From 71fce640c49fef64723501f83e2688b21d30eb11 Mon Sep 17 00:00:00 2001 From: matthewhegarty Date: Wed, 10 Jan 2024 13:49:52 +0000 Subject: [PATCH] updated documentation --- docs/advanced_usage.rst | 14 ++++++++------ import_export/exceptions.py | 2 +- import_export/resources.py | 9 ++++++--- import_export/results.py | 2 +- 4 files changed, 16 insertions(+), 11 deletions(-) diff --git a/docs/advanced_usage.rst b/docs/advanced_usage.rst index 808558649..b8cd6c7f5 100644 --- a/docs/advanced_usage.rst +++ b/docs/advanced_usage.rst @@ -243,9 +243,10 @@ to ``True``, which will mean the process will exit at the first row which has er resource = BookResource() self.resource.import_data(self.dataset, raise_errors=True) -The above process will exit with a row number and error:: +The above process will exit with a row number and error (formatted for clarity):: - import_export.exceptions.RowError: 2: {'published': ['Value could not be parsed using defined date formats.']} + ImportError: 2: {'published': ['Value could not be parsed using defined date formats.']} + (OrderedDict({'id': 2, 'name': 'The Hobbit', 'published': 'x'})) To iterate over all validation errors produced from an import, pass ``False`` to ``raise_errors``:: @@ -253,7 +254,7 @@ To iterate over all validation errors produced from an import, pass ``False`` to for row in result.invalid_rows: print(f"--- row {row.number} ---") for field, error in row.error.error_dict.items(): - print(f"{field}: {error}") + print(f"{field}: {error} ({row.values})") If using the :ref:`Admin UI`, errors are presented to the user during import (see below). @@ -281,9 +282,10 @@ The ``raise_errors`` parameter can be used during programmatic import to halt th raise_errors=True ) -The above process will exit with a row number and error:: +The above process will exit with a row number and error (formatted for clarity):: - import_export.exceptions.RowError: 2: [] + ImportError: 1: [] + (OrderedDict({'id': 1, 'name': 'Lord of the Rings', 'price': '1x'})) To iterate over all generic errors produced from an import, pass ``False`` to ``raise_errors``:: @@ -291,7 +293,7 @@ To iterate over all generic errors produced from an import, pass ``False`` to `` for row in result.error_rows: print(f"--- row {row.number} ---") for field, error in row.error.error_dict.items(): - print(f"{field}: {error}") + print(f"{field}: {error} ({error.row})") Field level validation ---------------------- diff --git a/import_export/exceptions.py b/import_export/exceptions.py index 3a90ff0fb..e4b3138aa 100644 --- a/import_export/exceptions.py +++ b/import_export/exceptions.py @@ -23,4 +23,4 @@ def __init__(self, error, number=None, row=None): self.row = row def __str__(self): - return f"{self.number}: {self.error}" + return f"{self.number}: {self.error} ({self.row})" diff --git a/import_export/resources.py b/import_export/resources.py index 5ecbe9513..f3e6f2fb0 100644 --- a/import_export/resources.py +++ b/import_export/resources.py @@ -793,9 +793,12 @@ def import_data( :param use_transactions: If ``True`` the import process will be processed inside a transaction. - :param collect_failed_rows: If ``True`` the import process will collect - failed rows. This can be useful for debugging purposes but will cause - higher memory usage for larger datasets. + :param collect_failed_rows: + If ``True`` the import process will create a new dataset object comprising + failed rows and errors. + This can be useful for debugging purposes but will cause higher memory usage + for larger datasets. + See :attr:`~import_export.results.Result.failed_dataset`. :param rollback_on_validation_errors: If both ``use_transactions`` and ``rollback_on_validation_errors`` are set to ``True``, the import process will diff --git a/import_export/results.py b/import_export/results.py index 629871675..fcacfb617 100644 --- a/import_export/results.py +++ b/import_export/results.py @@ -170,7 +170,7 @@ def __init__(self, *args, **kwargs): self.invalid_rows = [] #: The collection of rows which had generic errors. self.error_rows = [] - #: A custom Dataset containing only failed rows. + #: A custom Dataset containing only failed rows and associated errors. self.failed_dataset = Dataset() self.totals = OrderedDict( [