Skip to content

Commit

Permalink
ManyToManyWidget: fix handling unordered lists in skip_row() (#1391)
Browse files Browse the repository at this point in the history
  • Loading branch information
manelclos committed Apr 6, 2022
1 parent f335dba commit 0ba3582
Show file tree
Hide file tree
Showing 2 changed files with 45 additions and 11 deletions.
20 changes: 10 additions & 10 deletions import_export/resources.py
Expand Up @@ -609,18 +609,17 @@ def skip_row(self, instance, original, row, import_validation_errors=None):
if not self._meta.skip_unchanged or self._meta.skip_diff or import_validation_errors:
return False
for field in self.get_import_fields():
try:
# For fields that are models.fields.related.ManyRelatedManager
# we need to compare the results
if isinstance(field.widget, widgets.ManyToManyWidget):
# compare with the future value to detect changes
instance_value = list(field.clean(row))
else:
instance_value = list(field.get_value(instance).all())
# For fields that are models.fields.related.ManyRelatedManager
# we need to compare the results
if isinstance(field.widget, widgets.ManyToManyWidget):
instance_values = list(field.clean(row))
original_values = list(field.get_value(original).all())
if len(instance_values) != len(original_values):
return False

if instance_value != list(field.get_value(original).all()):
if sorted(v.id for v in instance_values) != sorted(v.id for v in original_values):
return False
except AttributeError:
else:
if field.get_value(instance) != field.get_value(original):
return False
return True
Expand Down Expand Up @@ -713,6 +712,7 @@ def import_row(self, row, instance_loader, using_transactions=True, dry_run=Fals
# validate_instance(), where they can be combined with model
# instance validation errors if necessary
import_validation_errors = e.update_error_dict(import_validation_errors)

if self.skip_row(instance, original, row, import_validation_errors):
row_result.import_type = RowResult.IMPORT_TYPE_SKIP
else:
Expand Down
36 changes: 35 additions & 1 deletion tests/core/tests/test_resources.py
Expand Up @@ -1548,7 +1548,7 @@ def test_many_to_many_widget_no_changes(self):
# should be skipped, because there is no change
book = Book.objects.first()
dataset_headers = ["id", "name", "categories"]
dataset_row = [book.id, book.name, book.categories.all()]
dataset_row = [book.id, book.name, book.categories.first().id]
dataset = tablib.Dataset(headers=dataset_headers)
dataset.append(dataset_row)

Expand All @@ -1560,6 +1560,40 @@ def test_many_to_many_widget_no_changes(self):
self.assertEqual(result.rows[0].import_type, results.RowResult.IMPORT_TYPE_SKIP)
self.assertEqual(1, book.categories.count())

def test_many_to_many_widget_handles_ordering(self):
# the book is associated with 2 categories ('Category 1', 'Category 2')
# when we import a row with a book with both categories (in any order), the book
# should be skipped, because there is no change
book = Book.objects.first()
self.assertEqual(1, book.categories.count())
cat1 = Category.objects.get(name="Category 1")
cat2 = Category.objects.get(name="Category 2")
book.categories.add(cat1)
book.save()
self.assertEqual(2, book.categories.count())
dataset_headers = ["id", "name", "categories"]

book_resource = BookResource()
book_resource._meta.skip_unchanged = True

# import with natural order
dataset_row = [book.id, book.name, f"{cat1.id},{cat2.id}"]
dataset = tablib.Dataset(headers=dataset_headers)
dataset.append(dataset_row)

result = book_resource.import_data(dataset, dry_run=False)
self.assertEqual(result.rows[0].import_type, results.RowResult.IMPORT_TYPE_SKIP)

# import with reverse order
dataset_row = [book.id, book.name, f"{cat2.id},{cat1.id}"]
dataset = tablib.Dataset(headers=dataset_headers)
dataset.append(dataset_row)

result = book_resource.import_data(dataset, dry_run=False)
self.assertEqual(result.rows[0].import_type, results.RowResult.IMPORT_TYPE_SKIP)

self.assertEqual(2, book.categories.count())


@mock.patch("import_export.resources.Diff", spec=True)
class SkipDiffTest(TestCase):
Expand Down

0 comments on commit 0ba3582

Please sign in to comment.