diff --git a/tests/integration/test_data_rows.py b/tests/integration/test_data_rows.py index df7532cf9..248265848 100644 --- a/tests/integration/test_data_rows.py +++ b/tests/integration/test_data_rows.py @@ -922,9 +922,7 @@ def test_data_row_bulk_creation_sync_with_same_global_keys( dataset, sample_image, is_adv_enabled): global_key_1 = str(uuid.uuid4()) - if is_adv_enabled: - # ADV does not throw an error for duplicate global keys - # but rather create the first one and reject the second + with pytest.raises(labelbox.exceptions.MalformedQueryException) as exc_info: dataset.create_data_rows_sync([{ DataRow.row_data: sample_image, DataRow.global_key: global_key_1 @@ -932,18 +930,13 @@ def test_data_row_bulk_creation_sync_with_same_global_keys( DataRow.row_data: sample_image, DataRow.global_key: global_key_1 }]) + + if is_adv_enabled: + # ADV will import the first data row but not the second (duplicate global key) assert len(list(dataset.data_rows())) == 1 - assert list(dataset.data_rows())[0].global_key == global_key_1 + assert "Some data rows were not imported. Check error output here" in str( + exc_info.value) else: - with pytest.raises(labelbox.exceptions.MalformedQueryException): - dataset.create_data_rows_sync([{ - DataRow.row_data: sample_image, - DataRow.global_key: global_key_1 - }, { - DataRow.row_data: sample_image, - DataRow.global_key: global_key_1 - }]) - assert len(list(dataset.data_rows())) == 0 dataset.create_data_rows_sync([{