From e807e774487d2fc44f9bf8ac29b4b0ebcd92a022 Mon Sep 17 00:00:00 2001 From: Paul Noirel Date: Fri, 10 Feb 2023 19:59:38 +0000 Subject: [PATCH 1/5] Add attachment_name support to create_attachment() --- labelbox/schema/data_row.py | 19 +++++++++++++------ tests/integration/test_data_rows.py | 12 ++++++++++++ 2 files changed, 25 insertions(+), 6 deletions(-) diff --git a/labelbox/schema/data_row.py b/labelbox/schema/data_row.py index d65a222ff..731f2eb98 100644 --- a/labelbox/schema/data_row.py +++ b/labelbox/schema/data_row.py @@ -106,8 +106,10 @@ def get_winning_label_id(self, project_id: str) -> Optional[str]: return res["dataRow"]["labelingActivity"]["selectedLabelId"] - def create_attachment(self, attachment_type, - attachment_value) -> "AssetAttachment": + def create_attachment(self, + attachment_type, + attachment_value, + attachment_name=None) -> "AssetAttachment": """ Adds an AssetAttachment to a DataRow. Labelers can view these attachments while labeling. @@ -117,6 +119,7 @@ def create_attachment(self, attachment_type, attachment_type (str): Asset attachment type, must be one of: VIDEO, IMAGE, TEXT, IMAGE_OVERLAY (AssetAttachment.AttachmentType) attachment_value (str): Asset attachment value. + attachment_name (str): (Optional) Asset attachment name. Returns: `AssetAttachment` DB object. Raises: @@ -126,19 +129,23 @@ def create_attachment(self, attachment_type, attachment_type_param = "type" attachment_value_param = "value" + attachment_name_param = "name" data_row_id_param = "dataRowId" + query_str = """mutation CreateDataRowAttachmentPyApi( - $%s: AttachmentType!, $%s: String!, $%s: ID!) { + $%s: AttachmentType!, $%s: String!, $%s: String, $%s: ID!) { createDataRowAttachment(data: { - type: $%s value: $%s dataRowId: $%s}) {%s}} """ % ( - attachment_type_param, attachment_value_param, data_row_id_param, - attachment_type_param, attachment_value_param, data_row_id_param, + type: $%s value: $%s name: $%s dataRowId: $%s}) {%s}} """ % ( + attachment_type_param, attachment_value_param, + attachment_name_param, data_row_id_param, attachment_type_param, + attachment_value_param, attachment_name_param, data_row_id_param, query.results_query_part(Entity.AssetAttachment)) res = self.client.execute( query_str, { attachment_type_param: attachment_type, attachment_value_param: attachment_value, + attachment_name_param: attachment_name, data_row_id_param: self.uid }) return Entity.AssetAttachment(self.client, diff --git a/tests/integration/test_data_rows.py b/tests/integration/test_data_rows.py index 7f0e6bc8a..2c1eddcc6 100644 --- a/tests/integration/test_data_rows.py +++ b/tests/integration/test_data_rows.py @@ -685,12 +685,24 @@ def test_create_data_rows_sync_mixed_upload(dataset, image_url): def test_delete_data_row_attachment(datarow, image_url): attachments = [] + + # Anonymous attachment to_attach = [("IMAGE", image_url), ("TEXT", "test-text"), ("IMAGE_OVERLAY", image_url), ("HTML", image_url)] for attachment_type, attachment_value in to_attach: attachments.append( datarow.create_attachment(attachment_type, attachment_value)) + # Attachment with a name + to_attach = [("IMAGE", image_url, "Att. Image"), + ("TEXT", "test-text", "Att. Text"), + ("IMAGE_OVERLAY", image_url, "Image Overlay"), + ("HTML", image_url, "Att. HTML")] + for attachment_type, attachment_value, attachment_name in to_attach: + attachments.append( + datarow.create_attachment(attachment_type, attachment_value, + attachment_name)) + for attachment in attachments: attachment.delete() From 4ee7cf8613b34e3457eded0b73e1710a7420ae9f Mon Sep 17 00:00:00 2001 From: Paul Noirel Date: Mon, 13 Feb 2023 13:53:13 +0000 Subject: [PATCH 2/5] Add unit tests --- tests/integration/test_data_rows.py | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/tests/integration/test_data_rows.py b/tests/integration/test_data_rows.py index 2c1eddcc6..b2cca83e9 100644 --- a/tests/integration/test_data_rows.py +++ b/tests/integration/test_data_rows.py @@ -621,16 +621,17 @@ def test_data_row_iteration(dataset, image_url) -> None: def test_data_row_attachments(dataset, image_url): - attachments = [("IMAGE", image_url), ("TEXT", "test-text"), - ("IMAGE_OVERLAY", image_url), ("HTML", image_url)] + attachments = [("IMAGE", image_url, "attachment image"), ("TEXT", "test-text", None), + ("IMAGE_OVERLAY", image_url, "Overlay"), ("HTML", image_url)] task = dataset.create_data_rows([{ "row_data": image_url, "external_id": "test-id", "attachments": [{ "type": attachment_type, - "value": attachment_value + "value": attachment_value, + "name": attachment_name }] - } for attachment_type, attachment_value in attachments]) + } for attachment_type, attachment_value, attachment_name in attachments]) task.wait_till_done() assert task.status == "COMPLETE" @@ -652,8 +653,8 @@ def test_data_row_attachments(dataset, image_url): def test_create_data_rows_sync_attachments(dataset, image_url): - attachments = [("IMAGE", image_url), ("TEXT", "test-text"), - ("IMAGE_OVERLAY", image_url), ("HTML", image_url)] + attachments = [("IMAGE", image_url, "image URL"), ("TEXT", "test-text", None), + ("IMAGE_OVERLAY", image_url, "Overlay"), ("HTML", image_url, None)] attachments_per_data_row = 3 dataset.create_data_rows_sync([{ "row_data": @@ -662,9 +663,10 @@ def test_create_data_rows_sync_attachments(dataset, image_url): "test-id", "attachments": [{ "type": attachment_type, - "value": attachment_value + "value": attachment_value, + "name": attachment_name } for _ in range(attachments_per_data_row)] - } for attachment_type, attachment_value in attachments]) + } for attachment_type, attachment_value, attachment_name in attachments]) data_rows = list(dataset.data_rows()) assert len(data_rows) == len(attachments) for data_row in data_rows: From 2b8a5c60eb724c977d6f4c56ffc3d81210235755 Mon Sep 17 00:00:00 2001 From: Paul Noirel Date: Mon, 13 Feb 2023 14:00:28 +0000 Subject: [PATCH 3/5] Add unit tests --- tests/integration/test_data_rows.py | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/tests/integration/test_data_rows.py b/tests/integration/test_data_rows.py index b2cca83e9..b318f5808 100644 --- a/tests/integration/test_data_rows.py +++ b/tests/integration/test_data_rows.py @@ -621,11 +621,14 @@ def test_data_row_iteration(dataset, image_url) -> None: def test_data_row_attachments(dataset, image_url): - attachments = [("IMAGE", image_url, "attachment image"), ("TEXT", "test-text", None), + attachments = [("IMAGE", image_url, "attachment image"), + ("TEXT", "test-text", None), ("IMAGE_OVERLAY", image_url, "Overlay"), ("HTML", image_url)] task = dataset.create_data_rows([{ - "row_data": image_url, - "external_id": "test-id", + "row_data": + image_url, + "external_id": + "test-id", "attachments": [{ "type": attachment_type, "value": attachment_value, @@ -653,8 +656,10 @@ def test_data_row_attachments(dataset, image_url): def test_create_data_rows_sync_attachments(dataset, image_url): - attachments = [("IMAGE", image_url, "image URL"), ("TEXT", "test-text", None), - ("IMAGE_OVERLAY", image_url, "Overlay"), ("HTML", image_url, None)] + attachments = [("IMAGE", image_url, "image URL"), + ("TEXT", "test-text", None), + ("IMAGE_OVERLAY", image_url, "Overlay"), + ("HTML", image_url, None)] attachments_per_data_row = 3 dataset.create_data_rows_sync([{ "row_data": From 1f7f643bf36d2ce3094eebbd8cdc9c5b369c9741 Mon Sep 17 00:00:00 2001 From: Paul Noirel Date: Mon, 13 Feb 2023 15:14:35 +0000 Subject: [PATCH 4/5] Update Unit tests --- tests/integration/test_data_rows.py | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/tests/integration/test_data_rows.py b/tests/integration/test_data_rows.py index b318f5808..1607c48b1 100644 --- a/tests/integration/test_data_rows.py +++ b/tests/integration/test_data_rows.py @@ -133,7 +133,7 @@ def test_lookup_data_rows(client, dataset): assert all([len(x) == 1 for x in lookup.values()]) assert lookup[uid][0] == dr.uid assert lookup[uid2][0] == dr2.uid - #1 external id : 2 uid + # 1 external id : 2 uid dr3 = dataset.create_data_row(row_data="123", external_id=uid2) lookup = client.get_data_row_ids_for_external_ids([uid2]) assert len(lookup) == 1 @@ -307,7 +307,7 @@ def test_create_data_row_with_metadata(mdo, dataset, image_url): assert len(metadata_fields) == 3 assert len(metadata) == 3 assert [m["schemaId"] for m in metadata_fields - ].sort() == EXPECTED_METADATA_SCHEMA_IDS + ].sort() == EXPECTED_METADATA_SCHEMA_IDS for m in metadata: assert mdo._parse_upsert(m) @@ -331,7 +331,7 @@ def test_create_data_row_with_metadata_dict(mdo, dataset, image_url): assert len(metadata_fields) == 3 assert len(metadata) == 3 assert [m["schemaId"] for m in metadata_fields - ].sort() == EXPECTED_METADATA_SCHEMA_IDS + ].sort() == EXPECTED_METADATA_SCHEMA_IDS for m in metadata: assert mdo._parse_upsert(m) @@ -388,7 +388,7 @@ def test_create_data_rows_with_metadata(mdo, dataset, image_url): assert len(metadata_fields) == 3 assert len(metadata) == 3 assert [m["schemaId"] for m in metadata_fields - ].sort() == EXPECTED_METADATA_SCHEMA_IDS + ].sort() == EXPECTED_METADATA_SCHEMA_IDS for m in metadata: assert mdo._parse_upsert(m) @@ -623,7 +623,8 @@ def test_data_row_iteration(dataset, image_url) -> None: def test_data_row_attachments(dataset, image_url): attachments = [("IMAGE", image_url, "attachment image"), ("TEXT", "test-text", None), - ("IMAGE_OVERLAY", image_url, "Overlay"), ("HTML", image_url)] + ("IMAGE_OVERLAY", image_url, "Overlay"), + ("HTML", image_url, None)] task = dataset.create_data_rows([{ "row_data": image_url, @@ -776,7 +777,7 @@ def test_data_row_bulk_creation_with_unique_global_keys(dataset, sample_image): task.wait_till_done() assert {row.global_key for row in dataset.data_rows() - } == {global_key_1, global_key_2, global_key_3} + } == {global_key_1, global_key_2, global_key_3} def test_data_row_bulk_creation_with_same_global_keys(dataset, sample_image): @@ -864,7 +865,7 @@ def test_data_row_bulk_creation_sync_with_unique_global_keys( ]) assert {row.global_key for row in dataset.data_rows() - } == {global_key_1, global_key_2, global_key_3} + } == {global_key_1, global_key_2, global_key_3} def test_data_row_rulk_creation_sync_with_same_global_keys( @@ -933,7 +934,8 @@ def test_create_tiled_layer(dataset, tile_content): **tile_content, 'media_type': 'TMS_SIMPLE' }, tile_content, - tile_content['row_data'] # Old way to check for backwards compatibility + # Old way to check for backwards compatibility + tile_content['row_data'] ] dataset.create_data_rows_sync(examples) data_rows = list(dataset.data_rows()) From 385ff1c7ab552281dc88a6e34887919eeb23fb9f Mon Sep 17 00:00:00 2001 From: Paul Noirel Date: Mon, 13 Feb 2023 15:27:42 +0000 Subject: [PATCH 5/5] Update Unit tests --- tests/integration/test_data_rows.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/tests/integration/test_data_rows.py b/tests/integration/test_data_rows.py index 1607c48b1..677348934 100644 --- a/tests/integration/test_data_rows.py +++ b/tests/integration/test_data_rows.py @@ -307,7 +307,7 @@ def test_create_data_row_with_metadata(mdo, dataset, image_url): assert len(metadata_fields) == 3 assert len(metadata) == 3 assert [m["schemaId"] for m in metadata_fields - ].sort() == EXPECTED_METADATA_SCHEMA_IDS + ].sort() == EXPECTED_METADATA_SCHEMA_IDS for m in metadata: assert mdo._parse_upsert(m) @@ -331,7 +331,7 @@ def test_create_data_row_with_metadata_dict(mdo, dataset, image_url): assert len(metadata_fields) == 3 assert len(metadata) == 3 assert [m["schemaId"] for m in metadata_fields - ].sort() == EXPECTED_METADATA_SCHEMA_IDS + ].sort() == EXPECTED_METADATA_SCHEMA_IDS for m in metadata: assert mdo._parse_upsert(m) @@ -388,7 +388,7 @@ def test_create_data_rows_with_metadata(mdo, dataset, image_url): assert len(metadata_fields) == 3 assert len(metadata) == 3 assert [m["schemaId"] for m in metadata_fields - ].sort() == EXPECTED_METADATA_SCHEMA_IDS + ].sort() == EXPECTED_METADATA_SCHEMA_IDS for m in metadata: assert mdo._parse_upsert(m) @@ -777,7 +777,7 @@ def test_data_row_bulk_creation_with_unique_global_keys(dataset, sample_image): task.wait_till_done() assert {row.global_key for row in dataset.data_rows() - } == {global_key_1, global_key_2, global_key_3} + } == {global_key_1, global_key_2, global_key_3} def test_data_row_bulk_creation_with_same_global_keys(dataset, sample_image): @@ -865,7 +865,7 @@ def test_data_row_bulk_creation_sync_with_unique_global_keys( ]) assert {row.global_key for row in dataset.data_rows() - } == {global_key_1, global_key_2, global_key_3} + } == {global_key_1, global_key_2, global_key_3} def test_data_row_rulk_creation_sync_with_same_global_keys(