From 521388d57f263f96f44aa6d7dd4a510d14e8e787 Mon Sep 17 00:00:00 2001 From: Tim Kerr Date: Sat, 18 Feb 2023 10:43:37 -0700 Subject: [PATCH 01/18] [PTDT-1107] Implemented is_feature_schema_archived client function --- labelbox/client.py | 47 +++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 46 insertions(+), 1 deletion(-) diff --git a/labelbox/client.py b/labelbox/client.py index 172184608..3ae3599d6 100644 --- a/labelbox/client.py +++ b/labelbox/client.py @@ -35,6 +35,7 @@ from labelbox.schema.role import Role from labelbox.schema.slice import CatalogSlice from labelbox.schema.queue_mode import QueueMode +from labelbox.schema.ontology import Ontology from labelbox.schema.media_type import MediaType, get_media_type_validation_error @@ -55,7 +56,8 @@ def __init__(self, api_key=None, endpoint='https://api.labelbox.com/graphql', enable_experimental=False, - app_url="https://app.labelbox.com"): + app_url="https://app.labelbox.com", + rest_endpoint="https://api.labelbox.com/api/v1"): """ Creates and initializes a Labelbox Client. Logging is defaulted to level WARNING. To receive more verbose @@ -95,6 +97,12 @@ def __init__(self, 'X-User-Agent': f'python-sdk {SDK_VERSION}' } self._data_row_metadata_ontology = None + self.rest_endpoint = rest_endpoint + self.rest_endpoint_headers = { + "authorization": "Bearer %s" % self.api_key, + 'X-User-Agent': 'python-sdk 0.0.0', + 'Content-Type': 'application/json', + } @retry.Retry(predicate=retry.if_exception_type( labelbox.exceptions.InternalServerError, @@ -1384,3 +1392,40 @@ def get_catalog_slice(self, slice_id) -> CatalogSlice: """ res = self.execute(query_str, {'id': slice_id}) return Entity.CatalogSlice(self, res['getSavedQuery']) + + def is_feature_schema_archived(self, feature_schema_id: str, ontology_id: str) -> bool: + """ + Returns true if a feature schema is archived in the specified ontology, returns false otherwise. + + Args: + feature_schema_id (str): The ID of the feature schema + ontology_id (str): The ID of the ontology + Returns: + bool + """ + + ontology_endpoint = self.rest_endpoint + "/ontologies/" + ontology_id + response = requests.get( + ontology_endpoint, + headers=self.rest_endpoint_headers, + ) + + if response.status_code == 200: + feature_schema_nodes = response.json()['featureSchemaNodes'] + tools = feature_schema_nodes['tools'] + classifications = feature_schema_nodes['classifications'] + relationships = feature_schema_nodes['relationships'] + feature_schema_node_list = tools + classifications + relationships + + is_feature = lambda f: f['featureSchemaId'] == feature_schema_id + feature_schema_node = list(filter(is_feature, feature_schema_node_list)) + if len(feature_schema_node) > 0: + return feature_schema_node[0]['archived'] + else: + raise labelbox.exceptions.LabelboxError( + "The specified feature schema was not in the ontology.") + + return True + elif response.status_code == 404: + raise labelbox.exceptions.ResourceNotFoundError(Ontology, ontology_id) + From ab200bcb78871d18055b65ad1fb2cf9b38dab5f1 Mon Sep 17 00:00:00 2001 From: Tim Kerr Date: Sat, 18 Feb 2023 10:53:57 -0700 Subject: [PATCH 02/18] Error handling --- labelbox/client.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/labelbox/client.py b/labelbox/client.py index 3ae3599d6..2677f893d 100644 --- a/labelbox/client.py +++ b/labelbox/client.py @@ -1428,4 +1428,7 @@ def is_feature_schema_archived(self, feature_schema_id: str, ontology_id: str) - return True elif response.status_code == 404: raise labelbox.exceptions.ResourceNotFoundError(Ontology, ontology_id) + else: + raise labelbox.exceptions.LabelboxError( + "Failed to get the feature schema archived status.") From d686fcb5448071eb432bc893b886385479af5a7f Mon Sep 17 00:00:00 2001 From: Tim Kerr Date: Sat, 18 Feb 2023 11:03:09 -0700 Subject: [PATCH 03/18] Ran the yapf formatter --- labelbox/client.py | 54 +++++++++++++++++++++++++--------------------- 1 file changed, 30 insertions(+), 24 deletions(-) diff --git a/labelbox/client.py b/labelbox/client.py index 2677f893d..f28b69c8b 100644 --- a/labelbox/client.py +++ b/labelbox/client.py @@ -157,7 +157,8 @@ def convert_value(value): if query is not None: if params is not None: params = { - key: convert_value(value) for key, value in params.items() + key: convert_value(value) + for key, value in params.items() } data = json.dumps({ 'query': query, @@ -358,18 +359,18 @@ def upload_data(self, request_data = { "operations": - json.dumps({ - "variables": { - "file": None, - "contentLength": len(content), - "sign": sign - }, - "query": - """mutation UploadFile($file: Upload!, $contentLength: Int!, + json.dumps({ + "variables": { + "file": None, + "contentLength": len(content), + "sign": sign + }, + "query": + """mutation UploadFile($file: Upload!, $contentLength: Int!, $sign: Boolean) { uploadFile(file: $file, contentLength: $contentLength, sign: $sign) {url filename} } """, - }), + }), "map": (None, json.dumps({"1": ["variables.file"]})), } response = requests.post( @@ -378,7 +379,7 @@ def upload_data(self, data=request_data, files={ "1": (filename, content, content_type) if - (filename and content_type) else content + (filename and content_type) else content }) if response.status_code == 502: @@ -666,7 +667,8 @@ def create_project(self, **kwargs) -> Project: elif queue_mode == QueueMode.Dataset: logger.warning( "QueueMode.Dataset will eventually be deprecated, and is no longer " - "recommended for new projects. Prefer QueueMode.Batch instead.") + "recommended for new projects. Prefer QueueMode.Batch instead." + ) return self._create(Entity.Project, { **kwargs, @@ -778,7 +780,7 @@ def get_data_row_ids_for_external_ids( for row in self.execute( query_str, {'externalId_in': external_ids[i:i + max_ids_per_request] - })['externalIdsToDataRowIds']: + })['externalIdsToDataRowIds']: result[row['externalId']].append(row['dataRowId']) return result @@ -1070,9 +1072,10 @@ def _format_failed_rows(rows: Dict[str, str], } """ params = { - 'globalKeyDataRowLinks': [{ - utils.camel_case(key): value for key, value in input.items() - } for input in global_key_to_data_row_inputs] + 'globalKeyDataRowLinks': + [{utils.camel_case(key): value + for key, value in input.items()} + for input in global_key_to_data_row_inputs] } assign_global_keys_to_data_rows_job = self.execute(query_str, params) @@ -1101,8 +1104,8 @@ def _format_failed_rows(rows: Dict[str, str], """ result_params = { "jobId": - assign_global_keys_to_data_rows_job["assignGlobalKeysToDataRows" - ]["jobId"] + assign_global_keys_to_data_rows_job["assignGlobalKeysToDataRows"] + ["jobId"] } # Poll job status until finished, then retrieve results @@ -1218,7 +1221,7 @@ def _format_failed_rows(rows: List[str], """ result_params = { "jobId": - data_rows_for_global_keys_job["dataRowsForGlobalKeys"]["jobId"] + data_rows_for_global_keys_job["dataRowsForGlobalKeys"]["jobId"] } # Poll job status until finished, then retrieve results @@ -1340,7 +1343,8 @@ def _format_failed_rows(rows: List[str], errors.extend( _format_failed_rows( data['notFoundGlobalKeys'], - "Failed to find data row matching provided global key")) + "Failed to find data row matching provided global key") + ) errors.extend( _format_failed_rows( data['accessDeniedGlobalKeys'], @@ -1393,7 +1397,8 @@ def get_catalog_slice(self, slice_id) -> CatalogSlice: res = self.execute(query_str, {'id': slice_id}) return Entity.CatalogSlice(self, res['getSavedQuery']) - def is_feature_schema_archived(self, feature_schema_id: str, ontology_id: str) -> bool: + def is_feature_schema_archived(self, feature_schema_id: str, + ontology_id: str) -> bool: """ Returns true if a feature schema is archived in the specified ontology, returns false otherwise. @@ -1418,7 +1423,8 @@ def is_feature_schema_archived(self, feature_schema_id: str, ontology_id: str) - feature_schema_node_list = tools + classifications + relationships is_feature = lambda f: f['featureSchemaId'] == feature_schema_id - feature_schema_node = list(filter(is_feature, feature_schema_node_list)) + feature_schema_node = list( + filter(is_feature, feature_schema_node_list)) if len(feature_schema_node) > 0: return feature_schema_node[0]['archived'] else: @@ -1427,8 +1433,8 @@ def is_feature_schema_archived(self, feature_schema_id: str, ontology_id: str) - return True elif response.status_code == 404: - raise labelbox.exceptions.ResourceNotFoundError(Ontology, ontology_id) + raise labelbox.exceptions.ResourceNotFoundError( + Ontology, ontology_id) else: raise labelbox.exceptions.LabelboxError( "Failed to get the feature schema archived status.") - From 7ac551fb56694000bf4ff4121b7b3f81a8817abd Mon Sep 17 00:00:00 2001 From: Tim Kerr Date: Sat, 25 Feb 2023 12:33:22 -0700 Subject: [PATCH 04/18] Ran yapf formatter --- labelbox/client.py | 44 ++++++++++++++++++++------------------------ 1 file changed, 20 insertions(+), 24 deletions(-) diff --git a/labelbox/client.py b/labelbox/client.py index f28b69c8b..69d0dfb0c 100644 --- a/labelbox/client.py +++ b/labelbox/client.py @@ -157,8 +157,7 @@ def convert_value(value): if query is not None: if params is not None: params = { - key: convert_value(value) - for key, value in params.items() + key: convert_value(value) for key, value in params.items() } data = json.dumps({ 'query': query, @@ -359,18 +358,18 @@ def upload_data(self, request_data = { "operations": - json.dumps({ - "variables": { - "file": None, - "contentLength": len(content), - "sign": sign - }, - "query": - """mutation UploadFile($file: Upload!, $contentLength: Int!, + json.dumps({ + "variables": { + "file": None, + "contentLength": len(content), + "sign": sign + }, + "query": + """mutation UploadFile($file: Upload!, $contentLength: Int!, $sign: Boolean) { uploadFile(file: $file, contentLength: $contentLength, sign: $sign) {url filename} } """, - }), + }), "map": (None, json.dumps({"1": ["variables.file"]})), } response = requests.post( @@ -379,7 +378,7 @@ def upload_data(self, data=request_data, files={ "1": (filename, content, content_type) if - (filename and content_type) else content + (filename and content_type) else content }) if response.status_code == 502: @@ -667,8 +666,7 @@ def create_project(self, **kwargs) -> Project: elif queue_mode == QueueMode.Dataset: logger.warning( "QueueMode.Dataset will eventually be deprecated, and is no longer " - "recommended for new projects. Prefer QueueMode.Batch instead." - ) + "recommended for new projects. Prefer QueueMode.Batch instead.") return self._create(Entity.Project, { **kwargs, @@ -780,7 +778,7 @@ def get_data_row_ids_for_external_ids( for row in self.execute( query_str, {'externalId_in': external_ids[i:i + max_ids_per_request] - })['externalIdsToDataRowIds']: + })['externalIdsToDataRowIds']: result[row['externalId']].append(row['dataRowId']) return result @@ -1072,10 +1070,9 @@ def _format_failed_rows(rows: Dict[str, str], } """ params = { - 'globalKeyDataRowLinks': - [{utils.camel_case(key): value - for key, value in input.items()} - for input in global_key_to_data_row_inputs] + 'globalKeyDataRowLinks': [{ + utils.camel_case(key): value for key, value in input.items() + } for input in global_key_to_data_row_inputs] } assign_global_keys_to_data_rows_job = self.execute(query_str, params) @@ -1104,8 +1101,8 @@ def _format_failed_rows(rows: Dict[str, str], """ result_params = { "jobId": - assign_global_keys_to_data_rows_job["assignGlobalKeysToDataRows"] - ["jobId"] + assign_global_keys_to_data_rows_job["assignGlobalKeysToDataRows" + ]["jobId"] } # Poll job status until finished, then retrieve results @@ -1221,7 +1218,7 @@ def _format_failed_rows(rows: List[str], """ result_params = { "jobId": - data_rows_for_global_keys_job["dataRowsForGlobalKeys"]["jobId"] + data_rows_for_global_keys_job["dataRowsForGlobalKeys"]["jobId"] } # Poll job status until finished, then retrieve results @@ -1343,8 +1340,7 @@ def _format_failed_rows(rows: List[str], errors.extend( _format_failed_rows( data['notFoundGlobalKeys'], - "Failed to find data row matching provided global key") - ) + "Failed to find data row matching provided global key")) errors.extend( _format_failed_rows( data['accessDeniedGlobalKeys'], From 48f7392d7283d0a3b60c21a7cad5706da02ce2bd Mon Sep 17 00:00:00 2001 From: Tim Kerr Date: Wed, 1 Mar 2023 08:55:57 -0700 Subject: [PATCH 05/18] Swapped order of args --- labelbox/client.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/labelbox/client.py b/labelbox/client.py index 929c78059..41b2eed2e 100644 --- a/labelbox/client.py +++ b/labelbox/client.py @@ -1393,8 +1393,8 @@ def get_catalog_slice(self, slice_id) -> CatalogSlice: res = self.execute(query_str, {'id': slice_id}) return Entity.CatalogSlice(self, res['getSavedQuery']) - def is_feature_schema_archived(self, feature_schema_id: str, - ontology_id: str) -> bool: + def is_feature_schema_archived(self, ontology_id: str, + feature_schema_id: str) -> bool: """ Returns true if a feature schema is archived in the specified ontology, returns false otherwise. From 478c9fa389d227175b881775103d9d58f0d1c323 Mon Sep 17 00:00:00 2001 From: Tim Kerr Date: Wed, 1 Mar 2023 08:56:26 -0700 Subject: [PATCH 06/18] Ran yapf formatter --- labelbox/client.py | 44 ++++++++++++++++++++++++-------------------- 1 file changed, 24 insertions(+), 20 deletions(-) diff --git a/labelbox/client.py b/labelbox/client.py index 41b2eed2e..e045bb580 100644 --- a/labelbox/client.py +++ b/labelbox/client.py @@ -157,7 +157,8 @@ def convert_value(value): if query is not None: if params is not None: params = { - key: convert_value(value) for key, value in params.items() + key: convert_value(value) + for key, value in params.items() } data = json.dumps({ 'query': query, @@ -358,18 +359,18 @@ def upload_data(self, request_data = { "operations": - json.dumps({ - "variables": { - "file": None, - "contentLength": len(content), - "sign": sign - }, - "query": - """mutation UploadFile($file: Upload!, $contentLength: Int!, + json.dumps({ + "variables": { + "file": None, + "contentLength": len(content), + "sign": sign + }, + "query": + """mutation UploadFile($file: Upload!, $contentLength: Int!, $sign: Boolean) { uploadFile(file: $file, contentLength: $contentLength, sign: $sign) {url filename} } """, - }), + }), "map": (None, json.dumps({"1": ["variables.file"]})), } response = requests.post( @@ -378,7 +379,7 @@ def upload_data(self, data=request_data, files={ "1": (filename, content, content_type) if - (filename and content_type) else content + (filename and content_type) else content }) if response.status_code == 502: @@ -666,7 +667,8 @@ def create_project(self, **kwargs) -> Project: elif queue_mode == QueueMode.Dataset: logger.warning( "QueueMode.Dataset will eventually be deprecated, and is no longer " - "recommended for new projects. Prefer QueueMode.Batch instead.") + "recommended for new projects. Prefer QueueMode.Batch instead." + ) return self._create(Entity.Project, { **kwargs, @@ -778,7 +780,7 @@ def get_data_row_ids_for_external_ids( for row in self.execute( query_str, {'externalId_in': external_ids[i:i + max_ids_per_request] - })['externalIdsToDataRowIds']: + })['externalIdsToDataRowIds']: result[row['externalId']].append(row['dataRowId']) return result @@ -1070,9 +1072,10 @@ def _format_failed_rows(rows: Dict[str, str], } """ params = { - 'globalKeyDataRowLinks': [{ - utils.camel_case(key): value for key, value in input.items() - } for input in global_key_to_data_row_inputs] + 'globalKeyDataRowLinks': + [{utils.camel_case(key): value + for key, value in input.items()} + for input in global_key_to_data_row_inputs] } assign_global_keys_to_data_rows_job = self.execute(query_str, params) @@ -1101,8 +1104,8 @@ def _format_failed_rows(rows: Dict[str, str], """ result_params = { "jobId": - assign_global_keys_to_data_rows_job["assignGlobalKeysToDataRows" - ]["jobId"] + assign_global_keys_to_data_rows_job["assignGlobalKeysToDataRows"] + ["jobId"] } # Poll job status until finished, then retrieve results @@ -1218,7 +1221,7 @@ def _format_failed_rows(rows: List[str], """ result_params = { "jobId": - data_rows_for_global_keys_job["dataRowsForGlobalKeys"]["jobId"] + data_rows_for_global_keys_job["dataRowsForGlobalKeys"]["jobId"] } # Poll job status until finished, then retrieve results @@ -1340,7 +1343,8 @@ def _format_failed_rows(rows: List[str], errors.extend( _format_failed_rows( data['notFoundGlobalKeys'], - "Failed to find data row matching provided global key")) + "Failed to find data row matching provided global key") + ) errors.extend( _format_failed_rows( data['accessDeniedGlobalKeys'], From 345f016d4e6e425bf8dc0d89544bdda469f9fc60 Mon Sep 17 00:00:00 2001 From: Tim Kerr Date: Wed, 1 Mar 2023 08:58:57 -0700 Subject: [PATCH 07/18] Now returning bool --- labelbox/client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/labelbox/client.py b/labelbox/client.py index e045bb580..c5bf3e805 100644 --- a/labelbox/client.py +++ b/labelbox/client.py @@ -1426,7 +1426,7 @@ def is_feature_schema_archived(self, ontology_id: str, feature_schema_node = list( filter(is_feature, feature_schema_node_list)) if len(feature_schema_node) > 0: - return feature_schema_node[0]['archived'] + return bool(feature_schema_node[0]['archived']) else: raise labelbox.exceptions.LabelboxError( "The specified feature schema was not in the ontology.") From 436a25b413a7c6298b5bb277af4c92714ddb007d Mon Sep 17 00:00:00 2001 From: Tim Kerr Date: Wed, 1 Mar 2023 11:35:17 -0700 Subject: [PATCH 08/18] Ran linter --- labelbox/client.py | 44 ++++++++++++++++++++------------------------ 1 file changed, 20 insertions(+), 24 deletions(-) diff --git a/labelbox/client.py b/labelbox/client.py index c5bf3e805..fc1255aef 100644 --- a/labelbox/client.py +++ b/labelbox/client.py @@ -157,8 +157,7 @@ def convert_value(value): if query is not None: if params is not None: params = { - key: convert_value(value) - for key, value in params.items() + key: convert_value(value) for key, value in params.items() } data = json.dumps({ 'query': query, @@ -359,18 +358,18 @@ def upload_data(self, request_data = { "operations": - json.dumps({ - "variables": { - "file": None, - "contentLength": len(content), - "sign": sign - }, - "query": - """mutation UploadFile($file: Upload!, $contentLength: Int!, + json.dumps({ + "variables": { + "file": None, + "contentLength": len(content), + "sign": sign + }, + "query": + """mutation UploadFile($file: Upload!, $contentLength: Int!, $sign: Boolean) { uploadFile(file: $file, contentLength: $contentLength, sign: $sign) {url filename} } """, - }), + }), "map": (None, json.dumps({"1": ["variables.file"]})), } response = requests.post( @@ -379,7 +378,7 @@ def upload_data(self, data=request_data, files={ "1": (filename, content, content_type) if - (filename and content_type) else content + (filename and content_type) else content }) if response.status_code == 502: @@ -667,8 +666,7 @@ def create_project(self, **kwargs) -> Project: elif queue_mode == QueueMode.Dataset: logger.warning( "QueueMode.Dataset will eventually be deprecated, and is no longer " - "recommended for new projects. Prefer QueueMode.Batch instead." - ) + "recommended for new projects. Prefer QueueMode.Batch instead.") return self._create(Entity.Project, { **kwargs, @@ -780,7 +778,7 @@ def get_data_row_ids_for_external_ids( for row in self.execute( query_str, {'externalId_in': external_ids[i:i + max_ids_per_request] - })['externalIdsToDataRowIds']: + })['externalIdsToDataRowIds']: result[row['externalId']].append(row['dataRowId']) return result @@ -1072,10 +1070,9 @@ def _format_failed_rows(rows: Dict[str, str], } """ params = { - 'globalKeyDataRowLinks': - [{utils.camel_case(key): value - for key, value in input.items()} - for input in global_key_to_data_row_inputs] + 'globalKeyDataRowLinks': [{ + utils.camel_case(key): value for key, value in input.items() + } for input in global_key_to_data_row_inputs] } assign_global_keys_to_data_rows_job = self.execute(query_str, params) @@ -1104,8 +1101,8 @@ def _format_failed_rows(rows: Dict[str, str], """ result_params = { "jobId": - assign_global_keys_to_data_rows_job["assignGlobalKeysToDataRows"] - ["jobId"] + assign_global_keys_to_data_rows_job["assignGlobalKeysToDataRows" + ]["jobId"] } # Poll job status until finished, then retrieve results @@ -1221,7 +1218,7 @@ def _format_failed_rows(rows: List[str], """ result_params = { "jobId": - data_rows_for_global_keys_job["dataRowsForGlobalKeys"]["jobId"] + data_rows_for_global_keys_job["dataRowsForGlobalKeys"]["jobId"] } # Poll job status until finished, then retrieve results @@ -1343,8 +1340,7 @@ def _format_failed_rows(rows: List[str], errors.extend( _format_failed_rows( data['notFoundGlobalKeys'], - "Failed to find data row matching provided global key") - ) + "Failed to find data row matching provided global key")) errors.extend( _format_failed_rows( data['accessDeniedGlobalKeys'], From 0355561c22dca1cb089e8b8443ead79e418ceb07 Mon Sep 17 00:00:00 2001 From: Tim Kerr Date: Thu, 2 Mar 2023 14:32:52 -0700 Subject: [PATCH 09/18] removed unused headers --- labelbox/client.py | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/labelbox/client.py b/labelbox/client.py index 12389f8ca..4f043c466 100644 --- a/labelbox/client.py +++ b/labelbox/client.py @@ -99,12 +99,6 @@ def __init__(self, 'X-User-Agent': f'python-sdk {SDK_VERSION}' } self._data_row_metadata_ontology = None - self.rest_endpoint = rest_endpoint - self.rest_endpoint_headers = { - "authorization": "Bearer %s" % self.api_key, - 'X-User-Agent': 'python-sdk 0.0.0', - 'Content-Type': 'application/json', - } @retry.Retry(predicate=retry.if_exception_type( labelbox.exceptions.InternalServerError, @@ -1596,7 +1590,7 @@ def is_feature_schema_archived(self, ontology_id: str, ontology_endpoint = self.rest_endpoint + "/ontologies/" + ontology_id response = requests.get( ontology_endpoint, - headers=self.rest_endpoint_headers, + headers=self.headers, ) if response.status_code == 200: From f5a2c1b76462bf6fea3a2832dd40db48e02eb2dc Mon Sep 17 00:00:00 2001 From: Tim Kerr Date: Thu, 2 Mar 2023 14:34:01 -0700 Subject: [PATCH 10/18] url encode rest endpoint params --- labelbox/client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/labelbox/client.py b/labelbox/client.py index 4f043c466..e54e41e53 100644 --- a/labelbox/client.py +++ b/labelbox/client.py @@ -1587,7 +1587,7 @@ def is_feature_schema_archived(self, ontology_id: str, bool """ - ontology_endpoint = self.rest_endpoint + "/ontologies/" + ontology_id + ontology_endpoint = self.rest_endpoint + "/ontologies/" + urllib.parse.quote(ontology_id) response = requests.get( ontology_endpoint, headers=self.headers, From 85f5b84467c19959a1759b1d525001496ba75681 Mon Sep 17 00:00:00 2001 From: Tim Kerr Date: Thu, 2 Mar 2023 14:34:44 -0700 Subject: [PATCH 11/18] Updated status code check --- labelbox/client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/labelbox/client.py b/labelbox/client.py index e54e41e53..a6328d40f 100644 --- a/labelbox/client.py +++ b/labelbox/client.py @@ -1593,7 +1593,7 @@ def is_feature_schema_archived(self, ontology_id: str, headers=self.headers, ) - if response.status_code == 200: + if response.status_code == requests.codes.ok: feature_schema_nodes = response.json()['featureSchemaNodes'] tools = feature_schema_nodes['tools'] classifications = feature_schema_nodes['classifications'] From 2c0f994ddd6903c95a12cf18338708728c538826 Mon Sep 17 00:00:00 2001 From: Tim Kerr Date: Thu, 2 Mar 2023 14:35:36 -0700 Subject: [PATCH 12/18] Ran yapf formatter --- labelbox/client.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/labelbox/client.py b/labelbox/client.py index a6328d40f..ca7f94698 100644 --- a/labelbox/client.py +++ b/labelbox/client.py @@ -1587,7 +1587,8 @@ def is_feature_schema_archived(self, ontology_id: str, bool """ - ontology_endpoint = self.rest_endpoint + "/ontologies/" + urllib.parse.quote(ontology_id) + ontology_endpoint = self.rest_endpoint + "/ontologies/" + urllib.parse.quote( + ontology_id) response = requests.get( ontology_endpoint, headers=self.headers, From 5f4ca70a5b6b54939efdd2aac9683921c07410c1 Mon Sep 17 00:00:00 2001 From: Tim Kerr Date: Fri, 3 Mar 2023 13:11:41 -0700 Subject: [PATCH 13/18] Simplified feature schema filtering logic --- labelbox/client.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/labelbox/client.py b/labelbox/client.py index ca7f94698..042e49b99 100644 --- a/labelbox/client.py +++ b/labelbox/client.py @@ -1600,12 +1600,13 @@ def is_feature_schema_archived(self, ontology_id: str, classifications = feature_schema_nodes['classifications'] relationships = feature_schema_nodes['relationships'] feature_schema_node_list = tools + classifications + relationships - - is_feature = lambda f: f['featureSchemaId'] == feature_schema_id - feature_schema_node = list( - filter(is_feature, feature_schema_node_list)) - if len(feature_schema_node) > 0: - return bool(feature_schema_node[0]['archived']) + filtered_feature_schema_nodes = [ + feature_schema_node + for feature_schema_node in feature_schema_node_list + if feature_schema_node['featureSchemaId'] == feature_schema_id + ] + if filtered_feature_schema_nodes: + return bool(filtered_feature_schema_nodes[0]['archived']) else: raise labelbox.exceptions.LabelboxError( "The specified feature schema was not in the ontology.") From 770674248026dfece323ac7a43e1ccb05d1e2ddf Mon Sep 17 00:00:00 2001 From: Tim Kerr Date: Wed, 8 Mar 2023 12:54:54 -0700 Subject: [PATCH 14/18] Added integration test --- tests/integration/test_ontology.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/tests/integration/test_ontology.py b/tests/integration/test_ontology.py index 5fa004ebb..c537604c6 100644 --- a/tests/integration/test_ontology.py +++ b/tests/integration/test_ontology.py @@ -6,6 +6,13 @@ import time +def test_is_feature_schema_archived(client, ontology): + feature_schema_to_check = ontology.normalized['tools'][0] + result = client.is_feature_schema_archived( + ontology.uid, feature_schema_to_check['featureSchemaId']) + assert result == False + + def test_delete_tool_feature_from_ontology(client, ontology): feature_schema_to_delete = ontology.normalized['tools'][0] assert len(ontology.normalized['tools']) == 2 From 1ec3d970c12274200ea22c0e4b537c362804280f Mon Sep 17 00:00:00 2001 From: Tim Kerr Date: Wed, 8 Mar 2023 14:54:28 -0700 Subject: [PATCH 15/18] Added unhappy path integration tests --- tests/integration/test_ontology.py | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/tests/integration/test_ontology.py b/tests/integration/test_ontology.py index c537604c6..e718823e1 100644 --- a/tests/integration/test_ontology.py +++ b/tests/integration/test_ontology.py @@ -13,6 +13,25 @@ def test_is_feature_schema_archived(client, ontology): assert result == False +def test_is_feature_schema_archived_for_non_existing_feature_schema( + client, ontology): + with pytest.raises( + Exception, + match="The specified feature schema was not in the ontology"): + client.is_feature_schema_archived(ontology.uid, + 'invalid-feature-schema-id') + + +def test_is_feature_schema_archived_for_non_existing_ontology(client, ontology): + feature_schema_to_unarchive = ontology.normalized['tools'][0] + with pytest.raises( + Exception, + match="Resource 'Ontology' not found for params: 'invalid-ontology'" + ): + client.is_feature_schema_archived( + 'invalid-ontology', feature_schema_to_unarchive['featureSchemaId']) + + def test_delete_tool_feature_from_ontology(client, ontology): feature_schema_to_delete = ontology.normalized['tools'][0] assert len(ontology.normalized['tools']) == 2 From a6395333a413d7bcc3ba0e168a87ea2d13c4e139 Mon Sep 17 00:00:00 2001 From: Tim Kerr Date: Thu, 9 Mar 2023 10:39:18 -0700 Subject: [PATCH 16/18] Updated changelog --- CHANGELOG.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 0ad3fe7f5..2c2a17808 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,9 @@ # Changelog # Version 3.40.0 (YYYY-MM-DD) +* Added new client method is_feature_schema_archived +* Added new client method unarchive_feature_schema_node +* Added new client method delete_feature_schema_from_ontology ## Added * Insert newest changelogs here From c578c4800432ccb84813bd1d82710b0363463df4 Mon Sep 17 00:00:00 2001 From: Tim Kerr Date: Thu, 9 Mar 2023 17:29:42 -0700 Subject: [PATCH 17/18] Added integration test test_feature_schema_is_archived --- labelbox/client.py | 7 +------ labelbox/schema/ontology.py | 10 ++++++++++ tests/integration/test_ontology.py | 27 ++++++++++++--------------- 3 files changed, 23 insertions(+), 21 deletions(-) diff --git a/labelbox/client.py b/labelbox/client.py index 6b816037a..a15c62a37 100644 --- a/labelbox/client.py +++ b/labelbox/client.py @@ -36,7 +36,7 @@ from labelbox.schema.role import Role from labelbox.schema.slice import CatalogSlice, ModelSlice from labelbox.schema.queue_mode import QueueMode -from labelbox.schema.ontology import Ontology +from labelbox.schema.ontology import Ontology, DeleteFeatureFromOntologyResult from labelbox.schema.media_type import MediaType, get_media_type_validation_error @@ -45,11 +45,6 @@ _LABELBOX_API_KEY = "LABELBOX_API_KEY" -class DeleteFeatureFromOntologyResult: - archived: bool - deleted: bool - - class Client: """ A Labelbox client. diff --git a/labelbox/schema/ontology.py b/labelbox/schema/ontology.py index f487b8b6e..08d3e62a8 100644 --- a/labelbox/schema/ontology.py +++ b/labelbox/schema/ontology.py @@ -11,11 +11,21 @@ from labelbox.exceptions import InconsistentOntologyException from labelbox.orm.db_object import DbObject from labelbox.orm.model import Field, Relationship +import json FeatureSchemaId: Type[str] = constr(min_length=25, max_length=25) SchemaId: Type[str] = constr(min_length=25, max_length=25) +class DeleteFeatureFromOntologyResult: + archived: bool + deleted: bool + + def __str__(self): + return "<%s %s>" % (self.__class__.__name__.split(".")[-1], + json.dumps(self.__dict__)) + + class FeatureSchema(DbObject): name = Field.String("name") color = Field.String("name") diff --git a/tests/integration/test_ontology.py b/tests/integration/test_ontology.py index e718823e1..030e7386c 100644 --- a/tests/integration/test_ontology.py +++ b/tests/integration/test_ontology.py @@ -5,14 +5,24 @@ import json import time - -def test_is_feature_schema_archived(client, ontology): +def test_feature_schema_is_not_archived(client, ontology): feature_schema_to_check = ontology.normalized['tools'][0] result = client.is_feature_schema_archived( ontology.uid, feature_schema_to_check['featureSchemaId']) assert result == False +def test_feature_schema_is_archived(client, configured_project_with_label): + project, _, _, label = configured_project_with_label + ontology = project.ontology() + feature_schema_id = ontology.normalized['tools'][0]['featureSchemaId'] + result = client.delete_feature_schema_from_ontology(ontology.uid, + feature_schema_id) + assert result.archived == True and result.deleted == False + assert client.is_feature_schema_archived(ontology.uid, + feature_schema_id) == True + + def test_is_feature_schema_archived_for_non_existing_feature_schema( client, ontology): with pytest.raises( @@ -21,7 +31,6 @@ def test_is_feature_schema_archived_for_non_existing_feature_schema( client.is_feature_schema_archived(ontology.uid, 'invalid-feature-schema-id') - def test_is_feature_schema_archived_for_non_existing_ontology(client, ontology): feature_schema_to_unarchive = ontology.normalized['tools'][0] with pytest.raises( @@ -31,7 +40,6 @@ def test_is_feature_schema_archived_for_non_existing_ontology(client, ontology): client.is_feature_schema_archived( 'invalid-ontology', feature_schema_to_unarchive['featureSchemaId']) - def test_delete_tool_feature_from_ontology(client, ontology): feature_schema_to_delete = ontology.normalized['tools'][0] assert len(ontology.normalized['tools']) == 2 @@ -42,7 +50,6 @@ def test_delete_tool_feature_from_ontology(client, ontology): updatedOntology = client.get_ontology(ontology.uid) assert len(updatedOntology.normalized['tools']) == 1 - @pytest.mark.skip(reason="normalized ontology contains Relationship, " "which is not finalized yet. introduce this back when" "Relationship feature is complete and we introduce" @@ -51,14 +58,12 @@ def test_from_project_ontology(project) -> None: o = OntologyBuilder.from_project(project) assert o.asdict() == project.ontology().normalized - point = Tool( tool=Tool.Type.POINT, name="name", color="#ff0000", ) - def test_deletes_an_ontology(client): tool = client.upsert_feature_schema(point.asdict()) feature_schema_id = tool.normalized['featureSchemaId'] @@ -71,7 +76,6 @@ def test_deletes_an_ontology(client): client.delete_unused_feature_schema(feature_schema_id) - def test_cant_delete_an_ontology_with_project(client): project = client.create_project(name="test project", media_type=MediaType.Image) @@ -94,7 +98,6 @@ def test_cant_delete_an_ontology_with_project(client): client.delete_unused_ontology(ontology.uid) client.delete_unused_feature_schema(feature_schema_id) - def test_cant_delete_an_ontology_that_doesnt_exist(client): with pytest.raises( Exception, @@ -103,7 +106,6 @@ def test_cant_delete_an_ontology_that_doesnt_exist(client): ): client.delete_unused_ontology("doesntexist") - def test_inserts_a_feature_schema_at_given_position(client): tool1 = {'tool': 'polygon', 'name': 'tool1', 'color': 'blue'} tool2 = {'tool': 'polygon', 'name': 'tool2', 'color': 'blue'} @@ -121,7 +123,6 @@ def test_inserts_a_feature_schema_at_given_position(client): client.delete_unused_ontology(ontology.uid) - def test_moves_already_added_feature_schema_in_ontology(client): tool1 = {'tool': 'polygon', 'name': 'tool1', 'color': 'blue'} ontology_normalized_json = {"tools": [tool1], "classifications": []} @@ -144,7 +145,6 @@ def test_moves_already_added_feature_schema_in_ontology(client): client.delete_unused_ontology(ontology.uid) - def test_does_not_include_used_ontologies(client): tool = client.upsert_feature_schema(point.asdict()) feature_schema_id = tool.normalized['featureSchemaId'] @@ -163,14 +163,12 @@ def test_does_not_include_used_ontologies(client): client.delete_unused_ontology(ontology_with_project.uid) client.delete_unused_feature_schema(feature_schema_id) - def _get_attr_stringify_json(obj, attr): value = getattr(obj, attr.name) if attr.field_type.name.lower() == "json": return json.dumps(value, sort_keys=True) return value - def test_feature_schema_create_read(client, rand_gen): name = f"test-root-schema-{rand_gen(str)}" feature_schema_cat_normalized = { @@ -199,7 +197,6 @@ def test_feature_schema_create_read(client, rand_gen): attr) == _get_attr_stringify_json( queried_feature_schema, attr) - def test_ontology_create_read(client, rand_gen): ontology_name = f"test-ontology-{rand_gen(str)}" tool_name = f"test-ontology-tool-{rand_gen(str)}" From 739d8f923de4d5e8c0e966eee8adee5405c1bc41 Mon Sep 17 00:00:00 2001 From: Tim Kerr Date: Thu, 9 Mar 2023 17:32:26 -0700 Subject: [PATCH 18/18] Ran formatter --- tests/integration/test_ontology.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/tests/integration/test_ontology.py b/tests/integration/test_ontology.py index 030e7386c..415cfa80e 100644 --- a/tests/integration/test_ontology.py +++ b/tests/integration/test_ontology.py @@ -5,6 +5,7 @@ import json import time + def test_feature_schema_is_not_archived(client, ontology): feature_schema_to_check = ontology.normalized['tools'][0] result = client.is_feature_schema_archived( @@ -31,6 +32,7 @@ def test_is_feature_schema_archived_for_non_existing_feature_schema( client.is_feature_schema_archived(ontology.uid, 'invalid-feature-schema-id') + def test_is_feature_schema_archived_for_non_existing_ontology(client, ontology): feature_schema_to_unarchive = ontology.normalized['tools'][0] with pytest.raises( @@ -40,6 +42,7 @@ def test_is_feature_schema_archived_for_non_existing_ontology(client, ontology): client.is_feature_schema_archived( 'invalid-ontology', feature_schema_to_unarchive['featureSchemaId']) + def test_delete_tool_feature_from_ontology(client, ontology): feature_schema_to_delete = ontology.normalized['tools'][0] assert len(ontology.normalized['tools']) == 2 @@ -50,6 +53,7 @@ def test_delete_tool_feature_from_ontology(client, ontology): updatedOntology = client.get_ontology(ontology.uid) assert len(updatedOntology.normalized['tools']) == 1 + @pytest.mark.skip(reason="normalized ontology contains Relationship, " "which is not finalized yet. introduce this back when" "Relationship feature is complete and we introduce" @@ -58,12 +62,14 @@ def test_from_project_ontology(project) -> None: o = OntologyBuilder.from_project(project) assert o.asdict() == project.ontology().normalized + point = Tool( tool=Tool.Type.POINT, name="name", color="#ff0000", ) + def test_deletes_an_ontology(client): tool = client.upsert_feature_schema(point.asdict()) feature_schema_id = tool.normalized['featureSchemaId'] @@ -76,6 +82,7 @@ def test_deletes_an_ontology(client): client.delete_unused_feature_schema(feature_schema_id) + def test_cant_delete_an_ontology_with_project(client): project = client.create_project(name="test project", media_type=MediaType.Image) @@ -98,6 +105,7 @@ def test_cant_delete_an_ontology_with_project(client): client.delete_unused_ontology(ontology.uid) client.delete_unused_feature_schema(feature_schema_id) + def test_cant_delete_an_ontology_that_doesnt_exist(client): with pytest.raises( Exception, @@ -106,6 +114,7 @@ def test_cant_delete_an_ontology_that_doesnt_exist(client): ): client.delete_unused_ontology("doesntexist") + def test_inserts_a_feature_schema_at_given_position(client): tool1 = {'tool': 'polygon', 'name': 'tool1', 'color': 'blue'} tool2 = {'tool': 'polygon', 'name': 'tool2', 'color': 'blue'} @@ -123,6 +132,7 @@ def test_inserts_a_feature_schema_at_given_position(client): client.delete_unused_ontology(ontology.uid) + def test_moves_already_added_feature_schema_in_ontology(client): tool1 = {'tool': 'polygon', 'name': 'tool1', 'color': 'blue'} ontology_normalized_json = {"tools": [tool1], "classifications": []} @@ -145,6 +155,7 @@ def test_moves_already_added_feature_schema_in_ontology(client): client.delete_unused_ontology(ontology.uid) + def test_does_not_include_used_ontologies(client): tool = client.upsert_feature_schema(point.asdict()) feature_schema_id = tool.normalized['featureSchemaId'] @@ -163,12 +174,14 @@ def test_does_not_include_used_ontologies(client): client.delete_unused_ontology(ontology_with_project.uid) client.delete_unused_feature_schema(feature_schema_id) + def _get_attr_stringify_json(obj, attr): value = getattr(obj, attr.name) if attr.field_type.name.lower() == "json": return json.dumps(value, sort_keys=True) return value + def test_feature_schema_create_read(client, rand_gen): name = f"test-root-schema-{rand_gen(str)}" feature_schema_cat_normalized = { @@ -197,6 +210,7 @@ def test_feature_schema_create_read(client, rand_gen): attr) == _get_attr_stringify_json( queried_feature_schema, attr) + def test_ontology_create_read(client, rand_gen): ontology_name = f"test-ontology-{rand_gen(str)}" tool_name = f"test-ontology-tool-{rand_gen(str)}"