From fefee3a399c4484d72602e0bdc9af8d2c43ddc41 Mon Sep 17 00:00:00 2001 From: jtsodapop <67922677+jtsodapop@users.noreply.github.com> Date: Thu, 14 Jul 2022 18:39:28 -0400 Subject: [PATCH 1/5] update to have optional include metadata field --- labelbox/schema/batch.py | 17 +++++++++-------- labelbox/schema/dataset.py | 19 +++++++++++-------- labelbox/schema/project.py | 11 ++++++----- 3 files changed, 26 insertions(+), 21 deletions(-) diff --git a/labelbox/schema/batch.py b/labelbox/schema/batch.py index b4c2373f0..8a0e75755 100644 --- a/labelbox/schema/batch.py +++ b/labelbox/schema/batch.py @@ -77,7 +77,7 @@ def remove_queued_data_rows(self) -> None: }, experimental=True) - def export_data_rows(self, timeout_seconds=120) -> Generator: + def export_data_rows(self, timeout_seconds=120, include_metadata: bool=False) -> Generator: """ Returns a generator that produces all data rows that are currently in this batch. @@ -92,22 +92,23 @@ def export_data_rows(self, timeout_seconds=120) -> Generator: LabelboxError: if the export fails or is unable to download within the specified time. """ id_param = "batchId" - query_str = """mutation GetBatchDataRowsExportUrlPyApi($%s: ID!) - {exportBatchDataRows(data:{batchId: $%s }) {downloadUrl createdAt status}} - """ % (id_param, id_param) + metadata_param = "includeMetadataInput" + query_str = """mutation GetBatchDataRowsExportUrlPyApi($%s: ID!, $%s: Boolean!) + {exportBatchDataRows(data:{batchId: $%s , includeMetadataInput: $%s}) {downloadUrl createdAt status}} + """ % (id_param, metadata_param, id_param, metadata_param) sleep_time = 2 while True: - res = self.client.execute(query_str, {id_param: self.uid}) + res = self.client.execute(query_str, {id_param: self.uid, metadata_param: include_metadata}) res = res["exportBatchDataRows"] if res["status"] == "COMPLETE": download_url = res["downloadUrl"] response = requests.get(download_url) response.raise_for_status() reader = ndjson.reader(StringIO(response.text)) - # TODO: Update result to parse metadataFields when resolver returns return (Entity.DataRow(self.client, { - **result, 'metadataFields': [], - 'customMetadata': [] + **result, + 'customMetadata': result['metadata'], + 'metadataFields': result['metadataFields'] }) for result in reader) elif res["status"] == "FAILED": raise LabelboxError("Data row export failed.") diff --git a/labelbox/schema/dataset.py b/labelbox/schema/dataset.py index 9ba1d86de..d647fe9cc 100644 --- a/labelbox/schema/dataset.py +++ b/labelbox/schema/dataset.py @@ -413,7 +413,7 @@ def data_row_for_external_id(self, external_id) -> "DataRow": external_id) return data_rows[0] - def export_data_rows(self, timeout_seconds=120) -> Generator: + def export_data_rows(self, timeout_seconds=120, include_metadata: bool=False) -> Generator: """ Returns a generator that produces all data rows that are currently attached to this dataset. @@ -428,22 +428,23 @@ def export_data_rows(self, timeout_seconds=120) -> Generator: LabelboxError: if the export fails or is unable to download within the specified time. """ id_param = "datasetId" - query_str = """mutation GetDatasetDataRowsExportUrlPyApi($%s: ID!) - {exportDatasetDataRows(data:{datasetId: $%s }) {downloadUrl createdAt status}} - """ % (id_param, id_param) + metadata_param = "includeMetadataInput" + query_str = """mutation GetDatasetDataRowsExportUrlPyApi($%s: ID!, $%s: Boolean!) + {exportDatasetDataRows(data:{datasetId: $%s , includeMetadataInput: $%s}) {downloadUrl createdAt status}} + """ % (id_param, metadata_param, id_param, metadata_param) sleep_time = 2 while True: - res = self.client.execute(query_str, {id_param: self.uid}) + res = self.client.execute(query_str, {id_param: self.uid, metadata_param: include_metadata}) res = res["exportDatasetDataRows"] if res["status"] == "COMPLETE": download_url = res["downloadUrl"] response = requests.get(download_url) response.raise_for_status() reader = ndjson.reader(StringIO(response.text)) - # TODO: Update result to parse metadataFields when resolver returns return (Entity.DataRow(self.client, { - **result, 'metadataFields': [], - 'customMetadata': [] + **result, + 'customMetadata': result['metadata'], + 'metadataFields': result['metadataFields'] }) for result in reader) elif res["status"] == "FAILED": raise LabelboxError("Data row export failed.") @@ -457,3 +458,5 @@ def export_data_rows(self, timeout_seconds=120) -> Generator: logger.debug("Dataset '%s' data row export, waiting for server...", self.uid) time.sleep(sleep_time) + + diff --git a/labelbox/schema/project.py b/labelbox/schema/project.py index 34a039db7..94299b755 100644 --- a/labelbox/schema/project.py +++ b/labelbox/schema/project.py @@ -186,7 +186,7 @@ def labels(self, datasets=None, order_by=None) -> PaginatedCollection: ["project", "labels"], Label) def export_queued_data_rows(self, - timeout_seconds=120) -> List[Dict[str, str]]: + timeout_seconds=120, include_metadata: bool=False) -> List[Dict[str, str]]: """ Returns all data rows that are currently enqueued for this project. Args: @@ -197,12 +197,13 @@ def export_queued_data_rows(self, LabelboxError: if the export fails or is unable to download within the specified time. """ id_param = "projectId" - query_str = """mutation GetQueuedDataRowsExportUrlPyApi($%s: ID!) - {exportQueuedDataRows(data:{projectId: $%s }) {downloadUrl createdAt status} } - """ % (id_param, id_param) + metadata_param = "includeMetadataInput" + query_str = """mutation GetQueuedDataRowsExportUrlPyApi($%s: ID!, $%s: Boolean!) + {exportQueuedDataRows(data:{projectId: $%s , includeMetadataInput: $%s}) {downloadUrl createdAt status} } + """ % (id_param, metadata_param, id_param, metadata_param) sleep_time = 2 while True: - res = self.client.execute(query_str, {id_param: self.uid}) + res = self.client.execute(query_str, {id_param: self.uid, metadata_param: include_metadata}) res = res["exportQueuedDataRows"] if res["status"] == "COMPLETE": download_url = res["downloadUrl"] From ebb0844f4ae6a3f58c014c04bfd91827b31c2d5f Mon Sep 17 00:00:00 2001 From: jtsodapop <67922677+jtsodapop@users.noreply.github.com> Date: Thu, 14 Jul 2022 18:50:01 -0400 Subject: [PATCH 2/5] yapf --- labelbox/schema/batch.py | 19 ++++++++++++------- labelbox/schema/dataset.py | 21 ++++++++++++--------- labelbox/schema/project.py | 11 ++++++++--- 3 files changed, 32 insertions(+), 19 deletions(-) diff --git a/labelbox/schema/batch.py b/labelbox/schema/batch.py index 8a0e75755..09e56918c 100644 --- a/labelbox/schema/batch.py +++ b/labelbox/schema/batch.py @@ -77,7 +77,9 @@ def remove_queued_data_rows(self) -> None: }, experimental=True) - def export_data_rows(self, timeout_seconds=120, include_metadata: bool=False) -> Generator: + def export_data_rows(self, + timeout_seconds=120, + include_metadata: bool = False) -> Generator: """ Returns a generator that produces all data rows that are currently in this batch. @@ -98,18 +100,21 @@ def export_data_rows(self, timeout_seconds=120, include_metadata: bool=False) -> """ % (id_param, metadata_param, id_param, metadata_param) sleep_time = 2 while True: - res = self.client.execute(query_str, {id_param: self.uid, metadata_param: include_metadata}) + res = self.client.execute(query_str, { + id_param: self.uid, + metadata_param: include_metadata + }) res = res["exportBatchDataRows"] if res["status"] == "COMPLETE": download_url = res["downloadUrl"] response = requests.get(download_url) response.raise_for_status() reader = ndjson.reader(StringIO(response.text)) - return (Entity.DataRow(self.client, { - **result, - 'customMetadata': result['metadata'], - 'metadataFields': result['metadataFields'] - }) for result in reader) + return (Entity.DataRow( + self.client, { + **result, 'customMetadata': result['metadata'], + 'metadataFields': result['metadataFields'] + }) for result in reader) elif res["status"] == "FAILED": raise LabelboxError("Data row export failed.") diff --git a/labelbox/schema/dataset.py b/labelbox/schema/dataset.py index 746faf0ae..e0ec8d8d3 100644 --- a/labelbox/schema/dataset.py +++ b/labelbox/schema/dataset.py @@ -462,7 +462,9 @@ def data_row_for_external_id(self, external_id) -> "DataRow": external_id) return data_rows[0] - def export_data_rows(self, timeout_seconds=120, include_metadata: bool=False) -> Generator: + def export_data_rows(self, + timeout_seconds=120, + include_metadata: bool = False) -> Generator: """ Returns a generator that produces all data rows that are currently attached to this dataset. @@ -483,18 +485,21 @@ def export_data_rows(self, timeout_seconds=120, include_metadata: bool=False) -> """ % (id_param, metadata_param, id_param, metadata_param) sleep_time = 2 while True: - res = self.client.execute(query_str, {id_param: self.uid, metadata_param: include_metadata}) + res = self.client.execute(query_str, { + id_param: self.uid, + metadata_param: include_metadata + }) res = res["exportDatasetDataRows"] if res["status"] == "COMPLETE": download_url = res["downloadUrl"] response = requests.get(download_url) response.raise_for_status() reader = ndjson.reader(StringIO(response.text)) - return (Entity.DataRow(self.client, { - **result, - 'customMetadata': result['metadata'], - 'metadataFields': result['metadataFields'] - }) for result in reader) + return (Entity.DataRow( + self.client, { + **result, 'customMetadata': result['metadata'], + 'metadataFields': result['metadataFields'] + }) for result in reader) elif res["status"] == "FAILED": raise LabelboxError("Data row export failed.") @@ -507,5 +512,3 @@ def export_data_rows(self, timeout_seconds=120, include_metadata: bool=False) -> logger.debug("Dataset '%s' data row export, waiting for server...", self.uid) time.sleep(sleep_time) - - diff --git a/labelbox/schema/project.py b/labelbox/schema/project.py index 8557efb60..e391f1b54 100644 --- a/labelbox/schema/project.py +++ b/labelbox/schema/project.py @@ -185,8 +185,10 @@ def labels(self, datasets=None, order_by=None) -> PaginatedCollection: return PaginatedCollection(self.client, query_str, {id_param: self.uid}, ["project", "labels"], Label) - def export_queued_data_rows(self, - timeout_seconds=120, include_metadata: bool=False) -> List[Dict[str, str]]: + def export_queued_data_rows( + self, + timeout_seconds=120, + include_metadata: bool = False) -> List[Dict[str, str]]: """ Returns all data rows that are currently enqueued for this project. Args: @@ -203,7 +205,10 @@ def export_queued_data_rows(self, """ % (id_param, metadata_param, id_param, metadata_param) sleep_time = 2 while True: - res = self.client.execute(query_str, {id_param: self.uid, metadata_param: include_metadata}) + res = self.client.execute(query_str, { + id_param: self.uid, + metadata_param: include_metadata + }) res = res["exportQueuedDataRows"] if res["status"] == "COMPLETE": download_url = res["downloadUrl"] From 9bfaa639db059124bb36fa8fc800bde73e44c8f4 Mon Sep 17 00:00:00 2001 From: jtsodapop <67922677+jtsodapop@users.noreply.github.com> Date: Fri, 15 Jul 2022 10:45:30 -0400 Subject: [PATCH 3/5] cleanup so we do not need eto unpack response --- labelbox/schema/batch.py | 7 +- labelbox/schema/dataset.py | 7 +- test.py | 216 +++++++++++++++++++++++++++++++++++++ 3 files changed, 220 insertions(+), 10 deletions(-) create mode 100644 test.py diff --git a/labelbox/schema/batch.py b/labelbox/schema/batch.py index 09e56918c..22b6d7a32 100644 --- a/labelbox/schema/batch.py +++ b/labelbox/schema/batch.py @@ -110,11 +110,8 @@ def export_data_rows(self, response = requests.get(download_url) response.raise_for_status() reader = ndjson.reader(StringIO(response.text)) - return (Entity.DataRow( - self.client, { - **result, 'customMetadata': result['metadata'], - 'metadataFields': result['metadataFields'] - }) for result in reader) + return ( + Entity.DataRow(self.client, result) for result in reader) elif res["status"] == "FAILED": raise LabelboxError("Data row export failed.") diff --git a/labelbox/schema/dataset.py b/labelbox/schema/dataset.py index e0ec8d8d3..af6400bd6 100644 --- a/labelbox/schema/dataset.py +++ b/labelbox/schema/dataset.py @@ -495,11 +495,8 @@ def export_data_rows(self, response = requests.get(download_url) response.raise_for_status() reader = ndjson.reader(StringIO(response.text)) - return (Entity.DataRow( - self.client, { - **result, 'customMetadata': result['metadata'], - 'metadataFields': result['metadataFields'] - }) for result in reader) + return ( + Entity.DataRow(self.client, result) for result in reader) elif res["status"] == "FAILED": raise LabelboxError("Data row export failed.") diff --git a/test.py b/test.py new file mode 100644 index 000000000..96ebdd184 --- /dev/null +++ b/test.py @@ -0,0 +1,216 @@ +from json import tool +from xml.etree.ElementInclude import include +from labelbox import Client, Project +import time +from labelbox.data.serialization import NDJsonConverter, LBV1Converter +import os +import requests +from datetime import datetime, timezone +from pprint import pprint +from labelbox.orm.db_object import experimental +from labelbox.schema.data_row import DataRow + +from labelbox.schema.data_row_metadata import ( + DataRowMetadata, + DataRowMetadataField, + DeleteDataRowMetadata, +) + +import uuid +# import re +import json +from dataclasses import dataclass, field +from labelbox import Tool, Classification +from typing import List + +from labelbox.schema.ontology import OntologyBuilder + +from labelbox.schema.annotation_import import LabelImport +# from labelbox.schema.bulk_import_request import DataRow +from labelbox.schema.labeling_frontend import LabelingFrontend +from labelbox.schema.ontology import Classification, OntologyBuilder, Ontology, Tool, Option +from labelbox import Label + +# from labelbox.data.annotation_types.classification import ClassificationAnswer, Dropdown, Text, Radio +# from labelbox.data.metrics.confusion_matrix import feature_confusion_matrix_metric, confusion_matrix_metric +# from labelbox.data.annotation_types import ObjectAnnotation, ClassificationAnnotation, TextEntity, Mask +# import numpy as np +# from labelbox.data.serialization.labelbox_v1.label import LBV1LabelAnnotationsVideo + +import logging +# logging.basicConfig(level=logging.DEBUG) + + +# __________________________________________ +def cleanup_my_org(): + from datetime import datetime, timezone + + date = datetime.strptime("2022-06-01", + "%Y-%m-%d").replace(tzinfo=timezone.utc) + + for project in client.get_projects(): + if project.created_at > date: + print(project.name) + project.delete() + for dataset in client.get_datasets(): + if dataset.created_at > date: + print(dataset.name) + dataset.delete() + for model in client.get_models(): + model.delete() + + +os.system('clear') + +# API_KEY = os.environ.get('apikey') +# client = Client(API_KEY, enable_experimental=True) + + +# API_KEY = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VySWQiOiJjbDN1OXJ3NzIwMDlvMHl4a2ViOHhkdml0Iiwib3JnYW5pemF0aW9uSWQiOiJjbDN1OXJ3Nm4wMDluMHl4azYzczVnNjZwIiwiYXBpS2V5SWQiOiJjbDU4YWhsaHEwYzR0MHkxZjhsYzc4NWxzIiwic2VjcmV0IjoiZTdlZjg5NTE2YjExNjQ5ZDE2MGQ1MTQxMGU1ZGUwYzIiLCJpYXQiOjE2NTcwMzI3MDQsImV4cCI6MjI4ODE4NDcwNH0.uQeS5xWVokFx5qQb-IXEgTnYnDfg_sH3jGUNd5Mw8Zc" +# client = Client(API_KEY, endpoint="https://api.lb-stage.xyz/graphql") + +# print(client.get_organization()) + +#Python SDK Staging API Key 2 + + + + +API_KEY = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VySWQiOiJjbDI5M2N2OWUwMDBicnRvdTBneWgyZ3RvIiwib3JnYW5pemF0aW9uSWQiOiJjbDI5M2N2NjAwMDBhcnRvdTl0M3JhMGpzIiwiYXBpS2V5SWQiOiJjbDN2c21icWgwMDBhbjdvdWFldWMwMmcyIiwic2VjcmV0IjoiYTQ4MmZjODU4OGU5YmI0NmJhZDU2YjljZDBhZDcyZTUiLCJpYXQiOjE2NTQxMDAzMTUsImV4cCI6MjI4NTI1MjMxNX0.UJC2uF8Cu6WwBSIZGUZUY8UznP7RCRsG4ns616OFXjI" +client = Client(API_KEY, endpoint='http://localhost:8080/graphql') + +# API_KEY = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VySWQiOiJjbDN1OXJ3NzIwMDlvMHl4a2ViOHhkdml0Iiwib3JnYW5pemF0aW9uSWQiOiJjbDN1OXJ3Nm4wMDluMHl4azYzczVnNjZwIiwiYXBpS2V5SWQiOiJjbDUxZWxxd3EwMGphMHl3MTZpMWdhdmdyIiwic2VjcmV0IjoiOWFlN2E1MjhiM2JkNjY2N2ZiNThhNTU4NzMwYzc3MjYiLCJpYXQiOjE2NTY2MTYzNTMsImV4cCI6MjI4Nzc2ODM1M30.Ej8Gk01QSpAV-55UEUIxiKV5Glu1uu_BryOQwBxc9HM" +# client = Client(API_KEY, endpoint="https://api.lb-stage.xyz/graphql") + +start = time.time() +project = client.get_project("cl5cwcz57000ae2ou1aoi9gld") #test queued data rows +# rows = project.export_queued_data_rows(include_metadata=True) + +# dataset = client.get_dataset("cl3w2r147000qj6ou307h4045") +# rows = dataset.export_data_rows(include_metadata=True) + +# project.update(queue_mode=Project.QueueMode.Batch) +# batch_rows = [row['id'] for row in project.export_queued_data_rows()][:5000] +# batch = project.create_batch(name="hello world", data_rows=batch_rows) +# print(batch.uid) +batch = list(project.batches())[0] +# rows = batch.export_data_rows(include_metadata=True) +rows = batch.export_data_rows() +#batch id is ae4dbe30-03c3-11ed-8a4d-870387097bd0 + +end = time.time() +print(f"{end-start} seconds") +# print(labels[-1]) +count = 0 +MAXIMUM = 5 +for row in rows: + # print(row.media_attributes, "\t\t", row.metadata, "\n") + print(row,"\n") + count +=1 + if count > MAXIMUM: + break +# Label.bulk_delete([label for label in project.labels()]) + + + + +# [print(f"""\n +# {label['Media Attributes']}, +# {label['DataRow ID']}, +# {label['DataRow Metadata']} +# """) for label in labels] +# bbox = [0,1,2,3] +# annotations = [] + +# for datarow in project.export_queued_data_rows(): +# annotations.append({ +# "uuid": str(uuid.uuid4()), +# "name": "bbox", +# "dataRow": { +# "id": datarow['id'] +# }, +# "bbox": { +# "left": bbox[0], +# "top": bbox[1], +# "height": bbox[2], +# "width": bbox[3] +# } +# }) + +# import_annotations = LabelImport.create_from_objects(client=client, project_id = project.uid, name=f"import {str(uuid.uuid4())}", labels=annotations) +# import_annotations.wait_until_done() +# print("\nthis is complete") + + + +# ____________________________________________________________________________________ + +# editor = next(client.get_labeling_frontends(where = LabelingFrontend.name == 'editor')) + + +# alt = { +# "tools": [ +# { +# "tool": "polygon", +# "name": "jyjyjyy33", +# "color": "#1CE6FF", +# "label": "pgon", +# "classifications": [] +# }, +# { +# "tool": "rectangle", +# "name": "jyjyjyy33", +# "color": "#FF34FF", +# "label": "bbox", +# "classifications": [ +# { +# "type": "radio", +# "name": "radio_sub", +# "instructions": "radio sub", +# "options": [ +# { +# "value": "ans_1", +# "label": "ans 1", +# "options": [] +# }, +# { +# "value": "ans_12", +# "label": "ans 12", +# "options": [] +# } +# ] +# }, +# { +# "type": "text", +# "name": "text_sub", +# "instructions": "text sub", +# "uiMode": "hotkey", +# "options": [] +# } +# ] +# }, +# { +# "tool": "polygon", +# "name": "pgon", +# "color": "#FF4A46", +# "label": "pgon", +# "classifications": [] +# } +# ], +# "relationships": [], +# "classifications": [ +# { +# "type": "text", +# "name": "bbox", +# "instructions": "bbox", +# "uiMode": "hotkey", +# "scope": "global", +# "options": [] +# } +# ], +# } +# project.setup(editor, alt) + + + + From a74db7a007aec7e83440a62cd757ab0e2e48b42d Mon Sep 17 00:00:00 2001 From: jtsodapop <67922677+jtsodapop@users.noreply.github.com> Date: Fri, 15 Jul 2022 10:59:43 -0400 Subject: [PATCH 4/5] yapf From daded19771e5180668c584d734a845318cedd914 Mon Sep 17 00:00:00 2001 From: jtsodapop <67922677+jtsodapop@users.noreply.github.com> Date: Fri, 15 Jul 2022 11:00:48 -0400 Subject: [PATCH 5/5] Delete test.py --- test.py | 216 -------------------------------------------------------- 1 file changed, 216 deletions(-) delete mode 100644 test.py diff --git a/test.py b/test.py deleted file mode 100644 index 96ebdd184..000000000 --- a/test.py +++ /dev/null @@ -1,216 +0,0 @@ -from json import tool -from xml.etree.ElementInclude import include -from labelbox import Client, Project -import time -from labelbox.data.serialization import NDJsonConverter, LBV1Converter -import os -import requests -from datetime import datetime, timezone -from pprint import pprint -from labelbox.orm.db_object import experimental -from labelbox.schema.data_row import DataRow - -from labelbox.schema.data_row_metadata import ( - DataRowMetadata, - DataRowMetadataField, - DeleteDataRowMetadata, -) - -import uuid -# import re -import json -from dataclasses import dataclass, field -from labelbox import Tool, Classification -from typing import List - -from labelbox.schema.ontology import OntologyBuilder - -from labelbox.schema.annotation_import import LabelImport -# from labelbox.schema.bulk_import_request import DataRow -from labelbox.schema.labeling_frontend import LabelingFrontend -from labelbox.schema.ontology import Classification, OntologyBuilder, Ontology, Tool, Option -from labelbox import Label - -# from labelbox.data.annotation_types.classification import ClassificationAnswer, Dropdown, Text, Radio -# from labelbox.data.metrics.confusion_matrix import feature_confusion_matrix_metric, confusion_matrix_metric -# from labelbox.data.annotation_types import ObjectAnnotation, ClassificationAnnotation, TextEntity, Mask -# import numpy as np -# from labelbox.data.serialization.labelbox_v1.label import LBV1LabelAnnotationsVideo - -import logging -# logging.basicConfig(level=logging.DEBUG) - - -# __________________________________________ -def cleanup_my_org(): - from datetime import datetime, timezone - - date = datetime.strptime("2022-06-01", - "%Y-%m-%d").replace(tzinfo=timezone.utc) - - for project in client.get_projects(): - if project.created_at > date: - print(project.name) - project.delete() - for dataset in client.get_datasets(): - if dataset.created_at > date: - print(dataset.name) - dataset.delete() - for model in client.get_models(): - model.delete() - - -os.system('clear') - -# API_KEY = os.environ.get('apikey') -# client = Client(API_KEY, enable_experimental=True) - - -# API_KEY = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VySWQiOiJjbDN1OXJ3NzIwMDlvMHl4a2ViOHhkdml0Iiwib3JnYW5pemF0aW9uSWQiOiJjbDN1OXJ3Nm4wMDluMHl4azYzczVnNjZwIiwiYXBpS2V5SWQiOiJjbDU4YWhsaHEwYzR0MHkxZjhsYzc4NWxzIiwic2VjcmV0IjoiZTdlZjg5NTE2YjExNjQ5ZDE2MGQ1MTQxMGU1ZGUwYzIiLCJpYXQiOjE2NTcwMzI3MDQsImV4cCI6MjI4ODE4NDcwNH0.uQeS5xWVokFx5qQb-IXEgTnYnDfg_sH3jGUNd5Mw8Zc" -# client = Client(API_KEY, endpoint="https://api.lb-stage.xyz/graphql") - -# print(client.get_organization()) - -#Python SDK Staging API Key 2 - - - - -API_KEY = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VySWQiOiJjbDI5M2N2OWUwMDBicnRvdTBneWgyZ3RvIiwib3JnYW5pemF0aW9uSWQiOiJjbDI5M2N2NjAwMDBhcnRvdTl0M3JhMGpzIiwiYXBpS2V5SWQiOiJjbDN2c21icWgwMDBhbjdvdWFldWMwMmcyIiwic2VjcmV0IjoiYTQ4MmZjODU4OGU5YmI0NmJhZDU2YjljZDBhZDcyZTUiLCJpYXQiOjE2NTQxMDAzMTUsImV4cCI6MjI4NTI1MjMxNX0.UJC2uF8Cu6WwBSIZGUZUY8UznP7RCRsG4ns616OFXjI" -client = Client(API_KEY, endpoint='http://localhost:8080/graphql') - -# API_KEY = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VySWQiOiJjbDN1OXJ3NzIwMDlvMHl4a2ViOHhkdml0Iiwib3JnYW5pemF0aW9uSWQiOiJjbDN1OXJ3Nm4wMDluMHl4azYzczVnNjZwIiwiYXBpS2V5SWQiOiJjbDUxZWxxd3EwMGphMHl3MTZpMWdhdmdyIiwic2VjcmV0IjoiOWFlN2E1MjhiM2JkNjY2N2ZiNThhNTU4NzMwYzc3MjYiLCJpYXQiOjE2NTY2MTYzNTMsImV4cCI6MjI4Nzc2ODM1M30.Ej8Gk01QSpAV-55UEUIxiKV5Glu1uu_BryOQwBxc9HM" -# client = Client(API_KEY, endpoint="https://api.lb-stage.xyz/graphql") - -start = time.time() -project = client.get_project("cl5cwcz57000ae2ou1aoi9gld") #test queued data rows -# rows = project.export_queued_data_rows(include_metadata=True) - -# dataset = client.get_dataset("cl3w2r147000qj6ou307h4045") -# rows = dataset.export_data_rows(include_metadata=True) - -# project.update(queue_mode=Project.QueueMode.Batch) -# batch_rows = [row['id'] for row in project.export_queued_data_rows()][:5000] -# batch = project.create_batch(name="hello world", data_rows=batch_rows) -# print(batch.uid) -batch = list(project.batches())[0] -# rows = batch.export_data_rows(include_metadata=True) -rows = batch.export_data_rows() -#batch id is ae4dbe30-03c3-11ed-8a4d-870387097bd0 - -end = time.time() -print(f"{end-start} seconds") -# print(labels[-1]) -count = 0 -MAXIMUM = 5 -for row in rows: - # print(row.media_attributes, "\t\t", row.metadata, "\n") - print(row,"\n") - count +=1 - if count > MAXIMUM: - break -# Label.bulk_delete([label for label in project.labels()]) - - - - -# [print(f"""\n -# {label['Media Attributes']}, -# {label['DataRow ID']}, -# {label['DataRow Metadata']} -# """) for label in labels] -# bbox = [0,1,2,3] -# annotations = [] - -# for datarow in project.export_queued_data_rows(): -# annotations.append({ -# "uuid": str(uuid.uuid4()), -# "name": "bbox", -# "dataRow": { -# "id": datarow['id'] -# }, -# "bbox": { -# "left": bbox[0], -# "top": bbox[1], -# "height": bbox[2], -# "width": bbox[3] -# } -# }) - -# import_annotations = LabelImport.create_from_objects(client=client, project_id = project.uid, name=f"import {str(uuid.uuid4())}", labels=annotations) -# import_annotations.wait_until_done() -# print("\nthis is complete") - - - -# ____________________________________________________________________________________ - -# editor = next(client.get_labeling_frontends(where = LabelingFrontend.name == 'editor')) - - -# alt = { -# "tools": [ -# { -# "tool": "polygon", -# "name": "jyjyjyy33", -# "color": "#1CE6FF", -# "label": "pgon", -# "classifications": [] -# }, -# { -# "tool": "rectangle", -# "name": "jyjyjyy33", -# "color": "#FF34FF", -# "label": "bbox", -# "classifications": [ -# { -# "type": "radio", -# "name": "radio_sub", -# "instructions": "radio sub", -# "options": [ -# { -# "value": "ans_1", -# "label": "ans 1", -# "options": [] -# }, -# { -# "value": "ans_12", -# "label": "ans 12", -# "options": [] -# } -# ] -# }, -# { -# "type": "text", -# "name": "text_sub", -# "instructions": "text sub", -# "uiMode": "hotkey", -# "options": [] -# } -# ] -# }, -# { -# "tool": "polygon", -# "name": "pgon", -# "color": "#FF4A46", -# "label": "pgon", -# "classifications": [] -# } -# ], -# "relationships": [], -# "classifications": [ -# { -# "type": "text", -# "name": "bbox", -# "instructions": "bbox", -# "uiMode": "hotkey", -# "scope": "global", -# "options": [] -# } -# ], -# } -# project.setup(editor, alt) - - - -