From 92ac33d31497837000d51f8d967bc2ceaf66f3d8 Mon Sep 17 00:00:00 2001 From: Vaghinak Basentsyan Date: Tue, 10 May 2022 11:46:49 +0400 Subject: [PATCH 1/3] 4.3.4dev1 initial changes --- src/superannotate/lib/core/entities/base.py | 1 + src/superannotate/lib/core/entities/items.py | 7 +++++-- src/superannotate/lib/core/enums.py | 6 ++++++ src/superannotate/lib/core/usecases/items.py | 14 ++++++-------- src/superannotate/lib/core/usecases/models.py | 4 ++-- src/superannotate/lib/core/usecases/projects.py | 1 + src/superannotate/lib/infrastructure/services.py | 5 +++++ src/superannotate/version.py | 3 +-- tests/integration/items/test_get_item_metadata.py | 9 +++++---- tests/integration/items/test_saqul_query.py | 10 ++++++---- tests/integration/projects/test_clone_project.py | 2 ++ 11 files changed, 40 insertions(+), 22 deletions(-) diff --git a/src/superannotate/lib/core/entities/base.py b/src/superannotate/lib/core/entities/base.py index 52bdc4c51..1ee41d156 100644 --- a/src/superannotate/lib/core/entities/base.py +++ b/src/superannotate/lib/core/entities/base.py @@ -44,6 +44,7 @@ class BaseEntity(TimedBaseModel): entropy_value: Optional[float] = Field(description="Priority score of given item") createdAt: str = Field(description="Date of creation") updatedAt: str = Field(description="Update date") + is_pinned: Optional[bool] class Config: extra = Extra.allow diff --git a/src/superannotate/lib/core/entities/items.py b/src/superannotate/lib/core/entities/items.py index 599a16b16..5b5d1f975 100644 --- a/src/superannotate/lib/core/entities/items.py +++ b/src/superannotate/lib/core/entities/items.py @@ -1,6 +1,7 @@ from typing import Optional from lib.core.entities.base import BaseEntity +from lib.core.enums import ApprovalStatus from lib.core.enums import SegmentationStatus from pydantic import Extra from pydantic import Field @@ -11,7 +12,7 @@ class Config: extra = Extra.allow def add_path(self, project_name: str, folder_name: str): - self.path = f"{project_name}{f'/{folder_name}' if folder_name != 'root' else ''}/{self.name}" + self.path = f"{project_name}{f'/{folder_name}' if folder_name != 'root' else ''}" return self @staticmethod @@ -30,13 +31,15 @@ class TmpImageEntity(Entity): segmentation_status: Optional[SegmentationStatus] = Field( SegmentationStatus.NOT_STARTED ) - approval_status: Optional[int] = Field(None) + approval_status: Optional[ApprovalStatus] = Field(None) class Config: extra = Extra.ignore class VideoEntity(Entity): + approval_status: Optional[ApprovalStatus] = Field(None) + class Config: extra = Extra.ignore diff --git a/src/superannotate/lib/core/enums.py b/src/superannotate/lib/core/enums.py index dca8ded74..12c250575 100644 --- a/src/superannotate/lib/core/enums.py +++ b/src/superannotate/lib/core/enums.py @@ -127,6 +127,12 @@ class SegmentationStatus(BaseTitledEnum): FAILED = "Failed", 4 +class ApprovalStatus(BaseTitledEnum): + NONE = None, 0 + DISAPPROVED = "disapproved", 1 + APPROVED = "approved", 2 + + class ProjectState(BaseTitledEnum): NOT_SYNCED = "Not synced", 1 SYNCED = "Synced", 2 diff --git a/src/superannotate/lib/core/usecases/items.py b/src/superannotate/lib/core/usecases/items.py index 0ee456f0a..a98f0da9a 100644 --- a/src/superannotate/lib/core/usecases/items.py +++ b/src/superannotate/lib/core/usecases/items.py @@ -9,7 +9,6 @@ from lib.core.entities import Entity from lib.core.entities import FolderEntity from lib.core.entities import ProjectEntity -from lib.core.entities import TmpBaseEntity from lib.core.entities import TmpImageEntity from lib.core.entities import VideoEntity from lib.core.exceptions import AppException @@ -20,7 +19,6 @@ from lib.core.response import Response from lib.core.serviceproviders import SuperannotateServiceProvider from lib.core.usecases.base import BaseReportableUseCae -from pydantic import parse_obj_as class GetItem(BaseReportableUseCae): @@ -113,12 +111,12 @@ def execute(self) -> Response: folder_id=None if self._folder.name == "root" else self._folder.uuid, ) if service_response.ok: - data = parse_obj_as( - List[TmpBaseEntity], - [Entity.map_fields(i) for i in service_response.data], - ) - for i, item in enumerate(data): - data[i] = GetItem.serialize_entity(item, self._project) + data = [] + for i, item in enumerate(service_response.data): + tmp_item = GetItem.serialize_entity(Entity(**Entity.map_fields(item)), self._project) + folder_path = f"{'/' + item['folder_name'] if not item['is_root_folder'] else ''}" + tmp_item.path = f"{self._project.name}" + folder_path + data.append(tmp_item) self._response.data = data else: self._response.errors = service_response.data diff --git a/src/superannotate/lib/core/usecases/models.py b/src/superannotate/lib/core/usecases/models.py index 9a2861811..581042161 100644 --- a/src/superannotate/lib/core/usecases/models.py +++ b/src/superannotate/lib/core/usecases/models.py @@ -74,8 +74,8 @@ def validate_fuse(self): def validate_folder_names(self): if self._folder_names: condition = ( - Condition("team_id", self._project.team_id, EQ) & - Condition("project_id", self._project.id, EQ) + Condition("team_id", self._project.team_id, EQ) + & Condition("project_id", self._project.id, EQ) ) existing_folders = {folder.name for folder in self._folders.get_all(condition)} folder_names_set = set(self._folder_names) diff --git a/src/superannotate/lib/core/usecases/projects.py b/src/superannotate/lib/core/usecases/projects.py index fa29c5e1d..def0f7466 100644 --- a/src/superannotate/lib/core/usecases/projects.py +++ b/src/superannotate/lib/core/usecases/projects.py @@ -546,6 +546,7 @@ def execute(self): self._project_to_create.upload_state = ( constances.UploadState.INITIAL.value ) + self._project_to_create.status = constances.ProjectStatus.NotStarted.value project = self._projects.insert(self._project_to_create) self.reporter.log_info( f"Created project {self._project_to_create.name} with type" diff --git a/src/superannotate/lib/infrastructure/services.py b/src/superannotate/lib/infrastructure/services.py index 9f5cf8552..796c32976 100644 --- a/src/superannotate/lib/infrastructure/services.py +++ b/src/superannotate/lib/infrastructure/services.py @@ -1075,6 +1075,10 @@ def get_annotations( reporter: Reporter, ) -> List[dict]: import nest_asyncio + import platform + + if platform.system().lower() == 'windows': + asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy()) nest_asyncio.apply() @@ -1140,6 +1144,7 @@ def saqul_query( params = { "team_id": team_id, "project_id": project_id, + "includeFolderNames": True } if folder_id: params["folder_id"] = folder_id diff --git a/src/superannotate/version.py b/src/superannotate/version.py index 300a46621..356d66d4d 100644 --- a/src/superannotate/version.py +++ b/src/superannotate/version.py @@ -1,2 +1 @@ -__version__ = "4.3.3b1" - +__version__ = "4.3.4dev1" diff --git a/tests/integration/items/test_get_item_metadata.py b/tests/integration/items/test_get_item_metadata.py index 1dff00cd9..b7a850126 100644 --- a/tests/integration/items/test_get_item_metadata.py +++ b/tests/integration/items/test_get_item_metadata.py @@ -27,10 +27,11 @@ def test_get_item_metadata(self): self.PROJECT_NAME, self.folder_path, annotation_status="InProgress" ) item_metadata = sa.get_item_metadata(self.PROJECT_NAME, self.IMAGE_NAME) - assert item_metadata["path"] == f"{self.PROJECT_NAME}/{self.IMAGE_NAME}" + assert item_metadata["path"] == f"{self.PROJECT_NAME}" assert item_metadata["prediction_status"] == "NotStarted" assert item_metadata["segmentation_status"] == None assert item_metadata["annotation_status"] == "InProgress" + assert item_metadata["approval_status"] == None def test_attached_items_paths(self): sa.attach_image_urls_to_project(self.PROJECT_NAME, self.scv_path) @@ -38,7 +39,7 @@ def test_attached_items_paths(self): sa.assign_images(self.PROJECT_NAME, [self.ATTACHED_IMAGE_NAME], "shab.prog@gmail.com") item = sa.get_item_metadata(self.PROJECT_NAME, self.ATTACHED_IMAGE_NAME) assert item["url"] == 'https://drive.google.com/uc?export=download&id=1vwfCpTzcjxoEA4hhDxqapPOVvLVeS7ZS' - assert item["path"] == f"{self.PROJECT_NAME}/{self.ATTACHED_IMAGE_NAME}" + assert item["path"] == f"{self.PROJECT_NAME}" class TestGetEntityMetadataPixel(BaseTestCase): @@ -57,7 +58,7 @@ def test_get_item_metadata(self): self.PROJECT_NAME, self.folder_path, annotation_status="InProgress" ) item_metadata = sa.get_item_metadata(self.PROJECT_NAME, self.IMAGE_NAME) - assert item_metadata["path"] == f"{self.PROJECT_NAME}/{self.IMAGE_NAME}" + assert item_metadata["path"] == f"{self.PROJECT_NAME}" assert item_metadata["prediction_status"] == "NotStarted" assert item_metadata["segmentation_status"] == "NotStarted" assert item_metadata["annotation_status"] == "InProgress" @@ -84,6 +85,6 @@ def test_get_item_metadata(self): ] ) item_metadata = sa.get_item_metadata(self.PROJECT_NAME, self.ITEM_NAME) - assert item_metadata["path"] == f"{self.PROJECT_NAME}/{self.ITEM_NAME}" + assert item_metadata["path"] == f"{self.PROJECT_NAME}" assert "prediction_status" not in item_metadata assert "segmentation_status" not in item_metadata diff --git a/tests/integration/items/test_saqul_query.py b/tests/integration/items/test_saqul_query.py index c4b0c1ff7..2d19e2394 100644 --- a/tests/integration/items/test_saqul_query.py +++ b/tests/integration/items/test_saqul_query.py @@ -9,6 +9,7 @@ class TestEntitiesSearchVector(BaseTestCase): PROJECT_NAME = "TestEntitiesSearchVector" PROJECT_DESCRIPTION = "TestEntitiesSearchVector" PROJECT_TYPE = "Vector" + FOLDER_NAME = "test_folder" TEST_FOLDER_PATH = "data_set/sample_project_vector" TEST_QUERY = "instance(type =bbox )" TEST_INVALID_QUERY = "!instance(type =bbox )!" @@ -18,19 +19,20 @@ def folder_path(self): return os.path.join(Path(__file__).parent.parent.parent, self.TEST_FOLDER_PATH) def test_query(self): + sa.create_folder(self.PROJECT_NAME, self.FOLDER_NAME) sa.upload_images_from_folder_to_project( - self.PROJECT_NAME, self.folder_path, annotation_status="InProgress" + f"{self.PROJECT_NAME}/{self.FOLDER_NAME}", self.folder_path, annotation_status="InProgress" ) sa.create_annotation_classes_from_classes_json( self.PROJECT_NAME, f"{self.folder_path}/classes/classes.json" ) _, _, _ = sa.upload_annotations_from_folder_to_project( - self.PROJECT_NAME, self.folder_path + f"{self.PROJECT_NAME}/{self.FOLDER_NAME}", self.folder_path ) - entities = sa.query(self.PROJECT_NAME, self.TEST_QUERY) + entities = sa.query(f"{self.PROJECT_NAME}/{self.FOLDER_NAME}", self.TEST_QUERY) self.assertEqual(len(entities), 1) - assert all([entity["path"] is None for entity in entities]) + assert all([entity["path"] == f"{self.PROJECT_NAME}/{self.FOLDER_NAME}" for entity in entities]) def test_validate_saqul_query(self): try: diff --git a/tests/integration/projects/test_clone_project.py b/tests/integration/projects/test_clone_project.py index c4f77841c..1942a906e 100644 --- a/tests/integration/projects/test_clone_project.py +++ b/tests/integration/projects/test_clone_project.py @@ -107,6 +107,7 @@ def test_create_like_project(self): "tall", ) self.assertEqual(ann_classes[0]["color"], "#FFAAFF") + assert new_project["status"], constances.ProjectStatus.NotStarted.name class TestCloneProjectAttachedUrls(TestCase): @@ -161,3 +162,4 @@ def test_create_like_project(self): self.assertEqual(ann_classes[0]["color"], "#FFAAFF") self.assertEqual(ann_classes[0]["type"], "object") self.assertIn("Workflow copy is deprecated for Document projects.", self._caplog.text) + assert new_project["status"], constances.ProjectStatus.NotStarted.name \ No newline at end of file From 786eb9cf7e07c37bebc5ef95b946816f57f9f588 Mon Sep 17 00:00:00 2001 From: Vaghinak Basentsyan Date: Mon, 16 May 2022 15:28:47 +0400 Subject: [PATCH 2/3] changed video handeling --- docs/source/superannotate.sdk.rst | 56 +- docs/source/tutorial.sdk.rst | 10 - pytest.ini | 2 +- requirements_dev.txt | 2 +- requirements_extra.txt | 3 +- src/superannotate/__init__.py | 18 +- src/superannotate/lib/app/common.py | 82 --- src/superannotate/lib/app/helpers.py | 71 --- .../lib/app/input_converters/conversion.py | 2 +- .../lib/app/interface/cli_interface.py | 28 +- .../lib/app/interface/sdk_interface.py | 545 ++++-------------- src/superannotate/lib/app/interface/types.py | 12 - .../lib/app/mixp/utils/parsers.py | 108 +--- src/superannotate/lib/app/serializers.py | 64 -- src/superannotate/lib/core/data_handlers.py | 53 +- src/superannotate/lib/core/entities/base.py | 1 - src/superannotate/lib/core/entities/items.py | 5 +- src/superannotate/lib/core/enums.py | 8 + src/superannotate/lib/core/helpers.py | 8 - src/superannotate/lib/core/reporter.py | 36 ++ src/superannotate/lib/core/service_types.py | 4 - .../lib/core/serviceproviders.py | 45 +- .../lib/core/usecases/annotations.py | 153 ++++- src/superannotate/lib/core/usecases/images.py | 236 -------- src/superannotate/lib/core/usecases/items.py | 4 +- src/superannotate/lib/core/usecases/models.py | 68 +-- .../lib/core/usecases/projects.py | 31 - .../lib/infrastructure/controller.py | 116 ++-- .../lib/infrastructure/services.py | 143 ++--- .../lib/infrastructure/stream_data_handler.py | 90 ++- src/superannotate/version.py | 2 +- .../video.mp4.json | 154 +---- .../annotations/test_download_annotations.py | 63 ++ .../annotations/test_get_annotations.py | 22 +- .../test_get_annotations_per_frame.py | 2 +- .../test_text_annotation_upload.py | 6 +- .../test_video_annotation_upload.py | 59 +- .../items/test_get_item_metadata.py | 8 - .../test_add_contributors_to_project.py | 34 -- .../projects/test_basic_project.py | 89 --- .../projects/test_clone_project.py | 2 +- tests/integration/test_assign_images.py | 144 ----- .../integration/test_attach_document_urls.py | 55 -- tests/integration/test_attach_video_urls.py | 85 --- tests/integration/test_cli.py | 148 ++--- .../integration/test_create_from_full_info.py | 48 -- .../test_depricated_functions_document.py | 22 - .../test_depricated_functions_video.py | 22 - tests/integration/test_export_upload_s3.py | 62 +- tests/integration/test_interface.py | 8 - .../integration/test_validate_upload_state.py | 57 -- 51 files changed, 829 insertions(+), 2267 deletions(-) delete mode 100644 src/superannotate/lib/core/helpers.py create mode 100644 tests/integration/annotations/test_download_annotations.py delete mode 100644 tests/integration/test_assign_images.py delete mode 100644 tests/integration/test_attach_document_urls.py delete mode 100644 tests/integration/test_attach_video_urls.py delete mode 100644 tests/integration/test_create_from_full_info.py delete mode 100644 tests/integration/test_validate_upload_state.py diff --git a/docs/source/superannotate.sdk.rst b/docs/source/superannotate.sdk.rst index 588af30ec..224765118 100644 --- a/docs/source/superannotate.sdk.rst +++ b/docs/source/superannotate.sdk.rst @@ -34,8 +34,6 @@ ________ .. autofunction:: superannotate.create_folder .. autofunction:: superannotate.delete_folders .. autofunction:: superannotate.upload_images_to_project -.. autofunction:: superannotate.attach_image_urls_to_project -.. autofunction:: superannotate.attach_document_urls_to_project .. autofunction:: superannotate.attach_items_from_integrated_storage .. autofunction:: superannotate.upload_image_to_project .. autofunction:: superannotate.delete_annotations @@ -43,11 +41,9 @@ ________ .. autofunction:: superannotate.upload_images_from_folder_to_project .. autofunction:: superannotate.upload_video_to_project .. autofunction:: superannotate.upload_videos_from_folder_to_project -.. autofunction:: superannotate.attach_video_urls_to_project .. _ref_upload_annotations_from_folder_to_project: .. autofunction:: superannotate.upload_annotations_from_folder_to_project .. autofunction:: superannotate.upload_preannotations_from_folder_to_project -.. autofunction:: superannotate.share_project .. autofunction:: superannotate.add_contributors_to_project .. autofunction:: superannotate.get_project_settings .. autofunction:: superannotate.set_project_default_image_quality_in_editor @@ -73,6 +69,7 @@ ______ .. autofunction:: superannotate.query .. autofunction:: superannotate.search_items +.. autofunction:: superannotate.download_annotations .. autofunction:: superannotate.attach_items .. autofunction:: superannotate.copy_items .. autofunction:: superannotate.move_items @@ -92,8 +89,6 @@ ______ .. autofunction:: superannotate.download_image_annotations .. autofunction:: superannotate.upload_image_annotations .. autofunction:: superannotate.copy_image -.. autofunction:: superannotate.copy_images -.. autofunction:: superannotate.move_images .. autofunction:: superannotate.pin_image .. autofunction:: superannotate.assign_images .. autofunction:: superannotate.delete_images @@ -159,11 +154,26 @@ Project metadata example: "attachment_name": None, "attachment_path": None, "entropy_status": 1, - "status": 0, + "status": "NotStarted", "...": "..." } +---------- + +Setting metadata +_________________ + +Setting metadata example: + +.. code-block:: python + + { + "attribute": "FrameRate", + "value": 3 + } + + ---------- Export metadata @@ -250,6 +260,38 @@ Image metadata example: } +---------- + +Priority score +_______________ + + +Priority score example: + +.. code-block:: python + + { + "name" : "image1.png", + "priority": 0.567 + } + + +---------- + +Attachment +_______________ + + +Attachment example: + +.. code-block:: python + + { + "url": "https://sa-public-files.s3.../text_file_example_1.jpeg", + "name": "example.jpeg" + } + + ---------- .. _ref_class: diff --git a/docs/source/tutorial.sdk.rst b/docs/source/tutorial.sdk.rst index 302842fca..ebb52279b 100644 --- a/docs/source/tutorial.sdk.rst +++ b/docs/source/tutorial.sdk.rst @@ -390,16 +390,6 @@ A team contributor can be invited to the team with: sa.invite_contributors_to_team(emails=["admin@superannotate.com"], admin=False) - -This invitation should be accepted by the contributor. After which, to share the -project with the found contributor as an QA: - -.. code-block:: python - - sa.share_project(project, "admin@superannotate.com", user_role="QA") - - - ---------- diff --git a/pytest.ini b/pytest.ini index 86c2d4c63..de724250b 100644 --- a/pytest.ini +++ b/pytest.ini @@ -2,4 +2,4 @@ minversion = 3.7 log_cli=true python_files = test_*.py -addopts = -n auto --dist=loadscope \ No newline at end of file +;addopts = -n auto --dist=loadscope diff --git a/requirements_dev.txt b/requirements_dev.txt index 02595ac5b..85a0bfdee 100644 --- a/requirements_dev.txt +++ b/requirements_dev.txt @@ -1,2 +1,2 @@ -superannotate_schemas>=v1.0.42dev2 +superannotate_schemas>=v1.0.43dev1 diff --git a/requirements_extra.txt b/requirements_extra.txt index 65165a3b1..05bc9b5eb 100644 --- a/requirements_extra.txt +++ b/requirements_extra.txt @@ -5,4 +5,5 @@ pytest==6.2.4 pytest-xdist==2.3.0 pytest-parallel==0.1.0 pytest-rerunfailures==10.2 -sphinx_rtd_theme==1.0.0 \ No newline at end of file +sphinx_rtd_theme==1.0.0 +pytest-cov diff --git a/src/superannotate/__init__.py b/src/superannotate/__init__.py index d55efa2b0..ff7c2394e 100644 --- a/src/superannotate/__init__.py +++ b/src/superannotate/__init__.py @@ -3,8 +3,8 @@ import sys import requests -import superannotate.lib.core as constances from packaging.version import parse +from superannotate.lib import core as constances from superannotate.lib import get_default_controller from superannotate.lib.app.analytics.class_analytics import class_distribution from superannotate.lib.app.exceptions import AppException @@ -21,20 +21,14 @@ from superannotate.lib.app.interface.sdk_interface import aggregate_annotations_as_df from superannotate.lib.app.interface.sdk_interface import assign_folder from superannotate.lib.app.interface.sdk_interface import assign_images -from superannotate.lib.app.interface.sdk_interface import ( - attach_document_urls_to_project, -) -from superannotate.lib.app.interface.sdk_interface import attach_image_urls_to_project from superannotate.lib.app.interface.sdk_interface import attach_items from superannotate.lib.app.interface.sdk_interface import ( attach_items_from_integrated_storage, ) -from superannotate.lib.app.interface.sdk_interface import attach_video_urls_to_project from superannotate.lib.app.interface.sdk_interface import benchmark from superannotate.lib.app.interface.sdk_interface import clone_project from superannotate.lib.app.interface.sdk_interface import consensus from superannotate.lib.app.interface.sdk_interface import copy_image -from superannotate.lib.app.interface.sdk_interface import copy_images from superannotate.lib.app.interface.sdk_interface import copy_items from superannotate.lib.app.interface.sdk_interface import create_annotation_class from superannotate.lib.app.interface.sdk_interface import ( @@ -51,6 +45,7 @@ from superannotate.lib.app.interface.sdk_interface import ( download_annotation_classes_json, ) +from superannotate.lib.app.interface.sdk_interface import download_annotations from superannotate.lib.app.interface.sdk_interface import download_export from superannotate.lib.app.interface.sdk_interface import download_image from superannotate.lib.app.interface.sdk_interface import download_image_annotations @@ -68,7 +63,6 @@ from superannotate.lib.app.interface.sdk_interface import get_team_metadata from superannotate.lib.app.interface.sdk_interface import init from superannotate.lib.app.interface.sdk_interface import invite_contributors_to_team -from superannotate.lib.app.interface.sdk_interface import move_images from superannotate.lib.app.interface.sdk_interface import move_items from superannotate.lib.app.interface.sdk_interface import pin_image from superannotate.lib.app.interface.sdk_interface import prepare_export @@ -89,7 +83,6 @@ set_project_default_image_quality_in_editor, ) from superannotate.lib.app.interface.sdk_interface import set_project_workflow -from superannotate.lib.app.interface.sdk_interface import share_project from superannotate.lib.app.interface.sdk_interface import unassign_folder from superannotate.lib.app.interface.sdk_interface import unassign_images from superannotate.lib.app.interface.sdk_interface import ( @@ -133,6 +126,7 @@ "get_exports", # annotations "get_annotations", + "download_annotations", "get_annotations_per_frame", # integrations "get_integrations", @@ -154,7 +148,6 @@ "search_projects", "create_project", "clone_project", - "share_project", "delete_project", "rename_project", "upload_priority_scores", @@ -176,8 +169,6 @@ "move_items", "set_annotation_statuses", # Image Section - "copy_images", - "move_images", "delete_images", "download_image", "pin_image", @@ -189,9 +180,6 @@ "upload_image_to_project", "upload_image_annotations", "upload_images_from_folder_to_project", - "attach_image_urls_to_project", - "attach_video_urls_to_project", - "attach_document_urls_to_project", # Video Section "upload_videos_from_folder_to_project", # Annotation Section diff --git a/src/superannotate/lib/app/common.py b/src/superannotate/lib/app/common.py index e27b3d5d5..de05b04f6 100644 --- a/src/superannotate/lib/app/common.py +++ b/src/superannotate/lib/app/common.py @@ -1,26 +1,11 @@ import json -import os -import sys -import time import numpy as np -from PIL import Image from superannotate.logger import get_default_logger from tqdm import tqdm logger = get_default_logger() -_PROJECT_TYPES = {"Vector": 1, "Pixel": 2} - -_ANNOTATION_STATUSES = { - "NotStarted": 1, - "InProgress": 2, - "QualityCheck": 3, - "Returned": 4, - "Completed": 5, - "Skipped": 6, -} - def hex_to_rgb(hex_string): """Converts HEX values to RGB values @@ -68,73 +53,6 @@ def id2rgb(id_map): return color -def save_desktop_format(output_dir, classes, files_dict): - cat_id_map = {} - new_classes = [] - for idx, class_ in enumerate(classes): - cat_id_map[class_["name"]] = idx + 2 - class_["id"] = idx + 2 - new_classes.append(class_) - with open(output_dir.joinpath("classes.json"), "w") as fw: - json.dump(new_classes, fw) - - meta = { - "type": "meta", - "name": "lastAction", - "timestamp": int(round(time.time() * 1000)), - } - new_json = {} - files_path = [] - (output_dir / "images" / "thumb").mkdir() - for file_name, json_data in files_dict.items(): - file_name = file_name.replace("___objects.json", "") - if not (output_dir / "images" / file_name).exists(): - continue - - for js_data in json_data: - if "className" in js_data: - js_data["classId"] = cat_id_map[js_data["className"]] - json_data.append(meta) - new_json[file_name] = json_data - - files_path.append( - { - "srcPath": str(output_dir.resolve() / file_name), - "name": file_name, - "imagePath": str(output_dir.resolve() / file_name), - "thumbPath": str( - output_dir.resolve() - / "images" - / "thumb" - / ("thmb_" + file_name + ".jpg") - ), - "valid": True, - } - ) - - img = Image.open(output_dir / "images" / file_name) - img.thumbnail((168, 120), Image.ANTIALIAS) - img.save(output_dir / "images" / "thumb" / ("thmb_" + file_name + ".jpg")) - - with open(output_dir / "images" / "images.sa", "w") as fw: - fw.write(json.dumps(files_path)) - - with open(output_dir.joinpath("annotations.json"), "w") as fw: - json.dump(new_json, fw) - - with open(output_dir / "config.json", "w") as fw: - json.dump({"pathSeparator": os.sep, "os": sys.platform}, fw) - - -def save_web_format(output_dir, classes, files_dict): - for key, value in files_dict.items(): - with open(output_dir.joinpath(key), "w") as fw: - json.dump(value, fw, indent=2) - - with open(output_dir.joinpath("classes", "classes.json"), "w") as fw: - json.dump(classes, fw) - - def write_to_json(output_path, json_data): with open(output_path, "w") as fw: json.dump(json_data, fw, indent=2) diff --git a/src/superannotate/lib/app/helpers.py b/src/superannotate/lib/app/helpers.py index 39c2e12ab..cbdc19dea 100644 --- a/src/superannotate/lib/app/helpers.py +++ b/src/superannotate/lib/app/helpers.py @@ -1,5 +1,4 @@ import uuid -from ast import literal_eval from pathlib import Path from typing import List from typing import Optional @@ -101,76 +100,6 @@ def get_s3_annotation_paths(folder_path, s3_bucket, annotation_paths, recursive) return list(set(annotation_paths)) -def reformat_metrics_json(data, name): - continuous_metrics = [] - per_evaluation_metrics = [] - for item in data: - for key in item: - try: - item[key] = literal_eval(item[key]) - except Exception as e: - pass - if "data_time" in item and item["eta_seconds"] != 0: - continuous_metrics += [item] - else: - per_evaluation_metrics += [item] - continuous_metrics_df = pd.DataFrame.from_dict(continuous_metrics) - per_evaluation_metrics_df = pd.DataFrame.from_dict(per_evaluation_metrics) - continuous_metrics_df = drop_non_plotable_cols(continuous_metrics_df) - per_evaluation_metrics_df = drop_non_plotable_cols(per_evaluation_metrics_df) - continuous_metrics_df["model"] = name - per_evaluation_metrics_df["model"] = name - if "total_loss" in per_evaluation_metrics_df: - per_evaluation_metrics_df = per_evaluation_metrics_df.drop(columns="total_loss") - - per_evaluation_metrics_df = per_evaluation_metrics_df.dropna(axis="rows") - return continuous_metrics_df, per_evaluation_metrics_df - - -def drop_non_plotable_cols(df): - for column in df: - if metric_is_plottable(column): - continue - df = df.drop(columns=column) - return df - - -def metric_is_plottable(key): - if key == "total_loss" or "mIoU" in key or "mAP" in key or key == "iteration": - return True - return False - - -def get_paths_and_duplicated_from_csv(csv_path): - image_data = pd.read_csv(csv_path, dtype=str) - image_data = image_data[~image_data["url"].isnull()] - if "name" in image_data.columns: - image_data["name"] = ( - image_data["name"] - .fillna("") - .apply(lambda cell: cell if str(cell).strip() else str(uuid.uuid4())) - ) - else: - image_data["name"] = [str(uuid.uuid4()) for _ in range(len(image_data.index))] - - image_data = pd.DataFrame(image_data, columns=["name", "url"]) - img_names_urls = image_data.rename(columns={"url": "path"}).to_dict( - orient="records" - ) - duplicate_images = [] - seen = [] - images_to_upload = [] - for i in img_names_urls: - temp = i["name"] - i["name"] = i["name"].strip() - if i["name"] not in seen: - seen.append(i["name"]) - images_to_upload.append(i) - else: - duplicate_images.append(temp) - return images_to_upload, duplicate_images - - def get_name_url_duplicated_from_csv(csv_path): image_data = pd.read_csv(csv_path, dtype=str) if "url" not in image_data.columns: diff --git a/src/superannotate/lib/app/input_converters/conversion.py b/src/superannotate/lib/app/input_converters/conversion.py index a57be3f92..f9d1a4199 100644 --- a/src/superannotate/lib/app/input_converters/conversion.py +++ b/src/superannotate/lib/app/input_converters/conversion.py @@ -145,7 +145,7 @@ def export_annotation( task="object_detection", ): """ - Converts SuperAnnotate annotation formate to the other annotation formats. Currently available (project_type, task) combinations for converter + Converts SuperAnnotate annotation format to the other annotation formats. Currently available (project_type, task) combinations for converter presented below: ============== ====================== diff --git a/src/superannotate/lib/app/interface/cli_interface.py b/src/superannotate/lib/app/interface/cli_interface.py index 793517d02..570d49b96 100644 --- a/src/superannotate/lib/app/interface/cli_interface.py +++ b/src/superannotate/lib/app/interface/cli_interface.py @@ -9,9 +9,7 @@ from lib.app.helpers import split_project_path from lib.app.input_converters.conversion import import_annotation from lib.app.interface.base_interface import BaseInterfaceFacade -from lib.app.interface.sdk_interface import attach_document_urls_to_project -from lib.app.interface.sdk_interface import attach_image_urls_to_project -from lib.app.interface.sdk_interface import attach_video_urls_to_project +from lib.app.interface.sdk_interface import attach_items from lib.app.interface.sdk_interface import create_folder from lib.app.interface.sdk_interface import create_project from lib.app.interface.sdk_interface import upload_annotations_from_folder_to_project @@ -125,17 +123,13 @@ def export_project( ) export_name = export_res.data["name"] - use_case = Controller.get_default().download_export( + Controller.get_default().download_export( project_name=project_name, export_name=export_name, folder_path=folder, extract_zip_contents=not disable_extract_zip_contents, to_s3_bucket=False, ) - if use_case.is_valid(): - for _ in use_case.execute(): - continue - sys.exit(0) def upload_preannotations( @@ -223,13 +217,16 @@ def _upload_annotations( sys.exit(0) def attach_image_urls( - self, project: str, attachments: str, annotation_status: Optional[Any] = None + self, + project: str, + attachments: str, + annotation_status: Optional[Any] = "NotStarted", ): """ To attach image URLs to project use: """ - attach_image_urls_to_project( + attach_items( project=project, attachments=attachments, annotation_status=annotation_status, @@ -237,9 +234,12 @@ def attach_image_urls( sys.exit(0) def attach_video_urls( - self, project: str, attachments: str, annotation_status: Optional[Any] = None + self, + project: str, + attachments: str, + annotation_status: Optional[Any] = "NotStarted", ): - attach_video_urls_to_project( + attach_items( project=project, attachments=attachments, annotation_status=annotation_status, @@ -248,9 +248,9 @@ def attach_video_urls( @staticmethod def attach_document_urls( - project: str, attachments: str, annotation_status: Optional[Any] = None + project: str, attachments: str, annotation_status: Optional[Any] = "NotStarted" ): - attach_document_urls_to_project( + attach_items( project=project, attachments=attachments, annotation_status=annotation_status, diff --git a/src/superannotate/lib/app/interface/sdk_interface.py b/src/superannotate/lib/app/interface/sdk_interface.py index 346040ea3..967ac64ce 100644 --- a/src/superannotate/lib/app/interface/sdk_interface.py +++ b/src/superannotate/lib/app/interface/sdk_interface.py @@ -5,6 +5,7 @@ import tempfile import warnings from pathlib import Path +from typing import Callable from typing import Iterable from typing import List from typing import Optional @@ -19,7 +20,6 @@ from lib.app.helpers import extract_project_folder from lib.app.helpers import get_annotation_paths from lib.app.helpers import get_name_url_duplicated_from_csv -from lib.app.helpers import get_paths_and_duplicated_from_csv from lib.app.interface.types import AnnotationStatuses from lib.app.interface.types import AnnotationType from lib.app.interface.types import AnnotatorRole @@ -36,7 +36,6 @@ from lib.app.mixp.decorators import Trackable from lib.app.serializers import BaseSerializer from lib.app.serializers import FolderSerializer -from lib.app.serializers import ImageSerializer from lib.app.serializers import ProjectSerializer from lib.app.serializers import SettingsSerializer from lib.app.serializers import TeamSerializer @@ -233,10 +232,14 @@ def create_project( @Trackable @validate_arguments def create_project_from_metadata(project_metadata: Project): - """Create a new project in the team using project metadata object dict. + """ + Create a new project in the team using project metadata object dict. Mandatory keys in project_metadata are "name", "description" and "type" (Vector or Pixel) Non-mandatory keys: "workflow", "settings" and "annotation_classes". + :param project_metadata: project metadata + :type project_metadata: dict + :return: dict object metadata the new project :rtype: dict """ @@ -516,153 +519,6 @@ def copy_image( ) -@Trackable -@validate_arguments -def copy_images( - source_project: Union[NotEmptyStr, dict], - image_names: Optional[List[NotEmptyStr]], - destination_project: Union[NotEmptyStr, dict], - include_annotations: Optional[StrictBool] = True, - copy_pin: Optional[StrictBool] = True, -): - """Copy images in bulk between folders in a project - - :param source_project: project name or folder path (e.g., "project1/folder1") - :type source_project: str` - :param image_names: image names. If None, all images from source project will be copied - :type image_names: list of str - :param destination_project: project name or folder path (e.g., "project1/folder2") - :type destination_project: str - :param include_annotations: enables annotations copy - :type include_annotations: bool - :param copy_pin: enables image pin status copy - :type copy_pin: bool - :return: list of skipped image names - :rtype: list of strs - """ - warning_msg = ( - "We're deprecating the copy_images function. Please use copy_items instead. Learn more. \n" - "https://superannotate.readthedocs.io/en/stable/superannotate.sdk.html#superannotate.copy_items" - ) - logger.warning(warning_msg) - warnings.warn(warning_msg, DeprecationWarning) - project_name, source_folder_name = extract_project_folder(source_project) - - to_project_name, destination_folder_name = extract_project_folder( - destination_project - ) - if project_name != to_project_name: - raise AppException("Source and destination projects should be the same") - if not image_names: - images = ( - Controller.get_default() - .search_images(project_name=project_name, folder_path=source_folder_name) - .data - ) - image_names = [image.name for image in images] - - res = Controller.get_default().bulk_copy_images( - project_name=project_name, - from_folder_name=source_folder_name, - to_folder_name=destination_folder_name, - image_names=image_names, - include_annotations=include_annotations, - include_pin=copy_pin, - ) - if res.errors: - raise AppException(res.errors) - skipped_images = res.data - done_count = len(image_names) - len(skipped_images) - message_postfix = "{from_path} to {to_path}." - message_prefix = "Copied images from " - if done_count > 1 or done_count == 0: - message_prefix = f"Copied {done_count}/{len(image_names)} images from " - elif done_count == 1: - message_prefix = "Copied an image from " - logger.info( - message_prefix - + message_postfix.format(from_path=source_project, to_path=destination_project) - ) - - return skipped_images - - -@Trackable -@validate_arguments -def move_images( - source_project: Union[NotEmptyStr, dict], - image_names: Optional[List[NotEmptyStr]], - destination_project: Union[NotEmptyStr, dict], - *args, - **kwargs, -): - """Move images in bulk between folders in a project - - :param source_project: project name or folder path (e.g., "project1/folder1") - :type source_project: str - :param image_names: image names. If None, all images from source project will be moved - :type image_names: list of str - :param destination_project: project name or folder path (e.g., "project1/folder2") - :type destination_project: str - :return: list of skipped image names - :rtype: list of strs - """ - warning_msg = ( - "We're deprecating the move_images function. Please use move_items instead. Learn more." - "https://superannotate.readthedocs.io/en/stable/superannotate.sdk.html#superannotate.move_items" - ) - logger.warning(warning_msg) - warnings.warn(warning_msg, DeprecationWarning) - project_name, source_folder_name = extract_project_folder(source_project) - - project = Controller.get_default().get_project_metadata(project_name).data - if project["project"].type in [ - constances.ProjectType.VIDEO.value, - constances.ProjectType.DOCUMENT.value, - ]: - raise AppException(LIMITED_FUNCTIONS[project["project"].type]) - - to_project_name, destination_folder_name = extract_project_folder( - destination_project - ) - - if project_name != to_project_name: - raise AppException( - "Source and destination projects should be the same for move_images" - ) - - if not image_names: - images = Controller.get_default().search_images( - project_name=project_name, folder_path=source_folder_name - ) - images = images.data - image_names = [image.name for image in images] - - response = Controller.get_default().bulk_move_images( - project_name=project_name, - from_folder_name=source_folder_name, - to_folder_name=destination_folder_name, - image_names=image_names, - ) - if response.errors: - raise AppException(response.errors) - moved_images = response.data - moved_count = len(moved_images) - message_postfix = "{from_path} to {to_path}." - message_prefix = "Moved images from " - if moved_count > 1 or moved_count == 0: - message_prefix = f"Moved {moved_count}/{len(image_names)} images from " - elif moved_count == 1: - message_prefix = "Moved an image from" - - logger.info( - message_prefix - + message_postfix.format(from_path=source_project, to_path=destination_project) - ) - - return list(set(image_names) - set(moved_images)) - - @Trackable @validate_arguments def get_project_metadata( @@ -767,13 +623,15 @@ def get_project_workflow(project: Union[str, dict]): def search_annotation_classes( project: Union[NotEmptyStr, dict], name_contains: Optional[str] = None ): - """Searches annotation classes by name_prefix (case-insensitive) + """ + Searches annotation classes by name_prefix (case-insensitive) :param project: project name :type project: str + :param name_contains: search string. Returns those classes, where the given string is found anywhere within its name. If None, all annotation classes will be returned. - :type name_prefix: str + :type name_contains: str :return: annotation classes of the project :rtype: list of dicts @@ -850,6 +708,13 @@ def set_images_annotation_statuses( should be one of NotStarted InProgress QualityCheck Returned Completed Skipped :type annotation_status: str """ + warning_msg = ( + "We're deprecating the set_images_annotation_statuses function. Please use set_annotation_statuses instead. " + "Learn more. \n" + "https://superannotate.readthedocs.io/en/stable/superannotate.sdk.html#superannotate.set_annotation_statuses" + ) + logger.warning(warning_msg) + warnings.warn(warning_msg, DeprecationWarning) project_name, folder_name = extract_project_folder(project) response = Controller.get_default().set_images_annotation_statuses( project_name, folder_name, image_names, annotation_status @@ -1018,40 +883,6 @@ def assign_folder( raise AppException(response.errors) -@Trackable -@validate_arguments -def share_project( - project_name: NotEmptyStr, user: Union[str, dict], user_role: NotEmptyStr -): - """Share project with user. - - :param project_name: project name - :type project_name: str - :param user: user email or metadata of the user to share project with - :type user: str or dict - :param user_role: user role to apply, one of Admin , Annotator , QA , Customer , Viewer - :type user_role: str - """ - warning_msg = ( - "The share_project function is deprecated and will be removed with the coming release, " - "please use add_contributors_to_project instead." - ) - logger.warning(warning_msg) - warnings.warn(warning_msg, DeprecationWarning) - if isinstance(user, dict): - user_id = user["id"] - else: - response = Controller.get_default().search_team_contributors(email=user) - if not response.data: - raise AppException(f"User {user} not found.") - user_id = response.data[0]["id"] - response = Controller.get_default().share_project( - project_name=project_name, user_id=user_id, user_role=user_role - ) - if response.errors: - raise AppException(response.errors) - - @validate_arguments def upload_images_from_folder_to_project( project: Union[NotEmptyStr, dict], @@ -1067,9 +898,9 @@ def upload_images_from_folder_to_project( recursive_subfolders: Optional[StrictBool] = False, image_quality_in_editor: Optional[str] = None, ): - """Uploads all images with given extensions from folder_path to the project. + """ + Uploads all images with given extensions from folder_path to the project. Sets status of all the uploaded images to set_status if it is not None. - If an image with existing name already exists in the project it won't be uploaded, and its path will be appended to the third member of return value of this function. @@ -1084,7 +915,7 @@ def upload_images_from_folder_to_project( :type extensions: tuple or list of strs :param annotation_status: value to set the annotation statuses of the uploaded images - NotStarted InProgress QualityCheck Returned Completed Skipped + NotStarted InProgress QualityCheck Returned Completed Skipped :type annotation_status: str :param from_s3_bucket: AWS S3 bucket to use. If None then folder_path is in local filesystem @@ -1101,7 +932,7 @@ def upload_images_from_folder_to_project( Can be either "compressed" or "original". If None then the default value in project settings will be used. :type image_quality_in_editor: str - :return: uploaded, could-not-upload, existing-images filepaths + :return: uploaded, could-not-upload, existing-images file-paths :rtype: tuple (3 members) of list of strs """ @@ -1562,7 +1393,7 @@ def create_annotation_classes_from_classes_json( def download_export( project: Union[NotEmptyStr, dict], export: Union[NotEmptyStr, dict], - folder_path: Union[str, Path], + folder_path: Union[str, Path] = None, extract_zip_contents: Optional[StrictBool] = True, to_s3_bucket=None, ): @@ -1573,40 +1404,32 @@ def download_export( :param project: project name :type project: str + :param export: export name :type export: str, dict + :param folder_path: where to download the export :type folder_path: Path-like (str or Path) + :param extract_zip_contents: if False then a zip file will be downloaded, if True the zip file will be extracted at folder_path :type extract_zip_contents: bool + :param to_s3_bucket: AWS S3 bucket to use for download. If None then folder_path is in local filesystem. :type to_s3_bucket: Bucket object """ project_name, folder_name = extract_project_folder(project) export_name = export["name"] if isinstance(export, dict) else export - use_case = Controller.get_default().download_export( + response = Controller.get_default().download_export( project_name=project_name, export_name=export_name, folder_path=folder_path, extract_zip_contents=extract_zip_contents, to_s3_bucket=to_s3_bucket, ) - if use_case.is_valid(): - if to_s3_bucket: - with tqdm( - total=use_case.get_upload_files_count(), desc="Uploading" - ) as progress_bar: - for _ in use_case.execute(): - progress_bar.update() - progress_bar.close() - else: - for _ in use_case.execute(): - continue - logger.info(use_case.response.data) - else: - raise AppException(use_case.response.errors) + if response.errors: + raise AppException(response.errors) @Trackable @@ -1629,6 +1452,13 @@ def set_image_annotation_status( :return: metadata of the updated image :rtype: dict """ + warning_msg = ( + "We're deprecating the set_image_annotation_status function. Please use set_annotation_statuses instead. " + "Learn more. \n" + "https://superannotate.readthedocs.io/en/stable/superannotate.sdk.html#superannotate.set_annotation_statuses" + ) + logger.warning(warning_msg) + warnings.warn(warning_msg, DeprecationWarning) project_name, folder_name = extract_project_folder(project) response = Controller.get_default().set_images_annotation_statuses( project_name, folder_name, [image_name], annotation_status @@ -1713,150 +1543,6 @@ def download_image( return response.data -@Trackable -@validate_arguments -def attach_image_urls_to_project( - project: Union[NotEmptyStr, dict], - attachments: Union[str, Path], - annotation_status: Optional[AnnotationStatuses] = "NotStarted", -): - """Link images on external storage to SuperAnnotate. - - :param project: project name or project folder path - :type project: str or dict - :param attachments: path to csv file on attachments metadata - :type attachments: Path-like (str or Path) - :param annotation_status: value to set the annotation statuses of the linked images: NotStarted InProgress QualityCheck Returned Completed Skipped - :type annotation_status: str - - :return: list of linked image names, list of failed image names, list of duplicate image names - :rtype: tuple - """ - warning_msg = ( - "We're deprecating the attach_image_urls_to_project function. Please use attach_items instead. Learn more." - "https://superannotate.readthedocs.io/en/stable/superannotate.sdk.html#superannotate.attach_items" - ) - logger.warning(warning_msg) - warnings.warn(warning_msg, DeprecationWarning) - project_name, folder_name = extract_project_folder(project) - project = Controller.get_default().get_project_metadata(project_name).data - project_folder_name = project_name + (f"/{folder_name}" if folder_name else "") - - if project["project"].type in [ - constances.ProjectType.VIDEO.value, - constances.ProjectType.DOCUMENT.value, - ]: - raise AppException( - constances.INVALID_PROJECT_TYPE_TO_PROCESS.format( - constances.ProjectType.get_name(project["project"].type) - ) - ) - images_to_upload, duplicate_images = get_paths_and_duplicated_from_csv(attachments) - use_case = Controller.get_default().interactive_attach_urls( - project_name=project_name, - folder_name=folder_name, - files=ImageSerializer.deserialize(images_to_upload), # noqa: E203 - annotation_status=annotation_status, - ) - if len(duplicate_images): - logger.warning( - constances.ALREADY_EXISTING_FILES_WARNING.format(len(duplicate_images)) - ) - - if use_case.is_valid(): - logger.info( - constances.ATTACHING_FILES_MESSAGE.format( - len(images_to_upload), project_folder_name - ) - ) - with tqdm( - total=use_case.attachments_count, desc="Attaching urls" - ) as progress_bar: - for attached in use_case.execute(): - progress_bar.update(attached) - uploaded, duplications = use_case.data - uploaded = [i["name"] for i in uploaded] - duplications.extend(duplicate_images) - failed_images = [ - image["name"] - for image in images_to_upload - if image["name"] not in uploaded + duplications - ] - return uploaded, failed_images, duplications - raise AppException(use_case.response.errors) - - -@Trackable -@validate_arguments -def attach_video_urls_to_project( - project: Union[NotEmptyStr, dict], - attachments: Union[str, Path], - annotation_status: Optional[AnnotationStatuses] = "NotStarted", -): - """Link videos on external storage to SuperAnnotate. - - :param project: project name or project folder path - :type project: str or dict - :param attachments: path to csv file on attachments metadata - :type attachments: Path-like (str or Path) - :param annotation_status: value to set the annotation statuses of the linked videos: NotStarted InProgress QualityCheck Returned Completed Skipped - :type annotation_status: str - - :return: attached videos, failed videos, skipped videos - :rtype: (list, list, list) - """ - warning_msg = ( - "We're deprecating the attach_video_urls_to_project function. Please use attach_items instead. Learn more." - "https://superannotate.readthedocs.io/en/stable/superannotate.sdk.html#superannotate.attach_items" - ) - logger.warning(warning_msg) - warnings.warn(warning_msg, DeprecationWarning) - project_name, folder_name = extract_project_folder(project) - project = Controller.get_default().get_project_metadata(project_name).data - project_folder_name = project_name + (f"/{folder_name}" if folder_name else "") - - if project["project"].type != constances.ProjectType.VIDEO.value: - raise AppException( - constances.INVALID_PROJECT_TYPE_TO_PROCESS.format( - constances.ProjectType.get_name(project["project"].type) - ) - ) - - images_to_upload, duplicate_images = get_paths_and_duplicated_from_csv(attachments) - use_case = Controller.get_default().interactive_attach_urls( - project_name=project_name, - folder_name=folder_name, - files=ImageSerializer.deserialize(images_to_upload), # noqa: E203 - annotation_status=annotation_status, - ) - if len(duplicate_images): - logger.warning( - constances.ALREADY_EXISTING_FILES_WARNING.format(len(duplicate_images)) - ) - - if use_case.is_valid(): - logger.info( - constances.ATTACHING_FILES_MESSAGE.format( - len(images_to_upload), project_folder_name - ) - ) - with tqdm( - total=use_case.attachments_count, desc="Attaching urls" - ) as progress_bar: - for attached in use_case.execute(): - progress_bar.update(attached) - uploaded, duplications = use_case.data - uploaded = [i["name"] for i in uploaded] - duplications.extend(duplicate_images) - failed_images = [ - image["name"] - for image in images_to_upload - if image["name"] not in uploaded + duplications - ] - return uploaded, failed_images, duplications - raise AppException(use_case.response.errors) - - @Trackable @validate_arguments def upload_annotations_from_folder_to_project( @@ -1997,15 +1683,21 @@ def upload_image_annotations( mask: Optional[Union[str, Path, bytes]] = None, verbose: Optional[StrictBool] = True, ): - """Upload annotations from JSON (also mask for pixel annotations) - to the image. + """ + Upload annotations from JSON (also mask for pixel annotations) to the image. :param project: project name or folder path (e.g., "project1/folder1") :type project: str + :param image_name: image name :type image_name: str + :param annotation_json: annotations in SuperAnnotate format JSON dict or path to JSON file :type annotation_json: dict or Path-like (str or Path) + + :param verbose: provide detailed log + :type verbose: bool + :param mask: BytesIO object or filepath to mask annotation for pixel projects in SuperAnnotate format :type mask: BytesIO or Path-like (str or Path) """ @@ -2463,12 +2155,16 @@ def search_models( :param name: search string :type name: str + :param type_: ml model type string :type type_: str + :param project_id: project id :type project_id: int + :param task: training task :type task: str + :param include_global: include global ml models :type include_global: bool @@ -2600,97 +2296,26 @@ def aggregate_annotations_as_df( @Trackable @validate_arguments def delete_annotations( - project: NotEmptyStr, image_names: Optional[List[NotEmptyStr]] = None + project: NotEmptyStr, item_names: Optional[List[NotEmptyStr]] = None ): """ Delete image annotations from a given list of images. :param project: project name or folder path (e.g., "project1/folder1") :type project: str - :param image_names: image names. If None, all image annotations from a given project/folder will be deleted. - :type image_names: list of strs + :param item_names: image names. If None, all image annotations from a given project/folder will be deleted. + :type item_names: list of strs """ project_name, folder_name = extract_project_folder(project) response = Controller.get_default().delete_annotations( - project_name=project_name, folder_name=folder_name, image_names=image_names + project_name=project_name, folder_name=folder_name, item_names=item_names ) if response.errors: raise AppException(response.errors) -@Trackable -@validate_arguments -def attach_document_urls_to_project( - project: Union[NotEmptyStr, dict], - attachments: Union[Path, NotEmptyStr], - annotation_status: Optional[AnnotationStatuses] = "NotStarted", -): - """Link documents on external storage to SuperAnnotate. - - :param project: project name or project folder path - :type project: str or dict - :param attachments: path to csv file on attachments metadata - :type attachments: Path-like (str or Path) - :param annotation_status: value to set the annotation statuses of the linked documents: NotStarted InProgress QualityCheck Returned Completed Skipped - :type annotation_status: str - - :return: list of attached documents, list of not attached documents, list of skipped documents - :rtype: tuple - """ - warning_msg = ( - "We're deprecating the attach_document_urls_to_project function. Please use attach_items instead. Learn more." - "https://superannotate.readthedocs.io/en/stable/superannotate.sdk.html#superannotate.attach_items" - ) - logger.warning(warning_msg) - warnings.warn(warning_msg, DeprecationWarning) - project_name, folder_name = extract_project_folder(project) - project = Controller.get_default().get_project_metadata(project_name).data - project_folder_name = project_name + (f"/{folder_name}" if folder_name else "") - - if project["project"].type != constances.ProjectType.DOCUMENT.value: - raise AppException( - constances.INVALID_PROJECT_TYPE_TO_PROCESS.format( - constances.ProjectType.get_name(project["project"].type) - ) - ) - - images_to_upload, duplicate_images = get_paths_and_duplicated_from_csv(attachments) - - use_case = Controller.get_default().interactive_attach_urls( - project_name=project_name, - folder_name=folder_name, - files=ImageSerializer.deserialize(images_to_upload), # noqa: E203 - annotation_status=annotation_status, - ) - if len(duplicate_images): - logger.warning( - constances.ALREADY_EXISTING_FILES_WARNING.format(len(duplicate_images)) - ) - if use_case.is_valid(): - logger.info( - constances.ATTACHING_FILES_MESSAGE.format( - len(images_to_upload), project_folder_name - ) - ) - with tqdm( - total=use_case.attachments_count, desc="Attaching urls" - ) as progress_bar: - for attached in use_case.execute(): - progress_bar.update(attached) - uploaded, duplications = use_case.data - uploaded = [i["name"] for i in uploaded] - duplications.extend(duplicate_images) - failed_images = [ - image["name"] - for image in images_to_upload - if image["name"] not in uploaded + duplications - ] - return uploaded, failed_images, duplications - raise AppException(use_case.response.errors) - - @Trackable @validate_arguments def validate_annotations( @@ -2809,7 +2434,7 @@ def get_annotations_per_frame(project: NotEmptyStr, video: NotEmptyStr, fps: int :param fps: how many frames per second needs to be extracted from the video. Will extract 1 frame per second by default. - :type fps: str + :type fps: int :return: list of annotation objects :rtype: list of dicts @@ -2951,7 +2576,6 @@ def search_items( :param project: project name or folder path (e.g., “project1/folder1”). - If recursive=False=True, then only the project name is required. :type project: str :param name_contains: Returns those items, where the given string is found anywhere within an item’s name. @@ -2960,12 +2584,12 @@ def search_items( :param annotation_status: if not None, filters items by annotation status. Values are: - “NotStarted” - “InProgress” - “QualityCheck” - “Returned” - “Completed” - “Skipped” + ♦ “NotStarted” \n + ♦ “InProgress” \n + ♦ “QualityCheck” \n + ♦ “Returned” \n + ♦ “Completed” \n + ♦ “Skipped” \n :type annotation_status: str @@ -3004,7 +2628,7 @@ def search_items( def attach_items( project: Union[NotEmptyStr, dict], attachments: AttachmentArg, - annotation_status: Optional[AnnotationStatuses] = "NotStarted", + annotation_status: AnnotationStatuses = "NotStarted", ): """Link items from external storage to SuperAnnotate using URLs. @@ -3081,7 +2705,7 @@ def copy_items( :type destination: str :param items: names of items to copy. If None, all items from the source directory will be copied. - :type itmes: list of str + :type items: list of str :param include_annotations: enables annotations copy :type include_annotations: bool @@ -3116,7 +2740,7 @@ def move_items( destination: Union[NotEmptyStr, dict], items: Optional[List[NotEmptyStr]] = None, ): - """Copy images in bulk between folders in a project + """Move images in bulk between folders in a project :param source: project name or folder path to pick items from (e.g., “project1/folder1”). :type source: str @@ -3183,3 +2807,48 @@ def set_annotation_statuses( if response.errors: raise AppException(response.errors) return response.data + + +@Trackable +@validate_arguments +def download_annotations( + project: Union[NotEmptyStr, dict], + path: Union[str, Path] = None, + items: Optional[List[NotEmptyStr]] = None, + recursive: bool = False, + callback: Callable = None, +): + """Downloads annotation JSON files of the selected items to the local directory. + + :param project: project name or folder path (e.g., “project1/folder1”). + :type project: str + + :param path: local directory path where the annotations will be downloaded. If none, the current directory is used. + :type path: Path-like (str or Path) + + :param items: project name (root) or folder path to move items to. + :type items: list of str + + :param recursive: download annotations from the project’s root and all of its folders with the preserved structure. + If False download only from the project’s root or given directory. + :type recursive: bool + + :param callback: a function that allows you to modify each annotation’s dict before downloading. + The function receives each annotation as an argument and the returned value will be applied to the download. + :type callback: callable + + :return: local path of the downloaded annotations folder. + :rtype: str + """ + project_name, folder_name = extract_project_folder(project) + response = Controller.get_default().download_annotations( + project_name=project_name, + folder_name=folder_name, + destination=path, + recursive=recursive, + item_names=items, + callback=callback, + ) + if response.errors: + raise AppException(response.errors) + return response.data diff --git a/src/superannotate/lib/app/interface/types.py b/src/superannotate/lib/app/interface/types.py index c262d0776..cf0903d6b 100644 --- a/src/superannotate/lib/app/interface/types.py +++ b/src/superannotate/lib/app/interface/types.py @@ -40,18 +40,6 @@ def validate(cls, value: Union[str]) -> Union[str]: return value -class Status(StrictStr): - @classmethod - def validate(cls, value: Union[str]) -> Union[str]: - if cls.curtail_length and len(value) > cls.curtail_length: - value = value[: cls.curtail_length] - if value.lower() not in AnnotationStatus.values(): - raise TypeError( - f"Available statuses is {', '.join(AnnotationStatus.titles())}. " - ) - return value - - class ProjectStatusEnum(StrictStr): @classmethod def validate(cls, value: Union[str]) -> Union[str]: diff --git a/src/superannotate/lib/app/mixp/utils/parsers.py b/src/superannotate/lib/app/mixp/utils/parsers.py index b2b91537f..a67a85b40 100644 --- a/src/superannotate/lib/app/mixp/utils/parsers.py +++ b/src/superannotate/lib/app/mixp/utils/parsers.py @@ -55,7 +55,7 @@ def search_projects(**kwargs): "properties": { "Metadata": bool(kwargs.get("return_metadata")), "project_name": get_project_name(project[0]) if project else None, - "status": status + "status": status, }, } @@ -147,18 +147,6 @@ def upload_video_to_project(**kwargs): } -def attach_image_urls_to_project(**kwargs): - project = kwargs["project"] - - return { - "event_name": "attach_image_urls_to_project", - "properties": { - "project_name": get_project_name(project), - "Annotation Status": bool(kwargs.get("annotation_status")), - }, - } - - def set_images_annotation_statuses(**kwargs): project = kwargs["project"] annotation_status = kwargs.get("annotation_status") @@ -420,44 +408,6 @@ def import_annotation(**kwargs): } -def move_images(**kwargs): - project = kwargs["source_project"] - project_name, folder_name = extract_project_folder(project) - image_names = kwargs.get("image_names", False) - if image_names is None: - res = Controller.get_default().search_images(project_name, folder_name) - image_names = res.data - - return { - "event_name": "move_images", - "properties": { - "project_name": project_name, - "Image Count": len(image_names), - "Copy Annotations": bool("include_annotations" in kwargs), - "Copy Annotation Status": bool("copy_annotation_status" in kwargs), - "Copy Pin": bool("copy_pin" in kwargs), - }, - } - - -def copy_images(**kwargs): - project = kwargs["source_project"] - project_name, folder_name = extract_project_folder(project) - image_names = kwargs.get("image_names", False) - if not image_names: - res = Controller.get_default().search_images(project_name, folder_name) - image_names = res.data - return { - "event_name": "copy_images", - "properties": { - "project_name": project_name, - "Image Count": len(image_names), - "Copy Annotations": bool("include_annotations" in kwargs), - "Copy Annotation Status": bool("copy_annotation_status" in kwargs), - }, - } - - def consensus(**kwargs): folder_names = kwargs["folder_names"] image_list = kwargs["image_list"] @@ -719,19 +669,6 @@ def class_distribution(**kwargs): } -def share_project(**kwargs): - project = kwargs["project_name"] - - user_role = kwargs.get("user_role") - return { - "event_name": "share_project", - "properties": { - "project_name": get_project_name(project), - "User Role": user_role, - }, - } - - def set_project_default_image_quality_in_editor(**kwargs): project = kwargs["project"] @@ -835,30 +772,6 @@ def unassign_images(**kwargs): } -def attach_video_urls_to_project(**kwargs): - project = kwargs["project"] - project_name, _ = extract_project_folder(project) - return { - "event_name": "attach_video_urls_to_project", - "properties": { - "project_name": project_name, - "Annotation Status": bool(kwargs.get("annotation_status")), - }, - } - - -def attach_document_urls_to_project(**kwargs): - project = kwargs["project"] - project_name, _ = extract_project_folder(project) - return { - "event_name": "attach_document_urls_to_project", - "properties": { - "project_name": project_name, - "Annotation Status": bool(kwargs.get("annotation_status")), - }, - } - - def delete_annotations(**kwargs): return {"event_name": "delete_annotations", "properties": {}} @@ -1054,3 +967,22 @@ def set_annotation_statuses(**kwargs): "root": folder_name == "root", }, } + + +def download_annotations(**kwargs): + project = kwargs["project"] + project_name, folder_name = extract_project_folder(project) + project = ( + Controller.get_default().get_project_metadata(project_name).data["project"] + ) + return { + "event_name": "download_annotations", + "properties": { + "project_name": project_name, + "project_type": ProjectType.get_name(project.type), + "root": bool(folder_name), + "recursive": kwargs["recursive"], + "path": bool(kwargs["path"]), + "callback": bool(kwargs["callback"]), + }, + } diff --git a/src/superannotate/lib/app/serializers.py b/src/superannotate/lib/app/serializers.py index 14686fad1..c7594bcbb 100644 --- a/src/superannotate/lib/app/serializers.py +++ b/src/superannotate/lib/app/serializers.py @@ -7,8 +7,6 @@ import superannotate.lib.core as constance from pydantic import BaseModel from superannotate.lib.core.entities import BaseEntity -from superannotate.lib.core.entities import ImageEntity -from superannotate.lib.core.entities import ProjectEntity class BaseSerializer(ABC): @@ -84,13 +82,6 @@ def serialize_iterable( return serialized_data -class UserSerializer(BaseSerializer): - def serialize(self): - data = super().serialize() - data["user_role"] = constance.UserRole[data["user_role"]].name - return data - - class TeamSerializer(BaseSerializer): def serialize(self): data = super().serialize() @@ -144,61 +135,6 @@ def serialize(self): return data -class ImageSerializer(BaseSerializer): - def serialize(self): - data = super().serialize() - data["annotation_status"] = constance.AnnotationStatus.get_name( - data["annotation_status"] - ) - return data - - def serialize_by_project(self, project: ProjectEntity): - data = super().serialize() - data = { - "name": data.get("name"), - "path": data.get("path"), - "annotation_status": data.get("annotation_status"), - "prediction_status": data.get("prediction_status"), - "segmentation_status": data.get("segmentation_status"), - "approval_status": data.get("approval_status"), - "is_pinned": data.get("is_pinned"), - "annotator_name": data.get("annotator_name"), - "qa_name": data.get("qa_name"), - "entropy_value": data.get("entropy_value"), - "createdAt": data.get("createdAt"), - "updatedAt": data.get("updatedAt"), - } - - data["annotation_status"] = constance.AnnotationStatus.get_name( - data["annotation_status"] - ) - - if project.upload_state == constance.UploadState.EXTERNAL.value: - data["prediction_status"] = None - data["segmentation_status"] = None - else: - if project.type == constance.ProjectType.VECTOR.value: - data["prediction_status"] = constance.SegmentationStatus.get_name( - data["prediction_status"] - ) - data["segmentation_status"] = None - if project.type == constance.ProjectType.PIXEL.value: - data["prediction_status"] = constance.SegmentationStatus.get_name( - data["prediction_status"] - ) - data["segmentation_status"] = constance.SegmentationStatus.get_name( - data["segmentation_status"] - ) - data["path"] = None - return data - - @staticmethod - def deserialize(data): - if isinstance(data, list): - return [ImageEntity(**image) for image in data] - return ImageEntity(**data) - - class SettingsSerializer(BaseSerializer): def serialize( self, diff --git a/src/superannotate/lib/core/data_handlers.py b/src/superannotate/lib/core/data_handlers.py index 8b352cb23..8f9a6deae 100644 --- a/src/superannotate/lib/core/data_handlers.py +++ b/src/superannotate/lib/core/data_handlers.py @@ -5,11 +5,12 @@ from collections import defaultdict from functools import lru_cache from typing import Any +from typing import Callable from typing import Dict from typing import List import lib.core as constances -from lib.core.enums import ClassTypeEnum +from lib.core.enums import AnnotationTypes from lib.core.reporter import Reporter from superannotate_schemas.schemas.classes import AnnotationClass from superannotate_schemas.schemas.classes import Attribute @@ -46,7 +47,7 @@ def get_annotation_class(self, name: str) -> AnnotationClass: @lru_cache() def get_attribute_group( - self, annotation_class: AnnotationClass, attr_group_name: str + self, annotation_class: AnnotationClass, attr_group_name: str ) -> AttributeGroup: for attr_group in annotation_class.attribute_groups: if attr_group.name == attr_group_name: @@ -113,10 +114,10 @@ def handle(self, annotation: dict): class MissingIDsHandler(BaseAnnotationDateHandler): def __init__( - self, - annotation_classes: List[AnnotationClass], - templates: List[dict], - reporter: Reporter, + self, + annotation_classes: List[AnnotationClass], + templates: List[dict], + reporter: Reporter, ): super().__init__(annotation_classes) self.validate_existing_classes(annotation_classes) @@ -156,11 +157,6 @@ def validate_existing_classes(self, annotation_classes: List[AnnotationClass]): " This will result in errors in annotation upload.", ) - def _get_class_type(self, annotation_type: str): - if annotation_type == ClassTypeEnum.TAG.name: - return ClassTypeEnum.TAG - return ClassTypeEnum.OBJECT - def handle(self, annotation: dict): if "instances" not in annotation: return annotation @@ -191,7 +187,7 @@ def handle(self, annotation: dict): template["name"]: template["id"] for template in self._templates } for annotation_instance in ( - i for i in annotation["instances"] if i.get("type", None) == "template" + i for i in annotation["instances"] if i.get("type", None) == "template" ): annotation_instance["templateId"] = template_name_id_map.get( annotation_instance.get("templateName", ""), -1 @@ -241,6 +237,20 @@ def handle(self, annotation: dict): class VideoFormatHandler(BaseAnnotationDateHandler): + @staticmethod + def _point_handler(time_stamp): + pass + + HANDLERS: Dict[str, Callable] = { + AnnotationTypes.EVENT: lambda timestamp: {}, + AnnotationTypes.BBOX: lambda timestamp: {"points": timestamp["points"]}, + AnnotationTypes.POINT: lambda timestamp: {"x": timestamp["x"], "y": timestamp["y"]}, + AnnotationTypes.POLYLINE: lambda timestamp: {"points": timestamp["points"]}, + AnnotationTypes.POLYGON: lambda timestamp: { + "points": timestamp["points"], "exclude": timestamp.get("exclude", []) + }, + } + def __init__(self, annotation_classes: List[AnnotationClass], reporter: Reporter): super().__init__(annotation_classes) self.reporter = reporter @@ -288,13 +298,10 @@ def convert_timestamp(timestamp): if meta.get("pointLabels", None): editor_instance["pointLabels"] = meta["pointLabels"] - active_attributes = set() for parameter in instance["parameters"]: - start_time = safe_time(convert_timestamp(parameter["start"])) end_time = safe_time(convert_timestamp(parameter["end"])) - for timestamp_data in parameter["timestamps"]: timestamp = safe_time( convert_timestamp(timestamp_data["timestamp"]) @@ -306,11 +313,13 @@ def convert_timestamp(timestamp): if timestamp == end_time: editor_instance["timeline"][timestamp]["active"] = False - - if timestamp_data.get("points", None): - editor_instance["timeline"][timestamp][ - "points" - ] = timestamp_data["points"] + handler: Callable = self.HANDLERS.get(meta["type"]) + if handler: + editor_instance["timeline"][timestamp].update(handler(timestamp_data)) + # if timestamp_data.get("points", None): + # editor_instance["timeline"][timestamp][ + # "points" + # ] = timestamp_data["points"] if not class_name: continue annotation_class = self.get_annotation_class(class_name) @@ -348,10 +357,10 @@ def convert_timestamp(timestamp): (group_name, attr_name) ) attributes_to_add = ( - existing_attributes_in_current_instance - active_attributes + existing_attributes_in_current_instance - active_attributes ) attributes_to_delete = ( - active_attributes - existing_attributes_in_current_instance + active_attributes - existing_attributes_in_current_instance ) if attributes_to_add or attributes_to_delete: editor_instance["timeline"][timestamp][ diff --git a/src/superannotate/lib/core/entities/base.py b/src/superannotate/lib/core/entities/base.py index 1ee41d156..52bdc4c51 100644 --- a/src/superannotate/lib/core/entities/base.py +++ b/src/superannotate/lib/core/entities/base.py @@ -44,7 +44,6 @@ class BaseEntity(TimedBaseModel): entropy_value: Optional[float] = Field(description="Priority score of given item") createdAt: str = Field(description="Date of creation") updatedAt: str = Field(description="Update date") - is_pinned: Optional[bool] class Config: extra = Extra.allow diff --git a/src/superannotate/lib/core/entities/items.py b/src/superannotate/lib/core/entities/items.py index 5b5d1f975..1a8fabb21 100644 --- a/src/superannotate/lib/core/entities/items.py +++ b/src/superannotate/lib/core/entities/items.py @@ -12,7 +12,9 @@ class Config: extra = Extra.allow def add_path(self, project_name: str, folder_name: str): - self.path = f"{project_name}{f'/{folder_name}' if folder_name != 'root' else ''}" + self.path = ( + f"{project_name}{f'/{folder_name}' if folder_name != 'root' else ''}" + ) return self @staticmethod @@ -32,6 +34,7 @@ class TmpImageEntity(Entity): SegmentationStatus.NOT_STARTED ) approval_status: Optional[ApprovalStatus] = Field(None) + is_pinned: Optional[bool] class Config: extra = Extra.ignore diff --git a/src/superannotate/lib/core/enums.py b/src/superannotate/lib/core/enums.py index 12c250575..62a5bcc58 100644 --- a/src/superannotate/lib/core/enums.py +++ b/src/superannotate/lib/core/enums.py @@ -42,6 +42,14 @@ def equals(self, other: Enum): return self.__doc__.lower() == other.__doc__.lower() +class AnnotationTypes(str, Enum): + BBOX = "bbox" + EVENT = "event" + POINT = "point" + POLYGON = "polygon" + POLYLINE = "polyline" + + class ProjectType(BaseTitledEnum): VECTOR = "Vector", 1 PIXEL = "Pixel", 2 diff --git a/src/superannotate/lib/core/helpers.py b/src/superannotate/lib/core/helpers.py deleted file mode 100644 index 0a36cf5da..000000000 --- a/src/superannotate/lib/core/helpers.py +++ /dev/null @@ -1,8 +0,0 @@ -import json - - -class SetEncoder(json.JSONEncoder): - def default(self, obj): - if isinstance(obj, set): - return list(obj) - return json.JSONEncoder.default(self, obj) diff --git a/src/superannotate/lib/core/reporter.py b/src/superannotate/lib/core/reporter.py index 75193342a..365d1aec8 100644 --- a/src/superannotate/lib/core/reporter.py +++ b/src/superannotate/lib/core/reporter.py @@ -1,3 +1,7 @@ +import itertools +import sys +import threading +import time from collections import defaultdict from typing import Union @@ -5,6 +9,28 @@ from superannotate.logger import get_default_logger +class Spinner: + spinner_cycle = iter(itertools.cycle(["⣾", "⣽", "⣻", "⢿", "⡿", "⣟", "⣯", "⣷"])) + + def __init__(self): + self.stop_running = threading.Event() + self.spin_thread = threading.Thread(target=self.init_spin) + + def start(self): + self.spin_thread.start() + + def stop(self): + self.stop_running.set() + self.spin_thread.join() + + def init_spin(self): + while not self.stop_running.is_set(): + sys.stdout.write(next(self.spinner_cycle)) + sys.stdout.flush() + time.sleep(0.25) + sys.stdout.write("\b") + + class Reporter: def __init__( self, @@ -23,6 +49,16 @@ def __init__( self.debug_messages = [] self.custom_messages = defaultdict(set) self.progress_bar = None + self._spinner = None + + def start_spinner(self): + if self._log_info: + self._spinner = Spinner() + self._spinner.start() + + def stop_spinner(self): + if self._spinner: + self._spinner.stop() def disable_warnings(self): self._log_warning = False diff --git a/src/superannotate/lib/core/service_types.py b/src/superannotate/lib/core/service_types.py index 97f9a058f..f793234f3 100644 --- a/src/superannotate/lib/core/service_types.py +++ b/src/superannotate/lib/core/service_types.py @@ -8,10 +8,6 @@ from pydantic import Extra -class ErrorMessage(BaseModel): - error: str - - class Limit(BaseModel): max_image_count: Optional[int] remaining_image_count: int diff --git a/src/superannotate/lib/core/serviceproviders.py b/src/superannotate/lib/core/serviceproviders.py index c5d92732e..ea120e4ea 100644 --- a/src/superannotate/lib/core/serviceproviders.py +++ b/src/superannotate/lib/core/serviceproviders.py @@ -1,5 +1,6 @@ from abc import abstractmethod from typing import Any +from typing import Callable from typing import Dict from typing import Iterable from typing import List @@ -51,10 +52,6 @@ def prepare_export( ): raise NotImplementedError - @abstractmethod - def delete_team_invitation(self, team_id: int, token: str, email: str): - raise NotImplementedError - @abstractmethod def search_team_contributors(self, team_id: int, query_string: str = None): raise NotImplementedError @@ -141,11 +138,6 @@ def move_images_between_folders( """ raise NotImplementedError - def get_duplicated_images( - self, project_id: int, team_id: int, folder_id: int, images: List[str] - ): - raise NotImplementedError - def get_progress( self, project_id: int, team_id: int, poll_id: int ) -> Tuple[int, int]: @@ -204,21 +196,6 @@ def un_share_project( ): raise NotImplementedError - def upload_form_s3( - self, - project_id: int, - team_id: int, - access_key: str, - secret_key: str, - bucket_name: str, - from_folder_name: str, - to_folder_id: int, - ): - raise NotImplementedError - - def get_upload_status(self, project_id: int, team_id: int, folder_id: int): - raise NotImplementedError - def get_exports(self, team_id: int, project_id: int): raise NotImplementedError @@ -266,11 +243,6 @@ def start_model_training(self, team_id: int, hyper_parameters: dict) -> dict: def get_model_metrics(self, team_id: int, model_id: int) -> dict: raise NotImplementedError - def get_models( - self, name: str, team_id: int, project_id: int, model_type: str - ) -> List: - raise NotImplementedError - def bulk_get_folders(self, team_id: int, project_ids: List[int]): raise NotImplementedError @@ -317,6 +289,21 @@ def get_annotations( folder_id: int, items: List[str], reporter: Reporter, + callback: Callable = None, + ) -> List[dict]: + raise NotImplementedError + + @abstractmethod + async def download_annotations( + self, + project_id: int, + team_id: int, + folder_id: int, + reporter: Reporter, + download_path: str, + postfix: str, + items: List[str] = None, + callback: Callable = None, ) -> List[dict]: raise NotImplementedError diff --git a/src/superannotate/lib/core/usecases/annotations.py b/src/superannotate/lib/core/usecases/annotations.py index 8f4d7dd81..8b1666bba 100644 --- a/src/superannotate/lib/core/usecases/annotations.py +++ b/src/superannotate/lib/core/usecases/annotations.py @@ -1,8 +1,13 @@ +import asyncio import concurrent.futures import io import json import os +import platform from collections import namedtuple +from datetime import datetime +from pathlib import Path +from typing import Callable from typing import List from typing import Optional from typing import Tuple @@ -24,6 +29,7 @@ from lib.core.exceptions import AppException from lib.core.reporter import Reporter from lib.core.repositories import BaseManageableRepository +from lib.core.repositories import BaseReadOnlyRepository from lib.core.service_types import UploadAnnotationAuthData from lib.core.serviceproviders import SuperannotateServiceProvider from lib.core.types import PriorityScore @@ -36,6 +42,9 @@ logger = get_default_logger() +if platform.system().lower() == "windows": + asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy()) + class UploadAnnotationsUseCase(BaseReportableUseCae): MAX_WORKERS = 10 @@ -616,7 +625,6 @@ def execute(self): self.reporter.enable_info() if response.data: generator = VideoFrameGenerator(response.data[0], fps=self._fps) - self.reporter.log_info( f"Getting annotations for {generator.frames_count} frames from {self._video_name}." ) @@ -723,3 +731,146 @@ def execute(self): else: self.reporter.warning_messages("Empty scores.") return self._response + + +class DownloadAnnotations(BaseReportableUseCae): + def __init__( + self, + reporter: Reporter, + project: ProjectEntity, + folder: FolderEntity, + destination: str, + recursive: bool, + item_names: List[str], + backend_service_provider: SuperannotateServiceProvider, + items: BaseReadOnlyRepository, + folders: BaseReadOnlyRepository, + classes: BaseReadOnlyRepository, + callback: Callable = None, + ): + super().__init__(reporter) + self._project = project + self._folder = folder + self._destination = destination + self._recursive = recursive + self._item_names = item_names + self._backend_client = backend_service_provider + self._items = items + self._folders = folders + self._classes = classes + self._callback = callback + + def validate_item_names(self): + if self._item_names: + item_names = list(dict.fromkeys(self._item_names)) + len_unique_items, len_items = len(item_names), len(self._item_names) + if len_unique_items < len_items: + self.reporter.log_info( + f"Dropping duplicates. Found {len_unique_items}/{len_items} unique items." + ) + self._item_names = item_names + + def validate_destination(self): + if self._destination: + destination = str(self._destination) + if not os.path.exists(destination) or not os.access( + destination, os.X_OK | os.W_OK + ): + raise AppException( + f"Local path {destination} is not an existing directory or access denied." + ) + + @property + def destination(self) -> Path: + return Path(self._destination if self._destination else "") + + def get_postfix(self): + if self._project.type == constances.ProjectType.VECTOR: + return "___objects.json" + elif self._project.type == constances.ProjectType.PIXEL.value: + return "___pixel.json" + return ".json" + + def download_annotation_classes(self, path: str): + classes = self._classes.get_all() + os.mkdir(f"{path}/classes") + with open(f"{path}/classes/classes.json", "w+") as file: + json.dump([i.dict() for i in classes], file, indent=4) + + @staticmethod + def get_items_count(path: str): + return sum([len(files) for r, d, files in os.walk(path)]) + + @staticmethod + def coroutine_wrapper(coroutine): + loop = asyncio.new_event_loop() + asyncio.set_event_loop(loop) + loop.run_until_complete(coroutine) + loop.close() + + def execute(self): + if self.is_valid(): + export_prefix = f"{self._project.name}{f'/{self._folder.name}' if not self._folder.is_root else ''}" + export_path = str( + self.destination + / Path(f"{export_prefix} {datetime.now().strftime('%B %d %Y %H_%M')}") + ) + self.reporter.log_info( + f"Downloading the annotations of the requested items to {export_path} \nThis might take a while…" + ) + self.reporter.start_spinner() + folders = [] + if self._folder.is_root and self._recursive: + folders = self._folders.get_all( + Condition("team_id", self._project.team_id, EQ) + & Condition("project_id", self._project.id, EQ), + ) + folders.append(self._folder) + postfix = self.get_postfix() + import nest_asyncio + import platform + + if platform.system().lower() == "windows": + asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy()) + + nest_asyncio.apply() + + if not folders: + loop = asyncio.new_event_loop() + loop.run_until_complete( + self._backend_client.download_annotations( + team_id=self._project.team_id, + project_id=self._project.id, + folder_id=self._folder.uuid, + items=self._item_names, + reporter=self.reporter, + download_path=f"{export_path}{'/' + self._folder.name if not self._folder.is_root else ''}", + postfix=postfix, + callback=self._callback, + ) + ) + else: + with concurrent.futures.ThreadPoolExecutor(max_workers=5) as executor: + coroutines = [] + for folder in folders: + coroutines.append( + self._backend_client.download_annotations( + team_id=self._project.team_id, + project_id=self._project.id, + folder_id=folder.uuid, + items=self._item_names, + reporter=self.reporter, + download_path=f"{export_path}{'/' + folder.name if not folder.is_root else ''}", + postfix=postfix, + callback=self._callback, + ) + ) + _ = [_ for _ in executor.map(self.coroutine_wrapper, coroutines)] + + self.reporter.stop_spinner() + self.reporter.log_info( + f"SA-PYTHON-SDK - INFO - Downloaded annotations for {self.get_items_count(export_path)} items." + ) + self.download_annotation_classes(export_path) + self._response.data = os.path.abspath(export_path) + return self._response diff --git a/src/superannotate/lib/core/usecases/images.py b/src/superannotate/lib/core/usecases/images.py index faf8422da..f0519bbb6 100644 --- a/src/superannotate/lib/core/usecases/images.py +++ b/src/superannotate/lib/core/usecases/images.py @@ -515,151 +515,6 @@ def execute(self): self._images.update(self._image) -class ImagesBulkCopyUseCase(BaseUseCase): - """ - Copy images in bulk between folders in a project. - Return skipped image names. - """ - - CHUNK_SIZE = 1000 - - def __init__( - self, - project: ProjectEntity, - from_folder: FolderEntity, - to_folder: FolderEntity, - image_names: List[str], - backend_service_provider: SuperannotateServiceProvider, - include_annotations: bool, - include_pin: bool, - ): - super().__init__() - self._project = project - self._from_folder = from_folder - self._to_folder = to_folder - self._image_names = image_names - self._backend_service = backend_service_provider - self._include_annotations = include_annotations - self._include_pin = include_pin - - def _validate_limitations(self, images_to_copy_count): - response = self._backend_service.get_limitations( - team_id=self._project.team_id, - project_id=self._project.id, - folder_id=self._to_folder.uuid, - ) - if not response.ok: - raise AppValidationException(response.error) - if images_to_copy_count > response.data.folder_limit.remaining_image_count: - raise AppValidationException(constances.COPY_FOLDER_LIMIT_ERROR_MESSAGE) - if images_to_copy_count > response.data.project_limit.remaining_image_count: - raise AppValidationException(constances.COPY_PROJECT_LIMIT_ERROR_MESSAGE) - - def validate_project_type(self): - if self._project.type in constances.LIMITED_FUNCTIONS: - raise AppValidationException( - constances.LIMITED_FUNCTIONS[self._project.type] - ) - - def execute(self): - if self.is_valid(): - images = self._backend_service.get_bulk_images( - project_id=self._project.id, - team_id=self._project.team_id, - folder_id=self._to_folder.uuid, - images=self._image_names, - ) - duplications = [image["name"] for image in images] - images_to_copy = set(self._image_names) - set(duplications) - skipped_images = duplications - try: - self._validate_limitations(len(images_to_copy)) - except AppValidationException as e: - self._response.errors = e - return self._response - - for i in range(0, len(images_to_copy), self.CHUNK_SIZE): - poll_id = self._backend_service.copy_items_between_folders_transaction( - team_id=self._project.team_id, - project_id=self._project.id, - from_folder_id=self._from_folder.uuid, - to_folder_id=self._to_folder.uuid, - items=self._image_names[i : i + self.CHUNK_SIZE], - include_annotations=self._include_annotations, - include_pin=self._include_pin, - ) - if not poll_id: - skipped_images.append(self._image_names[i : i + self.CHUNK_SIZE]) - continue - - await_time = len(images_to_copy) * 0.3 - timeout_start = time.time() - while time.time() < timeout_start + await_time: - done_count, skipped_count = self._backend_service.get_progress( - self._project.id, self._project.team_id, poll_id - ) - if done_count + skipped_count == len(images_to_copy): - break - time.sleep(4) - - self._response.data = skipped_images - return self._response - - -class ImagesBulkMoveUseCase(BaseUseCase): - """ - Copy images in bulk between folders in a project. - Return skipped image names. - """ - - CHUNK_SIZE = 1000 - - def __init__( - self, - project: ProjectEntity, - from_folder: FolderEntity, - to_folder: FolderEntity, - image_names: List[str], - backend_service_provider: SuperannotateServiceProvider, - ): - super().__init__() - self._project = project - self._from_folder = from_folder - self._to_folder = to_folder - self._image_names = image_names - self._backend_service = backend_service_provider - - def validate_limitations(self): - response = self._backend_service.get_limitations( - team_id=self._project.team_id, - project_id=self._project.id, - folder_id=self._to_folder.uuid, - ) - to_upload_count = len(self._image_names) - if not response.ok: - raise AppValidationException(response.error) - if to_upload_count > response.data.folder_limit.remaining_image_count: - raise AppValidationException(constances.MOVE_FOLDER_LIMIT_ERROR_MESSAGE) - if to_upload_count > response.data.project_limit.remaining_image_count: - raise AppValidationException(constances.MOVE_PROJECT_LIMIT_ERROR_MESSAGE) - - def execute(self): - if self.is_valid(): - moved_images = [] - for i in range(0, len(self._image_names), self.CHUNK_SIZE): - moved_images.extend( - self._backend_service.move_images_between_folders( - team_id=self._project.team_id, - project_id=self._project.id, - from_folder_id=self._from_folder.uuid, - to_folder_id=self._to_folder.uuid, - images=self._image_names[i : i + self.CHUNK_SIZE], # noqa: E203 - ) - ) - self._response.data = moved_images - return self._response - - class CreateFuseImageUseCase(BaseUseCase): TRANSPARENCY = 128 @@ -1576,97 +1431,6 @@ def execute(self): return self._response -class InteractiveAttachFileUrlsUseCase(BaseInteractiveUseCase): - CHUNK_SIZE = 500 - - def __init__( - self, - project: ProjectEntity, - folder: FolderEntity, - attachments: List[ImageEntity], - backend_service_provider: SuperannotateServiceProvider, - annotation_status: str = None, - upload_state_code: int = constances.UploadState.EXTERNAL.value, - ): - super().__init__() - self._attachments = attachments - self._project = project - self._folder = folder - self._backend_service = backend_service_provider - self._annotation_status = annotation_status - self._upload_state_code = upload_state_code - - @property - def attachments_count(self): - return len(self._attachments) - - @property - def chunks_count(self): - return int(self.attachments_count / self.CHUNK_SIZE) - - def validate_limitations(self): - attachments_count = self.attachments_count - response = self._backend_service.get_limitations( - team_id=self._project.team_id, - project_id=self._project.id, - folder_id=self._folder.uuid, - ) - if not response.ok: - raise AppValidationException(response.error) - if attachments_count > response.data.folder_limit.remaining_image_count: - raise AppValidationException(constances.ATTACH_FOLDER_LIMIT_ERROR_MESSAGE) - elif attachments_count > response.data.project_limit.remaining_image_count: - raise AppValidationException(constances.ATTACH_PROJECT_LIMIT_ERROR_MESSAGE) - elif ( - response.data.user_limit - and attachments_count > response.data.user_limit.remaining_image_count - ): - raise AppValidationException(constances.ATTACH_USER_LIMIT_ERROR_MESSAGE) - - def validate_upload_state(self): - if ( - self._upload_state_code - and self._upload_state_code != self._project.upload_state - ) or self._project.upload_state == constances.UploadState.BASIC.value: - raise AppValidationException(constances.ATTACHING_UPLOAD_STATE_ERROR) - - @property - def annotation_status_code(self): - if self._annotation_status: - return constances.AnnotationStatus.get_value(self._annotation_status) - return constances.AnnotationStatus.NOT_STARTED.value - - @property - def upload_state_code(self) -> int: - if not self._upload_state_code: - return constances.UploadState.EXTERNAL.value - return self._upload_state_code - - def execute(self): - if self.is_valid(): - uploaded_files, duplicated_files = [], [] - for i in range(0, self.attachments_count, self.CHUNK_SIZE): - response = AttachFileUrlsUseCase( - project=self._project, - folder=self._folder, - attachments=self._attachments[ - i : i + self.CHUNK_SIZE # noqa: E203 - ], - backend_service_provider=self._backend_service, - annotation_status=self._annotation_status, - upload_state_code=self._upload_state_code, - ).execute() - if response.errors: - self._response.errors = response.errors - continue - uploaded, duplicated = response.data - uploaded_files.extend(uploaded) - duplicated_files.extend(duplicated) - yield len(uploaded) + len(duplicated) - self._response.data = uploaded_files, duplicated_files - return self._response - - class CopyImageUseCase(BaseUseCase): def __init__( self, diff --git a/src/superannotate/lib/core/usecases/items.py b/src/superannotate/lib/core/usecases/items.py index a98f0da9a..0bb2fa294 100644 --- a/src/superannotate/lib/core/usecases/items.py +++ b/src/superannotate/lib/core/usecases/items.py @@ -113,7 +113,9 @@ def execute(self) -> Response: if service_response.ok: data = [] for i, item in enumerate(service_response.data): - tmp_item = GetItem.serialize_entity(Entity(**Entity.map_fields(item)), self._project) + tmp_item = GetItem.serialize_entity( + Entity(**Entity.map_fields(item)), self._project + ) folder_path = f"{'/' + item['folder_name'] if not item['is_root_folder'] else ''}" tmp_item.path = f"{self._project.name}" + folder_path data.append(tmp_item) diff --git a/src/superannotate/lib/core/usecases/models.py b/src/superannotate/lib/core/usecases/models.py index 581042161..e987c3912 100644 --- a/src/superannotate/lib/core/usecases/models.py +++ b/src/superannotate/lib/core/usecases/models.py @@ -22,14 +22,14 @@ from lib.core.enums import ExportStatus from lib.core.exceptions import AppException from lib.core.exceptions import AppValidationException +from lib.core.reporter import Reporter from lib.core.repositories import BaseManageableRepository from lib.core.serviceproviders import SuperannotateServiceProvider -from lib.core.usecases.base import BaseInteractiveUseCase +from lib.core.usecases.base import BaseReportableUseCae from lib.core.usecases.base import BaseUseCase from lib.core.usecases.images import GetBulkImages from superannotate.logger import get_default_logger - logger = get_default_logger() @@ -73,11 +73,12 @@ def validate_fuse(self): def validate_folder_names(self): if self._folder_names: - condition = ( - Condition("team_id", self._project.team_id, EQ) - & Condition("project_id", self._project.id, EQ) + condition = Condition("team_id", self._project.team_id, EQ) & Condition( + "project_id", self._project.id, EQ ) - existing_folders = {folder.name for folder in self._folders.get_all(condition)} + existing_folders = { + folder.name for folder in self._folders.get_all(condition) + } folder_names_set = set(self._folder_names) if not folder_names_set.issubset(existing_folders): raise AppException( @@ -176,7 +177,7 @@ def execute(self): return self._response -class DownloadExportUseCase(BaseInteractiveUseCase): +class DownloadExportUseCase(BaseReportableUseCae): def __init__( self, service: SuperannotateServiceProvider, @@ -185,33 +186,35 @@ def __init__( folder_path: str, extract_zip_contents: bool, to_s3_bucket: bool, + reporter: Reporter, ): - super().__init__() + super().__init__(reporter) self._service = service self._project = project self._export_name = export_name - self._folder_path = folder_path + self._folder_path = folder_path if folder_path else "" self._extract_zip_contents = extract_zip_contents self._to_s3_bucket = to_s3_bucket - self._temp_dir = None - def upload_to_s3_from_folder(self, folder_path: str): + def upload_to_s3_from_folder(self, source: str, folder_path: str): to_s3_bucket = boto3.Session().resource("s3").Bucket(self._to_s3_bucket) - files_to_upload = list(Path(folder_path).rglob("*.*")) + files_to_upload = list(Path(source).rglob("*.*")) def _upload_file_to_s3(_to_s3_bucket, _path, _s3_key) -> None: - _to_s3_bucket.upload_file(_path, _s3_key) + _to_s3_bucket.upload_file(str(_path), _s3_key) + self.reporter.update_progress() with concurrent.futures.ThreadPoolExecutor(max_workers=10) as executor: results = [] + self.reporter.start_progress(len(files_to_upload), "Uploading") for path in files_to_upload: - s3_key = f"{self._folder_path}/{path.name}" + s3_key = f"{folder_path + '/' if folder_path else ''}{str(Path(path).relative_to(Path(source)))}" results.append( - executor.submit(_upload_file_to_s3, to_s3_bucket, str(path), s3_key) + executor.submit(_upload_file_to_s3, to_s3_bucket, path, s3_key) ) - yield + self.reporter.finish_progress() - def download_to_local_storage(self, destination: str): + def download_to_local_storage(self, destination: str, extract_zip=False): exports = self._service.get_exports( team_id=self._project.team_id, project_id=self._project.id ) @@ -247,38 +250,35 @@ def download_to_local_storage(self, destination: str): with open(filepath, "wb") as f: for chunk in response.iter_content(chunk_size=8192): f.write(chunk) - if self._extract_zip_contents: + if extract_zip: with zipfile.ZipFile(filepath, "r") as f: f.extractall(destination) Path.unlink(filepath) return export["id"], filepath, destination - def get_upload_files_count(self): - if not self._temp_dir: - self._temp_dir = tempfile.TemporaryDirectory() - self.download_to_local_storage(self._temp_dir.name) - return len(list(Path(self._temp_dir.name).rglob("*.*"))) - def execute(self): if self.is_valid(): - report = [] if self._to_s3_bucket: - self.get_upload_files_count() - yield from self.upload_to_s3_from_folder(self._temp_dir.name) - report.append( + with tempfile.TemporaryDirectory() as temp_dir: + self.download_to_local_storage( + temp_dir, extract_zip=self._extract_zip_contents + ) + self.upload_to_s3_from_folder(temp_dir, self._folder_path) + self.reporter.log_info( f"Exported to AWS {self._to_s3_bucket}/{self._folder_path}" ) - self._temp_dir.cleanup() else: export_id, filepath, destination = self.download_to_local_storage( - self._folder_path + self._folder_path, self._extract_zip_contents ) if self._extract_zip_contents: - report.append(f"Extracted {filepath} to folder {destination}") + self.reporter.log_info( + f"Extracted {filepath} to folder {destination}" + ) else: - report.append(f"Downloaded export ID {export_id} to {filepath}") - yield - self._response.data = "\n".join(report) + self.reporter.log_info( + f"Downloaded export ID {export_id} to {filepath}" + ) return self._response diff --git a/src/superannotate/lib/core/usecases/projects.py b/src/superannotate/lib/core/usecases/projects.py index def0f7466..fb97e87de 100644 --- a/src/superannotate/lib/core/usecases/projects.py +++ b/src/superannotate/lib/core/usecases/projects.py @@ -623,37 +623,6 @@ def execute(self): return self._response -class ShareProjectUseCase(BaseUseCase): - def __init__( - self, - service: SuperannotateServiceProvider, - project_entity: ProjectEntity, - user_id: str, - user_role: str, - ): - super().__init__() - self._service = service - self._project_entity = project_entity - self._user_id = user_id - self._user_role = user_role - - @property - def user_role(self): - return constances.UserRole.get_value(self._user_role) - - def execute(self): - self._response.data = self._service.share_project_bulk( - team_id=self._project_entity.team_id, - project_id=self._project_entity.id, - users=[{"user_id": self._user_id, "user_role": self.user_role}], - ) - if not self._response.errors: - logger.info( - f"Shared project {self._project_entity.name} with user {self._user_id} and role {constances.UserRole.get_name(self.user_role)}" - ) - return self._response - - class UnShareProjectUseCase(BaseUseCase): def __init__( self, diff --git a/src/superannotate/lib/infrastructure/controller.py b/src/superannotate/lib/infrastructure/controller.py index 3ea2835dd..0a28148a4 100644 --- a/src/superannotate/lib/infrastructure/controller.py +++ b/src/superannotate/lib/infrastructure/controller.py @@ -4,6 +4,7 @@ from abc import ABCMeta from os.path import expanduser from pathlib import Path +from typing import Callable from typing import Iterable from typing import List from typing import Optional @@ -498,26 +499,6 @@ def clone_project( ) return use_case.execute() - def interactive_attach_urls( - self, - project_name: str, - files: List[ImageEntity], - folder_name: str = None, - annotation_status: str = None, - upload_state_code: int = None, - ): - project = self._get_project(project_name) - folder = self._get_folder(project, folder_name) - - return usecases.InteractiveAttachFileUrlsUseCase( - project=project, - folder=folder, - attachments=files, - backend_service_provider=self._backend_client, - annotation_status=annotation_status, - upload_state_code=upload_state_code, - ) - def create_folder(self, project: str, folder_name: str): project = self._get_project(project) folder = FolderEntity( @@ -724,48 +705,6 @@ def update_image( use_case = usecases.UpdateImageUseCase(image=image, images=self.images) return use_case.execute() - def bulk_copy_images( - self, - project_name: str, - from_folder_name: str, - to_folder_name: str, - image_names: List[str], - include_annotations: bool, - include_pin: bool, - ): - project = self._get_project(project_name) - from_folder = self._get_folder(project, from_folder_name) - to_folder = self._get_folder(project, to_folder_name) - use_case = usecases.ImagesBulkCopyUseCase( - project=project, - from_folder=from_folder, - to_folder=to_folder, - image_names=image_names, - backend_service_provider=self._backend_client, - include_annotations=include_annotations, - include_pin=include_pin, - ) - return use_case.execute() - - def bulk_move_images( - self, - project_name: str, - from_folder_name: str, - to_folder_name: str, - image_names: List[str], - ): - project = self._get_project(project_name) - from_folder = self._get_folder(project, from_folder_name) - to_folder = self._get_folder(project, to_folder_name) - use_case = usecases.ImagesBulkMoveUseCase( - project=project, - from_folder=from_folder, - to_folder=to_folder, - image_names=image_names, - backend_service_provider=self._backend_client, - ) - return use_case.execute() - def get_project_metadata( self, project_name: str, @@ -928,16 +867,6 @@ def assign_folder(self, project_name: str, folder_name: str, users: List[str]): ) return use_case.execute() - def share_project(self, project_name: str, user_id: str, user_role: str): - project_entity = self._get_project(project_name) - use_case = usecases.ShareProjectUseCase( - service=self._backend_client, - project_entity=project_entity, - user_id=user_id, - user_role=user_role, - ) - return use_case.execute() - def un_share_project(self, project_name: str, user_id: str): project_entity = self._get_project(project_name) use_case = usecases.UnShareProjectUseCase( @@ -1195,14 +1124,16 @@ def download_export( to_s3_bucket: bool, ): project = self._get_project(project_name) - return usecases.DownloadExportUseCase( + use_case = usecases.DownloadExportUseCase( service=self._backend_client, project=project, export_name=export_name, folder_path=folder_path, extract_zip_contents=extract_zip_contents, to_s3_bucket=to_s3_bucket, + reporter=self.default_reporter, ) + return use_case.execute() def download_ml_model(self, model_data: dict, download_path: str): model = MLModelEntity( @@ -1243,12 +1174,14 @@ def benchmark( if export_response.errors: return export_response - download_use_case = self.download_export( - project_name=project.name, + download_use_case = usecases.DownloadExportUseCase( + service=self._backend_client, + project=project, export_name=export_response.data["name"], folder_path=export_root, extract_zip_contents=True, to_s3_bucket=False, + reporter=self.default_reporter, ) if download_use_case.is_valid(): for _ in download_use_case.execute(): @@ -1368,7 +1301,7 @@ def delete_annotations( self, project_name: str, folder_name: str, - image_names: Optional[List[str]] = None, + item_names: Optional[List[str]] = None, ): project = self._get_project(project_name) folder = self._get_folder(project, folder_name) @@ -1376,7 +1309,7 @@ def delete_annotations( project=project, folder=folder, backend_service=self._backend_client, - image_names=image_names, + image_names=item_names, ) return use_case.execute() @@ -1683,3 +1616,32 @@ def set_annotation_statuses( backend_service_provider=self.backend_client, ) return use_case.execute() + + def download_annotations( + self, + project_name: str, + folder_name: str, + destination: str, + recursive: bool, + item_names: Optional[List[str]], + callback: Optional[Callable], + ): + project = self._get_project(project_name) + folder = self._get_folder(project, folder_name) + + use_case = usecases.DownloadAnnotations( + reporter=self.default_reporter, + project=project, + folder=folder, + destination=destination, + recursive=recursive, + item_names=item_names, + items=self.items, + folders=self.folders, + classes=AnnotationClassRepository( + service=self._backend_client, project=project + ), + backend_service_provider=self.backend_client, + callback=callback, + ) + return use_case.execute() diff --git a/src/superannotate/lib/infrastructure/services.py b/src/superannotate/lib/infrastructure/services.py index 796c32976..51bc674b6 100644 --- a/src/superannotate/lib/infrastructure/services.py +++ b/src/superannotate/lib/infrastructure/services.py @@ -3,6 +3,7 @@ import json import time from contextlib import contextmanager +from typing import Callable from typing import Dict from typing import Iterable from typing import List @@ -205,7 +206,6 @@ class SuperannotateBackendService(BaseBackendService): URL_SHARE_PROJECT_BULK = "project/{}/share/bulk" URL_ANNOTATION_CLASSES = "classes" URL_TEAM = "team" - URL_INVITE_CONTRIBUTOR = "team/{}/invite" URL_INVITE_CONTRIBUTORS = "team/{}/inviteUsers" URL_PREPARE_EXPORT = "export" URL_COPY_IMAGES_FROM_FOLDER = "images/copy-image-or-folders" @@ -213,8 +213,6 @@ class SuperannotateBackendService(BaseBackendService): URL_GET_COPY_PROGRESS = "images/copy-image-progress" URL_ASSIGN_IMAGES = "images/editAssignment/" URL_ASSIGN_FOLDER = "folder/editAssignment" - URL_S3_ACCESS_POINT = "/project/{}/get-image-s3-access-point" - URL_S3_UPLOAD_STATUS = "/project/{}/getS3UploadStatus" URL_GET_EXPORTS = "exports" URL_GET_CLASS = "class/{}" URL_ANNOTATION_UPLOAD_PATH_TOKEN = "images/getAnnotationsPathsAndTokens" @@ -559,15 +557,6 @@ def get_team(self, team_id: int): res = self._request(get_team_url, "get") return res.json() - def delete_team_invitation(self, team_id: int, token: str, email: str) -> bool: - invite_contributor_url = urljoin( - self.api_url, self.URL_INVITE_CONTRIBUTOR.format(team_id) - ) - res = self._request( - invite_contributor_url, "delete", data={"token": token, "e_mail": email} - ) - return res.ok - def invite_contributors( self, team_id: int, team_role: int, emails: list ) -> Tuple[List[str], List[str]]: @@ -669,33 +658,6 @@ def await_progress(self, project_id: int, team_id: int, poll_id: int, items_coun except (AppException, Exception) as e: raise BackendError(e) - def get_duplicated_images( - self, project_id: int, team_id: int, folder_id: int, images: List[str] - ) -> List[str]: - get_duplications_url = urljoin(self.api_url, self.URL_BULK_GET_IMAGES) - - res = self._request( - get_duplications_url, - "post", - data={ - "project_id": project_id, - "team_id": team_id, - "folder_id": folder_id, - "names": images, - }, - ) - return res.json() - - def delete_image(self, image_id, team_id: int, project_id: int): - delete_image_url = urljoin(self.api_url, self.URL_GET_IMAGE.format(image_id)) - - res = self._request( - delete_image_url, - "delete", - params={"team_id": team_id, "project_id": project_id}, - ) - return res.ok - def set_images_statuses_bulk( self, image_names: list, @@ -821,44 +783,6 @@ def get_export(self, team_id: int, project_id: int, export_id: int): ) return res.json() - def upload_form_s3( - self, - project_id: int, - team_id: int, - access_key: str, - secret_key: str, - bucket_name: str, - from_folder_name: str, - to_folder_id: int, - ): - upload_from_s3_url = urljoin( - self.api_url, self.URL_S3_ACCESS_POINT.format(project_id) - ) - response = self._request( - upload_from_s3_url, - "post", - params={"team_id": team_id}, - data={ - "accessKeyID": access_key, - "secretAccessKey": secret_key, - "bucketName": bucket_name, - "folderName": from_folder_name, - "folder_id": to_folder_id, - }, - ) - return response - - def get_upload_status(self, project_id: int, team_id: int, folder_id: int): - get_upload_status_url = urljoin( - self.api_url, self.URL_S3_UPLOAD_STATUS.format(project_id) - ) - res = self._request( - get_upload_status_url, - "get", - params={"team_id": team_id, "folder_id": folder_id}, - ) - return res.json().get("progress") - def get_project_images_count(self, team_id: int, project_id: int): get_images_count_url = urljoin(self.api_url, self.URL_FOLDERS_IMAGES) res = self._request( @@ -956,17 +880,6 @@ def get_model_metrics(self, team_id: int, model_id: int) -> dict: res = self._request(get_metrics_url, "get", params={"team_id": team_id}) return res.json() - def get_models( - self, name: str, team_id: int, project_id: int, model_type: str - ) -> List: - search_model_url = urljoin(self.api_url, self.URL_MODELS) - res = self._request( - search_model_url, - "get", - params={"team_id": team_id, "project_id": project_id, "name": name}, - ) - return res.json() - def search_models(self, query_string: str): search_model_url = urljoin(self.api_url, self.URL_MODELS) if query_string: @@ -1073,11 +986,12 @@ def get_annotations( folder_id: int, items: List[str], reporter: Reporter, + callback: Callable = None, ) -> List[dict]: import nest_asyncio import platform - if platform.system().lower() == 'windows': + if platform.system().lower() == "windows": asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy()) nest_asyncio.apply() @@ -1089,7 +1003,12 @@ def get_annotations( if folder_id: query_params["folder_id"] = folder_id - handler = StreamedAnnotations(self.default_headers, reporter) + handler = StreamedAnnotations( + self.default_headers, + reporter, + map_function=lambda x: {"image_names": x}, + callback=callback, + ) loop = asyncio.new_event_loop() return loop.run_until_complete( @@ -1098,10 +1017,50 @@ def get_annotations( data=items, params=query_params, chunk_size=self.DEFAULT_CHUNK_SIZE, - map_function=lambda x: {"image_names": x}, ) ) + async def download_annotations( + self, + project_id: int, + team_id: int, + folder_id: int, + reporter: Reporter, + download_path: str, + postfix: str, + items: List[str] = None, + callback: Callable = None, + ) -> List[dict]: + import aiohttp + + async with aiohttp.ClientSession( + raise_for_status=True, + headers=self.default_headers, + connector=aiohttp.TCPConnector(ssl=False), + ) as session: + query_params = { + "team_id": team_id, + "project_id": project_id, + } + if folder_id: + query_params["folder_id"] = folder_id + handler = StreamedAnnotations( + self.default_headers, + reporter, + map_function=lambda x: {"image_names": x}, + callback=callback, + ) + + return await handler.download_data( + url=urljoin(self.assets_provider_url, self.URL_GET_ANNOTATIONS), + data=items, + params=query_params, + chunk_size=self.DEFAULT_CHUNK_SIZE, + download_path=download_path, + postfix=postfix, + session=session, + ) + def get_integrations(self, team_id: int) -> List[dict]: get_integrations_url = urljoin( self.api_url, self.URL_GET_INTEGRATIONS.format(team_id) @@ -1144,7 +1103,7 @@ def saqul_query( params = { "team_id": team_id, "project_id": project_id, - "includeFolderNames": True + "includeFolderNames": True, } if folder_id: params["folder_id"] = folder_id diff --git a/src/superannotate/lib/infrastructure/stream_data_handler.py b/src/superannotate/lib/infrastructure/stream_data_handler.py index 4d61e9630..3be1cf8d7 100644 --- a/src/superannotate/lib/infrastructure/stream_data_handler.py +++ b/src/superannotate/lib/infrastructure/stream_data_handler.py @@ -1,4 +1,5 @@ import json +import os from typing import Callable import aiohttp @@ -8,10 +9,18 @@ class StreamedAnnotations: DELIMITER = b"\\n;)\\n" - def __init__(self, headers: dict, reporter: Reporter): + def __init__( + self, + headers: dict, + reporter: Reporter, + callback: Callable = None, + map_function: Callable = None, + ): self._headers = headers self._annotations = [] self._reporter = reporter + self._callback = callback + self._map_function = map_function async def fetch( self, @@ -21,7 +30,10 @@ async def fetch( data: dict = None, params: dict = None, ): - response = await session._request(method, url, json=data, params=params) + kwargs = {"params": params} + if data: + kwargs["json"] = data + response = await session._request(method, url, **kwargs) buffer = b"" async for line in response.content.iter_any(): slices = line.split(self.DELIMITER) @@ -29,16 +41,15 @@ async def fetch( buffer += slices[0] continue elif slices[0]: - self._annotations.append(json.loads(buffer + slices[0])) self._reporter.update_progress() + yield json.loads(buffer + slices[0]) for data in slices[1:-1]: - self._annotations.append(json.loads(data)) self._reporter.update_progress() + yield json.loads(data) buffer = slices[-1] if buffer: - self._annotations.append(json.loads(buffer)) + yield json.loads(buffer) self._reporter.update_progress() - return self._annotations async def get_data( self, @@ -47,7 +58,6 @@ async def get_data( method: str = "post", params=None, chunk_size: int = 100, - map_function: Callable = lambda x: x, verify_ssl: bool = False, ): async with aiohttp.ClientSession( @@ -55,19 +65,71 @@ async def get_data( headers=self._headers, connector=aiohttp.TCPConnector(ssl=verify_ssl), ) as session: - if chunk_size: for i in range(0, len(data), chunk_size): data_to_process = data[i : i + chunk_size] - await self.fetch( + async for annotation in self.fetch( method, session, url, - map_function(data_to_process), + self._process_data(data_to_process), params=params, - ) + ): + self._annotations.append( + self._callback(annotation) if self._callback else annotation + ) else: - await self.fetch( - method, session, url, map_function(data), params=params - ) + async for annotation in self.fetch( + method, session, url, self._process_data(data), params=params + ): + self._annotations.append( + self._callback(annotation) if self._callback else annotation + ) return self._annotations + + @staticmethod + def _store_annotation(path, postfix, annotation: dict): + os.makedirs(path, exist_ok=True) + with open(f"{path}/{annotation['metadata']['name']}{postfix}", "w") as file: + json.dump(annotation, file) + + def _process_data(self, data): + if data and self._map_function: + return self._map_function(data) + return data + + async def download_data( + self, + url: str, + data: list, + download_path: str, + postfix: str, + session, + method: str = "post", + params=None, + chunk_size: int = 100, + ): + if chunk_size and data: + for i in range(0, len(data), chunk_size): + data_to_process = data[i : i + chunk_size] + async for annotation in self.fetch( + method, + session, + url, + self._process_data(data_to_process), + params=params, + ): + self._store_annotation( + download_path, + postfix, + self._callback(annotation) if self._callback else annotation, + ) + else: + async for annotation in self.fetch( + method, session, url, self._process_data(data), params=params + ): + self._store_annotation( + download_path, + postfix, + self._callback(annotation) if self._callback else annotation, + ) diff --git a/src/superannotate/version.py b/src/superannotate/version.py index 356d66d4d..6914911d9 100644 --- a/src/superannotate/version.py +++ b/src/superannotate/version.py @@ -1 +1 @@ -__version__ = "4.3.4dev1" +__version__ = "4.3.5dev1" diff --git a/tests/data_set/video_annotation_minimal_fields/video.mp4.json b/tests/data_set/video_annotation_minimal_fields/video.mp4.json index a389a1ee8..647b211c7 100644 --- a/tests/data_set/video_annotation_minimal_fields/video.mp4.json +++ b/tests/data_set/video_annotation_minimal_fields/video.mp4.json @@ -46,142 +46,6 @@ } ] }, - { - "points": { - "x1": 182.32, - "y1": 36.33, - "x2": 284.01, - "y2": 134.54 - }, - "timestamp": 18271058, - "attributes": [ - { - "id": 1175876, - "groupId": 338357, - "name": "attr", - "groupName": "attr g" - } - ] - }, - { - "points": { - "x1": 181.49, - "y1": 45.09, - "x2": 283.18, - "y2": 143.3 - }, - "timestamp": 19271058, - "attributes": [ - { - "id": 1175876, - "groupId": 338357, - "name": "attr", - "groupName": "attr g" - } - ] - }, - { - "points": { - "x1": 181.9, - "y1": 48.35, - "x2": 283.59, - "y2": 146.56 - }, - "timestamp": 19725864, - "attributes": [ - { - "id": 1175876, - "groupId": 338357, - "name": "attr", - "groupName": "attr g" - } - ] - }, - { - "points": { - "x1": 181.49, - "y1": 52.46, - "x2": 283.18, - "y2": 150.67 - }, - "timestamp": 20271058, - "attributes": [ - { - "id": 1175876, - "groupId": 338357, - "name": "attr", - "groupName": "attr g" - } - ] - }, - { - "points": { - "x1": 181.49, - "y1": 63.7, - "x2": 283.18, - "y2": 161.91 - }, - "timestamp": 21271058, - "attributes": [ - { - "id": 1175876, - "groupId": 338357, - "name": "attr", - "groupName": "attr g" - } - ] - }, - { - "points": { - "x1": 182.07, - "y1": 72.76, - "x2": 283.76, - "y2": 170.97 - }, - "timestamp": 22271058, - "attributes": [ - { - "id": 1175876, - "groupId": 338357, - "name": "attr", - "groupName": "attr g" - } - ] - }, - { - "points": { - "x1": 182.07, - "y1": 81.51, - "x2": 283.76, - "y2": 179.72 - }, - "timestamp": 23271058, - "attributes": [ - { - "id": 1175876, - "groupId": 338357, - "name": "attr", - "groupName": "attr g" - } - ] - }, - { - "points": { - "x1": 182.42, - "y1": 97.19, - "x2": 284.11, - "y2": 195.4 - }, - "timestamp": 24271058, - "attributes": [ - { - "id": 1175876, - "groupId": 338357, - "name": "attr", - "groupName": "attr g" - } - ] - }, { "points": { "x1": 182.42, @@ -205,7 +69,7 @@ }, { "meta": { - "type": "bbox", + "type": "point", "classId": 859496, "className": "vid", "start": 29713736, @@ -217,22 +81,14 @@ "end": 30526667, "timestamps": [ { - "points": { - "x1": 132.82, - "y1": 129.12, - "x2": 175.16, - "y2": 188 - }, + "x": 1, + "y": 2, "timestamp": 29713736, "attributes": [] }, { - "points": { - "x1": 132.82, - "y1": 129.12, - "x2": 175.16, - "y2": 188 - }, + "x": 2, + "y": 3, "timestamp": 30526667, "attributes": [] } diff --git a/tests/integration/annotations/test_download_annotations.py b/tests/integration/annotations/test_download_annotations.py new file mode 100644 index 000000000..4223b9010 --- /dev/null +++ b/tests/integration/annotations/test_download_annotations.py @@ -0,0 +1,63 @@ +import json +import os +import tempfile +from pathlib import Path + +import pytest + +import src.superannotate as sa +from tests.integration.base import BaseTestCase + + +class TestDownloadAnnotations(BaseTestCase): + PROJECT_NAME = "Test-download_annotations" + FOLDER_NAME = "FOLDER_NAME" + FOLDER_NAME_2 = "FOLDER_NAME_2" + PROJECT_DESCRIPTION = "Desc" + PROJECT_TYPE = "Vector" + TEST_FOLDER_PATH = "data_set/sample_project_vector" + IMAGE_NAME = "example_image_1.jpg" + + @property + def folder_path(self): + return os.path.join(Path(__file__).parent.parent.parent, self.TEST_FOLDER_PATH) + + @pytest.mark.flaky(reruns=3) + def test_download_annotations(self): + sa.init() + sa.upload_images_from_folder_to_project( + self.PROJECT_NAME, self.folder_path, annotation_status="InProgress" + ) + sa.create_annotation_classes_from_classes_json( + self.PROJECT_NAME, f"{self.folder_path}/classes/classes.json" + ) + _, _, _ = sa.upload_annotations_from_folder_to_project( + self.PROJECT_NAME, self.folder_path + ) + with tempfile.TemporaryDirectory() as temp_dir: + annotations_path = sa.download_annotations(f"{self.PROJECT_NAME}", temp_dir, [self.IMAGE_NAME]) + self.assertEqual(len(os.listdir(temp_dir)), 1) + with open(f"{self.folder_path}/{self.IMAGE_NAME}___objects.json", "r") as pre_annotation_file, open( + f"{annotations_path}/{self.IMAGE_NAME}___objects.json") as post_annotation_file: + pre_annotation_data = json.load(pre_annotation_file) + post_annotation_data = json.load(post_annotation_file) + self.assertEqual(len(pre_annotation_data["instances"]), len(post_annotation_data["instances"])) + + @pytest.mark.flaky(reruns=3) + def test_download_annotations_from_folders(self): + sa.init() + sa.create_folder(self.PROJECT_NAME, self.FOLDER_NAME) + sa.create_folder(self.PROJECT_NAME, self.FOLDER_NAME_2) + sa.create_annotation_classes_from_classes_json( + self.PROJECT_NAME, f"{self.folder_path}/classes/classes.json" + ) + for folder in [self.FOLDER_NAME, self.FOLDER_NAME_2, ""]: + sa.upload_images_from_folder_to_project( + f"{self.PROJECT_NAME}{'/' + folder if folder else ''}", self.folder_path, annotation_status="InProgress" + ) + _, _, _ = sa.upload_annotations_from_folder_to_project( + f"{self.PROJECT_NAME}{'/' + folder if folder else ''}", self.folder_path + ) + with tempfile.TemporaryDirectory() as temp_dir: + annotations_path = sa.download_annotations(f"{self.PROJECT_NAME}", temp_dir) + self.assertEqual(len(os.listdir(annotations_path)), 5) diff --git a/tests/integration/annotations/test_get_annotations.py b/tests/integration/annotations/test_get_annotations.py index 1d4cddc1d..d277a69dd 100644 --- a/tests/integration/annotations/test_get_annotations.py +++ b/tests/integration/annotations/test_get_annotations.py @@ -100,6 +100,24 @@ def test_get_annotations_all(self): annotations = sa.get_annotations(f"{self.PROJECT_NAME}") self.assertEqual(len(annotations), 4) + @pytest.mark.flaky(reruns=3) + def test_get_annotations_all_plus_folder(self): + sa.create_folder(self.PROJECT_NAME, self.FOLDER_NAME) + sa.upload_images_from_folder_to_project( + self.PROJECT_NAME, self.folder_path, annotation_status="InProgress" + ) + sa.upload_images_from_folder_to_project( + f"{self.PROJECT_NAME}/{self.FOLDER_NAME}", self.folder_path, annotation_status="InProgress" + ) + sa.create_annotation_classes_from_classes_json( + self.PROJECT_NAME, f"{self.folder_path}/classes/classes.json" + ) + _, _, _ = sa.upload_annotations_from_folder_to_project( + self.PROJECT_NAME, self.folder_path + ) + annotations = sa.get_annotations(f"{self.PROJECT_NAME}") + self.assertEqual(len(annotations), 4) + class TestGetAnnotationsVideo(BaseTestCase): PROJECT_NAME = "test attach multiple video urls" @@ -132,7 +150,7 @@ def test_video_annotation_upload_root(self): sa.init() sa.create_annotation_classes_from_classes_json(self.PROJECT_NAME, self.classes_path) - _, _, _ = sa.attach_video_urls_to_project( + _, _, _ = sa.attach_items( self.PROJECT_NAME, self.csv_path, ) @@ -145,7 +163,7 @@ def test_video_annotation_upload_folder(self): sa.create_annotation_classes_from_classes_json(self.PROJECT_NAME, self.classes_path) sa.create_folder(self.PROJECT_NAME, "folder") path = f"{self.PROJECT_NAME}/folder" - _, _, _ = sa.attach_video_urls_to_project( + _, _, _ = sa.attach_items( path, self.csv_path, ) diff --git a/tests/integration/annotations/test_get_annotations_per_frame.py b/tests/integration/annotations/test_get_annotations_per_frame.py index 565d38464..1624a1d73 100644 --- a/tests/integration/annotations/test_get_annotations_per_frame.py +++ b/tests/integration/annotations/test_get_annotations_per_frame.py @@ -38,7 +38,7 @@ def test_video_annotation_upload(self): sa.init() sa.create_annotation_classes_from_classes_json(self.PROJECT_NAME, self.classes_path) - _, _, _ = sa.attach_video_urls_to_project( + _, _, _ = sa.attach_items( self.PROJECT_NAME, self.csv_path, ) diff --git a/tests/integration/annotations/test_text_annotation_upload.py b/tests/integration/annotations/test_text_annotation_upload.py index 98368c31c..a44f51027 100644 --- a/tests/integration/annotations/test_text_annotation_upload.py +++ b/tests/integration/annotations/test_text_annotation_upload.py @@ -48,7 +48,7 @@ def inject_fixtures(self, caplog): def test_document_annotation_upload_invalid_json(self): sa.create_annotation_classes_from_classes_json(self.PROJECT_NAME, self.classes_path) - _, _, _ = sa.attach_document_urls_to_project( + _, _, _ = sa.attach_items( self.PROJECT_NAME, self.csv_path, ) @@ -63,7 +63,7 @@ def test_document_annotation_upload_invalid_json(self): def test_text_annotation_upload(self): sa.create_annotation_classes_from_classes_json(self.PROJECT_NAME, self.classes_path) - _, _, _ = sa.attach_document_urls_to_project( + _, _, _ = sa.attach_items( self.PROJECT_NAME, self.csv_path, ) @@ -81,7 +81,7 @@ def test_text_annotation_upload(self): def test_document_annotation_without_class_data(self): sa.create_annotation_classes_from_classes_json(self.PROJECT_NAME, self.classes_path) - _, _, _ = sa.attach_document_urls_to_project( + _, _, _ = sa.attach_items( self.PROJECT_NAME, self.csv_path, ) diff --git a/tests/integration/annotations/test_video_annotation_upload.py b/tests/integration/annotations/test_video_annotation_upload.py index 8f3f23a6f..cc7c262eb 100644 --- a/tests/integration/annotations/test_video_annotation_upload.py +++ b/tests/integration/annotations/test_video_annotation_upload.py @@ -1,13 +1,14 @@ +import json import os import tempfile -import json from pathlib import Path import pytest + import src.superannotate as sa -from tests.integration.base import BaseTestCase -from src.superannotate.lib.core.data_handlers import VideoFormatHandler from lib.core.reporter import Reporter +from src.superannotate.lib.core.data_handlers import VideoFormatHandler +from tests.integration.base import BaseTestCase class TestUploadVideoAnnotation(BaseTestCase): @@ -63,7 +64,7 @@ def inject_fixtures(self, caplog): def test_video_annotation_upload_invalid_json(self): sa.create_annotation_classes_from_classes_json(self.PROJECT_NAME, self.classes_path) - _, _, _ = sa.attach_video_urls_to_project( + _, _, _ = sa.attach_items( self.PROJECT_NAME, self.csv_path, ) @@ -77,7 +78,7 @@ def test_video_annotation_upload_invalid_json(self): def test_video_annotation_upload(self): sa.create_annotation_classes_from_classes_json(self.PROJECT_NAME, self.classes_path) - _, _, _ = sa.attach_video_urls_to_project( + _, _, _ = sa.attach_items( self.PROJECT_NAME, self.csv_path, ) @@ -114,7 +115,7 @@ def test_video_annotation_upload(self): def test_upload_annotations_without_class_name(self): sa.create_annotation_classes_from_classes_json(self.PROJECT_NAME, self.classes_path) - _, _, _ = sa.attach_video_urls_to_project( + _, _, _ = sa.attach_items( self.PROJECT_NAME, self.csv_path, ) @@ -123,7 +124,7 @@ def test_upload_annotations_without_class_name(self): def test_upload_annotations_empty_json(self): sa.create_annotation_classes_from_classes_json(self.PROJECT_NAME, self.classes_path) - _, _, _ = sa.attach_video_urls_to_project( + _, _, _ = sa.attach_items( self.PROJECT_NAME, self.csv_path, ) @@ -141,28 +142,24 @@ def test_video_annotation_converter(self): json.loads(open(f'{self.minimal_annotations_path}/video.mp4.json', 'r').read()) ) - data = {'instances': [{'attributes': [], 'timeline': { - '0': {'active': True, 'points': {'x1': 223.32, 'y1': 78.45, 'x2': 312.31, 'y2': 176.66}}, - 17.271058: {'points': {'x1': 182.08, 'y1': 33.18, 'x2': 283.45, 'y2': 131.39}}, - 18.271058: {'points': {'x1': 182.32, 'y1': 36.33, 'x2': 284.01, 'y2': 134.54}}, - 19.271058: {'points': {'x1': 181.49, 'y1': 45.09, 'x2': 283.18, 'y2': 143.3}}, - 19.725864: {'points': {'x1': 181.9, 'y1': 48.35, 'x2': 283.59, 'y2': 146.56}}, - 20.271058: {'points': {'x1': 181.49, 'y1': 52.46, 'x2': 283.18, 'y2': 150.67}}, - 21.271058: {'points': {'x1': 181.49, 'y1': 63.7, 'x2': 283.18, 'y2': 161.91}}, - 22.271058: {'points': {'x1': 182.07, 'y1': 72.76, 'x2': 283.76, 'y2': 170.97}}, - 23.271058: {'points': {'x1': 182.07, 'y1': 81.51, 'x2': 283.76, 'y2': 179.72}}, - 24.271058: {'points': {'x1': 182.42, 'y1': 97.19, 'x2': 284.11, 'y2': 195.4}}, - 30.526667: {'active': False, 'points': {'x1': 182.42, 'y1': 97.19, 'x2': 284.11, 'y2': 195.4}}}, - 'type': 'bbox', 'locked': False, 'classId': -1, 'pointLabels': {'3': 'point label bro'}}, - {'attributes': [], 'timeline': {29.713736: {'active': True, - 'points': {'x1': 132.82, 'y1': 129.12, - 'x2': 175.16, 'y2': 188}}, - 30.526667: {'active': False, - 'points': {'x1': 132.82, 'y1': 129.12, - 'x2': 175.16, 'y2': 188}}}, - 'type': 'bbox', 'locked': False, 'classId': -1}, {'attributes': [], 'timeline': { - 5.528212: {'active': True}, 6.702957: {}, 7.083022: {'active': False}}, 'type': 'event', - 'locked': False, 'classId': -1}], - 'tags': ['some tag'], 'name': 'video.mp4', - 'metadata': {'name': 'video.mp4', 'width': None, 'height': None}} + data = {'instances': [ + { + 'attributes': [], 'timeline': { + '0': {'active': True, 'points': {'x1': 223.32, 'y1': 78.45, 'x2': 312.31, 'y2': 176.66}}, + 17.271058: {'points': {'x1': 182.08, 'y1': 33.18, 'x2': 283.45, 'y2': 131.39}}, + 30.526667: {'active': False, 'points': {'x1': 182.42, 'y1': 97.19, 'x2': 284.11, 'y2': 195.4}}}, + 'type': 'bbox', 'locked': False, 'classId': -1, 'pointLabels': {'3': 'point label bro'} + }, + { + 'attributes': [], + 'timeline': {29.713736: {'active': True, 'x': 1, 'y': 2}, 30.526667: {'active': False, 'x': 2, 'y': 3}}, + 'type': 'point', 'locked': False, 'classId': -1 + }, + { + 'attributes': [], 'timeline': {5.528212: {'active': True}, 6.702957: {}, 7.083022: {'active': False}}, + 'type': 'event', 'locked': False, 'classId': -1 + } + ], + 'tags': ['some tag'], 'name': 'video.mp4', + 'metadata': {'name': 'video.mp4', 'width': None, 'height': None}} self.assertEqual(data, converted_video) diff --git a/tests/integration/items/test_get_item_metadata.py b/tests/integration/items/test_get_item_metadata.py index b7a850126..ca805b776 100644 --- a/tests/integration/items/test_get_item_metadata.py +++ b/tests/integration/items/test_get_item_metadata.py @@ -33,14 +33,6 @@ def test_get_item_metadata(self): assert item_metadata["annotation_status"] == "InProgress" assert item_metadata["approval_status"] == None - def test_attached_items_paths(self): - sa.attach_image_urls_to_project(self.PROJECT_NAME, self.scv_path) - sa.add_contributors_to_project(self.PROJECT_NAME, ["shab.prog@gmail.com"], "QA") - sa.assign_images(self.PROJECT_NAME, [self.ATTACHED_IMAGE_NAME], "shab.prog@gmail.com") - item = sa.get_item_metadata(self.PROJECT_NAME, self.ATTACHED_IMAGE_NAME) - assert item["url"] == 'https://drive.google.com/uc?export=download&id=1vwfCpTzcjxoEA4hhDxqapPOVvLVeS7ZS' - assert item["path"] == f"{self.PROJECT_NAME}" - class TestGetEntityMetadataPixel(BaseTestCase): PROJECT_NAME = "TestGetEntityMetadataPixel" diff --git a/tests/integration/projects/test_add_contributors_to_project.py b/tests/integration/projects/test_add_contributors_to_project.py index e5629f183..c4c2f34c6 100644 --- a/tests/integration/projects/test_add_contributors_to_project.py +++ b/tests/integration/projects/test_add_contributors_to_project.py @@ -23,40 +23,6 @@ class TestProject(BaseTestCase): def random_email(self): return f"{''.join(random.choice(string.ascii_letters) for _ in range(7))}@gmail.com" - @patch("lib.infrastructure.controller.Controller.get_team") - @patch("lib.infrastructure.controller.Controller.get_project_metadata") - @patch("lib.infrastructure.controller.Controller.backend_client", new_callable=PropertyMock) - def test_add_contributors(self, client, get_project_metadata_mock, get_team_mock): - client.return_value.share_project_bulk.return_value = dict(invalidUsers=[]) - - random_emails = [self.random_email for i in range(20)] - - team_users = [UserEntity(email=email, user_role=3) for email in random_emails[: 10]] - project_users = [dict(user_id=email, user_role=4) for email in random_emails[: 10]] - to_add_emails = random_emails[8: 18] - pending_users = [dict(email=email, user_role=3) for email in random_emails[15: 20]] - unverified_users = [dict(email=email, user_role=4) for email in random_emails[18: 20]] - - team_data = MagicMock() - project_data = MagicMock() - get_team_mock.return_value = team_data - team_data.data = TeamEntity( - uuid=controller.team_id, - users=team_users, - pending_invitations=pending_users - ) - get_project_metadata_mock.return_value = project_data - project_data.data = dict( - project=ProjectEntity( - uuid=controller.team_id, - users=project_users, - unverified_users=unverified_users, - ) - ) - added, skipped = sa.add_contributors_to_project(self.PROJECT_NAME, to_add_emails, "QA") - self.assertEqual(len(added), 3) - self.assertEqual(len(skipped), 7) - @patch("lib.infrastructure.controller.Controller.get_team") @patch("lib.infrastructure.controller.Controller.backend_client", new_callable=PropertyMock) def test_invite_contributors(self, client, get_team_mock): diff --git a/tests/integration/projects/test_basic_project.py b/tests/integration/projects/test_basic_project.py index f7264c196..03be9cab7 100644 --- a/tests/integration/projects/test_basic_project.py +++ b/tests/integration/projects/test_basic_project.py @@ -98,95 +98,6 @@ def test_workflow_get(self): self.assertEqual(workflows[1]['className'], "class2") -class TestProjectCreateMetadata(BaseTestCase): - PROJECT_NAME = "TestProjectCreateMetadata" - PROJECT_TYPE = "Vector" - OTHER_PROJECT_NAME = "other_project" - PROJECT_DESCRIPTION = "DESCRIPTION" - IMAGE_QUALITY_ORIGINAL = "original" - - def setUp(self, *args, **kwargs): - super(TestProjectCreateMetadata, self).setUp() - try: - sa.delete_project(self.OTHER_PROJECT_NAME) - except: - pass - - def tearDown(self) -> None: - super(TestProjectCreateMetadata, self).tearDown() - try: - sa.delete_project(self.OTHER_PROJECT_NAME) - except: - pass - - @pytest.mark.flaky(reruns=2) - def test_create_project_from_metadata(self): - sa.create_annotation_class( - self.PROJECT_NAME, - "rrr", - "#FFAAFF", - [ - { - "name": "tall", - "is_multiselect": 0, - "attributes": [{"name": "yes"}, {"name": "no"}], - }, - { - "name": "age", - "is_multiselect": 0, - "attributes": [{"name": "young"}, {"name": "old"}], - }, - ], - ) - sa.set_project_workflow( - self.PROJECT_NAME, - [ - { - "step": 1, - "className": "rrr", - "tool": 3, - "attribute": [ - { - "attribute": { - "name": "young", - "attribute_group": {"name": "age"}, - } - }, - { - "attribute": { - "name": "yes", - "attribute_group": {"name": "tall"}, - } - }, - ], - } - ], - ) - - team_users = sa.search_team_contributors() - sa.share_project(self.PROJECT_NAME, team_users[0], "QA") - - sa.set_project_default_image_quality_in_editor(self.PROJECT_NAME, self.IMAGE_QUALITY_ORIGINAL) - meta = sa.get_project_metadata(self.PROJECT_NAME, - include_workflow=True, - include_settings=True, - include_contributors=True, - include_annotation_classes=True - ) - meta["name"] = self.OTHER_PROJECT_NAME - sa.create_project_from_metadata(meta) - created = sa.get_project_metadata(self.OTHER_PROJECT_NAME, - include_workflow=True, - include_settings=True, - include_contributors=True, - include_annotation_classes=True - ) - self.assertEqual(len(created["classes"]), 1) - self.assertEqual([f"{i['attribute']}_{i['value']}" for i in meta["settings"]], - [f"{i['attribute']}_{i['value']}" for i in created["settings"]]) - self.assertEqual(len(created['workflows']), 1) - - class TestProject(BaseTestCase): PROJECT_NAME = "sample_basic_project" PROJECT_TYPE = "Pixel" diff --git a/tests/integration/projects/test_clone_project.py b/tests/integration/projects/test_clone_project.py index 1942a906e..f26a2e179 100644 --- a/tests/integration/projects/test_clone_project.py +++ b/tests/integration/projects/test_clone_project.py @@ -25,7 +25,7 @@ def tearDown(self) -> None: sa.delete_project(self.PROJECT_NAME_2) def test_create_like_project(self): - _, _, _ = sa.attach_image_urls_to_project( + _, _, _ = sa.attach_items( self.PROJECT_NAME_1, os.path.join(DATA_SET_PATH, self.PATH_TO_URLS), ) diff --git a/tests/integration/test_assign_images.py b/tests/integration/test_assign_images.py deleted file mode 100644 index ab0b8e567..000000000 --- a/tests/integration/test_assign_images.py +++ /dev/null @@ -1,144 +0,0 @@ -import os -import pytest -from os.path import dirname - -import src.superannotate as sa -from tests.integration.base import BaseTestCase - - -class TestAnnotationClasses(BaseTestCase): - PROJECT_NAME = "test_assign_images" - TEST_FOLDER_PATH = "data_set/sample_project_vector" - TEST_FOLDER_NAME = "test_folder" - PROJECT_DESCRIPTION = "desc" - PROJECT_TYPE = "Vector" - EXAMPLE_IMAGE_1 = "example_image_1.jpg" - EXAMPLE_IMAGE_2 = "example_image_2.jpg" - - @property - def folder_path(self): - return os.path.join(dirname(dirname(__file__)), self.TEST_FOLDER_PATH) - - def test_assign_images(self): - email = sa.get_team_metadata()["users"][0]["email"] - sa.share_project(self._project["name"], email, "QA") - - sa.upload_images_from_folder_to_project( - project=self._project["name"], folder_path=self.folder_path - ) - - sa.assign_images( - self._project["name"], [self.EXAMPLE_IMAGE_1, self.EXAMPLE_IMAGE_2], email - ) - image_metadata = sa.get_item_metadata( - self._project["name"], self.EXAMPLE_IMAGE_1 - ) - self.assertIsNotNone(image_metadata["qa_email"]) - - def test_assign_images_folder(self): - - email = sa.get_team_metadata()["users"][0]["email"] - - sa.share_project(self.PROJECT_NAME, email, "QA") - sa.create_folder(self.PROJECT_NAME, self.TEST_FOLDER_NAME) - - project_folder = f"{self.PROJECT_NAME}/{self.TEST_FOLDER_NAME}" - - sa.upload_images_from_folder_to_project(project_folder, self.folder_path) - - sa.assign_images( - project_folder, [self.EXAMPLE_IMAGE_1, self.EXAMPLE_IMAGE_2], email - ) - - im1_metadata = sa.get_item_metadata(project_folder, self.EXAMPLE_IMAGE_1) - im2_metadata = sa.get_item_metadata(project_folder, self.EXAMPLE_IMAGE_2) - - self.assertIsNotNone(im1_metadata["qa_email"]) - self.assertIsNotNone(im2_metadata["qa_email"]) - - @pytest.mark.flaky(reruns=4) - def test_un_assign_images(self): - - email = sa.get_team_metadata()["users"][0]["email"] - sa.share_project(self.PROJECT_NAME, email, "QA") - sa.upload_images_from_folder_to_project(self.PROJECT_NAME, self.folder_path) - sa.assign_images( - self.PROJECT_NAME, [self.EXAMPLE_IMAGE_1, self.EXAMPLE_IMAGE_2], email - ) - sa.unassign_images( - self.PROJECT_NAME, [self.EXAMPLE_IMAGE_1, self.EXAMPLE_IMAGE_2], - ) - - im1_metadata = sa.get_item_metadata(self.PROJECT_NAME, self.EXAMPLE_IMAGE_1) - im2_metadata = sa.get_item_metadata(self.PROJECT_NAME, self.EXAMPLE_IMAGE_2) - - self.assertIsNone(im1_metadata["qa_email"]) - self.assertIsNone(im2_metadata["qa_email"]) - - sa.create_folder(self.PROJECT_NAME, self.TEST_FOLDER_NAME) - project = self.PROJECT_NAME + "/" + self.TEST_FOLDER_NAME - - sa.move_images( - self.PROJECT_NAME, [self.EXAMPLE_IMAGE_1, self.EXAMPLE_IMAGE_2], project - ) - sa.assign_images(project, [self.EXAMPLE_IMAGE_1, self.EXAMPLE_IMAGE_2], email) - sa.unassign_images( - project, [self.EXAMPLE_IMAGE_1, self.EXAMPLE_IMAGE_2], - ) - - sa.search_items(project) - im1_metadata = sa.get_item_metadata(project, self.EXAMPLE_IMAGE_1) - - im2_metadata = sa.get_item_metadata(project, self.EXAMPLE_IMAGE_2) - - self.assertIsNone(im1_metadata["qa_email"]) - self.assertIsNone(im2_metadata["qa_email"]) - - def test_assign_folder(self): - sa.create_folder(self.PROJECT_NAME, self.TEST_FOLDER_NAME) - email = sa.get_team_metadata()["users"][0]["email"] - sa.share_project(self.PROJECT_NAME, email, "QA") - sa.assign_folder(self.PROJECT_NAME, self.TEST_FOLDER_NAME, [email]) - folders = sa.search_folders( - self.PROJECT_NAME, self.TEST_FOLDER_NAME, return_metadata=True - ) - self.assertGreater(len(folders[0]["folder_users"]), 0) - - def test_un_assign_folder(self): - sa.create_folder(self.PROJECT_NAME, self.TEST_FOLDER_NAME) - email = sa.get_team_metadata()["users"][0]["email"] - sa.share_project(self.PROJECT_NAME, email, "QA") - sa.assign_folder(self.PROJECT_NAME, self.TEST_FOLDER_NAME, [email]) - folders = sa.search_folders( - self.PROJECT_NAME, folder_name=self.TEST_FOLDER_NAME, return_metadata=True - ) - self.assertGreater(len(folders[0]["folder_users"]), 0) - sa.unassign_folder(self.PROJECT_NAME, self.TEST_FOLDER_NAME) - - folders = sa.search_folders( - self.PROJECT_NAME, self.TEST_FOLDER_NAME, return_metadata=True - ) - self.assertEqual(len(folders[0]["folder_users"]), 0) - - def test_assign_folder_unverified_users(self): - - sa.create_folder(self.PROJECT_NAME, self.TEST_FOLDER_NAME) - email = "unverified_user@mail.com" - try: - sa.assign_folder(self.PROJECT_NAME, self.TEST_FOLDER_NAME, [email]) - except Exception: - pass - - # assert "Skipping unverified_user@mail.com from assignees." in caplog.text - - def test_assign_images_unverified_user(self): - sa.create_folder(self.PROJECT_NAME, self.TEST_FOLDER_NAME) - project_folder = self.PROJECT_NAME + "/" + self.TEST_FOLDER_NAME - sa.upload_images_from_folder_to_project(project_folder, self.folder_path) - email = "unverified_user@email.com" - try: - sa.assign_images( - project_folder, [self.EXAMPLE_IMAGE_1, self.EXAMPLE_IMAGE_2], email - ) - except Exception: - pass diff --git a/tests/integration/test_attach_document_urls.py b/tests/integration/test_attach_document_urls.py deleted file mode 100644 index 2f819c1b5..000000000 --- a/tests/integration/test_attach_document_urls.py +++ /dev/null @@ -1,55 +0,0 @@ -import os -from os.path import dirname -from os.path import join - -import src.superannotate as sa -from src.superannotate import AppException -import src.superannotate.lib.core as constances -from tests.integration.base import BaseTestCase - - -class TestDocumentUrls(BaseTestCase): - PROJECT_NAME = "document attach urls" - PATH_TO_URLS = "csv_files/text_urls.csv" - PATH_TO_50K_URLS = "501_urls.csv" - PROJECT_DESCRIPTION = "desc" - PROJECT_TYPE = "Document" - - @property - def csv_path(self): - return os.path.join(dirname(dirname(__file__)), "data_set") - - def test_attach_documents_urls(self): - uploaded, could_not_upload, existing_images = sa.attach_document_urls_to_project( - self.PROJECT_NAME, - join(self.csv_path, self.PATH_TO_URLS) - ) - self.assertEqual(len(uploaded), 11) - self.assertEqual(len(could_not_upload), 0) - self.assertEqual(len(existing_images), 1) - - uploaded, could_not_upload, existing_images = sa.attach_document_urls_to_project( - self.PROJECT_NAME, - join(self.csv_path, self.PATH_TO_URLS), - ) - self.assertEqual(len(uploaded), 2) - self.assertEqual(len(could_not_upload), 0) - self.assertEqual(len(existing_images), 10) - - def test_attach_video_urls_to_vector_project(self): - try: - sa.create_project("1", "!", "vector") - with self.assertRaisesRegexp(AppException, constances.INVALID_PROJECT_TYPE_TO_PROCESS.format("Vector")): - sa.attach_document_urls_to_project("1", join(self.csv_path, self.PATH_TO_URLS),) - except AssertionError: - raise - except Exception: - sa.delete_project("1") - - def test_limitation(self): - self.assertRaises( - Exception, - sa.attach_document_urls_to_project, - self.PROJECT_NAME, - join(self.csv_path, self.PATH_TO_50K_URLS) - ) \ No newline at end of file diff --git a/tests/integration/test_attach_video_urls.py b/tests/integration/test_attach_video_urls.py deleted file mode 100644 index 96a84f4e7..000000000 --- a/tests/integration/test_attach_video_urls.py +++ /dev/null @@ -1,85 +0,0 @@ -import os -from os.path import dirname - -import src.superannotate as sa -from src.superannotate import AppException -import src.superannotate.lib.core as constances -from tests.integration.base import BaseTestCase - - -class TestVideoUrls(BaseTestCase): - PROJECT_NAME = "test attach video urls" - PATH_TO_URLS = "data_set/attach_urls.csv" - PATH_TO_URLS_WITHOUT_NAMES = "data_set/attach_urls_with_no_name.csv" - PATH_TO_50K_URLS = "data_set/501_urls.csv" - PROJECT_DESCRIPTION = "desc" - PROJECT_TYPE = "Video" - - @property - def csv_path(self): - return os.path.join(dirname(dirname(__file__)), self.PATH_TO_URLS) - - @property - def csv_path_without_name_column(self): - return os.path.join(dirname(dirname(__file__)), self.PATH_TO_URLS_WITHOUT_NAMES) - - def test_attach_video_urls(self): - uploaded, could_not_upload, existing_images = sa.attach_video_urls_to_project( - self.PROJECT_NAME, - self.csv_path, - ) - self.assertEqual(len(uploaded), 7) - self.assertEqual(len(could_not_upload), 0) - self.assertEqual(len(existing_images), 1) - - def test_attach_video_urls_to_vector_project(self): - try: - sa.create_project("1", "!", "vector") - with self.assertRaisesRegexp(AppException, constances.INVALID_PROJECT_TYPE_TO_PROCESS.format("Vector")): - sa.attach_video_urls_to_project("1", self.csv_path) - except AssertionError: - raise - except Exception: - sa.delete_project("1") - - def test_attach_video_urls_without_name_column(self): - uploaded, could_not_upload, existing_images = sa.attach_video_urls_to_project( - self.PROJECT_NAME, - self.csv_path_without_name_column - ) - self.assertEqual(len(uploaded), 8) - self.assertEqual(len(could_not_upload), 0) - self.assertEqual(len(existing_images), 0) - - def test_get_exports(self): - sa.attach_video_urls_to_project( - self.PROJECT_NAME, - self.csv_path_without_name_column - ) - sa.prepare_export(self.PROJECT_NAME) - self.assertEqual(len(sa.get_exports(self.PROJECT_NAME)),1) - - def test_double_attach_image_urls(self): - uploaded, could_not_upload, existing_images = sa.attach_video_urls_to_project( - self.PROJECT_NAME, - os.path.join(dirname(dirname(__file__)), self.PATH_TO_URLS), - ) - self.assertEqual(len(uploaded), 7) - self.assertEqual(len(could_not_upload), 0) - self.assertEqual(len(existing_images), 1) - - uploaded, could_not_upload, existing_images = sa.attach_video_urls_to_project( - self.PROJECT_NAME, - os.path.join(dirname(dirname(__file__)), self.PATH_TO_URLS), - ) - self.assertEqual(len(uploaded), 2) - self.assertEqual(len(could_not_upload), 0) - self.assertEqual(len(existing_images), 6) - - def test_limitation(self): - self.assertRaises( - Exception, - sa.attach_image_urls_to_project, - self.PROJECT_NAME, - os.path.join(dirname(dirname(__file__)), self.PATH_TO_50K_URLS) - ) diff --git a/tests/integration/test_cli.py b/tests/integration/test_cli.py index d821c48a9..f02bca960 100644 --- a/tests/integration/test_cli.py +++ b/tests/integration/test_cli.py @@ -1,13 +1,13 @@ import os -import subprocess -import pytest -import pkg_resources import tempfile from os.path import dirname from pathlib import Path from unittest import TestCase +import pkg_resources + import src.superannotate as sa +from src.superannotate.lib.app.interface.cli_interface import CLIFacade try: CLI_VERSION = pkg_resources.get_distribution("superannotate").version @@ -30,6 +30,7 @@ class CLITest(TestCase): TEST_VIDEO_CSV_PATH = "data_set/csv_files/image_urls.csv" def setUp(self, *args, **kwargs): + self._cli = CLIFacade() self.tearDown() def tearDown(self) -> None: @@ -89,111 +90,65 @@ def video_csv_path(self): Path(os.path.join(dirname(dirname(__file__)), self.TEST_VIDEO_CSV_PATH)) ) + @staticmethod + def safe_run(method, *args, **kwargs): + try: + method(*args, **kwargs) + except SystemExit: + pass + def _create_project(self, project_type="Vector"): - subprocess.run( - f'superannotatecli create-project --name "{self.PROJECT_NAME}" --description gg --type {project_type}', - check=True, - shell=True, - ) + self.safe_run(self._cli.create_project, name=self.PROJECT_NAME, description="gg", type=project_type) - # @pytest.mark.skipif(CLI_VERSION and CLI_VERSION != sa.__version__, - # reason=f"Updated package version from {CLI_VERSION} to {sa.__version__}") def test_create_folder(self): self._create_project() - subprocess.run( - f'superannotatecli create-folder --project "{self.PROJECT_NAME}" --name {self.FOLDER_NAME}', - check=True, - shell=True, - ) + self.safe_run(self._cli.create_folder, project=self.PROJECT_NAME, name=self.FOLDER_NAME) folder = sa.get_folder_metadata( project=self.PROJECT_NAME, folder_name=self.FOLDER_NAME ) self.assertEqual(self.FOLDER_NAME, folder["name"]) - # @pytest.mark.skipif(CLI_VERSION and CLI_VERSION != sa.__version__, - # reason=f"Updated package version from {CLI_VERSION} to {sa.__version__}") def test_upload_images(self): self._create_project() - subprocess.run( - f'superannotatecli upload-images --project "{self.PROJECT_NAME}"' - f" --folder {self.recursive_folder_path} " - "--extensions=jpg " - "--set-annotation-status QualityCheck", - check=True, - shell=True, - ) + self.safe_run(self._cli.upload_images, project=self.PROJECT_NAME, folder=str(self.recursive_folder_path), + extensions="jpg", + set_annotation_status="QualityCheck") self.assertEqual(1, len(sa.search_items(self.PROJECT_NAME))) - # @pytest.mark.skipif(CLI_VERSION and CLI_VERSION != sa.__version__, - # reason=f"Updated package version from {CLI_VERSION} to {sa.__version__}") def test_upload_export(self): self._create_project() with tempfile.TemporaryDirectory() as temp_dir: test_dir = Path(temp_dir) / "test1" test_dir.mkdir() - subprocess.run( - f'superannotatecli export-project --project "{self.PROJECT_NAME}" --folder {test_dir}', - check=True, - shell=True, - ) + self.safe_run(self._cli.export_project, project=self.PROJECT_NAME, folder=test_dir) self.assertEqual(len(list(test_dir.rglob("*.json"))), 1) self.assertEqual(len(list(test_dir.glob("*.jpg"))), 0) self.assertEqual(len(list(test_dir.glob("*.png"))), 0) - # @pytest.mark.skipif(CLI_VERSION and CLI_VERSION != sa.__version__, - # reason=f"Updated package version from {CLI_VERSION} to {sa.__version__}") def test_vector_pre_annotation_folder_upload_download_cli(self): self._create_project() sa.create_annotation_classes_from_classes_json( self.PROJECT_NAME, f"{self.vector_folder_path}/classes/classes.json" ) - subprocess.run( - f'superannotatecli upload-images --project "{self.PROJECT_NAME}"' - f" --folder {self.convertor_data_path} " - "--extensions=jpg " - "--set-annotation-status QualityCheck", - check=True, - shell=True, - ) - - subprocess.run( - f"superannotatecli upload-preannotations " - f'--project "{self.PROJECT_NAME}" ' - f'--folder "{self.convertor_data_path}" ' - f'--format COCO ' - f'--data-set-name "instances_test"', - shell=True, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - ) + self.safe_run(self._cli.upload_images, self.PROJECT_NAME, folder=str(self.convertor_data_path), + extensions="jpg", + set_annotation_status="QualityCheck") + self.safe_run(self._cli.upload_preannotations, self.PROJECT_NAME, folder=str(self.convertor_data_path), + format="COCO", + dataset_name="instances_test") # tod add test - @pytest.mark.skipif(CLI_VERSION and CLI_VERSION != sa.__version__, - reason=f"Updated package version from {CLI_VERSION} to {sa.__version__}") def test_vector_annotation_folder_upload_download_cli(self): self._create_project() sa.create_annotation_classes_from_classes_json( self.PROJECT_NAME, f"{self.vector_folder_path}/classes/classes.json" ) - subprocess.run( - f"superannotatecli upload-images" - f' --project "{self.PROJECT_NAME}"' - f" --folder {self.convertor_data_path} " - "--extensions=jpg " - "--set-annotation-status QualityCheck", - check=True, - shell=True, - ) - subprocess.run( - f"superannotatecli upload-annotations " - f'--project "{self.PROJECT_NAME}" ' - f'--folder "{self.convertor_data_path}" ' - f'--format COCO ' - f'--dataset-name "instances_test"', - check=True, - shell=True, - ) + self.safe_run(self._cli.upload_images, self.PROJECT_NAME, str(self.convertor_data_path), extensions="jpg", + set_annotation_status="QualityCheck") + self.safe_run(self._cli.upload_annotations, self.PROJECT_NAME, str(self.convertor_data_path), format="COCO", + dataset_name="instances_test") + count_in = len(list(self.vector_folder_path.glob("*.json"))) with tempfile.TemporaryDirectory() as temp_dir: for image in sa.search_items(self.PROJECT_NAME): @@ -202,55 +157,22 @@ def test_vector_annotation_folder_upload_download_cli(self): count_out = len(list(Path(temp_dir).glob("*.json"))) self.assertEqual(count_in, count_out) - @pytest.mark.skipif(CLI_VERSION and CLI_VERSION != sa.__version__, - reason=f"Updated package version from {CLI_VERSION} to {sa.__version__}") def test_attach_image_urls(self): self._create_project() - subprocess.run( - f"superannotatecli attach-image-urls " - f'--project "{self.PROJECT_NAME}" ' - f"--attachments {self.image_csv_path}", - check=True, - shell=True, - ) - + self.safe_run(self._cli.attach_image_urls, self.PROJECT_NAME, str(self.video_csv_path)) self.assertEqual(3, len(sa.search_items(self.PROJECT_NAME))) - # @pytest.mark.skipif(CLI_VERSION and CLI_VERSION != sa.__version__, - # reason=f"Updated package version from {CLI_VERSION} to {sa.__version__}") def test_attach_video_urls(self): self._create_project("Video") - subprocess.run( - f"superannotatecli attach-video-urls " - f'--project "{self.PROJECT_NAME}" ' - f"--attachments {self.video_csv_path}", - check=True, - shell=True, - ) - # self.assertEqual(3, len(sa.search_items(self.PROJECT_NAME))) + self.safe_run(self._cli.attach_video_urls, self.PROJECT_NAME, str(self.video_csv_path)) + self.assertEqual(3, len(sa.search_items(self.PROJECT_NAME))) - # @pytest.mark.skipif(CLI_VERSION and CLI_VERSION != sa.__version__, - # reason=f"Updated package version from {CLI_VERSION} to {sa.__version__}") def test_upload_videos(self): self._create_project() - subprocess.run( - f"superannotatecli upload-videos " - f'--project "{self.PROJECT_NAME}" ' - f"--folder '{self.video_folder_path}' " - f"--target-fps 1", - check=True, - shell=True, - ) + self.safe_run(self._cli.upload_videos, self.PROJECT_NAME, str(self.video_folder_path)) self.assertEqual(5, len(sa.search_items(self.PROJECT_NAME))) - @pytest.mark.skipif(CLI_VERSION and CLI_VERSION != sa.__version__, - reason=f"Updated package version from {CLI_VERSION} to {sa.__version__}") def test_attach_document_urls(self): - self._create_project("Document") - subprocess.run( - f"superannotatecli attach-document-urls " - f'--project "{self.PROJECT_NAME}" ' - f"--attachments {self.image_csv_path}", - check=True, - shell=True, - ) + self._create_project("Document") + self.safe_run(self._cli.attach_document_urls, self.PROJECT_NAME, str(self.video_csv_path)) + self.assertEqual(3, len(sa.search_items(self.PROJECT_NAME))) diff --git a/tests/integration/test_create_from_full_info.py b/tests/integration/test_create_from_full_info.py deleted file mode 100644 index f76393b04..000000000 --- a/tests/integration/test_create_from_full_info.py +++ /dev/null @@ -1,48 +0,0 @@ -import os -from os.path import dirname -from unittest import TestCase - -import src.superannotate as sa - - -class TestCloneProject(TestCase): - PROJECT_NAME_1 = "test create from full info1" - PROJECT_NAME_2 = "test create from full info2" - PROJECT_DESCRIPTION = "desc" - PROJECT_TYPE = "Vector" - - TEST_FOLDER_PATH = "data_set/sample_project_vector" - - @property - def folder_path(self): - return os.path.join(dirname(dirname(__file__)), self.TEST_FOLDER_PATH) - - @property - def classes_json(self): - return f"{self.folder_path}/classes/classes.json" - - def setUp(self, *args, **kwargs): - self.tearDown() - self._project_1 = sa.create_project( - self.PROJECT_NAME_1, self.PROJECT_DESCRIPTION, self.PROJECT_TYPE - ) - - def tearDown(self) -> None: - sa.delete_project(self.PROJECT_NAME_1) - sa.delete_project(self.PROJECT_NAME_2) - - def test_clone_contributors_and_description(self): - team_users = sa.search_team_contributors() - sa.share_project(self.PROJECT_NAME_1, team_users[0], "QA") - first_project_metadata = sa.get_project_metadata( - self.PROJECT_NAME_1, include_contributors=True - ) - first_project_contributors = first_project_metadata["contributors"] - sa.clone_project(self.PROJECT_NAME_2, self.PROJECT_NAME_1, "DESCRIPTION", copy_contributors=True) - second_project_metadata = sa.get_project_metadata( - self.PROJECT_NAME_2, include_contributors=True - ) - second_project_contributors = second_project_metadata["contributors"] - - self.assertEqual(first_project_contributors[0]["user_id"], second_project_contributors[0]["user_id"]) - self.assertEqual("DESCRIPTION", second_project_metadata["description"]) \ No newline at end of file diff --git a/tests/integration/test_depricated_functions_document.py b/tests/integration/test_depricated_functions_document.py index 96c9ec420..4beaf1810 100644 --- a/tests/integration/test_depricated_functions_document.py +++ b/tests/integration/test_depricated_functions_document.py @@ -84,13 +84,6 @@ def test_deprecated_functions(self): sa.set_image_annotation_status(self.PROJECT_NAME, self.UPLOAD_IMAGE_NAME, "Completed") except AppException as e: self.assertIn(self.EXCEPTION_MESSAGE, str(e)) - try: - sa.attach_image_urls_to_project( - self.PROJECT_NAME, - os.path.join(dirname(dirname(__file__)), self.PATH_TO_URLS), - ) - except AppException as e: - self.assertIn(self.EXCEPTION_MESSAGE_2.format(self.PROJECT_TYPE), str(e)) try: sa.copy_image(self.PROJECT_NAME, self.UPLOAD_IMAGE_NAME, self.PROJECT_NAME_2) except AppException as e: @@ -123,18 +116,10 @@ def test_deprecated_functions(self): sa.add_annotation_point_to_image(self.PROJECT_NAME, self.UPLOAD_IMAGE_NAME, [1, 2], "some class") except AppException as e: self.assertIn(self.EXCEPTION_MESSAGE, str(e)) - try: - sa.copy_images(self.PROJECT_NAME, [self.UPLOAD_IMAGE_NAME], self.PROJECT_NAME) - except AppException as e: - self.assertIn(self.EXCEPTION_MESSAGE, str(e)) try: sa.get_project_workflow(self.PROJECT_NAME) except AppException as e: self.assertIn(self.EXCEPTION_MESSAGE, str(e)) - try: - sa.move_images(self.PROJECT_NAME, [self.UPLOAD_IMAGE_NAME], self.PROJECT_NAME_2) - except AppException as e: - self.assertIn(self.EXCEPTION_MESSAGE, str(e)) try: sa.class_distribution(self.video_export_path, [self.PROJECT_NAME]) except AppException as e: @@ -147,13 +132,6 @@ def test_deprecated_functions(self): sa.prepare_export(self.PROJECT_NAME, include_fuse=True, only_pinned=True) except AppException as e: self.assertIn("Include fuse functionality is not supported", str(e)) - try: - sa.attach_video_urls_to_project( - self.PROJECT_NAME, - os.path.join(dirname(dirname(__file__)), self.PATH_TO_URLS), - ) - except AppException as e: - self.assertIn(self.EXCEPTION_MESSAGE_2.format(self.PROJECT_TYPE), str(e)) try: sa.benchmark(self.PROJECT_NAME, "some", ["some folder1"]) except AppException as e: diff --git a/tests/integration/test_depricated_functions_video.py b/tests/integration/test_depricated_functions_video.py index 75bb7f88f..1831554c7 100644 --- a/tests/integration/test_depricated_functions_video.py +++ b/tests/integration/test_depricated_functions_video.py @@ -79,13 +79,6 @@ def test_deprecated_functions(self): sa.set_image_annotation_status(self.PROJECT_NAME, self.UPLOAD_IMAGE_NAME, "Completed") except AppException as e: self.assertIn(self.EXCEPTION_MESSAGE, str(e)) - try: - sa.attach_image_urls_to_project( - self.PROJECT_NAME, - os.path.join(dirname(dirname(__file__)), self.PATH_TO_URLS), - ) - except AppException as e: - self.assertIn(self.EXCEPTION_MESSAGE_2.format(self.PROJECT_TYPE), str(e)) try: sa.copy_image(self.PROJECT_NAME, self.UPLOAD_IMAGE_NAME, self.PROJECT_NAME_2) except AppException as e: @@ -114,18 +107,10 @@ def test_deprecated_functions(self): sa.upload_preannotations_from_folder_to_project(self.PROJECT_NAME, self.folder_path) except AppException as e: self.assertIn(self.EXCEPTION_MESSAGE, str(e)) - try: - sa.copy_images(self.PROJECT_NAME, [self.UPLOAD_IMAGE_NAME], self.PROJECT_NAME) - except AppException as e: - self.assertIn(self.EXCEPTION_MESSAGE, str(e)) try: sa.get_project_workflow(self.PROJECT_NAME) except AppException as e: self.assertIn(self.EXCEPTION_MESSAGE, str(e)) - try: - sa.move_images(self.PROJECT_NAME, [self.UPLOAD_IMAGE_NAME], self.PROJECT_NAME_2) - except AppException as e: - self.assertIn(self.EXCEPTION_MESSAGE, str(e)) try: sa.set_project_default_image_quality_in_editor(self.PROJECT_NAME, "original") except AppException as e: @@ -142,13 +127,6 @@ def test_deprecated_functions(self): sa.prepare_export(self.PROJECT_NAME, include_fuse=True, only_pinned=True) except AppException as e: self.assertIn("Include fuse functionality is not supported", str(e)) - try: - sa.attach_document_urls_to_project( - self.PROJECT_NAME, - os.path.join(dirname(dirname(__file__)), self.PATH_TO_URLS), - ) - except AppException as e: - self.assertIn(self.EXCEPTION_MESSAGE_2.format(self.PROJECT_TYPE), str(e)) try: sa.benchmark(self.PROJECT_NAME, "some", ["some folder1"]) except AppException as e: diff --git a/tests/integration/test_export_upload_s3.py b/tests/integration/test_export_upload_s3.py index 14a0414f9..cbd145c88 100644 --- a/tests/integration/test_export_upload_s3.py +++ b/tests/integration/test_export_upload_s3.py @@ -3,9 +3,12 @@ from os.path import dirname import boto3 + import src.superannotate as sa from tests.integration.base import BaseTestCase +s3_client = boto3.client("s3") + class TestExportUploadS3(BaseTestCase): PROJECT_NAME = "export_upload_s3" @@ -14,53 +17,36 @@ class TestExportUploadS3(BaseTestCase): TEST_FOLDER_PTH = "data_set" TEST_FOLDER_PATH = "data_set/sample_project_vector" TEST_S3_BUCKET = "superannotate-python-sdk-test" - S3_PREFIX2 = "frex_temp" + TMP_DIR = "TMP_DIR" + + def tearDown(self) -> None: + super().tearDown() + for object_data in s3_client.list_objects_v2(Bucket=self.TEST_S3_BUCKET, Prefix=self.TMP_DIR).get("Contents", + []): + s3_client.delete_object(Bucket=self.TEST_S3_BUCKET, Key=object_data["Key"]) @property def folder_path(self): return os.path.join(dirname(dirname(__file__)), self.TEST_FOLDER_PATH) def test_export_upload(self): + sa.create_folder(self.PROJECT_NAME, self.TEST_FOLDER_PTH) sa.upload_images_from_folder_to_project( - project=self.PROJECT_NAME, + project=f"{self.PROJECT_NAME}/{self.TEST_FOLDER_PTH}", folder_path=self.folder_path, annotation_status="QualityCheck", ) - s3_client = boto3.client("s3") - paginator = s3_client.get_paginator("list_objects_v2") files = [] - - with tempfile.TemporaryDirectory() as temp_dir: - download_location = temp_dir - response_iterator = paginator.paginate( - Bucket=self.TEST_S3_BUCKET, Prefix=self.S3_PREFIX2 - ) - for response in response_iterator: - if "Contents" in response: - for object_data in response["Contents"]: - key = object_data["Key"] - s3_client.delete_object(Bucket=self.TEST_S3_BUCKET, Key=key) - new_export = sa.prepare_export(self.PROJECT_NAME, include_fuse=True) - sa.download_export( - self.PROJECT_NAME, - new_export, - download_location, - to_s3_bucket=self.TEST_S3_BUCKET, - ) - response_iterator = paginator.paginate( - Bucket=self.TEST_S3_BUCKET, Prefix=download_location - ) - for response in response_iterator: - if "Contents" in response: - for object_data in response["Contents"]: - key = object_data["Key"] - files.append(key) - - with tempfile.TemporaryDirectory() as temp_dir: - - output_path = temp_dir - sa.download_export(self.PROJECT_NAME, new_export, output_path) - local_files = os.listdir(output_path) - - self.assertEqual(len(local_files), len(files)) + new_export = sa.prepare_export(self.PROJECT_NAME, include_fuse=True) + sa.download_export( + project=self.PROJECT_NAME, + export=new_export, + folder_path=self.TMP_DIR, + to_s3_bucket=self.TEST_S3_BUCKET, + extract_zip_contents=True + ) + for object_data in s3_client.list_objects_v2(Bucket=self.TEST_S3_BUCKET, Prefix=self.TMP_DIR).get("Contents", + []): + files.append(object_data["Key"]) + self.assertEqual(13, len(files)) diff --git a/tests/integration/test_interface.py b/tests/integration/test_interface.py index 462321991..96e4b578c 100644 --- a/tests/integration/test_interface.py +++ b/tests/integration/test_interface.py @@ -90,14 +90,6 @@ def test_download_image_annotations(self): with tempfile.TemporaryDirectory() as temp_dir: sa.download_image_annotations(self.PROJECT_NAME, self.EXAMPLE_IMAGE_1, temp_dir) - def test_search_folder(self): - team_users = sa.search_team_contributors() - sa.share_project(self.PROJECT_NAME, team_users[0], "QA") - sa.create_folder(self.PROJECT_NAME, self.TEST_FOLDER_NAME) - data = sa.search_folders(self.PROJECT_NAME, return_metadata=True) - folder_data = sa.search_folders(self.PROJECT_NAME, self.TEST_FOLDER_NAME, return_metadata=True) - self.assertEqual(data, folder_data) - def test_search_project(self): sa.upload_images_from_folder_to_project(self.PROJECT_NAME, self.folder_path) sa.set_image_annotation_status(self.PROJECT_NAME, self.EXAMPLE_IMAGE_1, "Completed") diff --git a/tests/integration/test_validate_upload_state.py b/tests/integration/test_validate_upload_state.py deleted file mode 100644 index e67e84106..000000000 --- a/tests/integration/test_validate_upload_state.py +++ /dev/null @@ -1,57 +0,0 @@ -import os -from os.path import dirname - -import src.superannotate as sa -from src.superannotate import AppException -from src.superannotate.lib.core import ATTACHING_UPLOAD_STATE_ERROR -from src.superannotate.lib.core import UPLOADING_UPLOAD_STATE_ERROR -from tests.integration.base import BaseTestCase - - -class TestVectorUploadStateCode(BaseTestCase): - PROJECT_NAME = "TestVectorUploadStateCode" - PROJECT_DESCRIPTION = "Desc" - PROJECT_TYPE = "Vector" - TEST_FOLDER_PATH = "data_set" - TEST_IMAGES_PATH = "sample_project_vector" - PATH_TO_URLS = "attach_urls.csv" - PATH_TO_VIDEOS = "sample_videos" - IMAGE_NAME = "example_image_1.jpg" - - @property - def folder_path(self): - return os.path.join(dirname(dirname(__file__)), self.TEST_FOLDER_PATH) - - @property - def attachments(self): - return os.path.join(dirname(dirname(__file__)), self.PATH_TO_URLS) - - def test_attach_wrong_upload_state(self): - sa.upload_image_to_project(self.PROJECT_NAME, - os.path.join(self.folder_path, self.TEST_IMAGES_PATH, self.IMAGE_NAME)) - with self.assertRaisesRegexp(AppException, ATTACHING_UPLOAD_STATE_ERROR): - sa.attach_image_urls_to_project(self.PROJECT_NAME, os.path.join(self.folder_path, self.PATH_TO_URLS)) - - def test_upload_images_wrong_upload_state(self): - sa.attach_image_urls_to_project(self.PROJECT_NAME, os.path.join(self.folder_path, self.PATH_TO_URLS)) - with self.assertRaisesRegexp(AppException, UPLOADING_UPLOAD_STATE_ERROR): - sa.upload_images_from_folder_to_project( - self.PROJECT_NAME, - os.path.join(self.folder_path, self.TEST_IMAGES_PATH) - ) - - def test_upload_image_wrong_upload_state(self): - sa.attach_image_urls_to_project(self.PROJECT_NAME, os.path.join(self.folder_path, self.PATH_TO_URLS)) - with self.assertRaisesRegexp(AppException, UPLOADING_UPLOAD_STATE_ERROR): - sa.upload_image_to_project( - self.PROJECT_NAME, - os.path.join(self.folder_path, self.TEST_IMAGES_PATH, self.IMAGE_NAME) - ) - - def test_videos_image_wrong_upload_state(self): - sa.attach_image_urls_to_project(self.PROJECT_NAME, os.path.join(self.folder_path, self.PATH_TO_URLS)) - with self.assertRaisesRegexp(AppException, UPLOADING_UPLOAD_STATE_ERROR): - sa.upload_videos_from_folder_to_project( - self.PROJECT_NAME, - os.path.join(self.folder_path, self.PATH_TO_VIDEOS) - ) \ No newline at end of file From 6f1475882487bf518be5d75c656891f6ad7396e2 Mon Sep 17 00:00:00 2001 From: Vaghinak Basentsyan Date: Tue, 17 May 2022 12:07:44 +0400 Subject: [PATCH 3/3] fixed voc convertor --- requirements_dev.txt | 2 +- .../converters/voc_converters/voc_helper.py | 11 + .../voc_converters/voc_to_sa_vector.py | 18 +- src/superannotate/lib/core/video_convertor.py | 137 +-- .../lib/infrastructure/controller.py | 7 +- .../lib/infrastructure/services.py | 5 +- src/superannotate/version.py | 2 +- tests/convertors/test_voc.py | 5 +- ...0000_000019_leftImg8bit.png___objects.json | 789 +++++++++++++++++- .../VOC2012/Annotations/2007_000032.xml | 2 +- .../VOC2012/Annotations/2008_000009.xml | 2 +- .../VOC2012/Annotations/2009_000006.xml | 2 +- .../VOC2012/Annotations/2010_000002.xml | 2 +- .../VOC2012/Annotations/2011_000003.xml | 2 +- .../{2007_000032.jpg => 2007_000032.jpeg} | Bin .../{2008_000009.jpg => 2008_000009.jpeg} | Bin .../{2009_000006.jpg => 2009_000006.jpeg} | Bin .../{2010_000002.jpg => 2010_000002.jpeg} | Bin .../{2011_000003.jpg => 2011_000003.jpeg} | Bin .../classes/classes.json | 32 + .../video.mp4.json | 161 ++++ .../test_get_annotations_per_frame.py | 2 +- 22 files changed, 1094 insertions(+), 87 deletions(-) rename tests/data_set/converter_test/VOC/input/fromPascalVOCToSuperAnnotate/VOC2012/JPEGImages/{2007_000032.jpg => 2007_000032.jpeg} (100%) rename tests/data_set/converter_test/VOC/input/fromPascalVOCToSuperAnnotate/VOC2012/JPEGImages/{2008_000009.jpg => 2008_000009.jpeg} (100%) rename tests/data_set/converter_test/VOC/input/fromPascalVOCToSuperAnnotate/VOC2012/JPEGImages/{2009_000006.jpg => 2009_000006.jpeg} (100%) rename tests/data_set/converter_test/VOC/input/fromPascalVOCToSuperAnnotate/VOC2012/JPEGImages/{2010_000002.jpg => 2010_000002.jpeg} (100%) rename tests/data_set/converter_test/VOC/input/fromPascalVOCToSuperAnnotate/VOC2012/JPEGImages/{2011_000003.jpg => 2011_000003.jpeg} (100%) create mode 100644 tests/data_set/video_convertor_annotations/classes/classes.json create mode 100644 tests/data_set/video_convertor_annotations/video.mp4.json diff --git a/requirements_dev.txt b/requirements_dev.txt index 85a0bfdee..f965a59b3 100644 --- a/requirements_dev.txt +++ b/requirements_dev.txt @@ -1,2 +1,2 @@ -superannotate_schemas>=v1.0.43dev1 +superannotate_schemas>=v1.0.43dev3 diff --git a/src/superannotate/lib/app/input_converters/converters/voc_converters/voc_helper.py b/src/superannotate/lib/app/input_converters/converters/voc_converters/voc_helper.py index a7f9c354f..7c8795ffb 100644 --- a/src/superannotate/lib/app/input_converters/converters/voc_converters/voc_helper.py +++ b/src/superannotate/lib/app/input_converters/converters/voc_converters/voc_helper.py @@ -47,3 +47,14 @@ def _get_image_shape_from_xml(file_path): height = int(size.find("height").text) return height, width + + +def _get_image_metadata(file_path): + with open(os.path.splitext(file_path)[0] + ".xml") as f: + tree = ET.parse(f) + + size = tree.find("size") + width = int(size.find("width").text) + height = int(size.find("height").text) + + return tree.find("filename").text, height, width diff --git a/src/superannotate/lib/app/input_converters/converters/voc_converters/voc_to_sa_vector.py b/src/superannotate/lib/app/input_converters/converters/voc_converters/voc_to_sa_vector.py index 63c2b5954..6781b15f8 100644 --- a/src/superannotate/lib/app/input_converters/converters/voc_converters/voc_to_sa_vector.py +++ b/src/superannotate/lib/app/input_converters/converters/voc_converters/voc_to_sa_vector.py @@ -11,7 +11,7 @@ from ....common import write_to_json from ..sa_json_helper import _create_sa_json from ..sa_json_helper import _create_vector_instance -from .voc_helper import _get_image_shape_from_xml +from .voc_helper import _get_image_metadata from .voc_helper import _get_voc_instances_from_xml from .voc_helper import _iou @@ -118,11 +118,11 @@ def voc_instance_segmentation_to_sa_vector(voc_root, output_dir): sa_instances.append(sa_obj) images_converted.append(filename) - file_name = "%s.jpg___objects.json" % filename.stem - height, width = _get_image_shape_from_xml(annotation_dir / filename.name) - sa_metadata = {"name": filename.stem, "height": height, "width": width} + file_name, height, width = _get_image_metadata(annotation_dir / filename.name) + file_path = f"{file_name}___objects.json" + sa_metadata = {"name": str(filename), "height": height, "width": width} sa_json = _create_sa_json(sa_instances, sa_metadata) - write_to_json(output_dir / file_name, sa_json) + write_to_json(output_dir / file_path, sa_json) finish_event.set() tqdm_thread.join() @@ -161,11 +161,11 @@ def voc_object_detection_to_sa_vector(voc_root, output_dir): sa_instances.append(sa_obj) images_converted.append(filename) - file_name = "%s.jpg___objects.json" % filename.stem - height, width = _get_image_shape_from_xml(annotation_dir / filename.name) - sa_metadata = {"name": filename.stem, "height": height, "width": width} + file_name, height, width = _get_image_metadata(annotation_dir / filename.name) + file_path = f"{file_name}___objects.json" + sa_metadata = {"name": str(filename), "height": height, "width": width} sa_json = _create_sa_json(sa_instances, sa_metadata) - write_to_json(output_dir / file_name, sa_json) + write_to_json(output_dir / file_path, sa_json) finish_event.set() tqdm_thread.join() diff --git a/src/superannotate/lib/core/video_convertor.py b/src/superannotate/lib/core/video_convertor.py index 0b3726c25..ed9731e66 100644 --- a/src/superannotate/lib/core/video_convertor.py +++ b/src/superannotate/lib/core/video_convertor.py @@ -6,13 +6,17 @@ from typing import List from typing import Optional +from lib.core.enums import AnnotationTypes +from lib.core.exceptions import AppException from pydantic import BaseModel class Annotation(BaseModel): instanceId: int type: str - className: str + className: Optional[str] + x: Optional[Any] + y: Optional[Any] points: Optional[Dict] attributes: Optional[List[Any]] = [] keyframe: bool = False @@ -25,6 +29,7 @@ class FrameAnnotation(BaseModel): class VideoFrameGenerator: def __init__(self, annotation_data: dict, fps: int): + self.validate_annotations(annotation_data) self.id_generator = iter(itertools.count(0)) self._annotation_data = annotation_data self.duration = annotation_data["metadata"]["duration"] / (1000 * 1000) @@ -36,6 +41,13 @@ def __init__(self, annotation_data: dict, fps: int): self._mapping = {} self._process() + @staticmethod + def validate_annotations(annotation_data: dict): + try: + annotation_data["metadata"]["duration"] + except KeyError: + raise AppException("Video not annotated yet") + def get_frame(self, frame_no: int): try: return self.annotations[frame_no] @@ -43,33 +55,40 @@ def get_frame(self, frame_no: int): self.annotations[frame_no] = FrameAnnotation(frame=frame_no) return self.annotations[frame_no] - def interpolate_annotations( - self, - class_name: str, - from_frame: int, - to_frame: int, - data: dict, - instance_id: int, - steps: dict = None, - annotation_type: str = "bbox", + def _interpolate( + self, + class_name: str, + from_frame: int, + to_frame: int, + data: dict, + instance_id: int, + steps: dict = None, + annotation_type: str = "bbox", ) -> dict: annotations = {} for idx, frame_idx in enumerate(range(from_frame + 1, to_frame), 1): - points = None - if annotation_type == "bbox" and data.get("points") and steps: - points = { + tmp_data = {} + if annotation_type == AnnotationTypes.BBOX and data.get("points") and steps: + tmp_data["points"] = { "x1": round(data["points"]["x1"] + steps["x1"] * idx, 2), "y1": round(data["points"]["y1"] + steps["y1"] * idx, 2), "x2": round(data["points"]["x2"] + steps["x2"] * idx, 2), "y2": round(data["points"]["y2"] + steps["y2"] * idx, 2), } + elif annotation_type == AnnotationTypes.POINT: + tmp_data = { + "x": round(data["x"] + steps["x"] * idx, 2), + "y": round(data["y"] + steps["y"] * idx, 2) + } + elif annotation_type in (AnnotationTypes.POLYGON, AnnotationTypes.POLYLINE): + tmp_data["points"] = [point + steps[idx] * 2 for idx, point in enumerate(data["points"])] annotations[frame_idx] = Annotation( instanceId=instance_id, type=annotation_type, className=class_name, - points=points, attributes=data["attributes"], keyframe=False, + **tmp_data ) return annotations @@ -98,6 +117,9 @@ def get_median(self, annotations: List[dict]) -> dict: median_annotation = annotation return median_annotation + def calculate_sped(self, from_frame, to_frame): + pass + @staticmethod def merge_first_frame(frames_mapping): try: @@ -108,11 +130,37 @@ def merge_first_frame(frames_mapping): finally: return frames_mapping + def _interpolate_frames( + self, from_frame, from_frame_no, to_frame, to_frame_no, annotation_type, class_name, instance_id + ): + steps = None + frames_diff = to_frame_no - from_frame_no + if annotation_type == AnnotationTypes.BBOX and from_frame.get("points") and to_frame.get("points"): + steps = {} + for point in "x1", "x2", "y1", "y2": + steps[point] = round( + (to_frame["points"][point] - from_frame["points"][point]) / frames_diff, 2 + ) + elif annotation_type == AnnotationTypes.POINT: + steps = { + "x": (to_frame["x"] - from_frame["x"]) / frames_diff, + "y": (to_frame["y"] - from_frame["y"]) / frames_diff + } + elif annotation_type in (AnnotationTypes.POLYGON, AnnotationTypes.POLYLINE): + steps = [ + (to_point - from_point) / frames_diff + for from_point, to_point in zip(from_frame["points"], to_frame["points"]) + ] + return self._interpolate( + class_name=class_name, from_frame=from_frame_no, to_frame=to_frame_no, data=from_frame, + instance_id=instance_id, steps=steps, annotation_type=annotation_type + ) + def _process(self): for instance in self._annotation_data["instances"]: instance_id = next(self.id_generator) annotation_type = instance["meta"]["type"] - class_name = instance["meta"]["className"] + class_name = instance["meta"].get("className") for parameter in instance["parameters"]: frames_mapping = defaultdict(list) last_frame_no = None @@ -131,55 +179,15 @@ def _process(self): ) frames_diff = to_frame_no - from_frame_no if frames_diff > 1: - steps = None - if ( - annotation_type == "bbox" - and from_frame.get("points") - and to_frame.get("points") - ): - steps = { - "y1": round( - ( - to_frame["points"]["y1"] - - from_frame["points"]["y1"] - ) - / frames_diff, - 2, - ), - "x2": round( - ( - to_frame["points"]["x2"] - - from_frame["points"]["x2"] - ) - / frames_diff, - 2, - ), - "x1": round( - ( - to_frame["points"]["x1"] - - from_frame["points"]["x1"] - ) - / frames_diff, - 2, - ), - "y2": round( - ( - to_frame["points"]["y2"] - - from_frame["points"]["y2"] - ) - / frames_diff, - 2, - ), - } interpolated_frames.update( - self.interpolate_annotations( + self._interpolate_frames( + from_frame=from_frame, + from_frame_no=from_frame_no, + to_frame=to_frame, + to_frame_no=to_frame_no, class_name=class_name, - from_frame=from_frame_no, - to_frame=to_frame_no, - data=from_frame, - instance_id=instance_id, - steps=steps, annotation_type=annotation_type, + instance_id=instance_id ) ) start_median_frame = self.get_median(frames_mapping[from_frame_no]) @@ -188,6 +196,8 @@ def _process(self): instanceId=instance_id, type=annotation_type, className=class_name, + x=start_median_frame.get("x"), + y=start_median_frame.get("y"), points=start_median_frame.get("points"), attributes=start_median_frame["attributes"], keyframe=True, @@ -196,6 +206,8 @@ def _process(self): instanceId=instance_id, type=annotation_type, className=class_name, + x=start_median_frame.get("x"), + y=start_median_frame.get("y"), points=end_median_frame.get("points"), attributes=end_median_frame["attributes"], keyframe=True, @@ -208,4 +220,5 @@ def _process(self): def __iter__(self): for frame_no in range(1, int(self.frames_count) + 1): - yield self.get_frame(frame_no).dict() + frame = self.get_frame(frame_no) + yield {**frame.dict(exclude_unset=True), **frame.dict(exclude_none=True)} diff --git a/src/superannotate/lib/infrastructure/controller.py b/src/superannotate/lib/infrastructure/controller.py index 0a28148a4..c3b4466c6 100644 --- a/src/superannotate/lib/infrastructure/controller.py +++ b/src/superannotate/lib/infrastructure/controller.py @@ -1174,7 +1174,7 @@ def benchmark( if export_response.errors: return export_response - download_use_case = usecases.DownloadExportUseCase( + usecases.DownloadExportUseCase( service=self._backend_client, project=project, export_name=export_response.data["name"], @@ -1182,10 +1182,7 @@ def benchmark( extract_zip_contents=True, to_s3_bucket=False, reporter=self.default_reporter, - ) - if download_use_case.is_valid(): - for _ in download_use_case.execute(): - pass + ).execute() use_case = usecases.BenchmarkUseCase( project=project, diff --git a/src/superannotate/lib/infrastructure/services.py b/src/superannotate/lib/infrastructure/services.py index 51bc674b6..8b97e1925 100644 --- a/src/superannotate/lib/infrastructure/services.py +++ b/src/superannotate/lib/infrastructure/services.py @@ -1,6 +1,7 @@ import asyncio import datetime import json +import platform import time from contextlib import contextmanager from typing import Callable @@ -24,6 +25,7 @@ from lib.infrastructure.helpers import timed_lru_cache from lib.infrastructure.stream_data_handler import StreamedAnnotations from requests.exceptions import HTTPError +from superannotate.version import __version__ requests.packages.urllib3.disable_warnings() @@ -83,7 +85,8 @@ def default_headers(self): "Authorization": self._auth_token, "authtype": self.AUTH_TYPE, "Content-Type": "application/json", - # "User-Agent": constance.__version__, + "User-Agent": f"Python-SDK-Version: {__version__}; Python: {platform.python_version()}; " + f"OS: {platform.system()}; Team: {self.team_id}", } @property diff --git a/src/superannotate/version.py b/src/superannotate/version.py index 6914911d9..0da1ae0a8 100644 --- a/src/superannotate/version.py +++ b/src/superannotate/version.py @@ -1 +1 @@ -__version__ = "4.3.5dev1" +__version__ = "4.3.5dev2" diff --git a/tests/convertors/test_voc.py b/tests/convertors/test_voc.py index ce226b16c..24e23a236 100644 --- a/tests/convertors/test_voc.py +++ b/tests/convertors/test_voc.py @@ -1,7 +1,10 @@ +import os +from os.path import dirname from pathlib import Path import pytest import superannotate as sa +from tests import DATA_SET_PATH @pytest.mark.skip(reason="Need to adjust") @@ -10,7 +13,7 @@ def test_voc_vector_instance(tmpdir): input_dir = ( Path("tests") - / "converter_test" + / f"{DATA_SET_PATH}/converter_test" / "VOC" / "input" / "fromPascalVOCToSuperAnnotate" diff --git a/tests/data_set/consensus_benchmark/consensus_test_data/berlin_000000_000019_leftImg8bit.png___objects.json b/tests/data_set/consensus_benchmark/consensus_test_data/berlin_000000_000019_leftImg8bit.png___objects.json index 705e7b971..b56ceaa2f 100644 --- a/tests/data_set/consensus_benchmark/consensus_test_data/berlin_000000_000019_leftImg8bit.png___objects.json +++ b/tests/data_set/consensus_benchmark/consensus_test_data/berlin_000000_000019_leftImg8bit.png___objects.json @@ -1 +1,788 @@ -{"metadata":{"name":"berlin_000000_000019_leftImg8bit.png","width":2048,"height":1024,"status":"NotStarted","pinned":false,"isPredicted":false,"projectId":61619,"annotatorEmail":null,"qaEmail":null},"instances":[{"type":"bbox","classId":466281,"probability":100,"points":{"x1":1378.09,"x2":1399.48,"y1":421.93,"y2":471.05},"groupId":0,"pointLabels":{},"locked":false,"visible":true,"attributes":[],"trackingId":null,"error":null,"createdAt":"2020-10-23T12:28:02.767Z","createdBy":{"email":"user3@mail.com","role":"Annotator"},"creationType":"Manual","updatedAt":"2020-10-23T12:28:08.609Z","updatedBy":{"email":"user3@mail.com","role":"Annotator"},"className":"person"},{"type":"polygon","classId":466281,"probability":100,"points":[1338.48,421.14,1342.44,423.52,1349.57,433.03,1345.61,440.95,1345.61,443.32,1344.81,443.32,1340.06,462.34,1338.48,453.62,1334.52,451.25,1332.93,444.91,1329.76,444.91,1331.35,432.23,1337.68,421.93],"groupId":0,"pointLabels":{},"locked":false,"visible":true,"attributes":[],"trackingId":null,"error":null,"createdAt":"2020-10-23T12:27:14.272Z","createdBy":{"email":"user2@mail.com","role":"Annotator"},"creationType":"Manual","updatedAt":"2020-10-23T12:27:40.004Z","updatedBy":{"email":"user2@mail.com","role":"Annotator"},"className":"person"},{"type":"point","classId":466281,"probability":100,"x":1640.14,"y":441.85,"groupId":0,"pointLabels":{},"locked":false,"visible":true,"attributes":[],"trackingId":null,"error":null,"createdAt":"2020-10-23T12:23:55.471Z","createdBy":{"email":"user1@mail.com","role":"Annotator"},"creationType":"Manual","updatedAt":"2020-10-23T12:23:58.830Z","updatedBy":{"email":"user1@mail.com","role":"Annotator"},"className":"person"},{"type":"bbox","classId":466282,"probability":100,"points":{"x1":0,"x2":243.84,"y1":289.93,"y2":519.26},"groupId":0,"pointLabels":{},"locked":false,"visible":true,"attributes":[],"trackingId":null,"error":null,"createdAt":"2020-10-27T14:57:16.319Z","createdBy":{"email":"user2@mail.com","role":"Annotator"},"creationType":"Manual","updatedAt":"2020-10-27T14:57:28.986Z","updatedBy":{"email":"user2@mail.com","role":"Annotator"},"className":"car"},{"type":"polygon","classId":466282,"probability":100,"points":[956.29,425.86,1000.65,425.07,1011.75,452.79,1013.33,492.4,1006.99,492.4,1003.82,482.9,968.17,484.48,947.58,441.7,954.71,425.07],"groupId":0,"pointLabels":{},"locked":false,"visible":true,"attributes":[],"trackingId":null,"error":null,"createdAt":"2020-10-23T12:26:00.773Z","createdBy":{"email":"user2@mail.com","role":"Annotator"},"creationType":"Manual","updatedAt":"2020-10-23T12:26:32.274Z","updatedBy":{"email":"user2@mail.com","role":"Annotator"},"className":"car"},{"type":"polygon","classId":466282,"probability":100,"points":[332.81,440.12,293.2,475.77,292.41,502.7,383.51,499.53,442.14,493.99,433.42,453.59,412.03,433.78,332.02,437.74],"groupId":0,"pointLabels":{},"locked":false,"visible":true,"attributes":[],"trackingId":null,"error":null,"createdAt":"2020-10-23T12:24:57.599Z","createdBy":{"email":"user3@mail.com","role":"Annotator"},"creationType":"Manual","updatedAt":"2020-10-23T12:25:19.520Z","updatedBy":{"email":"user3@mail.com","role":"Annotator"},"className":"car"},{"type":"polygon","classId":466282,"probability":100,"points":[834.53,525.74,853.33,527.19,870.69,518.51,873.58,509.83,934.33,509.83,940.11,519.96,969.04,517.07,966.15,475.12,941.56,423.05,862.01,423.05,851.89,433.18,837.42,466.44,831.64,521.4],"groupId":0,"pointLabels":{},"locked":false,"visible":true,"attributes":[{"id":853754,"groupId":219243,"name":"yellow","groupName":"color"}],"trackingId":null,"error":null,"createdAt":"2020-10-23T12:24:11.204Z","createdBy":{"email":"user1@mail.com","role":"Annotator"},"creationType":"Manual","updatedAt":"2020-10-27T14:55:23.938Z","updatedBy":{"email":"user1@mail.com","role":"Annotator"},"className":"car"},{"type":"bbox","classId":466282,"probability":100,"points":{"x1":1356.66,"x2":1375.46,"y1":343.5,"y2":386.89},"groupId":0,"pointLabels":{},"locked":false,"visible":true,"attributes":[],"trackingId":null,"error":null,"createdAt":"2020-10-23T12:21:40.139Z","createdBy":{"email":"user1@mail.com","role":"Annotator"},"creationType":"Manual","updatedAt":"2020-10-23T12:23:04.139Z","updatedBy":{"email":"user1@mail.com","role":"Annotator"},"className":"car"},{"type":"bbox","classId":466282,"probability":100,"points":{"x1":914.08,"x2":928.54,"y1":252.38,"y2":294.33},"groupId":0,"pointLabels":{},"locked":false,"visible":true,"attributes":[],"trackingId":null,"error":null,"createdAt":"2020-10-23T12:21:59.421Z","createdBy":{"email":"user1@mail.com","role":"Annotator"},"creationType":"Manual","updatedAt":"2020-10-23T12:23:04.139Z","updatedBy":{"email":"user1@mail.com","role":"Annotator"},"className":"car"},{"type":"point","classId":466282,"probability":100,"x":1994.49,"y":423.05,"groupId":0,"pointLabels":{},"locked":false,"visible":true,"attributes":[],"trackingId":null,"error":null,"createdAt":"2020-10-23T12:22:25.842Z","createdBy":{"email":"user1@mail.com","role":"Annotator"},"creationType":"Manual","updatedAt":"2020-10-23T12:23:04.139Z","updatedBy":{"email":"user1@mail.com","role":"Annotator"},"className":"car"},{"type":"bbox","classId":466282,"probability":100,"points":{"x1":1081.85,"x2":1203.34,"y1":418.71,"y2":499.71},"groupId":0,"pointLabels":{},"locked":false,"visible":true,"attributes":[],"trackingId":null,"error":null,"createdAt":"2020-10-23T12:21:07.878Z","createdBy":{"email":"user3@mail.com","role":"Annotator"},"creationType":"Manual","updatedAt":"2020-10-23T12:23:04.138Z","updatedBy":{"email":"user3@mail.com","role":"Annotator"},"className":"car"},{"type":"bbox","classId":466282,"probability":100,"points":{"x1":0,"x2":209.72,"y1":447.64,"y2":898.89},"groupId":0,"pointLabels":{},"locked":false,"visible":true,"attributes":[],"trackingId":null,"error":null,"createdAt":"2020-10-23T12:20:30.263Z","createdBy":{"email":"user1@mail.com","role":"Annotator"},"creationType":"Manual","updatedAt":"2020-10-23T12:23:04.138Z","updatedBy":{"email":"user1@mail.com","role":"Annotator"},"className":"car"},{"type":"polygon","classId":466283,"probability":100,"points":[1236.74,355.35,1281.89,354.56,1285.06,399.71,1242.28,399.71,1237.53,353.77],"groupId":0,"pointLabels":{},"locked":false,"visible":true,"attributes":[],"trackingId":null,"error":null,"createdAt":"2020-10-23T12:26:45.464Z","createdBy":{"email":"user3@mail.com","role":"Annotator"},"creationType":"Manual","updatedAt":"2020-10-23T12:26:58.357Z","updatedBy":{"email":"user3@mail.com","role":"Annotator"},"className":"sign"},{"type":"polygon","classId":466283,"probability":100,"points":[559.38,357.73,573.64,361.69,568.1,370.4,570.48,387.04,568.89,407.64,560.97,410.01,548.29,410.01,548.29,380.7,557.01,378.32,550.67,367.23,553.05,359.31,558.59,356.93],"groupId":0,"pointLabels":{},"locked":false,"visible":true,"attributes":[],"trackingId":null,"error":null,"createdAt":"2020-10-23T12:25:29.070Z","createdBy":{"email":"user3@mail.com","role":"Annotator"},"creationType":"Manual","updatedAt":"2020-10-23T12:25:53.017Z","updatedBy":{"email":"user3@mail.com","role":"Annotator"},"className":"sign"},{"type":"point","classId":466283,"probability":100,"x":1368.23,"y":313.13,"groupId":0,"pointLabels":{},"locked":false,"visible":true,"attributes":[],"trackingId":null,"error":null,"createdAt":"2020-10-23T12:22:15.578Z","createdBy":{"email":"user3@mail.com","role":"Annotator"},"creationType":"Manual","updatedAt":"2020-10-23T12:23:36.967Z","updatedBy":{"email":"user3@mail.com","role":"Annotator"},"className":"sign"},{"type":"polygon","classId":466284,"probability":100,"points":[1844.8,581.67,1841.89,558.45,1840.44,519.26,1837.54,506.19,1836.09,478.62,1828.83,435.07,1828.83,424.91,1825.93,408.95,1825.93,394.43,1824.48,390.08,1824.48,362.5,1834.64,318.96,1834.64,295.73,1828.83,263.8,1827.38,237.68,1824.48,224.61,1824.48,186.87,1827.38,175.26,1827.38,157.85,1828.83,153.49,1828.83,80.92,1830.28,78.02,1830.28,64.95,1831.73,63.5,1831.73,54.79,1834.64,38.83,1834.64,0,1946.4,0,1991.59,0,2048,0,2048,72.21,1982.68,131.72,1944.95,157.85,1923.18,213,1995.75,597.64],"groupId":0,"pointLabels":{},"locked":false,"visible":true,"attributes":[],"trackingId":null,"error":null,"createdAt":"2020-10-27T14:57:43.847Z","createdBy":{"email":"user3@mail.com","role":"Annotator"},"creationType":"Manual","updatedAt":"2020-10-27T16:31:42.745Z","updatedBy":{"email":"user3@mail.com","role":"Annotator"},"className":"tree"},{"type":"polygon","classId":466284,"probability":100,"points":[1508.06,506.19,1509.51,506.19,1506.61,504.74,1502.25,488.78,1497.9,459.75,1497.9,330.57,1505.16,305.89,1505.16,301.54,1506.61,300.09,1508.06,272.51,1496.45,253.64,1484.84,240.58,1463.06,228.97,1435.49,220.26,1309.21,201.39,1275.83,194.13,1235.19,179.62,1181.48,166.55,1152.45,153.49,1142.29,146.23,1124.88,137.53,1101.65,133.17,1082.78,125.91,1065.37,121.56,1052.3,114.3,1027.63,105.59,1014.57,96.88,992.79,72.21,973.92,37.37,966.67,15.6,966.67,0,965.22,0,1802.7,0,1823.02,85.27,1657.56,115.75,1661.91,162.2,1624.18,217.36,1564.67,282.67,1580.63,516.35],"groupId":0,"pointLabels":{},"locked":false,"visible":true,"attributes":[],"trackingId":null,"error":null,"createdAt":"2020-10-27T14:56:35.824Z","createdBy":{"email":"user2@mail.com","role":"Annotator"},"creationType":"Manual","updatedAt":"2020-10-27T14:57:08.890Z","updatedBy":{"email":"user2@mail.com","role":"Annotator"},"className":"tree"},{"type":"bbox","classId":466284,"probability":100,"points":{"x1":436.79,"x2":833.08,"y1":122.21,"y2":479.46},"groupId":0,"pointLabels":{},"locked":false,"visible":true,"attributes":[],"trackingId":null,"error":null,"createdAt":"2020-10-23T12:21:21.856Z","createdBy":{"email":"user2@mail.com","role":"Annotator"},"creationType":"Manual","updatedAt":"2020-10-23T12:21:29.756Z","updatedBy":{"email":"user2@mail.com","role":"Annotator"},"className":"tree"}],"tags":[],"comments":[]} +{ + "metadata": { + "name": "berlin_000000_000019_leftImg8bit.png", + "width": 2048, + "height": 1024, + "status": "NotStarted", + "pinned": false, + "isPredicted": false, + "projectId": 61619, + "annotatorEmail": null, + "qaEmail": null + }, + "instances": [ + { + "type": "bbox", + "classId": 466281, + "probability": 100, + "points": { + "x1": 1378.09, + "x2": 1399.48, + "y1": 421.93, + "y2": 471.05 + }, + "groupId": 0, + "pointLabels": {}, + "locked": false, + "visible": true, + "attributes": [], + "trackingId": null, + "error": null, + "createdAt": "2020-10-23T12:28:02.767Z", + "createdBy": { + "email": "user3@mail.com", + "role": "Annotator" + }, + "creationType": "Manual", + "updatedAt": "2020-10-23T12:28:08.609Z", + "updatedBy": { + "email": "user3@mail.com", + "role": "Annotator" + }, + "className": "person" + }, + { + "type": "polygon", + "classId": 466281, + "probability": 100, + "points": [ + 1338.48, + 421.14, + 1342.44, + 423.52, + 1349.57, + 433.03, + 1345.61, + 440.95, + 1345.61, + 443.32, + 1344.81, + 443.32, + 1340.06, + 462.34, + 1338.48, + 453.62, + 1334.52, + 451.25, + 1332.93, + 444.91, + 1329.76, + 444.91, + 1331.35, + 432.23, + 1337.68, + 421.93 + ], + "groupId": 0, + "pointLabels": {}, + "locked": false, + "visible": true, + "attributes": [], + "trackingId": null, + "error": null, + "createdAt": "2020-10-23T12:27:14.272Z", + "createdBy": { + "email": "user2@mail.com", + "role": "Annotator" + }, + "creationType": "Manual", + "updatedAt": "2020-10-23T12:27:40.004Z", + "updatedBy": { + "email": "user2@mail.com", + "role": "Annotator" + }, + "className": "person" + }, + { + "type": "point", + "classId": 466281, + "probability": 100, + "x": 1640.14, + "y": 441.85, + "groupId": 0, + "pointLabels": {}, + "locked": false, + "visible": true, + "attributes": [], + "trackingId": null, + "error": null, + "createdAt": "2020-10-23T12:23:55.471Z", + "createdBy": { + "email": "user1@mail.com", + "role": "Annotator" + }, + "creationType": "Manual", + "updatedAt": "2020-10-23T12:23:58.830Z", + "updatedBy": { + "email": "user1@mail.com", + "role": "Annotator" + }, + "className": "person" + }, + { + "type": "bbox", + "classId": 466282, + "probability": 100, + "points": { + "x1": 0, + "x2": 243.84, + "y1": 289.93, + "y2": 519.26 + }, + "groupId": 0, + "pointLabels": {}, + "locked": false, + "visible": true, + "attributes": [], + "trackingId": null, + "error": null, + "createdAt": "2020-10-27T14:57:16.319Z", + "createdBy": { + "email": "user2@mail.com", + "role": "Annotator" + }, + "creationType": "Manual", + "updatedAt": "2020-10-27T14:57:28.986Z", + "updatedBy": { + "email": "user2@mail.com", + "role": "Annotator" + }, + "className": "car" + }, + { + "type": "polygon", + "classId": 466282, + "probability": 100, + "points": [ + 956.29, + 425.86, + 1000.65, + 425.07, + 1011.75, + 452.79, + 1013.33, + 492.4, + 1006.99, + 492.4, + 1003.82, + 482.9, + 968.17, + 484.48, + 947.58, + 441.7, + 954.71, + 425.07 + ], + "groupId": 0, + "pointLabels": {}, + "locked": false, + "visible": true, + "attributes": [], + "trackingId": null, + "error": null, + "createdAt": "2020-10-23T12:26:00.773Z", + "createdBy": { + "email": "user2@mail.com", + "role": "Annotator" + }, + "creationType": "Manual", + "updatedAt": "2020-10-23T12:26:32.274Z", + "updatedBy": { + "email": "user2@mail.com", + "role": "Annotator" + }, + "className": "car" + }, + { + "type": "polygon", + "classId": 466282, + "probability": 100, + "points": [ + 332.81, + 440.12, + 293.2, + 475.77, + 292.41, + 502.7, + 383.51, + 499.53, + 442.14, + 493.99, + 433.42, + 453.59, + 412.03, + 433.78, + 332.02, + 437.74 + ], + "groupId": 0, + "pointLabels": {}, + "locked": false, + "visible": true, + "attributes": [], + "trackingId": null, + "error": null, + "createdAt": "2020-10-23T12:24:57.599Z", + "createdBy": { + "email": "user3@mail.com", + "role": "Annotator" + }, + "creationType": "Manual", + "updatedAt": "2020-10-23T12:25:19.520Z", + "updatedBy": { + "email": "user3@mail.com", + "role": "Annotator" + }, + "className": "car" + }, + { + "type": "polygon", + "classId": 466282, + "probability": 100, + "points": [ + 834.53, + 525.74, + 853.33, + 527.19, + 870.69, + 518.51, + 873.58, + 509.83, + 934.33, + 509.83, + 940.11, + 519.96, + 969.04, + 517.07, + 966.15, + 475.12, + 941.56, + 423.05, + 862.01, + 423.05, + 851.89, + 433.18, + 837.42, + 466.44, + 831.64, + 521.4 + ], + "groupId": 0, + "pointLabels": {}, + "locked": false, + "visible": true, + "attributes": [ + { + "id": 853754, + "groupId": 219243, + "name": "yellow", + "groupName": "color" + } + ], + "trackingId": null, + "error": null, + "createdAt": "2020-10-23T12:24:11.204Z", + "createdBy": { + "email": "user1@mail.com", + "role": "Annotator" + }, + "creationType": "Manual", + "updatedAt": "2020-10-27T14:55:23.938Z", + "updatedBy": { + "email": "user1@mail.com", + "role": "Annotator" + }, + "className": "car" + }, + { + "type": "bbox", + "classId": 466282, + "probability": 100, + "points": { + "x1": 1356.66, + "x2": 1375.46, + "y1": 343.5, + "y2": 386.89 + }, + "groupId": 0, + "pointLabels": {}, + "locked": false, + "visible": true, + "attributes": [], + "trackingId": null, + "error": null, + "createdAt": "2020-10-23T12:21:40.139Z", + "createdBy": { + "email": "user1@mail.com", + "role": "Annotator" + }, + "creationType": "Manual", + "updatedAt": "2020-10-23T12:23:04.139Z", + "updatedBy": { + "email": "user1@mail.com", + "role": "Annotator" + }, + "className": "car" + }, + { + "type": "bbox", + "classId": 466282, + "probability": 100, + "points": { + "x1": 914.08, + "x2": 928.54, + "y1": 252.38, + "y2": 294.33 + }, + "groupId": 0, + "pointLabels": {}, + "locked": false, + "visible": true, + "attributes": [], + "trackingId": null, + "error": null, + "createdAt": "2020-10-23T12:21:59.421Z", + "createdBy": { + "email": "user1@mail.com", + "role": "Annotator" + }, + "creationType": "Manual", + "updatedAt": "2020-10-23T12:23:04.139Z", + "updatedBy": { + "email": "user1@mail.com", + "role": "Annotator" + }, + "className": "car" + }, + { + "type": "point", + "classId": 466282, + "probability": 100, + "x": 1994.49, + "y": 423.05, + "groupId": 0, + "pointLabels": {}, + "locked": false, + "visible": true, + "attributes": [], + "trackingId": null, + "error": null, + "createdAt": "2020-10-23T12:22:25.842Z", + "createdBy": { + "email": "user1@mail.com", + "role": "Annotator" + }, + "creationType": "Manual", + "updatedAt": "2020-10-23T12:23:04.139Z", + "updatedBy": { + "email": "user1@mail.com", + "role": "Annotator" + }, + "className": "car" + }, + { + "type": "bbox", + "classId": 466282, + "probability": 100, + "points": { + "x1": 1081.85, + "x2": 1203.34, + "y1": 418.71, + "y2": 499.71 + }, + "groupId": 0, + "pointLabels": {}, + "locked": false, + "visible": true, + "attributes": [], + "trackingId": null, + "error": null, + "createdAt": "2020-10-23T12:21:07.878Z", + "createdBy": { + "email": "user3@mail.com", + "role": "Annotator" + }, + "creationType": "Manual", + "updatedAt": "2020-10-23T12:23:04.138Z", + "updatedBy": { + "email": "user3@mail.com", + "role": "Annotator" + }, + "className": "car" + }, + { + "type": "bbox", + "classId": 466282, + "probability": 100, + "points": { + "x1": 0, + "x2": 209.72, + "y1": 447.64, + "y2": 898.89 + }, + "groupId": 0, + "pointLabels": {}, + "locked": false, + "visible": true, + "attributes": [], + "trackingId": null, + "error": null, + "createdAt": "2020-10-23T12:20:30.263Z", + "createdBy": { + "email": "user1@mail.com", + "role": "Annotator" + }, + "creationType": "Manual", + "updatedAt": "2020-10-23T12:23:04.138Z", + "updatedBy": { + "email": "user1@mail.com", + "role": "Annotator" + }, + "className": "car" + }, + { + "type": "polygon", + "classId": 466283, + "probability": 100, + "points": [ + 1236.74, + 355.35, + 1281.89, + 354.56, + 1285.06, + 399.71, + 1242.28, + 399.71, + 1237.53, + 353.77 + ], + "groupId": 0, + "pointLabels": {}, + "locked": false, + "visible": true, + "attributes": [], + "trackingId": null, + "error": null, + "createdAt": "2020-10-23T12:26:45.464Z", + "createdBy": { + "email": "user3@mail.com", + "role": "Annotator" + }, + "creationType": "Manual", + "updatedAt": "2020-10-23T12:26:58.357Z", + "updatedBy": { + "email": "user3@mail.com", + "role": "Annotator" + }, + "className": "sign" + }, + { + "type": "polygon", + "classId": 466283, + "probability": 100, + "points": [ + 559.38, + 357.73, + 573.64, + 361.69, + 568.1, + 370.4, + 570.48, + 387.04, + 568.89, + 407.64, + 560.97, + 410.01, + 548.29, + 410.01, + 548.29, + 380.7, + 557.01, + 378.32, + 550.67, + 367.23, + 553.05, + 359.31, + 558.59, + 356.93 + ], + "groupId": 0, + "pointLabels": {}, + "locked": false, + "visible": true, + "attributes": [], + "trackingId": null, + "error": null, + "createdAt": "2020-10-23T12:25:29.070Z", + "createdBy": { + "email": "user3@mail.com", + "role": "Annotator" + }, + "creationType": "Manual", + "updatedAt": "2020-10-23T12:25:53.017Z", + "updatedBy": { + "email": "user3@mail.com", + "role": "Annotator" + }, + "className": "sign" + }, + { + "type": "point", + "classId": 466283, + "probability": 100, + "x": 1368.23, + "y": 313.13, + "groupId": 0, + "pointLabels": {}, + "locked": false, + "visible": true, + "attributes": [], + "trackingId": null, + "error": null, + "createdAt": "2020-10-23T12:22:15.578Z", + "createdBy": { + "email": "user3@mail.com", + "role": "Annotator" + }, + "creationType": "Manual", + "updatedAt": "2020-10-23T12:23:36.967Z", + "updatedBy": { + "email": "user3@mail.com", + "role": "Annotator" + }, + "className": "sign" + }, + { + "type": "polygon", + "classId": 466284, + "probability": 100, + "points": [ + 1844.8, + 581.67, + 1841.89, + 558.45, + 1840.44, + 519.26, + 1837.54, + 506.19, + 1836.09, + 478.62, + 1828.83, + 435.07, + 1828.83, + 424.91, + 1825.93, + 408.95, + 1825.93, + 394.43, + 1824.48, + 390.08, + 1824.48, + 362.5, + 1834.64, + 318.96, + 1834.64, + 295.73, + 1828.83, + 263.8, + 1827.38, + 237.68, + 1824.48, + 224.61, + 1824.48, + 186.87, + 1827.38, + 175.26, + 1827.38, + 157.85, + 1828.83, + 153.49, + 1828.83, + 80.92, + 1830.28, + 78.02, + 1830.28, + 64.95, + 1831.73, + 63.5, + 1831.73, + 54.79, + 1834.64, + 38.83, + 1834.64, + 0, + 1946.4, + 0, + 1991.59, + 0, + 2048, + 0, + 2048, + 72.21, + 1982.68, + 131.72, + 1944.95, + 157.85, + 1923.18, + 213, + 1995.75, + 597.64 + ], + "groupId": 0, + "pointLabels": {}, + "locked": false, + "visible": true, + "attributes": [], + "trackingId": null, + "error": null, + "createdAt": "2020-10-27T14:57:43.847Z", + "createdBy": { + "email": "user3@mail.com", + "role": "Annotator" + }, + "creationType": "Manual", + "updatedAt": "2020-10-27T16:31:42.745Z", + "updatedBy": { + "email": "user3@mail.com", + "role": "Annotator" + }, + "className": "tree" + }, + { + "type": "polygon", + "classId": 466284, + "probability": 100, + "points": [ + 1508.06, + 506.19, + 1509.51, + 506.19, + 1506.61, + 504.74, + 1502.25, + 488.78, + 1497.9, + 459.75, + 1497.9, + 330.57, + 1505.16, + 305.89, + 1505.16, + 301.54, + 1506.61, + 300.09, + 1508.06, + 272.51, + 1496.45, + 253.64, + 1484.84, + 240.58, + 1463.06, + 228.97, + 1435.49, + 220.26, + 1309.21, + 201.39, + 1275.83, + 194.13, + 1235.19, + 179.62, + 1181.48, + 166.55, + 1152.45, + 153.49, + 1142.29, + 146.23, + 1124.88, + 137.53, + 1101.65, + 133.17, + 1082.78, + 125.91, + 1065.37, + 121.56, + 1052.3, + 114.3, + 1027.63, + 105.59, + 1014.57, + 96.88, + 992.79, + 72.21, + 973.92, + 37.37, + 966.67, + 15.6, + 966.67, + 0, + 965.22, + 0, + 1802.7, + 0, + 1823.02, + 85.27, + 1657.56, + 115.75, + 1661.91, + 162.2, + 1624.18, + 217.36, + 1564.67, + 282.67, + 1580.63, + 516.35 + ], + "groupId": 0, + "pointLabels": {}, + "locked": false, + "visible": true, + "attributes": [], + "trackingId": null, + "error": null, + "createdAt": "2020-10-27T14:56:35.824Z", + "createdBy": { + "email": "user2@mail.com", + "role": "Annotator" + }, + "creationType": "Manual", + "updatedAt": "2020-10-27T14:57:08.890Z", + "updatedBy": { + "email": "user2@mail.com", + "role": "Annotator" + }, + "className": "tree" + }, + { + "type": "bbox", + "classId": 466284, + "probability": 100, + "points": { + "x1": 436.79, + "x2": 833.08, + "y1": 122.21, + "y2": 479.46 + }, + "groupId": 0, + "pointLabels": {}, + "locked": false, + "visible": true, + "attributes": [], + "trackingId": null, + "error": null, + "createdAt": "2020-10-23T12:21:21.856Z", + "createdBy": { + "email": "user2@mail.com", + "role": "Annotator" + }, + "creationType": "Manual", + "updatedAt": "2020-10-23T12:21:29.756Z", + "updatedBy": { + "email": "user2@mail.com", + "role": "Annotator" + }, + "className": "tree" + } + ], + "tags": [], + "comments": [] +} diff --git a/tests/data_set/converter_test/VOC/input/fromPascalVOCToSuperAnnotate/VOC2012/Annotations/2007_000032.xml b/tests/data_set/converter_test/VOC/input/fromPascalVOCToSuperAnnotate/VOC2012/Annotations/2007_000032.xml index 779abb630..37627d5f0 100755 --- a/tests/data_set/converter_test/VOC/input/fromPascalVOCToSuperAnnotate/VOC2012/Annotations/2007_000032.xml +++ b/tests/data_set/converter_test/VOC/input/fromPascalVOCToSuperAnnotate/VOC2012/Annotations/2007_000032.xml @@ -1,6 +1,6 @@ VOC2012 - 2007_000032.jpg + 2007_000032.jpeg The VOC2007 Database PASCAL VOC2007 diff --git a/tests/data_set/converter_test/VOC/input/fromPascalVOCToSuperAnnotate/VOC2012/Annotations/2008_000009.xml b/tests/data_set/converter_test/VOC/input/fromPascalVOCToSuperAnnotate/VOC2012/Annotations/2008_000009.xml index 2df77db6c..ecdeda36a 100755 --- a/tests/data_set/converter_test/VOC/input/fromPascalVOCToSuperAnnotate/VOC2012/Annotations/2008_000009.xml +++ b/tests/data_set/converter_test/VOC/input/fromPascalVOCToSuperAnnotate/VOC2012/Annotations/2008_000009.xml @@ -1,6 +1,6 @@ VOC2012 - 2008_000009.jpg + 2008_000009.jpeg The VOC2008 Database PASCAL VOC2008 diff --git a/tests/data_set/converter_test/VOC/input/fromPascalVOCToSuperAnnotate/VOC2012/Annotations/2009_000006.xml b/tests/data_set/converter_test/VOC/input/fromPascalVOCToSuperAnnotate/VOC2012/Annotations/2009_000006.xml index 3cbfad667..29b3c1fa7 100755 --- a/tests/data_set/converter_test/VOC/input/fromPascalVOCToSuperAnnotate/VOC2012/Annotations/2009_000006.xml +++ b/tests/data_set/converter_test/VOC/input/fromPascalVOCToSuperAnnotate/VOC2012/Annotations/2009_000006.xml @@ -1,5 +1,5 @@ - 2009_000006.jpg + 2009_000006.jpeg VOC2012 bus diff --git a/tests/data_set/converter_test/VOC/input/fromPascalVOCToSuperAnnotate/VOC2012/Annotations/2010_000002.xml b/tests/data_set/converter_test/VOC/input/fromPascalVOCToSuperAnnotate/VOC2012/Annotations/2010_000002.xml index e8726a108..5bafd2144 100755 --- a/tests/data_set/converter_test/VOC/input/fromPascalVOCToSuperAnnotate/VOC2012/Annotations/2010_000002.xml +++ b/tests/data_set/converter_test/VOC/input/fromPascalVOCToSuperAnnotate/VOC2012/Annotations/2010_000002.xml @@ -1,5 +1,5 @@ - 2010_000002.jpg + 2010_000002.jpeg VOC2012 sheep diff --git a/tests/data_set/converter_test/VOC/input/fromPascalVOCToSuperAnnotate/VOC2012/Annotations/2011_000003.xml b/tests/data_set/converter_test/VOC/input/fromPascalVOCToSuperAnnotate/VOC2012/Annotations/2011_000003.xml index 11b1f60bd..65bb233d0 100755 --- a/tests/data_set/converter_test/VOC/input/fromPascalVOCToSuperAnnotate/VOC2012/Annotations/2011_000003.xml +++ b/tests/data_set/converter_test/VOC/input/fromPascalVOCToSuperAnnotate/VOC2012/Annotations/2011_000003.xml @@ -1,5 +1,5 @@ - 2011_000003.jpg + 2011_000003.jpeg VOC2012 bottle diff --git a/tests/data_set/converter_test/VOC/input/fromPascalVOCToSuperAnnotate/VOC2012/JPEGImages/2007_000032.jpg b/tests/data_set/converter_test/VOC/input/fromPascalVOCToSuperAnnotate/VOC2012/JPEGImages/2007_000032.jpeg similarity index 100% rename from tests/data_set/converter_test/VOC/input/fromPascalVOCToSuperAnnotate/VOC2012/JPEGImages/2007_000032.jpg rename to tests/data_set/converter_test/VOC/input/fromPascalVOCToSuperAnnotate/VOC2012/JPEGImages/2007_000032.jpeg diff --git a/tests/data_set/converter_test/VOC/input/fromPascalVOCToSuperAnnotate/VOC2012/JPEGImages/2008_000009.jpg b/tests/data_set/converter_test/VOC/input/fromPascalVOCToSuperAnnotate/VOC2012/JPEGImages/2008_000009.jpeg similarity index 100% rename from tests/data_set/converter_test/VOC/input/fromPascalVOCToSuperAnnotate/VOC2012/JPEGImages/2008_000009.jpg rename to tests/data_set/converter_test/VOC/input/fromPascalVOCToSuperAnnotate/VOC2012/JPEGImages/2008_000009.jpeg diff --git a/tests/data_set/converter_test/VOC/input/fromPascalVOCToSuperAnnotate/VOC2012/JPEGImages/2009_000006.jpg b/tests/data_set/converter_test/VOC/input/fromPascalVOCToSuperAnnotate/VOC2012/JPEGImages/2009_000006.jpeg similarity index 100% rename from tests/data_set/converter_test/VOC/input/fromPascalVOCToSuperAnnotate/VOC2012/JPEGImages/2009_000006.jpg rename to tests/data_set/converter_test/VOC/input/fromPascalVOCToSuperAnnotate/VOC2012/JPEGImages/2009_000006.jpeg diff --git a/tests/data_set/converter_test/VOC/input/fromPascalVOCToSuperAnnotate/VOC2012/JPEGImages/2010_000002.jpg b/tests/data_set/converter_test/VOC/input/fromPascalVOCToSuperAnnotate/VOC2012/JPEGImages/2010_000002.jpeg similarity index 100% rename from tests/data_set/converter_test/VOC/input/fromPascalVOCToSuperAnnotate/VOC2012/JPEGImages/2010_000002.jpg rename to tests/data_set/converter_test/VOC/input/fromPascalVOCToSuperAnnotate/VOC2012/JPEGImages/2010_000002.jpeg diff --git a/tests/data_set/converter_test/VOC/input/fromPascalVOCToSuperAnnotate/VOC2012/JPEGImages/2011_000003.jpg b/tests/data_set/converter_test/VOC/input/fromPascalVOCToSuperAnnotate/VOC2012/JPEGImages/2011_000003.jpeg similarity index 100% rename from tests/data_set/converter_test/VOC/input/fromPascalVOCToSuperAnnotate/VOC2012/JPEGImages/2011_000003.jpg rename to tests/data_set/converter_test/VOC/input/fromPascalVOCToSuperAnnotate/VOC2012/JPEGImages/2011_000003.jpeg diff --git a/tests/data_set/video_convertor_annotations/classes/classes.json b/tests/data_set/video_convertor_annotations/classes/classes.json new file mode 100644 index 000000000..7ee912787 --- /dev/null +++ b/tests/data_set/video_convertor_annotations/classes/classes.json @@ -0,0 +1,32 @@ +[ + { + "id": 857627, + "project_id": 150845, + "name": "vid", + "color": "#0fc1c9", + "count": 0, + "createdAt": "2021-10-01T13:03:51.000Z", + "updatedAt": "2021-10-01T13:03:51.000Z", + "attribute_groups": [ + { + "id": 337487, + "class_id": 857627, + "name": "attr g", + "is_multiselect": 0, + "createdAt": "2021-10-04T07:01:29.000Z", + "updatedAt": "2021-10-04T07:01:29.000Z", + "attributes": [ + { + "id": 1174520, + "group_id": 337487, + "project_id": 150845, + "name": "attr", + "count": 0, + "createdAt": "2021-10-04T07:01:31.000Z", + "updatedAt": "2021-10-04T07:01:31.000Z" + } + ] + } + ] + } +] \ No newline at end of file diff --git a/tests/data_set/video_convertor_annotations/video.mp4.json b/tests/data_set/video_convertor_annotations/video.mp4.json new file mode 100644 index 000000000..0b26bd541 --- /dev/null +++ b/tests/data_set/video_convertor_annotations/video.mp4.json @@ -0,0 +1,161 @@ +{ + "metadata": { + "name": "video.mp4", + "width": 480, + "height": 270, + "status": "NotStarted", + "url": "https://file-examples-com.github.io/uploads/2017/04/file_example_MP4_480_1_5MG.mp4", + "duration": 30526667, + "projectId": 152038, + "error": null, + "annotatorEmail": null, + "qaEmail": null + }, + "instances": [ + { + "meta": { + "type": "point", + "classId": 859496, + "className": "vid", + "start": 29713736, + "end": 30526667 + }, + "parameters": [ + { + "start": 29713736, + "end": 30526667, + "timestamps": [ + { + "x": 100.82, + "y": 50.12, + "timestamp": 17271058, + "attributes": [] + }, + { + "points": { + "x1": 132.82, + "y1": 129.12, + "x2": 175.16, + "y2": 188 + }, + "x": 200.82, + "y": 100.12, + "timestamp": 30526667, + "attributes": [] + } + ] + } + ] + }, + { + "meta": { + "type": "bbox", + "classId": 859496, + "className": "vid", + "pointLabels": { + "3": "point label bro" + }, + "start": 0, + "end": 30526667 + }, + "parameters": [ + { + "start": 0, + "end": 30526667, + "timestamps": [ + { + "points": { + "x1": 223.32, + "y1": 78.45, + "x2": 312.31, + "y2": 176.66 + }, + "timestamp": 0, + "attributes": [] + }, + { + "points": { + "x1": 182.08, + "y1": 33.18, + "x2": 283.45, + "y2": 131.39 + }, + "timestamp": 17271058, + "attributes": [ + { + "id": 1175876, + "groupId": 338357, + "name": "attr", + "groupName": "attr g" + } + ] + }, + { + "points": { + "x1": 182.42, + "y1": 97.19, + "x2": 284.11, + "y2": 195.4 + }, + "timestamp": 30526667, + "attributes": [ + { + "id": 1175876, + "groupId": 338357, + "name": "attr", + "groupName": "attr g" + } + ] + } + ] + } + ] + }, + { + "meta": { + "type": "event", + "classId": 859496, + "className": "vid", + "start": 5528212, + "end": 7083022 + }, + "parameters": [ + { + "start": 5528212, + "end": 7083022, + "timestamps": [ + { + "timestamp": 5528212, + "attributes": [] + }, + { + "timestamp": 6702957, + "attributes": [ + { + "id": 1175876, + "groupId": 338357, + "name": "attr", + "groupName": "attr g" + } + ] + }, + { + "timestamp": 7083022, + "attributes": [ + { + "id": 1175876, + "groupId": 338357, + "name": "attr", + "groupName": "attr g" + } + ] + } + ] + } + ] + } + ], + "tags": [ + "some tag" + ] +} \ No newline at end of file diff --git a/tests/integration/annotations/test_get_annotations_per_frame.py b/tests/integration/annotations/test_get_annotations_per_frame.py index 1624a1d73..ae005ab03 100644 --- a/tests/integration/annotations/test_get_annotations_per_frame.py +++ b/tests/integration/annotations/test_get_annotations_per_frame.py @@ -13,7 +13,7 @@ class TestGetAnnotations(BaseTestCase): PATH_TO_URLS_WITHOUT_NAMES = "data_set/attach_urls_with_no_name.csv" PATH_TO_50K_URLS = "data_set/501_urls.csv" PROJECT_DESCRIPTION = "desc" - ANNOTATIONS_PATH = "data_set/video_annotation" + ANNOTATIONS_PATH = "data_set/video_convertor_annotations" VIDEO_NAME = "video.mp4" CLASSES_PATH = "data_set/video_annotation/classes/classes.json" PROJECT_TYPE = "Video"