diff --git a/.github/workflows/pre_release.yml b/.github/workflows/pre_release.yml index f8055d254..e5ed528e7 100644 --- a/.github/workflows/pre_release.yml +++ b/.github/workflows/pre_release.yml @@ -16,7 +16,7 @@ jobs: - name: Install dependencies run: | python -m pip install --upgrade pip - pip install setuptools wheel twine + pip install setuptools wheel twine packaging - name: Create distribution files run: python setup.py sdist - name: Publish distribution to PyPI diff --git a/docs/source/superannotate.sdk.rst b/docs/source/superannotate.sdk.rst index 16771f2e2..588af30ec 100644 --- a/docs/source/superannotate.sdk.rst +++ b/docs/source/superannotate.sdk.rst @@ -29,7 +29,6 @@ ________ .. _ref_get_project_metadata: .. autofunction:: superannotate.get_project_metadata .. autofunction:: superannotate.get_project_image_count -.. autofunction:: superannotate.get_project_and_folder_metadata .. autofunction:: superannotate.search_folders .. autofunction:: superannotate.get_folder_metadata .. autofunction:: superannotate.create_folder @@ -74,7 +73,11 @@ ______ .. autofunction:: superannotate.query .. autofunction:: superannotate.search_items +.. autofunction:: superannotate.attach_items +.. autofunction:: superannotate.copy_items +.. autofunction:: superannotate.move_items .. autofunction:: superannotate.get_item_metadata +.. autofunction:: superannotate.set_annotation_statuses ---------- @@ -83,9 +86,6 @@ ______ .. _ref_search_images: -.. autofunction:: superannotate.search_images -.. autofunction:: superannotate.search_images_all_folders -.. autofunction:: superannotate.get_image_metadata .. autofunction:: superannotate.download_image .. autofunction:: superannotate.set_image_annotation_status .. autofunction:: superannotate.set_images_annotation_statuses diff --git a/pytest.ini b/pytest.ini index d9ab3b434..86c2d4c63 100644 --- a/pytest.ini +++ b/pytest.ini @@ -1,5 +1,5 @@ [pytest] -minversion = 3.0 +minversion = 3.7 log_cli=true python_files = test_*.py -;addopts = -n auto --dist=loadscope \ No newline at end of file +addopts = -n auto --dist=loadscope \ No newline at end of file diff --git a/requirements_dev.txt b/requirements_dev.txt index 1476814ab..02595ac5b 100644 --- a/requirements_dev.txt +++ b/requirements_dev.txt @@ -1,2 +1,2 @@ -superannotate_schemas>=1.0.41b1 +superannotate_schemas>=v1.0.42dev2 diff --git a/src/superannotate/__init__.py b/src/superannotate/__init__.py index e0b6ff051..d55efa2b0 100644 --- a/src/superannotate/__init__.py +++ b/src/superannotate/__init__.py @@ -25,6 +25,7 @@ attach_document_urls_to_project, ) from superannotate.lib.app.interface.sdk_interface import attach_image_urls_to_project +from superannotate.lib.app.interface.sdk_interface import attach_items from superannotate.lib.app.interface.sdk_interface import ( attach_items_from_integrated_storage, ) @@ -34,6 +35,7 @@ from superannotate.lib.app.interface.sdk_interface import consensus from superannotate.lib.app.interface.sdk_interface import copy_image from superannotate.lib.app.interface.sdk_interface import copy_images +from superannotate.lib.app.interface.sdk_interface import copy_items from superannotate.lib.app.interface.sdk_interface import create_annotation_class from superannotate.lib.app.interface.sdk_interface import ( create_annotation_classes_from_classes_json, @@ -57,12 +59,8 @@ from superannotate.lib.app.interface.sdk_interface import get_annotations_per_frame from superannotate.lib.app.interface.sdk_interface import get_exports from superannotate.lib.app.interface.sdk_interface import get_folder_metadata -from superannotate.lib.app.interface.sdk_interface import get_image_metadata from superannotate.lib.app.interface.sdk_interface import get_integrations from superannotate.lib.app.interface.sdk_interface import get_item_metadata -from superannotate.lib.app.interface.sdk_interface import ( - get_project_and_folder_metadata, -) from superannotate.lib.app.interface.sdk_interface import get_project_image_count from superannotate.lib.app.interface.sdk_interface import get_project_metadata from superannotate.lib.app.interface.sdk_interface import get_project_settings @@ -71,6 +69,7 @@ from superannotate.lib.app.interface.sdk_interface import init from superannotate.lib.app.interface.sdk_interface import invite_contributors_to_team from superannotate.lib.app.interface.sdk_interface import move_images +from superannotate.lib.app.interface.sdk_interface import move_items from superannotate.lib.app.interface.sdk_interface import pin_image from superannotate.lib.app.interface.sdk_interface import prepare_export from superannotate.lib.app.interface.sdk_interface import query @@ -78,12 +77,11 @@ from superannotate.lib.app.interface.sdk_interface import run_prediction from superannotate.lib.app.interface.sdk_interface import search_annotation_classes from superannotate.lib.app.interface.sdk_interface import search_folders -from superannotate.lib.app.interface.sdk_interface import search_images -from superannotate.lib.app.interface.sdk_interface import search_images_all_folders from superannotate.lib.app.interface.sdk_interface import search_items from superannotate.lib.app.interface.sdk_interface import search_models from superannotate.lib.app.interface.sdk_interface import search_projects from superannotate.lib.app.interface.sdk_interface import search_team_contributors +from superannotate.lib.app.interface.sdk_interface import set_annotation_statuses from superannotate.lib.app.interface.sdk_interface import set_auth_token from superannotate.lib.app.interface.sdk_interface import set_image_annotation_status from superannotate.lib.app.interface.sdk_interface import set_images_annotation_statuses @@ -161,13 +159,11 @@ "rename_project", "upload_priority_scores", # Images Section - "search_images", "copy_image", # Folders Section "create_folder", "get_folder_metadata", "delete_folders", - "get_project_and_folder_metadata", "search_folders", "assign_folder", "unassign_folder", @@ -175,15 +171,17 @@ "get_item_metadata", "search_items", "query", + "attach_items", + "copy_items", + "move_items", + "set_annotation_statuses", # Image Section "copy_images", "move_images", "delete_images", "download_image", "pin_image", - "get_image_metadata", "get_project_image_count", - "search_images_all_folders", "assign_images", "unassign_images", "download_image_annotations", diff --git a/src/superannotate/lib/app/analytics/aggregators.py b/src/superannotate/lib/app/analytics/aggregators.py index 5b75650c0..e9f18b47b 100644 --- a/src/superannotate/lib/app/analytics/aggregators.py +++ b/src/superannotate/lib/app/analytics/aggregators.py @@ -56,6 +56,34 @@ class VideoRawData: attributeName: str = None +class DocumentRawData: + docName: str = None + folderName: str = None + docStatus: str = None + docUrl: str = None + docAnnotator: str = None + docQA: str = None + # tag + tagId: int = None + tag: str = None + # instance + instanceId: int = None + instanceStart: int = None + instanceEnd: int = None + type: str = None + className: str = None + createdAt: str = None + createdBy: str = None + creatorRole: str = None + updatedAt: str = None + updatedBy: str = None + updatorRole: str = None + # attribute + attributeId: int = None + attributeGroupName: str = None + attributeName: str = None + + class DataAggregator: def __init__( self, @@ -124,6 +152,8 @@ def aggregate_annotations_as_df(self): return self.aggregate_image_annotations_as_df(annotation_paths) elif self.project_type == constances.ProjectType.VIDEO.name: return self.aggregate_video_annotations_as_df(annotation_paths) + elif self.project_type == constances.ProjectType.DOCUMENT.name: + return self.aggregate_document_annotations_as_df(annotation_paths) def aggregate_video_annotations_as_df(self, annotation_paths: List[str]): raws = [] @@ -205,7 +235,61 @@ def aggregate_video_annotations_as_df(self, annotation_paths: List[str]): raws.append(instance_raw) if not instances: raws.append(raw_data) - return pd.DataFrame([raw.__dict__ for raw in raws], dtype=object) + df = pd.DataFrame([raw.__dict__ for raw in raws], dtype=object) + return df.where(pd.notnull(df), None) + + def aggregate_document_annotations_as_df(self, annotation_paths: List[str]): + raws = [] + for annotation_path in annotation_paths: + annotation_path = Path(annotation_path) + annotation_data = json.load(open(annotation_path)) + raw_data = DocumentRawData() + # metadata + raw_data.docName = annotation_data["metadata"]["name"] + raw_data.folderName = ( + annotation_path.parent.name + if annotation_path.parent != self.project_root + else None + ) + raw_data.docStatus = annotation_data["metadata"].get("status") + raw_data.docUrl = annotation_data["metadata"].get("url") + raw_data.docAnnotator = annotation_data["metadata"].get("annotatorEmail") + raw_data.docQA = annotation_data["metadata"].get("qaEmail") + # append tags + for idx, tag in enumerate(annotation_data.get("tags", [])): + tag_row = copy.copy(raw_data) + tag_row.tagId = idx + tag_row.tag = tag + raws.append(tag_row) + # append instances + instances = annotation_data.get("instances", []) + for idx, instance in enumerate(instances): + instance_raw = copy.copy(raw_data) + instance_raw.instanceId = int(idx) + instance_raw.instanceStart = instance.get("start") + instance_raw.instanceEnd = instance.get("end") + instance_raw.type = instance.get("type") + instance_raw.className = instance.get("className") + instance_raw.createdAt = instance.get("createdAt") + instance_raw.createdBy = instance.get("createdBy", {}).get("email") + instance_raw.creatorRole = instance.get("createdBy", {}).get("role") + instance_raw.updatedAt = instance.get("updatedAt") + instance_raw.updatedBy = instance.get("updatedBy", {}).get("email") + instance_raw.updatorRole = instance.get("updatedBy", {}).get("role") + attributes = instance.get("attributes", []) + # append attributes + for attribute_id, attribute in enumerate(attributes): + attribute_raw = copy.copy(instance_raw) + attribute_raw.attributeId = attribute_id + attribute_raw.attributeGroupName = attribute.get("groupName") + attribute_raw.attributeName = attribute.get("name") + raws.append(attribute_raw) + if not attributes: + raws.append(instance_raw) + if not instances: + raws.append(raw_data) + df = pd.DataFrame([raw.__dict__ for raw in raws], dtype=object) + return df.where(pd.notnull(df), None) def aggregate_image_annotations_as_df(self, annotations_paths: List[str]): annotation_data = { diff --git a/src/superannotate/lib/app/helpers.py b/src/superannotate/lib/app/helpers.py index f50d0ec74..39c2e12ab 100644 --- a/src/superannotate/lib/app/helpers.py +++ b/src/superannotate/lib/app/helpers.py @@ -8,6 +8,7 @@ import boto3 import pandas as pd +from superannotate.lib.app.exceptions import AppException from superannotate.lib.app.exceptions import PathError from superannotate.lib.core import ATTACHED_VIDEO_ANNOTATION_POSTFIX from superannotate.lib.core import PIXEL_ANNOTATION_POSTFIX @@ -168,3 +169,33 @@ def get_paths_and_duplicated_from_csv(csv_path): else: duplicate_images.append(temp) return images_to_upload, duplicate_images + + +def get_name_url_duplicated_from_csv(csv_path): + image_data = pd.read_csv(csv_path, dtype=str) + if "url" not in image_data.columns: + raise AppException("Column 'url' is required") + image_data = image_data[~image_data["url"].isnull()] + if "name" in image_data.columns: + image_data["name"] = ( + image_data["name"] + .fillna("") + .apply(lambda cell: cell if str(cell).strip() else str(uuid.uuid4())) + ) + else: + image_data["name"] = [str(uuid.uuid4()) for _ in range(len(image_data.index))] + + image_data = pd.DataFrame(image_data, columns=["name", "url"]) + img_names_urls = image_data.to_dict(orient="records") + duplicate_images = [] + seen = [] + images_to_upload = [] + for i in img_names_urls: + temp = i["name"] + i["name"] = i["name"].strip() + if i["name"] not in seen: + seen.append(i["name"]) + images_to_upload.append(i) + else: + duplicate_images.append(temp) + return images_to_upload, duplicate_images diff --git a/src/superannotate/lib/app/interface/cli_interface.py b/src/superannotate/lib/app/interface/cli_interface.py index fabd1a2f6..793517d02 100644 --- a/src/superannotate/lib/app/interface/cli_interface.py +++ b/src/superannotate/lib/app/interface/cli_interface.py @@ -207,7 +207,7 @@ def _upload_annotations( dataset_format=format, dataset_name=dataset_name, project_type=constances.ProjectType.get_name( - project["project"].project_type + project["project"].type ), task=task, ) diff --git a/src/superannotate/lib/app/interface/sdk_interface.py b/src/superannotate/lib/app/interface/sdk_interface.py index bb3d51592..84a3236bc 100644 --- a/src/superannotate/lib/app/interface/sdk_interface.py +++ b/src/superannotate/lib/app/interface/sdk_interface.py @@ -1,3 +1,4 @@ +import collections import io import json import os @@ -17,16 +18,20 @@ from lib.app.annotation_helpers import add_annotation_point_to_json from lib.app.helpers import extract_project_folder from lib.app.helpers import get_annotation_paths +from lib.app.helpers import get_name_url_duplicated_from_csv from lib.app.helpers import get_paths_and_duplicated_from_csv from lib.app.interface.types import AnnotationStatuses from lib.app.interface.types import AnnotationType from lib.app.interface.types import AnnotatorRole +from lib.app.interface.types import AttachmentArg +from lib.app.interface.types import AttachmentDict from lib.app.interface.types import ClassType from lib.app.interface.types import EmailStr from lib.app.interface.types import ImageQualityChoices from lib.app.interface.types import NotEmptyStr from lib.app.interface.types import ProjectStatusEnum from lib.app.interface.types import ProjectTypes +from lib.app.interface.types import Setting from lib.app.interface.types import validate_arguments from lib.app.mixp.decorators import Trackable from lib.app.serializers import BaseSerializer @@ -36,6 +41,8 @@ from lib.app.serializers import SettingsSerializer from lib.app.serializers import TeamSerializer from lib.core import LIMITED_FUNCTIONS +from lib.core.entities import AttachmentEntity +from lib.core.entities import SettingEntity from lib.core.entities.integrations import IntegrationEntity from lib.core.entities.project_entities import AnnotationClassEntity from lib.core.enums import ImageQuality @@ -90,10 +97,10 @@ def get_team_metadata(): @Trackable @validate_arguments def search_team_contributors( - email: EmailStr = None, - first_name: NotEmptyStr = None, - last_name: NotEmptyStr = None, - return_metadata: bool = True, + email: EmailStr = None, + first_name: NotEmptyStr = None, + last_name: NotEmptyStr = None, + return_metadata: bool = True, ): """Search for contributors in the team @@ -112,10 +119,10 @@ def search_team_contributors( contributors = ( Controller.get_default() - .search_team_contributors( + .search_team_contributors( email=email, first_name=first_name, last_name=last_name ) - .data + .data ) if not return_metadata: return [contributor["email"] for contributor in contributors] @@ -125,10 +132,10 @@ def search_team_contributors( @Trackable @validate_arguments def search_projects( - name: Optional[NotEmptyStr] = None, - return_metadata: bool = False, - include_complete_image_count: bool = False, - status: Optional[Union[ProjectStatusEnum, List[ProjectStatusEnum]]] = None, + name: Optional[NotEmptyStr] = None, + return_metadata: bool = False, + include_complete_image_count: bool = False, + status: Optional[Union[ProjectStatusEnum, List[ProjectStatusEnum]]] = None, ): """ Project name based case-insensitive search for projects. @@ -157,15 +164,27 @@ def search_projects( statuses = [status] result = ( Controller.get_default() - .search_project( + .search_project( name=name, include_complete_image_count=include_complete_image_count, statuses=statuses, ) - .data + .data ) + if return_metadata: - return [ProjectSerializer(project).serialize() for project in result] + return [ + ProjectSerializer(project).serialize( + exclude={ + "annotation_classes", + "workflows", + "settings", + "contributors", + "classes", + } + ) + for project in result + ] else: return [project.name for project in result] @@ -173,24 +192,37 @@ def search_projects( @Trackable @validate_arguments def create_project( - project_name: NotEmptyStr, - project_description: NotEmptyStr, - project_type: NotEmptyStr, + project_name: NotEmptyStr, + project_description: NotEmptyStr, + project_type: NotEmptyStr, + settings: List[Setting] = None, ): """Create a new project in the team. :param project_name: the new project's name :type project_name: str + :param project_description: the new project's description :type project_description: str + :param project_type: the new project type, Vector or Pixel. :type project_type: str + :param settings: list of settings objects + :type settings: list of dicts + :return: dict object metadata the new project :rtype: dict """ + if settings: + settings = parse_obj_as(List[SettingEntity], settings) + else: + settings = [] response = Controller.get_default().create_project( - name=project_name, description=project_description, project_type=project_type + name=project_name, + description=project_description, + project_type=project_type, + settings=settings, ) if response.errors: raise AppException(response.errors) @@ -213,8 +245,10 @@ def create_project_from_metadata(project_metadata: Project): name=project_metadata["name"], description=project_metadata.get("description"), project_type=project_metadata["type"], - settings=project_metadata.get("settings", []), - annotation_classes=project_metadata.get("classes", []), + settings=parse_obj_as( + List[SettingEntity], project_metadata.get("settings", []) + ), + classes=project_metadata.get("classes", []), workflows=project_metadata.get("workflows", []), instructions_link=project_metadata.get("instructions_link"), ) @@ -226,13 +260,13 @@ def create_project_from_metadata(project_metadata: Project): @Trackable @validate_arguments def clone_project( - project_name: Union[NotEmptyStr, dict], - from_project: Union[NotEmptyStr, dict], - project_description: Optional[NotEmptyStr] = None, - copy_annotation_classes: Optional[StrictBool] = True, - copy_settings: Optional[StrictBool] = True, - copy_workflow: Optional[StrictBool] = True, - copy_contributors: Optional[StrictBool] = False, + project_name: Union[NotEmptyStr, dict], + from_project: Union[NotEmptyStr, dict], + project_description: Optional[NotEmptyStr] = None, + copy_annotation_classes: Optional[StrictBool] = True, + copy_settings: Optional[StrictBool] = True, + copy_workflow: Optional[StrictBool] = True, + copy_contributors: Optional[StrictBool] = False, ): """Create a new project in the team using annotation classes and settings from from_project. @@ -269,56 +303,6 @@ def clone_project( return ProjectSerializer(response.data).serialize() -@Trackable -@validate_arguments -def search_images( - project: Union[NotEmptyStr, dict], - image_name_prefix: Optional[NotEmptyStr] = None, - annotation_status: Optional[AnnotationStatuses] = None, - return_metadata: Optional[StrictBool] = False, -): - """Search images by name_prefix (case-insensitive) and annotation status - - :param project: project name or folder path (e.g., "project1/folder1") - :type project: str - :param image_name_prefix: image name prefix for search - :type image_name_prefix: str - :param annotation_status: if not None, annotation statuses of images to filter, - should be one of NotStarted InProgress QualityCheck Returned Completed Skipped - :type annotation_status: str - - :param return_metadata: return metadata of images instead of names - :type return_metadata: bool - - :return: metadata of found images or image names - :rtype: list of dicts or strs - """ - warning_msg = ( - "We're deprecating the search_images function. Please use search_items instead. Learn more. \n" - "https://superannotate.readthedocs.io/en/stable/superannotate.sdk.html#superannotate.search_items" - ) - logger.warning(warning_msg) - warnings.warn(warning_msg, DeprecationWarning) - project_name, folder_name = extract_project_folder(project) - project = Controller.get_default()._get_project(project_name) - - response = Controller.get_default().search_images( - project_name=project_name, - folder_path=folder_name, - annotation_status=annotation_status, - image_name_prefix=image_name_prefix, - ) - if response.errors: - raise AppException(response.errors) - - if return_metadata: - return [ - ImageSerializer(image).serialize_by_project(project) - for image in response.data - ] - return [image.name for image in response.data] - - @Trackable @validate_arguments def create_folder(project: NotEmptyStr, folder_name: NotEmptyStr): @@ -374,10 +358,8 @@ def rename_project(project: NotEmptyStr, new_name: NotEmptyStr): ) if response.errors: raise AppException(response.errors) - - logger.info( - "Successfully renamed project %s to %s.", project, response.data["name"] - ) + logger.info("Successfully renamed project %s to %s.", project, response.data.name) + return ProjectSerializer(response.data).serialize() @Trackable @@ -395,8 +377,8 @@ def get_folder_metadata(project: NotEmptyStr, folder_name: NotEmptyStr): """ result = ( Controller.get_default() - .get_folder(project_name=project, folder_name=folder_name) - .data + .get_folder(project_name=project, folder_name=folder_name) + .data ) if not result: raise AppException("Folder not found.") @@ -422,40 +404,12 @@ def delete_folders(project: NotEmptyStr, folder_names: List[NotEmptyStr]): logger.info(f"Folders {folder_names} deleted in project {project}") -@Trackable -@validate_arguments -def get_project_and_folder_metadata(project: Union[NotEmptyStr, dict]): - """Returns project and folder metadata tuple. If folder part is empty, - than returned folder part is set to None. - - :param project: project name or folder path (e.g., "project1/folder1") - :type project: str - - :return: tuple of project and folder - :rtype: tuple - """ - warning_msg = ( - "The get_project_and_folder_metadata function is deprecated and will be removed with the coming release, " - "please use get_folder_metadata instead." - ) - logger.warning(warning_msg) - warnings.warn(warning_msg, DeprecationWarning) - project_name, folder_name = extract_project_folder(project) - project = ProjectSerializer( - Controller.get_default().search_project(project_name).data[0] - ).serialize() - folder = None - if folder_name: - folder = get_folder_metadata(project_name, folder_name) - return project, folder - - @Trackable @validate_arguments def search_folders( - project: NotEmptyStr, - folder_name: Optional[NotEmptyStr] = None, - return_metadata: Optional[StrictBool] = False, + project: NotEmptyStr, + folder_name: Optional[NotEmptyStr] = None, + return_metadata: Optional[StrictBool] = False, ): """Folder name based case-insensitive search for folders in project. @@ -484,12 +438,12 @@ def search_folders( @Trackable @validate_arguments def copy_image( - source_project: Union[NotEmptyStr, dict], - image_name: NotEmptyStr, - destination_project: Union[NotEmptyStr, dict], - include_annotations: Optional[StrictBool] = False, - copy_annotation_status: Optional[StrictBool] = False, - copy_pin: Optional[StrictBool] = False, + source_project: Union[NotEmptyStr, dict], + image_name: NotEmptyStr, + destination_project: Union[NotEmptyStr, dict], + include_annotations: Optional[StrictBool] = False, + copy_annotation_status: Optional[StrictBool] = False, + copy_pin: Optional[StrictBool] = False, ): """Copy image to a project. The image's project is the same as destination project then the name will be changed to _()., @@ -521,16 +475,14 @@ def copy_image( Controller.get_default().get_project_metadata(destination_project).data ) - if destination_project_metadata["project"].project_type in [ + if destination_project_metadata["project"].type in [ constances.ProjectType.VIDEO.value, constances.ProjectType.DOCUMENT.value, - ] or source_project_metadata["project"].project_type in [ + ] or source_project_metadata["project"].type in [ constances.ProjectType.VIDEO.value, constances.ProjectType.DOCUMENT.value, ]: - raise AppException( - LIMITED_FUNCTIONS[source_project_metadata["project"].project_type] - ) + raise AppException(LIMITED_FUNCTIONS[source_project_metadata["project"].type]) response = Controller.get_default().copy_image( from_project_name=source_project_name, @@ -567,11 +519,11 @@ def copy_image( @Trackable @validate_arguments def copy_images( - source_project: Union[NotEmptyStr, dict], - image_names: Optional[List[NotEmptyStr]], - destination_project: Union[NotEmptyStr, dict], - include_annotations: Optional[StrictBool] = True, - copy_pin: Optional[StrictBool] = True, + source_project: Union[NotEmptyStr, dict], + image_names: Optional[List[NotEmptyStr]], + destination_project: Union[NotEmptyStr, dict], + include_annotations: Optional[StrictBool] = True, + copy_pin: Optional[StrictBool] = True, ): """Copy images in bulk between folders in a project @@ -588,21 +540,24 @@ def copy_images( :return: list of skipped image names :rtype: list of strs """ - + warning_msg = ( + "We're deprecating the copy_images function. Please use copy_items instead. Learn more. \n" + "https://superannotate.readthedocs.io/en/stable/superannotate.sdk.html#superannotate.copy_items" + ) + logger.warning(warning_msg) + warnings.warn(warning_msg, DeprecationWarning) project_name, source_folder_name = extract_project_folder(source_project) to_project_name, destination_folder_name = extract_project_folder( destination_project ) if project_name != to_project_name: - raise AppException( - "Source and destination projects should be the same for copy_images" - ) + raise AppException("Source and destination projects should be the same") if not image_names: images = ( Controller.get_default() - .search_images(project_name=project_name, folder_path=source_folder_name) - .data + .search_images(project_name=project_name, folder_path=source_folder_name) + .data ) image_names = [image.name for image in images] @@ -635,11 +590,11 @@ def copy_images( @Trackable @validate_arguments def move_images( - source_project: Union[NotEmptyStr, dict], - image_names: Optional[List[NotEmptyStr]], - destination_project: Union[NotEmptyStr, dict], - *args, - **kwargs, + source_project: Union[NotEmptyStr, dict], + image_names: Optional[List[NotEmptyStr]], + destination_project: Union[NotEmptyStr, dict], + *args, + **kwargs, ): """Move images in bulk between folders in a project @@ -652,14 +607,20 @@ def move_images( :return: list of skipped image names :rtype: list of strs """ + warning_msg = ( + "We're deprecating the move_images function. Please use move_items instead. Learn more." + "https://superannotate.readthedocs.io/en/stable/superannotate.sdk.html#superannotate.move_items" + ) + logger.warning(warning_msg) + warnings.warn(warning_msg, DeprecationWarning) project_name, source_folder_name = extract_project_folder(source_project) project = Controller.get_default().get_project_metadata(project_name).data - if project["project"].project_type in [ + if project["project"].type in [ constances.ProjectType.VIDEO.value, constances.ProjectType.DOCUMENT.value, ]: - raise AppException(LIMITED_FUNCTIONS[project["project"].project_type]) + raise AppException(LIMITED_FUNCTIONS[project["project"].type]) to_project_name, destination_folder_name = extract_project_folder( destination_project @@ -705,12 +666,12 @@ def move_images( @Trackable @validate_arguments def get_project_metadata( - project: Union[NotEmptyStr, dict], - include_annotation_classes: Optional[StrictBool] = False, - include_settings: Optional[StrictBool] = False, - include_workflow: Optional[StrictBool] = False, - include_contributors: Optional[StrictBool] = False, - include_complete_image_count: Optional[StrictBool] = False, + project: Union[NotEmptyStr, dict], + include_annotation_classes: Optional[StrictBool] = False, + include_settings: Optional[StrictBool] = False, + include_workflow: Optional[StrictBool] = False, + include_contributors: Optional[StrictBool] = False, + include_complete_image_count: Optional[StrictBool] = False, ): """Returns project metadata @@ -739,7 +700,7 @@ def get_project_metadata( project_name, folder_name = extract_project_folder(project) response = ( Controller.get_default() - .get_project_metadata( + .get_project_metadata( project_name, include_annotation_classes, include_settings, @@ -747,22 +708,16 @@ def get_project_metadata( include_contributors, include_complete_image_count, ) - .data + .data ) metadata = ProjectSerializer(response["project"]).serialize() - metadata["settings"] = [ - SettingsSerializer(setting).serialize() - for setting in response.get("settings", []) - ] for elem in "classes", "workflows", "contributors": if response.get(elem): metadata[elem] = [ BaseSerializer(attribute).serialize() for attribute in response[elem] ] - else: - metadata[elem] = [] return metadata @@ -810,7 +765,7 @@ def get_project_workflow(project: Union[str, dict]): @Trackable @validate_arguments def search_annotation_classes( - project: Union[NotEmptyStr, dict], name_contains: Optional[str] = None + project: Union[NotEmptyStr, dict], name_contains: Optional[str] = None ): """Searches annotation classes by name_prefix (case-insensitive) @@ -834,7 +789,7 @@ def search_annotation_classes( @Trackable @validate_arguments def set_project_default_image_quality_in_editor( - project: Union[NotEmptyStr, dict], image_quality_in_editor: Optional[str], + project: Union[NotEmptyStr, dict], image_quality_in_editor: Optional[str], ): """Sets project's default image quality in editor setting. @@ -858,7 +813,7 @@ def set_project_default_image_quality_in_editor( @Trackable @validate_arguments def pin_image( - project: Union[NotEmptyStr, dict], image_name: str, pin: Optional[StrictBool] = True + project: Union[NotEmptyStr, dict], image_name: str, pin: Optional[StrictBool] = True ): """Pins (or unpins) image @@ -878,43 +833,12 @@ def pin_image( ) -@Trackable -@validate_arguments -def get_image_metadata( - project: Union[NotEmptyStr, dict], image_name: str, *args, **kwargs -): - """Returns image metadata - - :param project: project name or folder path (e.g., "project1/folder1") - :type project: str - :param image_name: image name - :type image_name: str - - :return: metadata of image - :rtype: dict - """ - warning_msg = ( - "We're deprecating the get_image_metadata function. Please use get_item_metadata instead. Learn more. \n" - "https://superannotate.readthedocs.io/en/stable/superannotate.sdk.html#superannotate.get_item_metadata" - ) - logger.warning(warning_msg) - warnings.warn(warning_msg, DeprecationWarning) - project_name, folder_name = extract_project_folder(project) - project = Controller.get_default()._get_project(project_name) - response = Controller.get_default().get_image_metadata( - project_name, folder_name, image_name - ) - if response.errors: - raise AppException(response.errors) - return ImageSerializer(response.data).serialize_by_project(project) - - @Trackable @validate_arguments def set_images_annotation_statuses( - project: Union[NotEmptyStr, dict], - annotation_status: NotEmptyStr, - image_names: Optional[List[NotEmptyStr]] = None, + project: Union[NotEmptyStr, dict], + annotation_status: NotEmptyStr, + image_names: Optional[List[NotEmptyStr]] = None, ): """Sets annotation statuses of images @@ -938,7 +862,7 @@ def set_images_annotation_statuses( @Trackable @validate_arguments def delete_images( - project: Union[NotEmptyStr, dict], image_names: Optional[List[str]] = None + project: Union[NotEmptyStr, dict], image_names: Optional[List[str]] = None ): """Delete images in project. @@ -980,17 +904,17 @@ def assign_images(project: Union[NotEmptyStr, dict], image_names: List[str], use project_name, folder_name = extract_project_folder(project) project = Controller.get_default().get_project_metadata(project_name).data - if project["project"].project_type in [ + if project["project"].type in [ constances.ProjectType.VIDEO.value, constances.ProjectType.DOCUMENT.value, ]: - raise AppException(LIMITED_FUNCTIONS[project["project"].project_type]) + raise AppException(LIMITED_FUNCTIONS[project["project"].type]) contributors = ( Controller.get_default() - .get_project_metadata(project_name=project_name, include_contributors=True) - .data["project"] - .users + .get_project_metadata(project_name=project_name, include_contributors=True) + .data["project"] + .users ) contributor = None for c in contributors: @@ -1055,7 +979,7 @@ def unassign_folder(project_name: NotEmptyStr, folder_name: NotEmptyStr): @Trackable @validate_arguments def assign_folder( - project_name: NotEmptyStr, folder_name: NotEmptyStr, users: List[NotEmptyStr] + project_name: NotEmptyStr, folder_name: NotEmptyStr, users: List[NotEmptyStr] ): """Assigns folder to users. With SDK, the user can be assigned to a role in the project with the share_project function. @@ -1070,9 +994,9 @@ def assign_folder( contributors = ( Controller.get_default() - .get_project_metadata(project_name=project_name, include_contributors=True) - .data["project"] - .users + .get_project_metadata(project_name=project_name, include_contributors=True) + .data["project"] + .users ) verified_users = [i["user_id"] for i in contributors] verified_users = set(users).intersection(set(verified_users)) @@ -1097,7 +1021,7 @@ def assign_folder( @Trackable @validate_arguments def share_project( - project_name: NotEmptyStr, user: Union[str, dict], user_role: NotEmptyStr + project_name: NotEmptyStr, user: Union[str, dict], user_role: NotEmptyStr ): """Share project with user. @@ -1130,18 +1054,18 @@ def share_project( @validate_arguments def upload_images_from_folder_to_project( - project: Union[NotEmptyStr, dict], - folder_path: Union[NotEmptyStr, Path], - extensions: Optional[ - Union[List[NotEmptyStr], Tuple[NotEmptyStr]] - ] = constances.DEFAULT_IMAGE_EXTENSIONS, - annotation_status="NotStarted", - from_s3_bucket=None, - exclude_file_patterns: Optional[ - Iterable[NotEmptyStr] - ] = constances.DEFAULT_FILE_EXCLUDE_PATTERNS, - recursive_subfolders: Optional[StrictBool] = False, - image_quality_in_editor: Optional[str] = None, + project: Union[NotEmptyStr, dict], + folder_path: Union[NotEmptyStr, Path], + extensions: Optional[ + Union[List[NotEmptyStr], Tuple[NotEmptyStr]] + ] = constances.DEFAULT_IMAGE_EXTENSIONS, + annotation_status="NotStarted", + from_s3_bucket=None, + exclude_file_patterns: Optional[ + Iterable[NotEmptyStr] + ] = constances.DEFAULT_FILE_EXCLUDE_PATTERNS, + recursive_subfolders: Optional[StrictBool] = False, + image_quality_in_editor: Optional[str] = None, ): """Uploads all images with given extensions from folder_path to the project. Sets status of all the uploaded images to set_status if it is not None. @@ -1242,7 +1166,7 @@ def upload_images_from_folder_to_project( @Trackable @validate_arguments def get_project_image_count( - project: Union[NotEmptyStr, dict], with_all_subfolders: Optional[StrictBool] = False + project: Union[NotEmptyStr, dict], with_all_subfolders: Optional[StrictBool] = False ): """Returns number of images in the project. @@ -1270,9 +1194,9 @@ def get_project_image_count( @Trackable @validate_arguments def download_image_annotations( - project: Union[NotEmptyStr, dict], - image_name: NotEmptyStr, - local_dir_path: Union[str, Path], + project: Union[NotEmptyStr, dict], + image_name: NotEmptyStr, + local_dir_path: Union[str, Path], ): """Downloads annotations of the image (JSON and mask if pixel type project) to local_dir_path. @@ -1321,11 +1245,11 @@ def get_exports(project: NotEmptyStr, return_metadata: Optional[StrictBool] = Fa @Trackable @validate_arguments def prepare_export( - project: Union[NotEmptyStr, dict], - folder_names: Optional[List[NotEmptyStr]] = None, - annotation_statuses: Optional[List[AnnotationStatuses]] = None, - include_fuse: Optional[StrictBool] = False, - only_pinned=False, + project: Union[NotEmptyStr, dict], + folder_names: Optional[List[NotEmptyStr]] = None, + annotation_statuses: Optional[List[AnnotationStatuses]] = None, + include_fuse: Optional[StrictBool] = False, + only_pinned=False, ): """Prepare annotations and classes.json for export. Original and fused images for images with annotations can be included with include_fuse flag. @@ -1375,18 +1299,18 @@ def prepare_export( @Trackable @validate_arguments def upload_videos_from_folder_to_project( - project: Union[NotEmptyStr, dict], - folder_path: Union[NotEmptyStr, Path], - extensions: Optional[ - Union[Tuple[NotEmptyStr], List[NotEmptyStr]] - ] = constances.DEFAULT_VIDEO_EXTENSIONS, - exclude_file_patterns: Optional[List[NotEmptyStr]] = (), - recursive_subfolders: Optional[StrictBool] = False, - target_fps: Optional[int] = None, - start_time: Optional[float] = 0.0, - end_time: Optional[float] = None, - annotation_status: Optional[AnnotationStatuses] = "NotStarted", - image_quality_in_editor: Optional[ImageQualityChoices] = None, + project: Union[NotEmptyStr, dict], + folder_path: Union[NotEmptyStr, Path], + extensions: Optional[ + Union[Tuple[NotEmptyStr], List[NotEmptyStr]] + ] = constances.DEFAULT_VIDEO_EXTENSIONS, + exclude_file_patterns: Optional[List[NotEmptyStr]] = (), + recursive_subfolders: Optional[StrictBool] = False, + target_fps: Optional[int] = None, + start_time: Optional[float] = 0.0, + end_time: Optional[float] = None, + annotation_status: Optional[AnnotationStatuses] = "NotStarted", + image_quality_in_editor: Optional[ImageQualityChoices] = None, ): """Uploads image frames from all videos with given extensions from folder_path to the project. Sets status of all the uploaded images to set_status if it is not None. @@ -1456,13 +1380,13 @@ def upload_videos_from_folder_to_project( @Trackable @validate_arguments def upload_video_to_project( - project: Union[NotEmptyStr, dict], - video_path: Union[NotEmptyStr, Path], - target_fps: Optional[int] = None, - start_time: Optional[float] = 0.0, - end_time: Optional[float] = None, - annotation_status: Optional[AnnotationStatuses] = "NotStarted", - image_quality_in_editor: Optional[ImageQualityChoices] = None, + project: Union[NotEmptyStr, dict], + video_path: Union[NotEmptyStr, Path], + target_fps: Optional[int] = None, + start_time: Optional[float] = 0.0, + end_time: Optional[float] = None, + annotation_status: Optional[AnnotationStatuses] = "NotStarted", + image_quality_in_editor: Optional[ImageQualityChoices] = None, ): """Uploads image frames from video to platform. Uploaded images will have names "_.jpg". @@ -1509,11 +1433,11 @@ def upload_video_to_project( @Trackable @validate_arguments def create_annotation_class( - project: Union[Project, NotEmptyStr], - name: NotEmptyStr, - color: NotEmptyStr, - attribute_groups: Optional[List[AttributeGroup]] = None, - class_type: ClassType = "object", + project: Union[Project, NotEmptyStr], + name: NotEmptyStr, + color: NotEmptyStr, + attribute_groups: Optional[List[AttributeGroup]] = None, + class_type: ClassType = "object", ): """Create annotation class in project @@ -1553,7 +1477,7 @@ def create_annotation_class( @Trackable @validate_arguments def delete_annotation_class( - project: NotEmptyStr, annotation_class: Union[dict, NotEmptyStr] + project: NotEmptyStr, annotation_class: Union[dict, NotEmptyStr] ): """Deletes annotation class from project @@ -1591,9 +1515,9 @@ def download_annotation_classes_json(project: NotEmptyStr, folder: Union[str, Pa @Trackable @validate_arguments def create_annotation_classes_from_classes_json( - project: Union[NotEmptyStr, dict], - classes_json: Union[List[AnnotationClassEntity], str, Path], - from_s3_bucket=False, + project: Union[NotEmptyStr, dict], + classes_json: Union[List[AnnotationClassEntity], str, Path], + from_s3_bucket=False, ): """Creates annotation classes in project from a SuperAnnotate format annotation classes.json. @@ -1636,11 +1560,11 @@ def create_annotation_classes_from_classes_json( @Trackable @validate_arguments def download_export( - project: Union[NotEmptyStr, dict], - export: Union[NotEmptyStr, dict], - folder_path: Union[str, Path], - extract_zip_contents: Optional[StrictBool] = True, - to_s3_bucket=None, + project: Union[NotEmptyStr, dict], + export: Union[NotEmptyStr, dict], + folder_path: Union[str, Path], + extract_zip_contents: Optional[StrictBool] = True, + to_s3_bucket=None, ): """Download prepared export. @@ -1672,7 +1596,7 @@ def download_export( if use_case.is_valid(): if to_s3_bucket: with tqdm( - total=use_case.get_upload_files_count(), desc="Uploading" + total=use_case.get_upload_files_count(), desc="Uploading" ) as progress_bar: for _ in use_case.execute(): progress_bar.update() @@ -1688,9 +1612,9 @@ def download_export( @Trackable @validate_arguments def set_image_annotation_status( - project: Union[NotEmptyStr, dict], - image_name: NotEmptyStr, - annotation_status: NotEmptyStr, + project: Union[NotEmptyStr, dict], + image_name: NotEmptyStr, + annotation_status: NotEmptyStr, ): """Sets the image annotation status @@ -1706,18 +1630,15 @@ def set_image_annotation_status( :rtype: dict """ project_name, folder_name = extract_project_folder(project) - project_entity = Controller.get_default()._get_project(project_name) response = Controller.get_default().set_images_annotation_statuses( project_name, folder_name, [image_name], annotation_status ) if response.errors: raise AppException(response.errors) image = ( - Controller.get_default() - .get_image_metadata(project_name, folder_name, image_name) - .data + Controller.get_default().get_item(project_name, folder_name, image_name).data ) - return ImageSerializer(image).serialize_by_project(project=project_entity) + return BaseSerializer(image).serialize() @Trackable @@ -1746,13 +1667,13 @@ def set_project_workflow(project: Union[NotEmptyStr, dict], new_workflow: List[d @Trackable @validate_arguments def download_image( - project: Union[NotEmptyStr, dict], - image_name: NotEmptyStr, - local_dir_path: Optional[Union[str, Path]] = "./", - include_annotations: Optional[StrictBool] = False, - include_fuse: Optional[StrictBool] = False, - include_overlay: Optional[StrictBool] = False, - variant: Optional[str] = "original", + project: Union[NotEmptyStr, dict], + image_name: NotEmptyStr, + local_dir_path: Optional[Union[str, Path]] = "./", + include_annotations: Optional[StrictBool] = False, + include_fuse: Optional[StrictBool] = False, + include_overlay: Optional[StrictBool] = False, + variant: Optional[str] = "original", ): """Downloads the image (and annotation if not None) to local_dir_path @@ -1795,9 +1716,9 @@ def download_image( @Trackable @validate_arguments def attach_image_urls_to_project( - project: Union[NotEmptyStr, dict], - attachments: Union[str, Path], - annotation_status: Optional[AnnotationStatuses] = "NotStarted", + project: Union[NotEmptyStr, dict], + attachments: Union[str, Path], + annotation_status: Optional[AnnotationStatuses] = "NotStarted", ): """Link images on external storage to SuperAnnotate. @@ -1811,17 +1732,23 @@ def attach_image_urls_to_project( :return: list of linked image names, list of failed image names, list of duplicate image names :rtype: tuple """ + warning_msg = ( + "We're deprecating the attach_image_urls_to_project function. Please use attach_items instead. Learn more." + "https://superannotate.readthedocs.io/en/stable/superannotate.sdk.html#superannotate.attach_items" + ) + logger.warning(warning_msg) + warnings.warn(warning_msg, DeprecationWarning) project_name, folder_name = extract_project_folder(project) project = Controller.get_default().get_project_metadata(project_name).data project_folder_name = project_name + (f"/{folder_name}" if folder_name else "") - if project["project"].project_type in [ + if project["project"].type in [ constances.ProjectType.VIDEO.value, constances.ProjectType.DOCUMENT.value, ]: raise AppException( constances.INVALID_PROJECT_TYPE_TO_PROCESS.format( - constances.ProjectType.get_name(project["project"].project_type) + constances.ProjectType.get_name(project["project"].type) ) ) images_to_upload, duplicate_images = get_paths_and_duplicated_from_csv(attachments) @@ -1843,7 +1770,7 @@ def attach_image_urls_to_project( ) ) with tqdm( - total=use_case.attachments_count, desc="Attaching urls" + total=use_case.attachments_count, desc="Attaching urls" ) as progress_bar: for attached in use_case.execute(): progress_bar.update(attached) @@ -1862,9 +1789,9 @@ def attach_image_urls_to_project( @Trackable @validate_arguments def attach_video_urls_to_project( - project: Union[NotEmptyStr, dict], - attachments: Union[str, Path], - annotation_status: Optional[AnnotationStatuses] = "NotStarted", + project: Union[NotEmptyStr, dict], + attachments: Union[str, Path], + annotation_status: Optional[AnnotationStatuses] = "NotStarted", ): """Link videos on external storage to SuperAnnotate. @@ -1878,14 +1805,20 @@ def attach_video_urls_to_project( :return: attached videos, failed videos, skipped videos :rtype: (list, list, list) """ + warning_msg = ( + "We're deprecating the attach_video_urls_to_project function. Please use attach_items instead. Learn more." + "https://superannotate.readthedocs.io/en/stable/superannotate.sdk.html#superannotate.attach_items" + ) + logger.warning(warning_msg) + warnings.warn(warning_msg, DeprecationWarning) project_name, folder_name = extract_project_folder(project) project = Controller.get_default().get_project_metadata(project_name).data project_folder_name = project_name + (f"/{folder_name}" if folder_name else "") - if project["project"].project_type != constances.ProjectType.VIDEO.value: + if project["project"].type != constances.ProjectType.VIDEO.value: raise AppException( constances.INVALID_PROJECT_TYPE_TO_PROCESS.format( - constances.ProjectType.get_name(project["project"].project_type) + constances.ProjectType.get_name(project["project"].type) ) ) @@ -1908,7 +1841,7 @@ def attach_video_urls_to_project( ) ) with tqdm( - total=use_case.attachments_count, desc="Attaching urls" + total=use_case.attachments_count, desc="Attaching urls" ) as progress_bar: for attached in use_case.execute(): progress_bar.update(attached) @@ -1927,10 +1860,10 @@ def attach_video_urls_to_project( @Trackable @validate_arguments def upload_annotations_from_folder_to_project( - project: Union[NotEmptyStr, dict], - folder_path: Union[str, Path], - from_s3_bucket=None, - recursive_subfolders: Optional[StrictBool] = False, + project: Union[NotEmptyStr, dict], + folder_path: Union[str, Path], + from_s3_bucket=None, + recursive_subfolders: Optional[StrictBool] = False, ): """Finds and uploads all JSON files in the folder_path as annotations to the project. @@ -1991,10 +1924,10 @@ def upload_annotations_from_folder_to_project( @Trackable @validate_arguments def upload_preannotations_from_folder_to_project( - project: Union[NotEmptyStr, dict], - folder_path: Union[str, Path], - from_s3_bucket=None, - recursive_subfolders: Optional[StrictBool] = False, + project: Union[NotEmptyStr, dict], + folder_path: Union[str, Path], + from_s3_bucket=None, + recursive_subfolders: Optional[StrictBool] = False, ): """Finds and uploads all JSON files in the folder_path as pre-annotations to the project. @@ -2022,11 +1955,11 @@ def upload_preannotations_from_folder_to_project( project_name, folder_name = extract_project_folder(project) project_folder_name = project_name + (f"/{folder_name}" if folder_name else "") project = Controller.get_default().get_project_metadata(project_name).data - if project["project"].project_type in [ + if project["project"].type in [ constances.ProjectType.VIDEO.value, constances.ProjectType.DOCUMENT.value, ]: - raise AppException(LIMITED_FUNCTIONS[project["project"].project_type]) + raise AppException(LIMITED_FUNCTIONS[project["project"].type]) if recursive_subfolders: logger.info( "When using recursive subfolder parsing same name annotations in different " @@ -2058,11 +1991,11 @@ def upload_preannotations_from_folder_to_project( @Trackable @validate_arguments def upload_image_annotations( - project: Union[NotEmptyStr, dict], - image_name: str, - annotation_json: Union[str, Path, dict], - mask: Optional[Union[str, Path, bytes]] = None, - verbose: Optional[StrictBool] = True, + project: Union[NotEmptyStr, dict], + image_name: str, + annotation_json: Union[str, Path, dict], + mask: Optional[Union[str, Path, bytes]] = None, + verbose: Optional[StrictBool] = True, ): """Upload annotations from JSON (also mask for pixel annotations) to the image. @@ -2080,11 +2013,11 @@ def upload_image_annotations( project_name, folder_name = extract_project_folder(project) project = Controller.get_default().get_project_metadata(project_name).data - if project["project"].project_type in [ + if project["project"].type in [ constances.ProjectType.VIDEO.value, constances.ProjectType.DOCUMENT.value, ]: - raise AppException(LIMITED_FUNCTIONS[project["project"].project_type]) + raise AppException(LIMITED_FUNCTIONS[project["project"].type]) if not mask: if not isinstance(annotation_json, dict): @@ -2138,13 +2071,13 @@ def download_model(model: MLModel, output_dir: Union[str, Path]): @Trackable @validate_arguments def benchmark( - project: Union[NotEmptyStr, dict], - gt_folder: str, - folder_names: List[NotEmptyStr], - export_root: Optional[Union[str, Path]] = None, - image_list=None, - annot_type: Optional[AnnotationType] = "bbox", - show_plots=False, + project: Union[NotEmptyStr, dict], + gt_folder: str, + folder_names: List[NotEmptyStr], + export_root: Optional[Union[str, Path]] = None, + image_list=None, + annot_type: Optional[AnnotationType] = "bbox", + show_plots=False, ): """Computes benchmark score for each instance of given images that are present both gt_project_name project and projects in folder_names list: @@ -2171,11 +2104,11 @@ def benchmark( project_name = project["name"] project = Controller.get_default().get_project_metadata(project_name).data - if project["project"].project_type in [ + if project["project"].type in [ constances.ProjectType.VIDEO.value, constances.ProjectType.DOCUMENT.value, ]: - raise AppException(LIMITED_FUNCTIONS[project["project"].project_type]) + raise AppException(LIMITED_FUNCTIONS[project["project"].type]) if not export_root: with tempfile.TemporaryDirectory() as temp_dir: @@ -2207,12 +2140,12 @@ def benchmark( @Trackable @validate_arguments def consensus( - project: NotEmptyStr, - folder_names: List[NotEmptyStr], - export_root: Optional[Union[NotEmptyStr, Path]] = None, - image_list: Optional[List[NotEmptyStr]] = None, - annot_type: Optional[AnnotationType] = "bbox", - show_plots: Optional[StrictBool] = False, + project: NotEmptyStr, + folder_names: List[NotEmptyStr], + export_root: Optional[Union[NotEmptyStr, Path]] = None, + image_list: Optional[List[NotEmptyStr]] = None, + annot_type: Optional[AnnotationType] = "bbox", + show_plots: Optional[StrictBool] = False, ): """Computes consensus score for each instance of given images that are present in at least 2 of the given projects: @@ -2262,9 +2195,9 @@ def consensus( @Trackable @validate_arguments def run_prediction( - project: Union[NotEmptyStr, dict], - images_list: List[NotEmptyStr], - model: Union[NotEmptyStr, dict], + project: Union[NotEmptyStr, dict], + images_list: List[NotEmptyStr], + model: Union[NotEmptyStr, dict], ): """This function runs smart prediction on given list of images from a given project using the neural network of your choice @@ -2302,12 +2235,12 @@ def run_prediction( @Trackable @validate_arguments def add_annotation_bbox_to_image( - project: NotEmptyStr, - image_name: NotEmptyStr, - bbox: List[float], - annotation_class_name: NotEmptyStr, - annotation_class_attributes: Optional[List[dict]] = None, - error: Optional[StrictBool] = None, + project: NotEmptyStr, + image_name: NotEmptyStr, + bbox: List[float], + annotation_class_name: NotEmptyStr, + annotation_class_attributes: Optional[List[dict]] = None, + error: Optional[StrictBool] = None, ): """Add a bounding box annotation to image annotations @@ -2329,13 +2262,16 @@ def add_annotation_bbox_to_image( """ project_name, folder_name = extract_project_folder(project) project = Controller.get_default().get_project_metadata(project_name).data - if project["project"].project_type in [ + if project["project"].type in [ constances.ProjectType.VIDEO.value, constances.ProjectType.DOCUMENT.value, ]: - raise AppException(LIMITED_FUNCTIONS[project["project"].project_type]) + raise AppException(LIMITED_FUNCTIONS[project["project"].type]) response = Controller.get_default().get_annotations( - project_name=project_name, folder_name=folder_name, item_names=[image_name], logging=False + project_name=project_name, + folder_name=folder_name, + item_names=[image_name], + logging=False, ) if response.errors: raise AppException(response.errors) @@ -2360,12 +2296,12 @@ def add_annotation_bbox_to_image( @Trackable @validate_arguments def add_annotation_point_to_image( - project: NotEmptyStr, - image_name: NotEmptyStr, - point: List[float], - annotation_class_name: NotEmptyStr, - annotation_class_attributes: Optional[List[dict]] = None, - error: Optional[StrictBool] = None, + project: NotEmptyStr, + image_name: NotEmptyStr, + point: List[float], + annotation_class_name: NotEmptyStr, + annotation_class_attributes: Optional[List[dict]] = None, + error: Optional[StrictBool] = None, ): """Add a point annotation to image annotations @@ -2386,13 +2322,16 @@ def add_annotation_point_to_image( """ project_name, folder_name = extract_project_folder(project) project = Controller.get_default().get_project_metadata(project_name).data - if project["project"].project_type in [ + if project["project"].type in [ constances.ProjectType.VIDEO.value, constances.ProjectType.DOCUMENT.value, ]: - raise AppException(LIMITED_FUNCTIONS[project["project"].project_type]) + raise AppException(LIMITED_FUNCTIONS[project["project"].type]) response = Controller.get_default().get_annotations( - project_name=project_name, folder_name=folder_name, item_names=[image_name], logging=False + project_name=project_name, + folder_name=folder_name, + item_names=[image_name], + logging=False, ) if response.errors: raise AppException(response.errors) @@ -2416,12 +2355,12 @@ def add_annotation_point_to_image( @Trackable @validate_arguments def add_annotation_comment_to_image( - project: NotEmptyStr, - image_name: NotEmptyStr, - comment_text: NotEmptyStr, - comment_coords: List[float], - comment_author: EmailStr, - resolved: Optional[StrictBool] = False, + project: NotEmptyStr, + image_name: NotEmptyStr, + comment_text: NotEmptyStr, + comment_coords: List[float], + comment_author: EmailStr, + resolved: Optional[StrictBool] = False, ): """Add a comment to SuperAnnotate format annotation JSON @@ -2440,13 +2379,16 @@ def add_annotation_comment_to_image( """ project_name, folder_name = extract_project_folder(project) project = Controller.get_default().get_project_metadata(project_name).data - if project["project"].project_type in [ + if project["project"].type in [ constances.ProjectType.VIDEO.value, constances.ProjectType.DOCUMENT.value, ]: - raise AppException(LIMITED_FUNCTIONS[project["project"].project_type]) + raise AppException(LIMITED_FUNCTIONS[project["project"].type]) response = Controller.get_default().get_annotations( - project_name=project_name, folder_name=folder_name, item_names=[image_name], logging=False + project_name=project_name, + folder_name=folder_name, + item_names=[image_name], + logging=False, ) if response.errors: raise AppException(response.errors) @@ -2467,60 +2409,15 @@ def add_annotation_comment_to_image( ) -@Trackable -@validate_arguments -def search_images_all_folders( - project: NotEmptyStr, - image_name_prefix: Optional[NotEmptyStr] = None, - annotation_status: Optional[NotEmptyStr] = None, - return_metadata: Optional[StrictBool] = False, -): - """Search images by name_prefix (case-insensitive) and annotation status in - project and all of its folders - - :param project: project name - :type project: str - :param image_name_prefix: image name prefix for search - :type image_name_prefix: str - :param annotation_status: if not None, annotation statuses of images to filter, - should be one of NotStarted InProgress QualityCheck Returned Completed Skipped - :type annotation_status: str - - :param return_metadata: return metadata of images instead of names - :type return_metadata: bool - - :return: metadata of found images or image names - :rtype: list of dicts or strs - """ - warning_msg = ( - "We're deprecating the search_images function. Please use search_items instead. Learn more. \n" - "https://superannotate.readthedocs.io/en/stable/superannotate.sdk.html#superannotate.search_items" - ) - logger.warning(warning_msg) - warnings.warn(warning_msg, DeprecationWarning) - project_entity = Controller.get_default()._get_project(project) - res = Controller.get_default().list_images( - project_name=project, - name_prefix=image_name_prefix, - annotation_status=annotation_status, - ) - if return_metadata: - return [ - ImageSerializer(image).serialize_by_project(project=project_entity) - for image in res.data - ] - return [image.name for image in res.data] - - @Trackable @validate_arguments def upload_image_to_project( - project: NotEmptyStr, - img, - image_name: Optional[NotEmptyStr] = None, - annotation_status: Optional[AnnotationStatuses] = "NotStarted", - from_s3_bucket=None, - image_quality_in_editor: Optional[NotEmptyStr] = None, + project: NotEmptyStr, + img, + image_name: Optional[NotEmptyStr] = None, + annotation_status: Optional[AnnotationStatuses] = "NotStarted", + from_s3_bucket=None, + image_quality_in_editor: Optional[NotEmptyStr] = None, ): """Uploads image (io.BytesIO() or filepath to image) to project. Sets status of the uploaded image to set_status if it is not None. @@ -2556,11 +2453,11 @@ def upload_image_to_project( def search_models( - name: Optional[NotEmptyStr] = None, - type_: Optional[NotEmptyStr] = None, - project_id: Optional[int] = None, - task: Optional[NotEmptyStr] = None, - include_global: Optional[StrictBool] = True, + name: Optional[NotEmptyStr] = None, + type_: Optional[NotEmptyStr] = None, + project_id: Optional[int] = None, + task: Optional[NotEmptyStr] = None, + include_global: Optional[StrictBool] = True, ): """Search for ML models. @@ -2591,11 +2488,11 @@ def search_models( @Trackable @validate_arguments def upload_images_to_project( - project: NotEmptyStr, - img_paths: List[NotEmptyStr], - annotation_status: Optional[AnnotationStatuses] = "NotStarted", - from_s3_bucket=None, - image_quality_in_editor: Optional[ImageQualityChoices] = None, + project: NotEmptyStr, + img_paths: List[NotEmptyStr], + annotation_status: Optional[AnnotationStatuses] = "NotStarted", + from_s3_bucket=None, + image_quality_in_editor: Optional[ImageQualityChoices] = None, ): """Uploads all images given in list of path objects in img_paths to the project. Sets status of all the uploaded images to set_status if it is not None. @@ -2653,16 +2550,16 @@ def upload_images_to_project( @Trackable @validate_arguments def aggregate_annotations_as_df( - project_root: Union[NotEmptyStr, Path], - project_type: ProjectTypes, - folder_names: Optional[List[Union[Path, NotEmptyStr]]] = None, + project_root: Union[NotEmptyStr, Path], + project_type: ProjectTypes, + folder_names: Optional[List[Union[Path, NotEmptyStr]]] = None, ): """Aggregate annotations as pandas dataframe from project root. :param project_root: the export path of the project - :type project_root: Pathlike (str or Path) + :type project_root: Path-like (str or Path) - :param project_type: the project type, Vector/Pixel or Video + :param project_type: the project type, Vector/Pixel, Video or Document :type project_type: str :param folder_names: Aggregate the specified folders from project_root. @@ -2673,8 +2570,8 @@ def aggregate_annotations_as_df( :rtype: pandas DataFrame """ if project_type in ( - constances.ProjectType.VECTOR.name, - constances.ProjectType.PIXEL.name, + constances.ProjectType.VECTOR.name, + constances.ProjectType.PIXEL.name, ): from superannotate.lib.app.analytics.common import ( aggregate_image_annotations_as_df, @@ -2687,7 +2584,10 @@ def aggregate_annotations_as_df( include_tags=True, folder_names=folder_names, ) - elif project_type == constances.ProjectType.VIDEO.name: + elif project_type in ( + constances.ProjectType.VIDEO.name, + constances.ProjectType.DOCUMENT.name, + ): from superannotate.lib.app.analytics.aggregators import DataAggregator return DataAggregator( @@ -2695,14 +2595,12 @@ def aggregate_annotations_as_df( project_root=project_root, folder_names=folder_names, ).aggregate_annotations_as_df() - else: - raise AppException(constances.DEPRECATED_DOCUMENT_PROJECTS_MESSAGE) @Trackable @validate_arguments def delete_annotations( - project: NotEmptyStr, image_names: Optional[List[NotEmptyStr]] = None + project: NotEmptyStr, image_names: Optional[List[NotEmptyStr]] = None ): """ Delete image annotations from a given list of images. @@ -2725,9 +2623,9 @@ def delete_annotations( @Trackable @validate_arguments def attach_document_urls_to_project( - project: Union[NotEmptyStr, dict], - attachments: Union[Path, NotEmptyStr], - annotation_status: Optional[AnnotationStatuses] = "NotStarted", + project: Union[NotEmptyStr, dict], + attachments: Union[Path, NotEmptyStr], + annotation_status: Optional[AnnotationStatuses] = "NotStarted", ): """Link documents on external storage to SuperAnnotate. @@ -2741,14 +2639,20 @@ def attach_document_urls_to_project( :return: list of attached documents, list of not attached documents, list of skipped documents :rtype: tuple """ + warning_msg = ( + "We're deprecating the attach_document_urls_to_project function. Please use attach_items instead. Learn more." + "https://superannotate.readthedocs.io/en/stable/superannotate.sdk.html#superannotate.attach_items" + ) + logger.warning(warning_msg) + warnings.warn(warning_msg, DeprecationWarning) project_name, folder_name = extract_project_folder(project) project = Controller.get_default().get_project_metadata(project_name).data project_folder_name = project_name + (f"/{folder_name}" if folder_name else "") - if project["project"].project_type != constances.ProjectType.DOCUMENT.value: + if project["project"].type != constances.ProjectType.DOCUMENT.value: raise AppException( constances.INVALID_PROJECT_TYPE_TO_PROCESS.format( - constances.ProjectType.get_name(project["project"].project_type) + constances.ProjectType.get_name(project["project"].type) ) ) @@ -2771,7 +2675,7 @@ def attach_document_urls_to_project( ) ) with tqdm( - total=use_case.attachments_count, desc="Attaching urls" + total=use_case.attachments_count, desc="Attaching urls" ) as progress_bar: for attached in use_case.execute(): progress_bar.update(attached) @@ -2790,7 +2694,7 @@ def attach_document_urls_to_project( @Trackable @validate_arguments def validate_annotations( - project_type: ProjectTypes, annotations_json: Union[NotEmptyStr, Path] + project_type: ProjectTypes, annotations_json: Union[NotEmptyStr, Path] ): """Validates given annotation JSON. @@ -2820,7 +2724,7 @@ def validate_annotations( @Trackable @validate_arguments def add_contributors_to_project( - project: NotEmptyStr, emails: conlist(EmailStr, min_items=1), role: AnnotatorRole + project: NotEmptyStr, emails: conlist(EmailStr, min_items=1), role: AnnotatorRole ) -> Tuple[List[str], List[str]]: """Add contributors to project. @@ -2847,7 +2751,7 @@ def add_contributors_to_project( @Trackable @validate_arguments def invite_contributors_to_team( - emails: conlist(EmailStr, min_items=1), admin: StrictBool = False + emails: conlist(EmailStr, min_items=1), admin: StrictBool = False ) -> Tuple[List[str], List[str]]: """Invites contributors to the team. @@ -2961,9 +2865,9 @@ def get_integrations(): @Trackable @validate_arguments def attach_items_from_integrated_storage( - project: NotEmptyStr, - integration: Union[NotEmptyStr, IntegrationEntity], - folder_path: Optional[NotEmptyStr] = None, + project: NotEmptyStr, + integration: Union[NotEmptyStr, IntegrationEntity], + folder_path: Optional[NotEmptyStr] = None, ): """Link images from integrated external storage to SuperAnnotate. @@ -2991,7 +2895,8 @@ def attach_items_from_integrated_storage( @Trackable @validate_arguments def query(project: NotEmptyStr, query: Optional[NotEmptyStr]): - """Return items + """Return items that satisfy the given query. + Query syntax should be in SuperAnnotate query language(https://doc.superannotate.com/docs/query-search-1). :param project: project name or folder path (e.g., “project1/folder1”) :type project: str @@ -3012,7 +2917,7 @@ def query(project: NotEmptyStr, query: Optional[NotEmptyStr]): @Trackable @validate_arguments def get_item_metadata( - project: NotEmptyStr, item_name: NotEmptyStr, + project: NotEmptyStr, item_name: NotEmptyStr, ): """Returns item metadata @@ -3035,12 +2940,12 @@ def get_item_metadata( @Trackable @validate_arguments def search_items( - project: NotEmptyStr, - name_contains: NotEmptyStr = None, - annotation_status: Optional[AnnotationStatuses] = None, - annotator_email: Optional[NotEmptyStr] = None, - qa_email: Optional[NotEmptyStr] = None, - recursive: bool = False, + project: NotEmptyStr, + name_contains: NotEmptyStr = None, + annotation_status: Optional[AnnotationStatuses] = None, + annotator_email: Optional[NotEmptyStr] = None, + qa_email: Optional[NotEmptyStr] = None, + recursive: bool = False, ): """Search items by filtering criteria. @@ -3092,3 +2997,185 @@ def search_items( if response.errors: raise AppException(response.errors) return BaseSerializer.serialize_iterable(response.data) + + +@Trackable +@validate_arguments +def attach_items( + project: Union[NotEmptyStr, dict], + attachments: AttachmentArg, + annotation_status: Optional[AnnotationStatuses] = "NotStarted", +): + """Link items from external storage to SuperAnnotate using URLs. + + :param project: project name or folder path (e.g., “project1/folder1”) + :type project: str + + :param attachments: path to CSV file or list of dicts containing attachments URLs. + :type attachments: path-like (str or Path) or list of dicts + + :param annotation_status: value to set the annotation statuses of the linked items + “NotStarted” + “InProgress” + “QualityCheck” + “Returned” + “Completed” + “Skipped” + :type annotation_status: str + """ + attachments = attachments.data + project_name, folder_name = extract_project_folder(project) + if attachments and isinstance(attachments[0], AttachmentDict): + unique_attachments = set(attachments) + duplicate_attachments = [ + item + for item, count in collections.Counter(attachments).items() + if count > 1 + ] + else: + unique_attachments, duplicate_attachments = get_name_url_duplicated_from_csv( + attachments + ) + if duplicate_attachments: + logger.info("Dropping duplicates.") + unique_attachments = parse_obj_as(List[AttachmentEntity], unique_attachments) + uploaded, fails, duplicated = [], [], [] + if unique_attachments: + logger.info( + f"Attaching {len(unique_attachments)} file(s) to project {project}." + ) + response = Controller.get_default().attach_items( + project_name=project_name, + folder_name=folder_name, + attachments=unique_attachments, + annotation_status=annotation_status, + ) + if response.errors: + raise AppException(response.errors) + uploaded, duplicated = response.data + uploaded = [i["name"] for i in uploaded] + fails = [ + attachment.name + for attachment in unique_attachments + if attachment.name not in uploaded and attachment.name not in duplicated + ] + return uploaded, fails, duplicated + + +@Trackable +@validate_arguments +def copy_items( + source: Union[NotEmptyStr, dict], + destination: Union[NotEmptyStr, dict], + items: Optional[List[NotEmptyStr]] = None, + include_annotations: Optional[StrictBool] = True, +): + """Copy images in bulk between folders in a project + + :param source: project name or folder path to select items from (e.g., “project1/folder1”). + :type source: str + + :param destination: project name (root) or folder path to place copied items. + :type destination: str + + :param items: names of items to copy. If None, all items from the source directory will be copied. + :type itmes: list of str + + :param include_annotations: enables annotations copy + :type include_annotations: bool + + :return: list of skipped item names + :rtype: list of strs + """ + + project_name, source_folder = extract_project_folder(source) + + to_project_name, destination_folder = extract_project_folder(destination) + if project_name != to_project_name: + raise AppException("Source and destination projects should be the same") + + response = Controller.get_default().copy_items( + project_name=project_name, + from_folder=source_folder, + to_folder=destination_folder, + items=items, + include_annotations=include_annotations, + ) + if response.errors: + raise AppException(response.errors) + + return response.data + + +@Trackable +@validate_arguments +def move_items( + source: Union[NotEmptyStr, dict], + destination: Union[NotEmptyStr, dict], + items: Optional[List[NotEmptyStr]] = None, +): + """Copy images in bulk between folders in a project + + :param source: project name or folder path to pick items from (e.g., “project1/folder1”). + :type source: str + + :param destination: project name (root) or folder path to move items to. + :type destination: str + + :param items: names of items to move. If None, all items from the source directory will be moved. + :type items: list of str + + :return: list of skipped item names + :rtype: list of strs + """ + + project_name, source_folder = extract_project_folder(source) + to_project_name, destination_folder = extract_project_folder(destination) + if project_name != to_project_name: + raise AppException("Source and destination projects should be the same") + response = Controller.get_default().move_items( + project_name=project_name, + from_folder=source_folder, + to_folder=destination_folder, + items=items, + ) + if response.errors: + raise AppException(response.errors) + return response.data + + +@Trackable +@validate_arguments +def set_annotation_statuses( + project: Union[NotEmptyStr, dict], + annotation_status: AnnotationStatuses, + items: Optional[List[NotEmptyStr]] = None, +): + """Sets annotation statuses of items + + :param project: project name or folder path (e.g., “project1/folder1”). + :type project: str + + :param annotation_status: annotation status to set, should be one of. + “NotStarted” + “InProgress” + “QualityCheck” + “Returned” + “Completed” + “Skipped” + :type annotation_status: str + + :param items: item names to set the mentioned status for. If None, all the items in the project will be used. + :type items: str + """ + + project_name, folder_name = extract_project_folder(project) + response = Controller.get_default().set_annotation_statuses( + project_name=project_name, + folder_name=folder_name, + annotation_status=annotation_status, + item_names=items, + ) + if response.errors: + raise AppException(response.errors) + return response.data diff --git a/src/superannotate/lib/app/interface/types.py b/src/superannotate/lib/app/interface/types.py index 3d571d0ca..c262d0776 100644 --- a/src/superannotate/lib/app/interface/types.py +++ b/src/superannotate/lib/app/interface/types.py @@ -1,4 +1,7 @@ +import uuid from functools import wraps +from pathlib import Path +from typing import Optional from typing import Union from lib.core.enums import AnnotationStatus @@ -8,7 +11,13 @@ from lib.core.enums import UserRole from lib.core.exceptions import AppException from lib.infrastructure.validators import wrap_error +from pydantic import BaseModel +from pydantic import conlist from pydantic import constr +from pydantic import Extra +from pydantic import Field +from pydantic import parse_obj_as +from pydantic import root_validator from pydantic import StrictStr from pydantic import validate_arguments as pydantic_validate_arguments from pydantic import ValidationError @@ -22,7 +31,9 @@ class EmailStr(StrictStr): def validate(cls, value: Union[str]) -> Union[str]: try: constr( - regex=r"^(?=.{1,254}$)(?=.{1,64}@)[a-zA-Z0-9!#$%&'*+/=?^_`{|}~-]+(?:\.[a-zA-Z0-9!#$%&'*+/=?^_`{|}~-]+)*@[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\.[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)*$" + regex=r"^(?=.{1,254}$)(?=.{1,64}@)[a-zA-Z0-9!#$%&'*+/=?^_`{|}~-]+(?:\.[a-zA-Z0-9!#$%&'*+/=?^_`{|}~-]+)" + r"*@[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\.[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}" + r"[a-zA-Z0-9])?)*$" ).validate(value) except StrRegexError: raise ValueError("Invalid email") @@ -79,6 +90,52 @@ def validate(cls, value: Union[str]) -> Union[str]: return value +class AttachmentDict(BaseModel): + url: StrictStr + name: Optional[StrictStr] = Field(default_factory=lambda: str(uuid.uuid4())) + + class Config: + extra = Extra.ignore + + def __hash__(self): + return hash(self.name) + + def __eq__(self, other): + return self.url == other.url and self.name.strip() == other.name.strip() + + +AttachmentArgType = Union[NotEmptyStr, Path, conlist(AttachmentDict, min_items=1)] + + +class Setting(BaseModel): + attribute: NotEmptyStr + value: Union[NotEmptyStr, float, int] + + class Config: + extra = Extra.ignore + + +class AttachmentArg(BaseModel): + __root__: AttachmentArgType + + def __getitem__(self, index): + return self.__root__[index] + + @property + def data(self): + return self.__root__ + + @root_validator(pre=True) + def validate_root(cls, values): + try: + parse_obj_as(AttachmentArgType, values["__root__"]) + except ValidationError: + raise ValueError( + "The value must be str, path, or list of dicts with the required 'url' and optional 'name' keys" + ) + return values + + class ImageQualityChoices(StrictStr): VALID_CHOICES = ["compressed", "original"] @@ -118,7 +175,7 @@ class AnnotationStatuses(StrictStr): def validate(cls, value: Union[str]) -> Union[str]: if value.lower() not in AnnotationStatus.values(): raise TypeError( - f"Available annotation_statuses are {', '.join(AnnotationStatus.titles())}. " + f"Available an notation_statuses are {', '.join(AnnotationStatus.titles())}. " ) return value diff --git a/src/superannotate/lib/app/mixp/decorators.py b/src/superannotate/lib/app/mixp/decorators.py index 7f186369a..8a0f86a64 100644 --- a/src/superannotate/lib/app/mixp/decorators.py +++ b/src/superannotate/lib/app/mixp/decorators.py @@ -67,10 +67,7 @@ def default_parser(function_name: str, kwargs: dict): properties[key] = len(value) else: properties[key] = str(value) - return { - "event_name": function_name, - "properties": properties - } + return {"event_name": function_name, "properties": properties} def track(self, *args, **kwargs): try: @@ -89,7 +86,7 @@ def track(self, *args, **kwargs): pass else: data = self.default_parser(function_name, arguments) - event_name = data.get("event_name", ) + event_name = data.get("event_name",) properties = data.get("properties", {}) team_data = self.team.data user_id = team_data.creator_id diff --git a/src/superannotate/lib/app/mixp/utils/parsers.py b/src/superannotate/lib/app/mixp/utils/parsers.py index 20c3cf62c..b2b91537f 100644 --- a/src/superannotate/lib/app/mixp/utils/parsers.py +++ b/src/superannotate/lib/app/mixp/utils/parsers.py @@ -84,8 +84,10 @@ def create_project_from_metadata(**kwargs): def clone_project(**kwargs): project = kwargs.get("project_name") - project_metadata = Controller.get_default().get_project_metadata(project).data["project"] - project_type = ProjectType.get_name(project_metadata.project_type) + project_metadata = ( + Controller.get_default().get_project_metadata(project).data["project"] + ) + project_type = ProjectType.get_name(project_metadata.type) return { "event_name": "clone_project", @@ -94,26 +96,10 @@ def clone_project(**kwargs): project_metadata.upload_state == constances.UploadState.EXTERNAL.value ), "Project Type": project_type, - "Copy Classes": bool(kwargs.get("copy_annotation_classes") - ), + "Copy Classes": bool(kwargs.get("copy_annotation_classes")), "Copy Settings": bool(kwargs.get("copy_settings")), "Copy Workflow": bool(kwargs.get("copy_workflow")), - "Copy Contributors": bool(kwargs.get("copy_contributors") - ), - "project_name": get_project_name(project), - }, - } - - -def search_images(**kwargs): - project = kwargs["project"] - - return { - "event_name": "search_images", - "properties": { - "Annotation Status": bool(kwargs.get("annotation_status") - ), - "Metadata": bool(kwargs.get("return_metadata")), + "Copy Contributors": bool(kwargs.get("copy_contributors")), "project_name": get_project_name(project), }, } @@ -141,8 +127,7 @@ def upload_image_to_project(**kwargs): "event_name": "upload_image_to_project", "properties": { "Image Name": bool(kwargs.get("image_name")), - "Annotation Status": bool(kwargs.get("annotation_status") - ), + "Annotation Status": bool(kwargs.get("annotation_status")), "project_name": get_project_name(project), }, } @@ -169,8 +154,7 @@ def attach_image_urls_to_project(**kwargs): "event_name": "attach_image_urls_to_project", "properties": { "project_name": get_project_name(project), - "Annotation Status": bool(kwargs.get("annotation_status") - ), + "Annotation Status": bool(kwargs.get("annotation_status")), }, } @@ -198,15 +182,6 @@ def download_image_annotations(**kwargs): } -def get_image_metadata(**kwargs): - project = kwargs["project"] - - return { - "event_name": "get_image_metadata", - "properties": {"project_name": get_project_name(project)}, - } - - def add_annotation_comment_to_image(**kwargs): project = kwargs["project"] @@ -324,28 +299,6 @@ def get_folder_metadata(**kwargs): } -def get_project_and_folder_metadata(**kwargs): - project = kwargs["project"] - return { - "event_name": "get_project_and_folder_metadata", - "properties": {"project_name": get_project_name(project)}, - } - - -def search_images_all_folders(**kwargs): - project = kwargs["project"] - - return { - "event_name": "search_images_all_folders", - "properties": { - "Annotation Status": bool(kwargs.get("annotation_status") - ), - "Metadata": bool(kwargs.get("return_metadata")), - "project_name": get_project_name(project), - }, - } - - def download_model(**kwargs): model = kwargs["model"] return { @@ -410,13 +363,13 @@ def run_prediction(**kwargs): project_name = get_project_name(project) res = Controller.get_default().get_project_metadata(project_name) project_metadata = res.data["project"] - project_type = ProjectType.get_name(project_metadata.project_type) + project_type = ProjectType.get_name(project_metadata.typy) image_list = kwargs["images_list"] return { "event_name": "run_prediction", "properties": { "Project Type": project_type, - "Image Count": len(image_list) if image_list else None + "Image Count": len(image_list) if image_list else None, }, } @@ -551,7 +504,7 @@ def upload_annotations_from_folder_to_project(**kwargs): project_name = get_project_name(project) res = Controller.get_default().get_project_metadata(project_name) project_metadata = res.data["project"] - project_type = ProjectType.get_name(project_metadata.project_type) + project_type = ProjectType.get_name(project_metadata.type) folder_path = kwargs["folder_path"] glob_iterator = Path(folder_path).glob("*.json") @@ -571,7 +524,7 @@ def upload_preannotations_from_folder_to_project(**kwargs): project_name = get_project_name(project) res = Controller.get_default().get_project_metadata(project_name) project_metadata = res.data["project"] - project_type = ProjectType.get_name(project_metadata.project_type) + project_type = ProjectType.get_name(project_metadata.type) folder_path = kwargs["folder_path"] glob_iterator = Path(folder_path).glob("*.json") return { @@ -616,8 +569,7 @@ def upload_images_from_folder_to_project(**kwargs): "properties": { "Image Count": len(filtered_paths), "Custom Extentions": bool(kwargs["extensions"]), - "Annotation Status": bool(kwargs.get("annotation_status") - ), + "Annotation Status": bool(kwargs.get("annotation_status")), "From S3": bool(kwargs.get("from_s3_bucket")), "Custom Exclude Patters": bool(kwargs["exclude_file_patterns"]), }, @@ -631,8 +583,7 @@ def prepare_export(**kwargs): "properties": { "project_name": get_project_name(project), "Folder Count": bool(kwargs.get("folder_names")), - "Annotation Statuses": bool(kwargs.get("annotation_statuses") - ), + "Annotation Statuses": bool(kwargs.get("annotation_statuses")), "Include Fuse": bool(kwargs.get("include_fuse")), "Only Pinned": bool(kwargs.get("only_pinned")), }, @@ -658,8 +609,9 @@ def assign_images(**kwargs): user = kwargs.get("user") contributors = ( - Controller.get_default().get_project_metadata(project_name=project_name, include_contributors=True) - .data["contributors"] + Controller.get_default() + .get_project_metadata(project_name=project_name, include_contributors=True) + .data["contributors"] ) contributor = None for c in contributors: @@ -854,10 +806,7 @@ def delete_images(**kwargs): image_names = res.data return { "event_name": "delete_images", - "properties": { - "project_name": project_name, - "Image Count": len(image_names), - }, + "properties": {"project_name": project_name, "Image Count": len(image_names)}, } @@ -937,7 +886,10 @@ def get_annotations(**kwargs): return { "event_name": "get_annotations", - "properties": {"Project": project, "items_count": len(items) if items else None}, + "properties": { + "Project": project, + "items_count": len(items) if items else None, + }, } @@ -975,11 +927,13 @@ def attach_items_from_integrated_storage(**kwargs): if isinstance(integration, str): integration = IntegrationEntity(name=integration) - project = Controller.get_default().get_project_metadata(project_name).data["project"] + project = ( + Controller.get_default().get_project_metadata(project_name).data["project"] + ) return { "event_name": "attach_items_from_integrated_storage", "properties": { - "project_type": ProjectType.get_name(project.project_type), + "project_type": ProjectType.get_name(project.type), "integration_name": integration.name, "folder_path": bool(folder_path), }, @@ -990,11 +944,13 @@ def query(**kwargs): project = kwargs["project"] query_str = kwargs["query"] project_name, folder_name = extract_project_folder(project) - project = Controller.get_default().get_project_metadata(project_name).data["project"] + project = ( + Controller.get_default().get_project_metadata(project_name).data["project"] + ) return { "event_name": "query", "properties": { - "project_type": ProjectType.get_name(project.project_type), + "project_type": ProjectType.get_name(project.type), "query": query_str, }, } @@ -1003,10 +959,12 @@ def query(**kwargs): def get_item_metadata(**kwargs): project = kwargs["project"] project_name, _ = extract_project_folder(project) - project = Controller.get_default().get_project_metadata(project_name).data["project"] + project = ( + Controller.get_default().get_project_metadata(project_name).data["project"] + ) return { "event_name": "get_item_metadata", - "properties": {"project_type": ProjectType.get_name(project.project_type)}, + "properties": {"project_type": ProjectType.get_name(project.type)}, } @@ -1018,11 +976,14 @@ def search_items(**kwargs): qa_email = kwargs["qa_email"] recursive = kwargs["recursive"] project_name, folder_name = extract_project_folder(project) - project = Controller.get_default().get_project_metadata(project_name).data["project"] + project = ( + Controller.get_default().get_project_metadata(project_name).data["project"] + ) return { "event_name": "search_items", "properties": { - "project_type": ProjectType.get_name(project.project_type), + "project_type": ProjectType.get_name(project.type), + "query": query, "name_contains": len(name_contains) if name_contains else False, "annotation_status": annotation_status if annotation_status else False, "annotator_email": bool(annotator_email), @@ -1030,3 +991,66 @@ def search_items(**kwargs): "recursive": bool(recursive), }, } + + +def move_items(**kwargs): + project = kwargs["source"] + project_name, _ = extract_project_folder(project) + project = ( + Controller.get_default().get_project_metadata(project_name).data["project"] + ) + items = kwargs["items"] + return { + "event_name": "move_items", + "properties": { + "project_type": ProjectType.get_name(project.type), + "items_count": len(items) if items else None, + }, + } + + +def copy_items(**kwargs): + project = kwargs["source"] + project_name, _ = extract_project_folder(project) + project = ( + Controller.get_default().get_project_metadata(project_name).data["project"] + ) + items = kwargs["items"] + return { + "event_name": "copy_items", + "properties": { + "project_type": ProjectType.get_name(project.type), + "items_count": len(items) if items else None, + "include_annotations": kwargs["include_annotations"], + }, + } + + +def attach_items(**kwargs): + project = kwargs["project"] + project_name, _ = extract_project_folder(project) + project = ( + Controller.get_default().get_project_metadata(project_name).data["project"] + ) + attachments = kwargs["attachments"] + return { + "event_name": "attach_items", + "properties": { + "project_type": ProjectType.get_name(project.type), + "attachments": "csv" if isinstance(attachments, (str, Path)) else "dict", + "annotation_status": kwargs["annotation_status"], + }, + } + + +def set_annotation_statuses(**kwargs): + project = kwargs["project"] + project_name, folder_name = extract_project_folder(project) + return { + "event_name": "set_annotation_statuses", + "properties": { + "item_count": len(kwargs.get("item_names", [])), + "annotation_status": kwargs["annotation_status"], + "root": folder_name == "root", + }, + } diff --git a/src/superannotate/lib/app/serializers.py b/src/superannotate/lib/app/serializers.py index 55d46ebe2..14686fad1 100644 --- a/src/superannotate/lib/app/serializers.py +++ b/src/superannotate/lib/app/serializers.py @@ -24,10 +24,14 @@ def _fill_enum_values(data: dict): return data def serialize( - self, fields: List[str] = None, by_alias: bool = True, flat: bool = False + self, + fields: List[str] = None, + by_alias: bool = True, + flat: bool = False, + exclude: Set[str] = None, ): return self._fill_enum_values( - self._serialize(self._entity, fields, by_alias, flat) + self._serialize(self._entity, fields, by_alias, flat, exclude=exclude) ) def serialize_item( @@ -45,6 +49,7 @@ def _serialize( fields: List[str] = None, by_alias: bool = False, flat: bool = False, + exclude: Set[str] = None, ): if isinstance(entity, dict): return entity @@ -53,12 +58,14 @@ def _serialize( fields = set(fields) if len(fields) == 1: if flat: - return entity.dict(include=fields, by_alias=by_alias)[ - next(iter(fields)) - ] - return entity.dict(include=fields, by_alias=by_alias) - return entity.dict(include=fields, by_alias=by_alias) - return entity.dict(by_alias=by_alias) + return entity.dict( + include=fields, by_alias=by_alias, exclude=exclude + )[next(iter(fields))] + return entity.dict( + include=fields, by_alias=by_alias, exclude=exclude + ) + return entity.dict(include=fields, by_alias=by_alias, exclude=exclude) + return entity.dict(by_alias=by_alias, exclude=exclude) return entity.to_dict() @classmethod @@ -98,8 +105,23 @@ def serialize(self): class ProjectSerializer(BaseSerializer): - def serialize(self): - data = super().serialize() + DEFAULT_EXCLUDE_SET = {"sync_status", "unverified_users"} + + def serialize( + self, + fields: List[str] = None, + by_alias: bool = False, + flat: bool = False, + exclude: Set[str] = None, + ): + to_exclude = self.DEFAULT_EXCLUDE_SET + if exclude: + to_exclude = exclude.union(self.DEFAULT_EXCLUDE_SET) + data = super().serialize(fields, by_alias, flat, to_exclude) + if data.get("settings"): + data["settings"] = [ + SettingsSerializer(setting).serialize() for setting in data["settings"] + ] data["type"] = constance.ProjectType.get_name(data["type"]) if data.get("status"): data["status"] = constance.ProjectStatus.get_name(data["status"]) @@ -155,12 +177,12 @@ def serialize_by_project(self, project: ProjectEntity): data["prediction_status"] = None data["segmentation_status"] = None else: - if project.project_type == constance.ProjectType.VECTOR.value: + if project.type == constance.ProjectType.VECTOR.value: data["prediction_status"] = constance.SegmentationStatus.get_name( data["prediction_status"] ) data["segmentation_status"] = None - if project.project_type == constance.ProjectType.PIXEL.value: + if project.type == constance.ProjectType.PIXEL.value: data["prediction_status"] = constance.SegmentationStatus.get_name( data["prediction_status"] ) @@ -178,8 +200,14 @@ def deserialize(data): class SettingsSerializer(BaseSerializer): - def serialize(self): - data = super().serialize() + def serialize( + self, + fields: List[str] = None, + by_alias: bool = True, + flat: bool = False, + exclude=None, + ): + data = super().serialize(fields, by_alias, flat, exclude) if data["attribute"] == "ImageQuality": data["value"] = constance.ImageQuality.get_name(data["value"]) return data diff --git a/src/superannotate/lib/core/__init__.py b/src/superannotate/lib/core/__init__.py index 5a9a8e87e..1b1fed04c 100644 --- a/src/superannotate/lib/core/__init__.py +++ b/src/superannotate/lib/core/__init__.py @@ -61,6 +61,7 @@ ALREADY_EXISTING_FILES_WARNING = ( "{} already existing file(s) found that won't be uploaded." ) + ATTACHING_FILES_MESSAGE = "Attaching {} file(s) to project {}." ATTACHING_UPLOAD_STATE_ERROR = "You cannot attach URLs in this type of project. Please attach it in an external storage project." diff --git a/src/superannotate/lib/core/conditions.py b/src/superannotate/lib/core/conditions.py index 4f6c8d855..7eea76480 100644 --- a/src/superannotate/lib/core/conditions.py +++ b/src/superannotate/lib/core/conditions.py @@ -12,6 +12,17 @@ CONDITION_LE = "<=" +class EmptyCondition: + def __or__(self, other): + return other + + def __and__(self, other): + return other + + def build_query(self): + return "" + + class Condition: def __init__(self, key: str, value: Any, condition_type: str): self._key = key @@ -21,16 +32,6 @@ def __init__(self, key: str, value: Any, condition_type: str): @staticmethod def get_empty_condition(): - class EmptyCondition: - def __or__(self, other): - return other - - def __and__(self, other): - return other - - def build_query(self): - return "" - return EmptyCondition() def __str__(self): @@ -46,10 +47,12 @@ def __or__(self, other): def __and__(self, other): if isinstance(other, tuple) or isinstance(other, list): for elem in other: + if isinstance(other, EmptyCondition): + continue if not isinstance(other, Condition): raise Exception("Support the only Condition types") - return self.__and__(elem) - elif not isinstance(other, Condition): + return self.__and__(elem) + elif not isinstance(other, (Condition, EmptyCondition)): raise Exception("Support the only Condition types") QueryCondition = namedtuple("QueryCondition", ("condition", "query")) self._condition_set.append(QueryCondition(CONDITION_AND, other.build_query())) diff --git a/src/superannotate/lib/core/entities/__init__.py b/src/superannotate/lib/core/entities/__init__.py index 88425761a..d05a2bec7 100644 --- a/src/superannotate/lib/core/entities/__init__.py +++ b/src/superannotate/lib/core/entities/__init__.py @@ -1,4 +1,7 @@ +from lib.core.entities.base import AttachmentEntity from lib.core.entities.base import BaseEntity as TmpBaseEntity +from lib.core.entities.base import ProjectEntity +from lib.core.entities.base import SettingEntity from lib.core.entities.integrations import IntegrationEntity from lib.core.entities.items import DocumentEntity from lib.core.entities.items import Entity @@ -11,8 +14,6 @@ from lib.core.entities.project_entities import ImageEntity from lib.core.entities.project_entities import ImageInfoEntity from lib.core.entities.project_entities import MLModelEntity -from lib.core.entities.project_entities import ProjectEntity -from lib.core.entities.project_entities import ProjectSettingEntity from lib.core.entities.project_entities import S3FileEntity from lib.core.entities.project_entities import TeamEntity from lib.core.entities.project_entities import UserEntity @@ -25,7 +26,11 @@ VideoAnnotation as VideoExportAnnotation, ) +# from lib.core.entities.project_entities import ProjectEntity + __all__ = [ + # base + "SettingEntity", # items "TmpImageEntity", "BaseEntity", @@ -33,9 +38,10 @@ "Entity", "VideoEntity", "DocumentEntity", + # Utils + "AttachmentEntity", # project "ProjectEntity", - "ProjectSettingEntity", "ConfigEntity", "WorkflowEntity", "FolderEntity", diff --git a/src/superannotate/lib/core/entities/base.py b/src/superannotate/lib/core/entities/base.py index b733cda88..52bdc4c51 100644 --- a/src/superannotate/lib/core/entities/base.py +++ b/src/superannotate/lib/core/entities/base.py @@ -1,20 +1,42 @@ +import uuid from datetime import datetime +from typing import Any +from typing import List from typing import Optional +from typing import Union from lib.core.enums import AnnotationStatus from pydantic import BaseModel from pydantic import Extra from pydantic import Field +from pydantic import StrictBool +from pydantic import StrictFloat +from pydantic import StrictInt +from pydantic import StrictStr +from pydantic.datetime_parse import parse_datetime + + +class StringDate(datetime): + @classmethod + def __get_validators__(cls): + yield parse_datetime + yield cls.validate + + @classmethod + def validate(cls, v: datetime): + return v.isoformat() class TimedBaseModel(BaseModel): - createdAt: datetime = Field(None, alias="createdAt") - updatedAt: datetime = Field(None, alias="updatedAt") + createdAt: StringDate = Field(None, alias="createdAt") + updatedAt: StringDate = Field(None, alias="updatedAt") class BaseEntity(TimedBaseModel): name: str - path: Optional[str] = Field(None, description="Item’s path in SuperAnnotate project") + path: Optional[str] = Field( + None, description="Item’s path in SuperAnnotate project" + ) url: Optional[str] = Field(description="Publicly available HTTP address") annotator_email: Optional[str] = Field(description="Annotator email") qa_email: Optional[str] = Field(description="QA email") @@ -25,3 +47,68 @@ class BaseEntity(TimedBaseModel): class Config: extra = Extra.allow + + +class AttachmentEntity(BaseModel): + name: Optional[str] = Field(default_factory=lambda: str(uuid.uuid4())) + url: str + + class Config: + extra = Extra.ignore + + +class SettingEntity(BaseModel): + id: Optional[int] + project_id: Optional[int] + attribute: str + value: Union[StrictStr, StrictInt, StrictFloat, StrictBool] + + class Config: + extra = Extra.ignore + + def __copy__(self): + return SettingEntity(attribute=self.attribute, value=self.value) + + +class ProjectEntity(TimedBaseModel): + id: Optional[int] + team_id: Optional[int] + name: Optional[str] + type: Optional[int] + description: Optional[str] + instructions_link: Optional[str] + creator_id: Optional[str] + entropy_status: Optional[int] + sharing_status: Optional[int] + status: Optional[int] + folder_id: Optional[int] + sync_status: Optional[int] + upload_state: Optional[int] + users: Optional[List[Any]] = [] + unverified_users: Optional[List[Any]] = [] + contributors: Optional[List[Any]] = [] + settings: Optional[List[SettingEntity]] = [] + classes: Optional[List[Any]] = [] + workflows: Optional[List[Any]] = [] + completed_images_count: Optional[int] = Field(None, alias="completedImagesCount") + root_folder_completed_images_count: Optional[int] = Field( + None, alias="rootFolderCompletedImagesCount" + ) + + class Config: + extra = Extra.ignore + + def __copy__(self): + return ProjectEntity( + team_id=self.team_id, + name=self.name, + type=self.type, + description=self.description, + instructions_link=self.instructions_link + if self.description + else f"Copy of {self.name}.", + status=self.status, + folder_id=self.folder_id, + users=self.users, + upload_state=self.upload_state, + ) diff --git a/src/superannotate/lib/core/entities/items.py b/src/superannotate/lib/core/entities/items.py index f4a6b5379..599a16b16 100644 --- a/src/superannotate/lib/core/entities/items.py +++ b/src/superannotate/lib/core/entities/items.py @@ -7,7 +7,6 @@ class Entity(BaseEntity): - class Config: extra = Extra.allow @@ -31,7 +30,7 @@ class TmpImageEntity(Entity): segmentation_status: Optional[SegmentationStatus] = Field( SegmentationStatus.NOT_STARTED ) - approval_status: bool = None + approval_status: Optional[int] = Field(None) class Config: extra = Extra.ignore diff --git a/src/superannotate/lib/core/entities/project_entities.py b/src/superannotate/lib/core/entities/project_entities.py index d56dfe38d..4e50080f3 100644 --- a/src/superannotate/lib/core/entities/project_entities.py +++ b/src/superannotate/lib/core/entities/project_entities.py @@ -101,7 +101,7 @@ def __init__( super().__init__(uuid, createdAt, updatedAt) self.team_id = team_id self.name = name - self.project_type = project_type + self.type = project_type self.description = description self.instructions_link = instructions_link self.creator_id = creator_id @@ -124,7 +124,7 @@ def __copy__(self): return ProjectEntity( team_id=self.team_id, name=self.name, - project_type=self.project_type, + project_type=self.type, description=self.description, instructions_link=self.instructions_link if self.description @@ -140,7 +140,7 @@ def to_dict(self): **super().to_dict(), "team_id": self.team_id, "name": self.name, - "type": self.project_type, + "type": self.type, "description": self.description, "status": self.status, "instructions_link": self.instructions_link, @@ -155,31 +155,6 @@ def to_dict(self): } -class ProjectSettingEntity(BaseEntity): - def __init__( - self, - uuid: int = None, - project_id: int = None, - attribute: str = None, - value: Any = None, - ): - super().__init__(uuid) - self.project_id = project_id - self.attribute = attribute - self.value = value - - def __copy__(self): - return ProjectSettingEntity(attribute=self.attribute, value=self.value) - - def to_dict(self): - return { - "id": self.uuid, - "project_id": self.project_id, - "attribute": self.attribute, - "value": self.value, - } - - class WorkflowEntity(BaseEntity): def __init__( self, diff --git a/src/superannotate/lib/core/exceptions.py b/src/superannotate/lib/core/exceptions.py index e6331dc87..4228e5cb2 100644 --- a/src/superannotate/lib/core/exceptions.py +++ b/src/superannotate/lib/core/exceptions.py @@ -7,12 +7,18 @@ class AppException(Exception): def __init__(self, message): super().__init__(message) - self.message = message + self.message = str(message) def __str__(self): return self.message +class BackendError(AppException): + """ + Backend Error + """ + + class AppValidationException(AppException): """ App validation exception diff --git a/src/superannotate/lib/core/repositories.py b/src/superannotate/lib/core/repositories.py index bd11c3eaa..a94258a5c 100644 --- a/src/superannotate/lib/core/repositories.py +++ b/src/superannotate/lib/core/repositories.py @@ -1,4 +1,5 @@ from abc import ABC +from abc import ABCMeta from abc import abstractmethod from typing import Any from typing import List @@ -42,7 +43,7 @@ def update(self, entity: BaseEntity) -> BaseEntity: raise NotImplementedError @abstractmethod - def delete(self, uuid: Any, *args): + def delete(self, uuid: Any): raise NotImplementedError def bulk_delete(self, entities: List[BaseEntity]) -> bool: @@ -56,7 +57,9 @@ def _drop_nones(data: dict): return data -class BaseProjectRelatedManageableRepository(BaseManageableRepository): +class BaseProjectRelatedManageableRepository( + BaseManageableRepository, metaclass=ABCMeta +): def __init__(self, service: SuperannotateServiceProvider, project: ProjectEntity): self._service = service self._project = project diff --git a/src/superannotate/lib/core/serviceproviders.py b/src/superannotate/lib/core/serviceproviders.py index 9da0dc83f..c5d92732e 100644 --- a/src/superannotate/lib/core/serviceproviders.py +++ b/src/superannotate/lib/core/serviceproviders.py @@ -116,13 +116,13 @@ def get_upload_token( def update_image(self, image_id: int, team_id: int, project_id: int, data: dict): raise NotImplementedError - def copy_images_between_folders_transaction( + def copy_items_between_folders_transaction( self, team_id: int, project_id: int, from_folder_id: int, to_folder_id: int, - images: List[str], + items: List[str], include_annotations: bool = False, include_pin: bool = False, ) -> int: @@ -151,6 +151,11 @@ def get_progress( ) -> Tuple[int, int]: raise NotImplementedError + def await_progress( + self, project_id: int, team_id: int, poll_id: int, items_count + ) -> Tuple[int, int]: + raise NotImplementedError + def set_images_statuses_bulk( self, image_names: List[str], diff --git a/src/superannotate/lib/core/types.py b/src/superannotate/lib/core/types.py index 31aa7d746..509c24d42 100644 --- a/src/superannotate/lib/core/types.py +++ b/src/superannotate/lib/core/types.py @@ -1,12 +1,8 @@ from typing import Optional -from typing import Union from pydantic import BaseModel from pydantic import constr from pydantic import Extra -from pydantic import StrictStr -from pydantic.error_wrappers import ErrorWrapper -from pydantic.error_wrappers import ValidationError from superannotate_schemas.schemas.classes import AttributeGroup as AttributeGroupSchema NotEmptyStr = constr(strict=True, min_length=1) @@ -14,16 +10,6 @@ AttributeGroup = AttributeGroupSchema -class AnnotationType(StrictStr): - @classmethod - def validate(cls, value: str) -> Union[str]: - if value not in ANNOTATION_TYPES.keys(): - raise ValidationError( - [ErrorWrapper(TypeError(f"invalid value {value}"), "type")], cls - ) - return value - - class Project(BaseModel): name: NotEmptyStr diff --git a/src/superannotate/lib/core/usecases/annotations.py b/src/superannotate/lib/core/usecases/annotations.py index 0a9e6ddca..8f4d7dd81 100644 --- a/src/superannotate/lib/core/usecases/annotations.py +++ b/src/superannotate/lib/core/usecases/annotations.py @@ -80,14 +80,14 @@ def __init__( @property def annotation_postfix(self): - if self._project.project_type in ( + if self._project.type in ( constances.ProjectType.VIDEO.value, constances.ProjectType.DOCUMENT.value, ): return constances.ATTACHED_VIDEO_ANNOTATION_POSTFIX - elif self._project.project_type == constances.ProjectType.VECTOR.value: + elif self._project.type == constances.ProjectType.VECTOR.value: return constances.VECTOR_ANNOTATION_POSTFIX - elif self._project.project_type == constances.ProjectType.PIXEL.value: + elif self._project.type == constances.ProjectType.PIXEL.value: return constances.PIXEL_ANNOTATION_POSTFIX @staticmethod @@ -114,7 +114,7 @@ def annotations_to_upload(self): images_data = ( GetBulkImages( service=self._backend_service, - project_id=self._project.uuid, + project_id=self._project.id, team_id=self._project.team_id, folder_id=self._folder.uuid, images=[image.name for image in images_detail], @@ -157,7 +157,7 @@ def get_annotation_upload_data( else: function = self._backend_service.get_annotation_upload_data response = function( - project_id=self._project.uuid, + project_id=self._project.id, team_id=self._project.team_id, folder_id=self._folder.uuid, image_ids=image_ids, @@ -183,7 +183,7 @@ def _upload_annotation( uuid=image_id, name=image_name, team_id=self._project.team_id, - project_id=self._project.uuid, + project_id=self._project.id, ), images=self._images, annotation_classes=self._annotation_classes, @@ -348,7 +348,7 @@ def __init__( def annotation_upload_data(self) -> UploadAnnotationAuthData: if not self._annotation_upload_data: response = self._backend_service.get_annotation_upload_data( - project_id=self._project.uuid, + project_id=self._project.id, team_id=self._project.team_id, folder_id=self._folder.uuid, image_ids=[self._image.uuid], @@ -391,7 +391,7 @@ def set_annotation_json(self): self._annotation_json = json.load( self.get_s3_file(self.from_s3, self._annotation_path) ) - if self._project.project_type == constances.ProjectType.PIXEL.value: + if self._project.type == constances.ProjectType.PIXEL.value: self._mask = self.get_s3_file( self.from_s3, self._annotation_path.replace( @@ -401,7 +401,7 @@ def set_annotation_json(self): ) else: self._annotation_json = json.load(open(self._annotation_path)) - if self._project.project_type == constances.ProjectType.PIXEL.value: + if self._project.type == constances.ProjectType.PIXEL.value: self._mask = open( self._annotation_path.replace( constances.PIXEL_ANNOTATION_POSTFIX, @@ -437,7 +437,7 @@ def prepare_annotations( def clean_json(self, json_data: dict,) -> Tuple[bool, dict]: use_case = ValidateAnnotationUseCase( - constances.ProjectType.get_name(self._project.project_type), + constances.ProjectType.get_name(self._project.type), annotation=json_data, validators=self._validators, ) @@ -451,7 +451,7 @@ def execute(self): self._annotation_json = clean_json bucket = self.s3_bucket annotation_json = self.prepare_annotations( - project_type=self._project.project_type, + project_type=self._project.type, annotations=self._annotation_json, annotation_classes=self._annotation_classes, templates=self._templates, @@ -465,7 +465,7 @@ def execute(self): Body=json.dumps(annotation_json), ) if ( - self._project.project_type == constances.ProjectType.PIXEL.value + self._project.type == constances.ProjectType.PIXEL.value and self._mask ): bucket.put_object( @@ -512,7 +512,7 @@ def __init__( self._item_names_provided = True def validate_project_type(self): - if self._project.project_type == constances.ProjectType.PIXEL.value: + if self._project.type == constances.ProjectType.PIXEL.value: raise AppException("The function is not supported for Pixel projects.") def validate_item_names(self): @@ -528,7 +528,7 @@ def validate_item_names(self): self._item_names_provided = False condition = ( Condition("team_id", self._project.team_id, EQ) - & Condition("project_id", self._project.uuid, EQ) + & Condition("project_id", self._project.id, EQ) & Condition("folder_id", self._folder.uuid, EQ) ) @@ -560,7 +560,7 @@ def execute(self): self.reporter.start_progress(items_count, disable=not self._show_process) annotations = self._client.get_annotations( team_id=self._project.team_id, - project_id=self._project.uuid, + project_id=self._project.id, folder_id=self._folder.uuid, items=self._item_names, reporter=self.reporter, @@ -595,10 +595,10 @@ def __init__( self._client = backend_service_provider def validate_project_type(self): - if self._project.project_type != constances.ProjectType.VIDEO.value: + if self._project.type != constances.ProjectType.VIDEO.value: raise AppException( "The function is not supported for" - f" {constances.ProjectType.get_name(self._project.project_type)} projects." + f" {constances.ProjectType.get_name(self._project.type)} projects." ) def execute(self): @@ -707,7 +707,7 @@ def execute(self): ] # noqa: E203 res = self._client.upload_priority_scores( team_id=self._project.team_id, - project_id=self._project.uuid, + project_id=self._project.id, folder_id=self._folder.uuid, priorities=priorities_to_upload, ) diff --git a/src/superannotate/lib/core/usecases/folders.py b/src/superannotate/lib/core/usecases/folders.py index b64f26cd0..a9733652f 100644 --- a/src/superannotate/lib/core/usecases/folders.py +++ b/src/superannotate/lib/core/usecases/folders.py @@ -52,7 +52,7 @@ def validate_folder(self): def execute(self): if self.is_valid(): - self._folder.project_id = self._project.uuid + self._folder.project_id = self._project.id self._response.data = self._folders.insert(self._folder) if self._response.data.name not in (self._origin_name, self._folder.name): logger.warning( @@ -80,7 +80,7 @@ def execute(self): condition = ( Condition("name", self._folder_name, EQ) & Condition("team_id", self._team_id, EQ) - & Condition("project_id", self._project.uuid, EQ) + & Condition("project_id", self._project.id, EQ) ) try: self._response.data = self._folders.get_one(condition) @@ -108,7 +108,7 @@ def __init__( def execute(self): condition = ( self._condition - & Condition("project_id", self._project.uuid, EQ) + & Condition("project_id", self._project.id, EQ) & Condition("team_id", self._project.team_id, EQ) & Condition("includeUsers", self._include_users, EQ) ) @@ -195,7 +195,7 @@ def __init__( def execute(self): is_assigned = self._service.assign_folder( team_id=self._project_entity.team_id, - project_id=self._project_entity.uuid, + project_id=self._project_entity.id, folder_name=self._folder.name, users=self._users, ) diff --git a/src/superannotate/lib/core/usecases/images.py b/src/superannotate/lib/core/usecases/images.py index a35a835e4..faf8422da 100644 --- a/src/superannotate/lib/core/usecases/images.py +++ b/src/superannotate/lib/core/usecases/images.py @@ -26,8 +26,8 @@ from lib.core.entities import ImageEntity from lib.core.entities import ImageInfoEntity from lib.core.entities import ProjectEntity -from lib.core.entities import ProjectSettingEntity from lib.core.entities import S3FileEntity +from lib.core.entities import SettingEntity from lib.core.enums import ImageQuality from lib.core.enums import ProjectType from lib.core.exceptions import AppException @@ -69,9 +69,9 @@ def __init__( self._image_name_prefix = image_name_prefix def validate_project_type(self): - if self._project.project_type in constances.LIMITED_FUNCTIONS: + if self._project.type in constances.LIMITED_FUNCTIONS: raise AppValidationException( - constances.LIMITED_FUNCTIONS[self._project.project_type] + constances.LIMITED_FUNCTIONS[self._project.type] ) def validate_annotation_status(self): @@ -86,7 +86,7 @@ def execute(self): if self.is_valid(): condition = ( Condition("team_id", self._project.team_id, EQ) - & Condition("project_id", self._project.uuid, EQ) + & Condition("project_id", self._project.id, EQ) & Condition("folder_id", self._folder.uuid, EQ) ) if self._image_name_prefix: @@ -121,7 +121,7 @@ def execute(self): images = ( GetBulkImages( service=self._service, - project_id=self._project.uuid, + project_id=self._project.id, team_id=self._project.team_id, folder_id=self._folder.uuid, images=[self._image_name], @@ -157,7 +157,7 @@ def annotation_status(self): def execute(self): condition = ( Condition("team_id", self._project.team_id, EQ) - & Condition("project_id", self._project.uuid, EQ) + & Condition("project_id", self._project.id, EQ) & Condition("folder_id", 0, EQ) ) if self._annotation_status: @@ -225,7 +225,7 @@ def __init__( def _validate_limitations(self, to_upload_count): response = self._backend_service.get_limitations( team_id=self._project.team_id, - project_id=self._project.uuid, + project_id=self._project.id, folder_id=self._folder.uuid, ) if not response.ok: @@ -256,7 +256,7 @@ def upload_state_code(self) -> int: def execute(self): response = self._backend_service.get_bulk_images( - project_id=self._project.uuid, + project_id=self._project.id, team_id=self._project.team_id, folder_id=self._folder.uuid, images=[image.name for image in self._attachments], @@ -282,7 +282,7 @@ def execute(self): return self._response if to_upload: backend_response = self._backend_service.attach_files( - project_id=self._project.uuid, + project_id=self._project.id, folder_id=self._folder.uuid, team_id=self._project.team_id, files=to_upload, @@ -367,9 +367,9 @@ def default_annotation(self): @property def annotation_json_name(self): - if self._project.project_type == constances.ProjectType.VECTOR.value: + if self._project.type == constances.ProjectType.VECTOR.value: return f"{self._image.name}___objects.json" - elif self._project.project_type == constances.ProjectType.PIXEL.value: + elif self._project.type == constances.ProjectType.PIXEL.value: return f"{self._image.name}___pixel.json" @property @@ -392,7 +392,7 @@ def upload_auth_data(self): ) def validate_project_type(self): - if self._from_project.project_type != self._to_project.project_type: + if self._from_project.type != self._to_project.type: raise AppValidationException("Projects are different.") def execute(self): @@ -487,7 +487,7 @@ def execute(self): self.to_project_s3_repo.insert(file) if ( - self._to_project.project_type == constances.ProjectType.PIXEL.value + self._to_project.type == constances.ProjectType.PIXEL.value and annotations.get("annotation_bluemap_path") and annotations["annotation_bluemap_path"]["exist"] ): @@ -545,7 +545,7 @@ def __init__( def _validate_limitations(self, images_to_copy_count): response = self._backend_service.get_limitations( team_id=self._project.team_id, - project_id=self._project.uuid, + project_id=self._project.id, folder_id=self._to_folder.uuid, ) if not response.ok: @@ -556,15 +556,15 @@ def _validate_limitations(self, images_to_copy_count): raise AppValidationException(constances.COPY_PROJECT_LIMIT_ERROR_MESSAGE) def validate_project_type(self): - if self._project.project_type in constances.LIMITED_FUNCTIONS: + if self._project.type in constances.LIMITED_FUNCTIONS: raise AppValidationException( - constances.LIMITED_FUNCTIONS[self._project.project_type] + constances.LIMITED_FUNCTIONS[self._project.type] ) def execute(self): if self.is_valid(): images = self._backend_service.get_bulk_images( - project_id=self._project.uuid, + project_id=self._project.id, team_id=self._project.team_id, folder_id=self._to_folder.uuid, images=self._image_names, @@ -579,12 +579,12 @@ def execute(self): return self._response for i in range(0, len(images_to_copy), self.CHUNK_SIZE): - poll_id = self._backend_service.copy_images_between_folders_transaction( + poll_id = self._backend_service.copy_items_between_folders_transaction( team_id=self._project.team_id, - project_id=self._project.uuid, + project_id=self._project.id, from_folder_id=self._from_folder.uuid, to_folder_id=self._to_folder.uuid, - images=self._image_names[i : i + self.CHUNK_SIZE], + items=self._image_names[i : i + self.CHUNK_SIZE], include_annotations=self._include_annotations, include_pin=self._include_pin, ) @@ -596,7 +596,7 @@ def execute(self): timeout_start = time.time() while time.time() < timeout_start + await_time: done_count, skipped_count = self._backend_service.get_progress( - self._project.uuid, self._project.team_id, poll_id + self._project.id, self._project.team_id, poll_id ) if done_count + skipped_count == len(images_to_copy): break @@ -606,42 +606,6 @@ def execute(self): return self._response -class GetImageMetadataUseCase(BaseUseCase): - def __init__( - self, - image_name: str, - project: ProjectEntity, - folder: FolderEntity, - service: SuperannotateServiceProvider, - ): - super().__init__() - self._image_name = image_name - self._project = project - self._service = service - self._folder = folder - - def validate_project_type(self): - if self._project.project_type in constances.LIMITED_FUNCTIONS: - raise AppValidationException( - constances.LIMITED_FUNCTIONS[self._project.project_type] - ) - - def execute(self): - if self.is_valid(): - data = self._service.get_bulk_images( - images=[self._image_name], - team_id=self._project.team_id, - project_id=self._project.uuid, - folder_id=self._folder.uuid, - ) - if data: - image_entity = ImageEntity.from_dict(**data[0]) - self._response.data = image_entity - else: - self._response.errors = AppException("Image not found.") - return self._response - - class ImagesBulkMoveUseCase(BaseUseCase): """ Copy images in bulk between folders in a project. @@ -668,7 +632,7 @@ def __init__( def validate_limitations(self): response = self._backend_service.get_limitations( team_id=self._project.team_id, - project_id=self._project.uuid, + project_id=self._project.id, folder_id=self._to_folder.uuid, ) to_upload_count = len(self._image_names) @@ -686,7 +650,7 @@ def execute(self): moved_images.extend( self._backend_service.move_images_between_folders( team_id=self._project.team_id, - project_id=self._project.uuid, + project_id=self._project.id, from_folder_id=self._from_folder.uuid, to_folder_id=self._to_folder.uuid, images=self._image_names[i : i + self.CHUNK_SIZE], # noqa: E203 @@ -958,9 +922,9 @@ def __init__( ) def validate_project_type(self): - if self._project.project_type in constances.LIMITED_FUNCTIONS: + if self._project.type in constances.LIMITED_FUNCTIONS: raise AppValidationException( - constances.LIMITED_FUNCTIONS[self._project.project_type] + constances.LIMITED_FUNCTIONS[self._project.type] ) def validate_variant_type(self): @@ -1005,7 +969,7 @@ def execute(self): fuse_image = ( CreateFuseImageUseCase( project_type=constances.ProjectType.get_name( - self._project.project_type + self._project.type ), image_path=download_path, classes=[ @@ -1059,7 +1023,7 @@ def __init__( @property def s3_repo(self): self._auth_data = self._backend_client.get_s3_upload_auth_token( - self._project.team_id, self._folder.uuid, self._project.uuid + self._project.team_id, self._folder.uuid, self._project.id ) if "error" in self._auth_data: raise AppException(self._auth_data.get("error")) @@ -1075,16 +1039,16 @@ def validate_project_type(self): raise AppValidationException(constances.UPLOADING_UPLOAD_STATE_ERROR) def validate_deprecation(self): - if self._project.project_type in [ + if self._project.type in [ constances.ProjectType.VIDEO.value, constances.ProjectType.DOCUMENT.value, ]: - raise AppException(constances.LIMITED_FUNCTIONS[self._project.project_type]) + raise AppException(constances.LIMITED_FUNCTIONS[self._project.type]) def validate_limitations(self): response = self._backend_client.get_limitations( team_id=self._project.team_id, - project_id=self._project.uuid, + project_id=self._project.id, folder_id=self._folder.uuid, ) if response.data.folder_limit.remaining_image_count < 1: @@ -1109,7 +1073,7 @@ def validate_image_name_uniqueness(self): image_entities = ( GetBulkImages( service=self._backend_client, - project_id=self._project.uuid, + project_id=self._project.id, team_id=self._project.team_id, folder_id=self._folder.uuid, images=[ @@ -1219,7 +1183,7 @@ def exclude_file_patterns(self): def validate_limitations(self): response = self._backend_client.get_limitations( team_id=self._project.team_id, - project_id=self._project.uuid, + project_id=self._project.id, folder_id=self._folder.uuid, ) if not response.ok: @@ -1257,9 +1221,9 @@ def validate_project_type(self): raise AppValidationException(constances.UPLOADING_UPLOAD_STATE_ERROR) def validate_deprecation(self): - if self._project.project_type in constances.LIMITED_FUNCTIONS: + if self._project.type in constances.LIMITED_FUNCTIONS: raise AppValidationException( - constances.LIMITED_FUNCTIONS[self._project.project_type] + constances.LIMITED_FUNCTIONS[self._project.type] ) @property @@ -1268,7 +1232,7 @@ def auth_data(self): response = self._backend_client.get_s3_upload_auth_token( team_id=self._project.team_id, folder_id=self._folder.uuid, - project_id=self._project.uuid, + project_id=self._project.id, ) if "error" in response: raise AppException(response.get("error")) @@ -1358,7 +1322,7 @@ def filter_paths(self, paths: List[str]): image_entities = ( GetBulkImages( service=self._backend_client, - project_id=self._project.uuid, + project_id=self._project.id, team_id=self._project.team_id, folder_id=self._folder.uuid, images=[image.split("/")[-1] for image in filtered_paths], @@ -1514,7 +1478,7 @@ class UploadImageS3UseCase(BaseUseCase): def __init__( self, project: ProjectEntity, - project_settings: List[ProjectSettingEntity], + project_settings: List[SettingEntity], image_path: str, image: io.BytesIO, s3_repo: BaseManageableRepository, @@ -1532,7 +1496,7 @@ def __init__( @property def max_resolution(self) -> int: - if self._project.project_type == ProjectType.PIXEL.value: + if self._project.type == ProjectType.PIXEL.value: return constances.MAX_PIXEL_RESOLUTION return constances.MAX_VECTOR_RESOLUTION @@ -1644,7 +1608,7 @@ def validate_limitations(self): attachments_count = self.attachments_count response = self._backend_service.get_limitations( team_id=self._project.team_id, - project_id=self._project.uuid, + project_id=self._project.id, folder_id=self._folder.uuid, ) if not response.ok: @@ -1714,7 +1678,7 @@ def __init__( backend_service: SuperannotateServiceProvider, images: BaseManageableRepository, s3_repo, - project_settings: List[ProjectSettingEntity], + project_settings: List[SettingEntity], include_annotations: Optional[bool] = True, copy_annotation_status: Optional[bool] = True, copy_pin: Optional[bool] = True, @@ -1745,24 +1709,24 @@ def validate_copy_path(self): ) def validate_project_type(self): - if self._from_project.project_type in ( + if self._from_project.type in ( constances.ProjectType.VIDEO.value, constances.ProjectType.DOCUMENT.value, ): raise AppValidationException( - constances.LIMITED_FUNCTIONS[self._from_project.project_type] + constances.LIMITED_FUNCTIONS[self._from_project.type] ) def validate_limitations(self): response = self._backend_service.get_limitations( team_id=self._to_project.team_id, - project_id=self._to_project.uuid, + project_id=self._to_project.id, folder_id=self._to_folder.uuid, ) if not response.ok: raise AppValidationException(response.error) - if self._move and self._from_project.uuid == self._to_project.uuid: + if self._move and self._from_project.id == self._to_project.id: if self._from_folder.uuid == self._to_folder.uuid: raise AppValidationException( "Cannot move image if source_project == destination_project." @@ -1781,7 +1745,7 @@ def validate_limitations(self): @property def s3_repo(self): self._auth_data = self._backend_service.get_s3_upload_auth_token( - self._to_project.team_id, self._to_folder.uuid, self._to_project.uuid + self._to_project.team_id, self._to_folder.uuid, self._to_project.id ) if "error" in self._auth_data: raise AppException(self._auth_data.get("error")) @@ -1818,7 +1782,7 @@ def execute(self) -> Response: auth_data = self._backend_service.get_s3_upload_auth_token( team_id=self._to_project.team_id, folder_id=self._to_folder.uuid, - project_id=self._to_project.uuid, + project_id=self._to_project.id, ) if "error" in auth_data: raise AppException(auth_data["error"]) @@ -1873,7 +1837,7 @@ def execute(self) -> Response: if self._image_names: for idx in range(0, len(self._image_names), self.CHUNK_SIZE): response = self._backend_service.delete_image_annotations( - project_id=self._project.uuid, + project_id=self._project.id, team_id=self._project.team_id, folder_id=self._folder.uuid, image_names=self._image_names[ @@ -1884,7 +1848,7 @@ def execute(self) -> Response: polling_states[response.get("poll_id")] = False else: response = self._backend_service.delete_image_annotations( - project_id=self._project.uuid, + project_id=self._project.id, team_id=self._project.team_id, folder_id=self._folder.uuid, ) @@ -1899,7 +1863,7 @@ def execute(self) -> Response: while time.time() < timeout_start + self.POLL_AWAIT_TIME: progress = int( self._backend_service.get_annotations_delete_progress( - project_id=self._project.uuid, + project_id=self._project.id, team_id=self._project.team_id, poll_id=poll_id, ).get("process", -1) @@ -1949,9 +1913,9 @@ def __init__( self._image_names = image_names def validate_project_type(self): - if self._project.project_type in constances.LIMITED_FUNCTIONS: + if self._project.type in constances.LIMITED_FUNCTIONS: raise AppValidationException( - constances.LIMITED_FUNCTIONS[self._project.project_type] + constances.LIMITED_FUNCTIONS[self._project.type] ) def execute(self): @@ -1961,7 +1925,7 @@ def execute(self): image.uuid for image in GetBulkImages( service=self._backend_service, - project_id=self._project.uuid, + project_id=self._project.id, team_id=self._project.team_id, folder_id=self._folder.uuid, images=self._image_names, @@ -1972,14 +1936,14 @@ def execute(self): else: condition = ( Condition("team_id", self._project.team_id, EQ) - & Condition("project_id", self._project.uuid, EQ) + & Condition("project_id", self._project.id, EQ) & Condition("folder_id", self._folder.uuid, EQ) ) image_ids = [image.uuid for image in self._images.get_all(condition)] for i in range(0, len(image_ids), self.CHUNK_SIZE): self._backend_service.delete_images( - project_id=self._project.uuid, + project_id=self._project.id, team_id=self._project.team_id, image_ids=image_ids[i : i + self.CHUNK_SIZE], # noqa: E203 ) @@ -2017,9 +1981,9 @@ def image_use_case(self): ) def validate_project_type(self): - if self._project.project_type in constances.LIMITED_FUNCTIONS: + if self._project.type in constances.LIMITED_FUNCTIONS: raise AppValidationException( - constances.LIMITED_FUNCTIONS[self._project.project_type] + constances.LIMITED_FUNCTIONS[self._project.type] ) @property @@ -2098,7 +2062,7 @@ def execute(self): } image_response = self.image_use_case.execute() token = self._service.get_download_token( - project_id=self._project.uuid, + project_id=self._project.id, team_id=self._project.team_id, folder_id=self._folder.uuid, image_id=image_response.data.uuid, @@ -2106,7 +2070,7 @@ def execute(self): credentials = token["annotations"]["MAIN"][0] annotation_json_creds = credentials["annotation_json_path"] - if self._project.project_type == constances.ProjectType.VECTOR.value: + if self._project.type == constances.ProjectType.VECTOR.value: file_postfix = "___objects.json" else: file_postfix = "___pixel.json" @@ -2122,7 +2086,7 @@ def execute(self): data["annotation_json"] = response.json() data["annotation_json_filename"] = f"{self._image_name}{file_postfix}" mask_path = None - if self._project.project_type == constances.ProjectType.PIXEL.value: + if self._project.type == constances.ProjectType.PIXEL.value: annotation_blue_map_creds = credentials["annotation_bluemap_path"] response = requests.get( url=annotation_blue_map_creds["url"], @@ -2177,9 +2141,9 @@ def image_use_case(self): return use_case def validate_project_type(self): - if self._project.project_type in constances.LIMITED_FUNCTIONS: + if self._project.type in constances.LIMITED_FUNCTIONS: raise AppValidationException( - constances.LIMITED_FUNCTIONS[self._project.project_type] + constances.LIMITED_FUNCTIONS[self._project.type] ) def execute(self): @@ -2192,13 +2156,13 @@ def execute(self): } image_response = self.image_use_case.execute() token = self._service.get_download_token( - project_id=self._project.uuid, + project_id=self._project.id, team_id=self._project.team_id, folder_id=self._folder.uuid, image_id=image_response.data.uuid, ) credentials = token["annotations"]["MAIN"][0] - if self._project.project_type == constances.ProjectType.VECTOR.value: + if self._project.type == constances.ProjectType.VECTOR.value: file_postfix = "___objects.json" else: file_postfix = "___pixel.json" @@ -2215,7 +2179,7 @@ def execute(self): return self._response data["annotation_json"] = response.json() data["annotation_json_filename"] = f"{self._image_name}{file_postfix}" - if self._project.project_type == constances.ProjectType.PIXEL.value: + if self._project.type == constances.ProjectType.PIXEL.value: annotation_blue_map_creds = credentials["annotation_bluemap_path"] response = requests.get( url=annotation_blue_map_creds["url"], @@ -2247,9 +2211,9 @@ def __init__( self._service = service def validate_project_type(self): - if self._project.project_type in constances.LIMITED_FUNCTIONS: + if self._project.type in constances.LIMITED_FUNCTIONS: raise AppValidationException( - constances.LIMITED_FUNCTIONS[self._project.project_type] + constances.LIMITED_FUNCTIONS[self._project.type] ) def execute(self): @@ -2257,7 +2221,7 @@ def execute(self): for i in range(0, len(self._image_names), self.CHUNK_SIZE): is_assigned = self._service.assign_images( team_id=self._project.team_id, - project_id=self._project.uuid, + project_id=self._project.id, folder_name=self._folder.name, user=self._user, image_names=self._image_names[ @@ -2293,7 +2257,7 @@ def execute(self): for i in range(0, len(self._image_names), self.CHUNK_SIZE): is_un_assigned = self._service.un_assign_images( team_id=self._project_entity.team_id, - project_id=self._project_entity.uuid, + project_id=self._project_entity.id, folder_name=self._folder.name, image_names=self._image_names[i : i + self.CHUNK_SIZE], # noqa: E203 ) @@ -2320,7 +2284,7 @@ def __init__( def execute(self): is_un_assigned = self._service.un_assign_folder( team_id=self._project_entity.team_id, - project_id=self._project_entity.uuid, + project_id=self._project_entity.id, folder_name=self._folder.name, ) if not is_un_assigned: @@ -2354,10 +2318,8 @@ def __init__( def validate_project_type(self): project = self._projects.get_one(uuid=self._project_id, team_id=self._team_id) - if project.project_type in constances.LIMITED_FUNCTIONS: - raise AppValidationException( - constances.LIMITED_FUNCTIONS[project.project_type] - ) + if project.type in constances.LIMITED_FUNCTIONS: + raise AppValidationException(constances.LIMITED_FUNCTIONS[project.type]) def execute(self): if self.is_valid(): @@ -2412,12 +2374,11 @@ def validate_uniqueness(self): def validate_project_type(self): if ( - self._project.project_type - in (ProjectType.PIXEL.value, ProjectType.VIDEO.value) + self._project.type in (ProjectType.PIXEL.value, ProjectType.VIDEO.value) and self._annotation_class.type == "tag" ): raise AppException( - f"Predefined tagging functionality is not supported for projects of type {ProjectType.get_name(self._project.project_type)}." + f"Predefined tagging functionality is not supported for projects of type {ProjectType.get_name(self._project.type)}." ) def execute(self): @@ -2527,12 +2488,12 @@ def __init__( self._project = project def validate_project_type(self): - if self._project.project_type in ( + if self._project.type in ( ProjectType.PIXEL.value, ProjectType.VIDEO.value, ) and any([True for i in self._annotation_classes if i.type == "tag"]): raise AppException( - f"Predefined tagging functionality is not supported for projects of type {ProjectType.get_name(self._project.project_type)}." + f"Predefined tagging functionality is not supported for projects of type {ProjectType.get_name(self._project.type)}." ) def execute(self): @@ -2630,7 +2591,7 @@ def limitation_response(self): if not self._limitation_response: self._limitation_response = self._backend_service.get_limitations( team_id=self._project.team_id, - project_id=self._project.uuid, + project_id=self._project.id, folder_id=self._folder.uuid, ) if not self._limitation_response.ok: @@ -2664,9 +2625,9 @@ def limit(self): return min(limits) def validate_project_type(self): - if self._project.project_type in constances.LIMITED_FUNCTIONS: + if self._project.type in constances.LIMITED_FUNCTIONS: raise AppValidationException( - constances.LIMITED_FUNCTIONS[self._project.project_type] + constances.LIMITED_FUNCTIONS[self._project.type] ) def execute(self): @@ -2771,9 +2732,9 @@ def extensions(self): return self._extensions def validate_project_type(self): - if self._project.project_type in constances.LIMITED_FUNCTIONS: + if self._project.type in constances.LIMITED_FUNCTIONS: raise AppValidationException( - constances.LIMITED_FUNCTIONS[self._project.project_type] + constances.LIMITED_FUNCTIONS[self._project.type] ) def validate_paths(self): @@ -2802,7 +2763,7 @@ def execute(self) -> Response: duplicate_images = ( GetBulkImages( service=self._service, - project_id=self._project.uuid, + project_id=self._project.id, team_id=self._project.team_id, folder_id=self._folder.uuid, images=frame_names, diff --git a/src/superannotate/lib/core/usecases/integrations.py b/src/superannotate/lib/core/usecases/integrations.py index 7684ca66f..c61379de0 100644 --- a/src/superannotate/lib/core/usecases/integrations.py +++ b/src/superannotate/lib/core/usecases/integrations.py @@ -72,7 +72,7 @@ def execute(self) -> Response: ) attached = self._client.attach_integrations( self._team.uuid, - self._project.uuid, + self._project.id, integration.id, self._folder.uuid, self._folder_path, diff --git a/src/superannotate/lib/core/usecases/items.py b/src/superannotate/lib/core/usecases/items.py index 00bcffdce..0ee456f0a 100644 --- a/src/superannotate/lib/core/usecases/items.py +++ b/src/superannotate/lib/core/usecases/items.py @@ -4,6 +4,7 @@ import superannotate.lib.core as constances from lib.core.conditions import Condition from lib.core.conditions import CONDITION_EQ as EQ +from lib.core.entities import AttachmentEntity from lib.core.entities import DocumentEntity from lib.core.entities import Entity from lib.core.entities import FolderEntity @@ -12,6 +13,8 @@ from lib.core.entities import TmpImageEntity from lib.core.entities import VideoEntity from lib.core.exceptions import AppException +from lib.core.exceptions import AppValidationException +from lib.core.exceptions import BackendError from lib.core.reporter import Reporter from lib.core.repositories import BaseReadOnlyRepository from lib.core.response import Response @@ -39,20 +42,20 @@ def __init__( def serialize_entity(entity: Entity, project: ProjectEntity): if project.upload_state != constances.UploadState.EXTERNAL.value: entity.url = None - if project.project_type in ( + if project.type in ( constances.ProjectType.VECTOR.value, constances.ProjectType.PIXEL.value, ): tmp_entity = entity - if project.project_type == constances.ProjectType.VECTOR.value: + if project.type == constances.ProjectType.VECTOR.value: entity.segmentation_status = None if project.upload_state == constances.UploadState.EXTERNAL.value: tmp_entity.prediction_status = None tmp_entity.segmentation_status = None return TmpImageEntity(**tmp_entity.dict(by_alias=True)) - elif project.project_type == constances.ProjectType.VIDEO.value: + elif project.type == constances.ProjectType.VIDEO.value: return VideoEntity(**entity.dict(by_alias=True)) - elif project.project_type == constances.ProjectType.DOCUMENT.value: + elif project.type == constances.ProjectType.DOCUMENT.value: return DocumentEntity(**entity.dict(by_alias=True)) return entity @@ -61,7 +64,7 @@ def execute(self) -> Response: condition = ( Condition("name", self._item_name, EQ) & Condition("team_id", self._project.team_id, EQ) - & Condition("project_id", self._project.uuid, EQ) + & Condition("project_id", self._project.id, EQ) & Condition("folder_id", self._folder.uuid, EQ) ) entity = self._items.get_one(condition) @@ -90,7 +93,7 @@ def __init__( def validate_query(self): response = self._backend_client.validate_saqul_query( - self._project.team_id, self._project.uuid, self._query + self._project.team_id, self._project.id, self._query ) if response.get("error"): raise AppException(response["error"]) @@ -105,12 +108,15 @@ def execute(self) -> Response: if self.is_valid(): service_response = self._backend_client.saqul_query( self._project.team_id, - self._project.uuid, + self._project.id, self._query, folder_id=None if self._folder.name == "root" else self._folder.uuid, ) if service_response.ok: - data = parse_obj_as(List[TmpBaseEntity], [Entity.map_fields(i) for i in service_response.data]) + data = parse_obj_as( + List[TmpBaseEntity], + [Entity.map_fields(i) for i in service_response.data], + ) for i, item in enumerate(data): data[i] = GetItem.serialize_entity(item, self._project) self._response.data = data @@ -145,7 +151,7 @@ def validate_recursive_case(self): def execute(self) -> Response: if self.is_valid(): self._search_condition &= Condition("team_id", self._project.team_id, EQ) - self._search_condition &= Condition("project_id", self._project.uuid, EQ) + self._search_condition &= Condition("project_id", self._project.id, EQ) if not self._recursive: self._search_condition &= Condition("folder_id", self._folder.uuid, EQ) @@ -160,12 +166,13 @@ def execute(self) -> Response: items = [] folders = self._folders.get_all( Condition("team_id", self._project.team_id, EQ) - & Condition("project_id", self._project.uuid, EQ), + & Condition("project_id", self._project.id, EQ), ) folders.append(self._folder) for folder in folders: tmp = self._items.get_all( - copy.deepcopy(self._search_condition) & Condition("folder_id", folder.uuid, EQ) + copy.deepcopy(self._search_condition) + & Condition("folder_id", folder.uuid, EQ) ) items.extend( [ @@ -178,3 +185,359 @@ def execute(self) -> Response: ) self._response.data = items return self._response + + +class AttachItems(BaseReportableUseCae): + CHUNK_SIZE = 500 + + def __init__( + self, + reporter: Reporter, + project: ProjectEntity, + folder: FolderEntity, + attachments: List[AttachmentEntity], + annotation_status: str, + backend_service_provider: SuperannotateServiceProvider, + upload_state_code: int = constances.UploadState.EXTERNAL.value, + ): + super().__init__(reporter) + self._project = project + self._folder = folder + self._attachments = attachments + self._annotation_status_code = constances.AnnotationStatus.get_value( + annotation_status + ) + self._upload_state_code = upload_state_code + self._backend_service = backend_service_provider + self._attachments_count = None + + @property + def attachments_count(self): + if not self._attachments_count: + self._attachments_count = len(self._attachments) + return self._attachments_count + + def validate_limitations(self): + attachments_count = self.attachments_count + response = self._backend_service.get_limitations( + team_id=self._project.team_id, + project_id=self._project.id, + folder_id=self._folder.uuid, + ) + if not response.ok: + raise AppValidationException(response.error) + if attachments_count > response.data.folder_limit.remaining_image_count: + raise AppValidationException(constances.ATTACH_FOLDER_LIMIT_ERROR_MESSAGE) + elif attachments_count > response.data.project_limit.remaining_image_count: + raise AppValidationException(constances.ATTACH_PROJECT_LIMIT_ERROR_MESSAGE) + elif ( + response.data.user_limit + and attachments_count > response.data.user_limit.remaining_image_count + ): + raise AppValidationException(constances.ATTACH_USER_LIMIT_ERROR_MESSAGE) + + def validate_upload_state(self): + if self._project.upload_state == constances.UploadState.BASIC.value: + raise AppValidationException(constances.ATTACHING_UPLOAD_STATE_ERROR) + + @staticmethod + def generate_meta(): + return {"width": None, "height": None} + + def execute(self) -> Response: + if self.is_valid(): + duplications = [] + attached = [] + self.reporter.start_progress(self.attachments_count, "Attaching URLs") + for i in range(0, self.attachments_count, self.CHUNK_SIZE): + attachments = self._attachments[i : i + self.CHUNK_SIZE] # noqa: E203 + response = self._backend_service.get_bulk_images( + project_id=self._project.id, + team_id=self._project.team_id, + folder_id=self._folder.uuid, + images=[attachment.name for attachment in attachments], + ) + if isinstance(response, dict) and "error" in response: + raise AppException(response["error"]) + duplications.extend([image["name"] for image in response]) + to_upload = [] + to_upload_meta = {} + for attachment in attachments: + if attachment.name not in duplications: + to_upload.append( + {"name": attachment.name, "path": attachment.url} + ) + to_upload_meta[attachment.name] = self.generate_meta() + if to_upload: + backend_response = self._backend_service.attach_files( + project_id=self._project.id, + folder_id=self._folder.uuid, + team_id=self._project.team_id, + files=to_upload, + annotation_status_code=self._annotation_status_code, + upload_state_code=self._upload_state_code, + meta=to_upload_meta, + ) + if "error" in backend_response: + self._response.errors = AppException(backend_response["error"]) + else: + attached.extend(backend_response) + self.reporter.update_progress(len(attachments)) + self.reporter.finish_progress() + self._response.data = attached, duplications + return self._response + + +class CopyItems(BaseReportableUseCae): + """ + Copy items in bulk between folders in a project. + Return skipped item names. + """ + + CHUNK_SIZE = 1000 + + def __init__( + self, + reporter: Reporter, + project: ProjectEntity, + from_folder: FolderEntity, + to_folder: FolderEntity, + item_names: List[str], + items: BaseReadOnlyRepository, + backend_service_provider: SuperannotateServiceProvider, + include_annotations: bool, + ): + super().__init__(reporter) + self._project = project + self._from_folder = from_folder + self._to_folder = to_folder + self._item_names = item_names + self._items = items + self._backend_service = backend_service_provider + self._include_annotations = include_annotations + + def _validate_limitations(self, items_count): + response = self._backend_service.get_limitations( + team_id=self._project.team_id, + project_id=self._project.id, + folder_id=self._to_folder.uuid, + ) + if not response.ok: + raise AppValidationException(response.error) + if items_count > response.data.folder_limit.remaining_image_count: + raise AppValidationException(constances.COPY_FOLDER_LIMIT_ERROR_MESSAGE) + if items_count > response.data.project_limit.remaining_image_count: + raise AppValidationException(constances.COPY_PROJECT_LIMIT_ERROR_MESSAGE) + + def validate_item_names(self): + if self._item_names: + self._item_names = list(set(self._item_names)) + + def execute(self): + if self.is_valid(): + if self._item_names: + items = self._item_names + else: + condition = ( + Condition("team_id", self._project.team_id, EQ) + & Condition("project_id", self._project.id, EQ) + & Condition("folder_id", self._from_folder.uuid, EQ) + ) + items = [item.name for item in self._items.get_all(condition)] + + existing_items = self._backend_service.get_bulk_images( + project_id=self._project.id, + team_id=self._project.team_id, + folder_id=self._to_folder.uuid, + images=items, + ) + duplications = [item["name"] for item in existing_items] + items_to_copy = list(set(items) - set(duplications)) + skipped_items = duplications + try: + self._validate_limitations(len(items_to_copy)) + except AppValidationException as e: + self._response.errors = e + return self._response + if items_to_copy: + for i in range(0, len(items_to_copy), self.CHUNK_SIZE): + chunk_to_copy = items_to_copy[i : i + self.CHUNK_SIZE] # noqa: E203 + poll_id = self._backend_service.copy_items_between_folders_transaction( + team_id=self._project.team_id, + project_id=self._project.id, + from_folder_id=self._from_folder.uuid, + to_folder_id=self._to_folder.uuid, + items=chunk_to_copy, + include_annotations=self._include_annotations, + ) + if not poll_id: + skipped_items.extend(chunk_to_copy) + continue + try: + self._backend_service.await_progress( + self._project.id, + self._project.team_id, + poll_id=poll_id, + items_count=len(chunk_to_copy), + ) + except BackendError as e: + self._response.errors = AppException(e) + return self._response + existing_items = self._backend_service.get_bulk_images( + project_id=self._project.id, + team_id=self._project.team_id, + folder_id=self._to_folder.uuid, + images=items, + ) + existing_item_names_set = {item["name"] for item in existing_items} + items_to_copy_names_set = set(items_to_copy) + copied_items = existing_item_names_set.intersection( + items_to_copy_names_set + ) + skipped_items.extend(list(items_to_copy_names_set - copied_items)) + self.reporter.log_info( + f"Copied {len(copied_items)}/{len(items)} item(s) from " + f"{self._project.name}{'' if self._from_folder.is_root else f'/{self._from_folder.name}'} to " + f"{self._project.name}{'' if self._to_folder.is_root else f'/{self._to_folder.name}'}" + ) + self._response.data = skipped_items + return self._response + + +class MoveItems(BaseReportableUseCae): + CHUNK_SIZE = 1000 + + def __init__( + self, + reporter: Reporter, + project: ProjectEntity, + from_folder: FolderEntity, + to_folder: FolderEntity, + item_names: List[str], + items: BaseReadOnlyRepository, + backend_service_provider: SuperannotateServiceProvider, + ): + super().__init__(reporter) + self._project = project + self._from_folder = from_folder + self._to_folder = to_folder + self._item_names = item_names + self._items = items + self._backend_service = backend_service_provider + + def validate_item_names(self): + if self._item_names: + self._item_names = list(set(self._item_names)) + + def _validate_limitations(self, items_count): + response = self._backend_service.get_limitations( + team_id=self._project.team_id, + project_id=self._project.id, + folder_id=self._to_folder.uuid, + ) + if not response.ok: + raise AppValidationException(response.error) + if items_count > response.data.folder_limit.remaining_image_count: + raise AppValidationException(constances.MOVE_FOLDER_LIMIT_ERROR_MESSAGE) + if items_count > response.data.project_limit.remaining_image_count: + raise AppValidationException(constances.MOVE_PROJECT_LIMIT_ERROR_MESSAGE) + + def execute(self): + if self.is_valid(): + if not self._item_names: + condition = ( + Condition("team_id", self._project.team_id, EQ) + & Condition("project_id", self._project.id, EQ) + & Condition("folder_id", self._from_folder.uuid, EQ) + ) + items = [item.name for item in self._items.get_all(condition)] + else: + items = self._item_names + try: + self._validate_limitations(len(items)) + except AppValidationException as e: + self._response.errors = e + return self._response + moved_images = [] + for i in range(0, len(items), self.CHUNK_SIZE): + moved_images.extend( + self._backend_service.move_images_between_folders( + team_id=self._project.team_id, + project_id=self._project.id, + from_folder_id=self._from_folder.uuid, + to_folder_id=self._to_folder.uuid, + images=items[i : i + self.CHUNK_SIZE], # noqa: E203 + ) + ) + self.reporter.log_info( + f"Moved {len(moved_images)}/{len(items)} item(s) from " + f"{self._project.name}{'' if self._from_folder.is_root else f'/{self._from_folder.name}'} to " + f"{self._project.name}{'' if self._to_folder.is_root else f'/{self._to_folder.name}'}" + ) + + self._response.data = list(set(items) - set(moved_images)) + return self._response + + +class SetAnnotationStatues(BaseReportableUseCae): + CHUNK_SIZE = 500 + ERROR_MESSAGE = "Failed to change status" + + def __init__( + self, + reporter: Reporter, + project: ProjectEntity, + folder: FolderEntity, + items: BaseReadOnlyRepository, + annotation_status: str, + backend_service_provider: SuperannotateServiceProvider, + item_names: List[str] = None, + ): + super().__init__(reporter) + self._project = project + self._folder = folder + self._item_names = item_names + self._items = items + self._annotation_status_code = constances.AnnotationStatus.get_value( + annotation_status + ) + self._backend_service = backend_service_provider + + def validate_items(self): + if not self._item_names: + condition = ( + Condition("team_id", self._project.team_id, EQ) + & Condition("project_id", self._project.id, EQ) + & Condition("folder_id", self._folder.uuid, EQ) + ) + self._item_names = [item.name for item in self._items.get_all(condition)] + return + existing_items = self._backend_service.get_bulk_images( + project_id=self._project.id, + team_id=self._project.team_id, + folder_id=self._folder.uuid, + images=self._item_names, + ) + if not existing_items: + raise AppValidationException(self.ERROR_MESSAGE) + if existing_items: + self._item_names = list( + {i["name"] for i in existing_items}.intersection(set(self._item_names)) + ) + + def execute(self): + if self.is_valid(): + for i in range(0, len(self._item_names), self.CHUNK_SIZE): + status_changed = self._backend_service.set_images_statuses_bulk( + image_names=self._item_names[ + i : i + self.CHUNK_SIZE + ], # noqa: E203, + team_id=self._project.team_id, + project_id=self._project.id, + folder_id=self._folder.uuid, + annotation_status=self._annotation_status_code, + ) + if not status_changed: + self._response.errors = AppException(self.ERROR_MESSAGE) + break + return self._response diff --git a/src/superannotate/lib/core/usecases/models.py b/src/superannotate/lib/core/usecases/models.py index c60aea5dd..9a2861811 100644 --- a/src/superannotate/lib/core/usecases/models.py +++ b/src/superannotate/lib/core/usecases/models.py @@ -39,6 +39,7 @@ def __init__( project: ProjectEntity, folder_names: List[str], backend_service_provider: SuperannotateServiceProvider, + folders: BaseManageableRepository, include_fuse: bool, only_pinned: bool, annotation_statuses: List[str] = None, @@ -50,6 +51,7 @@ def __init__( self._annotation_statuses = annotation_statuses self._include_fuse = include_fuse self._only_pinned = only_pinned + self._folders = folders def validate_only_pinned(self): if ( @@ -57,7 +59,7 @@ def validate_only_pinned(self): and self._only_pinned ): raise AppValidationException( - f"Pin functionality is not supported for projects containing {self._project.project_type} attached with URLs" + f"Pin functionality is not supported for projects containing {self._project.type} attached with URLs" ) def validate_fuse(self): @@ -66,9 +68,22 @@ def validate_fuse(self): and self._include_fuse ): raise AppValidationException( - f"Include fuse functionality is not supported for projects containing {self._project.project_type} attached with URLs" + f"Include fuse functionality is not supported for projects containing {self._project.type} attached with URLs" ) + def validate_folder_names(self): + if self._folder_names: + condition = ( + Condition("team_id", self._project.team_id, EQ) & + Condition("project_id", self._project.id, EQ) + ) + existing_folders = {folder.name for folder in self._folders.get_all(condition)} + folder_names_set = set(self._folder_names) + if not folder_names_set.issubset(existing_folders): + raise AppException( + f"Folder(s) {', '.join(folder_names_set - existing_folders)} does not exist" + ) + def execute(self): if self.is_valid(): if self._project.upload_state == constances.UploadState.EXTERNAL.value: @@ -85,7 +100,7 @@ def execute(self): ) response = self._backend_service.prepare_export( - project_id=self._project.uuid, + project_id=self._project.id, team_id=self._project.team_id, folders=self._folder_names, annotation_statuses=self._annotation_statuses, @@ -100,7 +115,7 @@ def execute(self): report_message = f"[{', '.join(self._folder_names)}] " logger.info( f"Prepared export {response['name']} for project {self._project.name} " - f"{report_message}(project ID {self._project.uuid})." + f"{report_message}(project ID {self._project.id})." ) self._response.data = response @@ -122,7 +137,7 @@ def __init__( def execute(self): if self.is_valid(): data = self._service.get_exports( - team_id=self._project.team_id, project_id=self._project.uuid + team_id=self._project.team_id, project_id=self._project.id ) self._response.data = data if not self._return_metadata: @@ -198,12 +213,12 @@ def _upload_file_to_s3(_to_s3_bucket, _path, _s3_key) -> None: def download_to_local_storage(self, destination: str): exports = self._service.get_exports( - team_id=self._project.team_id, project_id=self._project.uuid + team_id=self._project.team_id, project_id=self._project.id ) export = next(filter(lambda i: i["name"] == self._export_name, exports), None) export = self._service.get_export( team_id=self._project.team_id, - project_id=self._project.uuid, + project_id=self._project.id, export_id=export["id"], ) if not export: @@ -216,7 +231,7 @@ def download_to_local_storage(self, destination: str): export = self._service.get_export( team_id=self._project.team_id, - project_id=self._project.uuid, + project_id=self._project.id, export_id=export["id"], ) if "error" in export: @@ -529,9 +544,9 @@ def __init__( self._folder = folder def validate_project_type(self): - if self._project.project_type in constances.LIMITED_FUNCTIONS: + if self._project.type in constances.LIMITED_FUNCTIONS: raise AppValidationException( - constances.LIMITED_FUNCTIONS[self._project.project_type] + constances.LIMITED_FUNCTIONS[self._project.type] ) def execute(self): @@ -539,7 +554,7 @@ def execute(self): images = ( GetBulkImages( service=self._service, - project_id=self._project.uuid, + project_id=self._project.id, team_id=self._project.team_id, folder_id=self._folder.uuid, images=self._images_list, @@ -569,7 +584,7 @@ def execute(self): res = self._service.run_prediction( team_id=self._project.team_id, - project_id=self._project.uuid, + project_id=self._project.id, ml_model_id=ml_model.uuid, image_ids=image_ids, ) @@ -582,7 +597,7 @@ def execute(self): images_metadata = ( GetBulkImages( service=self._service, - project_id=self._project.uuid, + project_id=self._project.id, team_id=self._project.team_id, folder_id=self._folder.uuid, images=self._images_list, diff --git a/src/superannotate/lib/core/usecases/projects.py b/src/superannotate/lib/core/usecases/projects.py index 4fee8e060..fa29c5e1d 100644 --- a/src/superannotate/lib/core/usecases/projects.py +++ b/src/superannotate/lib/core/usecases/projects.py @@ -1,5 +1,7 @@ import copy +import decimal from collections import defaultdict +from typing import Iterable from typing import List from typing import Type @@ -9,7 +11,7 @@ from lib.core.entities import AnnotationClassEntity from lib.core.entities import FolderEntity from lib.core.entities import ProjectEntity -from lib.core.entities import ProjectSettingEntity +from lib.core.entities import SettingEntity from lib.core.entities import TeamEntity from lib.core.entities import WorkflowEntity from lib.core.exceptions import AppException @@ -120,12 +122,12 @@ def work_flow_use_case(self): def execute(self): data = {} project = self._projects.get_one( - uuid=self._project.uuid, team_id=self._project.team_id + uuid=self._project.id, team_id=self._project.team_id ) data["project"] = project if self._include_complete_image_count: completed_images_data = self._service.bulk_get_folders( - self._project.team_id, [project.uuid] + self._project.team_id, [project.id] ) root_completed_count = 0 total_completed_count = 0 @@ -138,15 +140,12 @@ def execute(self): project.completed_images_count = total_completed_count if self._include_annotation_classes: - self.annotation_classes_use_case.execute() data["classes"] = self.annotation_classes_use_case.execute().data if self._include_settings: - self.settings_use_case.execute() - data["settings"] = self.settings_use_case.execute().data + data["project"].settings = self.settings_use_case.execute().data if self._include_workflow: - self.work_flow_use_case.execute() data["workflows"] = self.work_flow_use_case.execute().data if self._include_contributors: @@ -164,25 +163,55 @@ def __init__( project: ProjectEntity, projects: BaseManageableRepository, backend_service_provider: SuperannotateServiceProvider, - settings_repo: Type[BaseManageableRepository], annotation_classes_repo: Type[BaseManageableRepository], workflows_repo: Type[BaseManageableRepository], - settings: List[ProjectSettingEntity] = None, - workflows: List[WorkflowEntity] = None, - annotation_classes: List[AnnotationClassEntity] = None, + workflows: Iterable[WorkflowEntity] = None, + classes: List[AnnotationClassEntity] = None, ): super().__init__() self._project = project self._projects = projects - self._settings = settings - self._settings_repo = settings_repo self._annotation_classes_repo = annotation_classes_repo self._workflows_repo = workflows_repo self._workflows = workflows - self._annotation_classes = annotation_classes + self._classes = classes self._backend_service = backend_service_provider + def validate_settings(self): + for setting in self._project.settings[:]: + if setting.attribute == "WorkflowType": + self._project.settings.remove(setting) + if setting.attribute == "ImageQuality" and isinstance(setting.value, str): + setting.value = constances.ImageQuality.get_value(setting.value) + elif setting.attribute == "FrameRate": + if not self._project.type == constances.ProjectType.VIDEO.value: + raise AppValidationException( + "FrameRate is available only for Video projects" + ) + if isinstance(setting.value, (float, int)): + if ( + not (0.0001 < setting.value < 120) + or decimal.Decimal(str(setting.value)).as_tuple().exponent < -3 + ): + raise AppValidationException( + "The FrameRate value range is between 0.001 - 120" + ) + frame_mode = next( + filter( + lambda x: x.attribute == "FrameMode", self._project.settings + ), + None, + ) + if not frame_mode: + self._project.settings.append( + SettingEntity(attribute="FrameMode", value=1) + ) + else: + frame_mode.value = 1 + else: + raise AppValidationException("The FrameRate value should be float") + def validate_project_name(self): if ( len( @@ -224,26 +253,27 @@ def execute(self): self._projects.update(entity) self._response.data = entity data = {} - if self._settings: - settings_repo = self._settings_repo(self._backend_service, entity) - for setting in self._settings: - for new_setting in settings_repo.get_all(): - if new_setting.attribute == setting.attribute: - setting_copy = copy.copy(setting) - setting_copy.uuid = new_setting.uuid - setting_copy.project_id = entity.uuid - settings_repo.update(setting_copy) - data["settings"] = self._settings + # TODO delete + # if self._settings: + # settings_repo = self._settings_repo(self._backend_service, entity) + # for setting in self._settings: + # for new_setting in settings_repo.get_all(): + # if new_setting.attribute == setting.attribute: + # setting_copy = copy.copy(setting) + # setting_copy.id = new_setting.id + # setting_copy.project_id = entity.uuid + # settings_repo.update(setting_copy) + # data["settings"] = self._settings annotation_classes_mapping = {} - if self._annotation_classes: + if self._classes: annotation_repo = self._annotation_classes_repo( self._backend_service, entity ) - for annotation_class in self._annotation_classes: + for annotation_class in self._classes: annotation_classes_mapping[ annotation_class.id ] = annotation_repo.insert(annotation_class) - self._response.data.annotation_classes = self._annotation_classes + self._response.data.classes = self._classes if self._workflows: set_workflow_use_case = SetWorkflowUseCase( service=self._backend_service, @@ -262,8 +292,8 @@ def execute(self): logger.info( "Created project %s (ID %s) with type %s", self._response.data.name, - self._response.data.uuid, - constances.ProjectType.get_name(self._response.data.project_type), + self._response.data.id, + constances.ProjectType.get_name(self._response.data.type), ) return self._response @@ -339,7 +369,7 @@ def execute(self): for field, value in self._project_data.items(): setattr(self._project, field, value) new_project = self._projects.update(self._project) - self._response.data = new_project.to_dict() + self._response.data = new_project return self._response @@ -429,7 +459,7 @@ def _copy_annotation_classes( def _copy_include_contributors(self, to_project: ProjectEntity): from_project = self._projects.get_one( - uuid=self._project.uuid, team_id=self._project.team_id + uuid=self._project.id, team_id=self._project.team_id ) users = [] for user in from_project.users: @@ -443,7 +473,7 @@ def _copy_include_contributors(self, to_project: ProjectEntity): ) if users: self._backend_service.share_project_bulk( - to_project.uuid, to_project.team_id, users + to_project.id, to_project.team_id, users ) def _copy_settings(self, to_project: ProjectEntity): @@ -452,8 +482,8 @@ def _copy_settings(self, to_project: ProjectEntity): for new_setting in new_settings.get_all(): if new_setting.attribute == setting.attribute: setting_copy = copy.copy(setting) - setting_copy.uuid = new_setting.uuid - setting_copy.project_id = to_project.uuid + setting_copy.id = new_setting.id + setting_copy.project_id = to_project.id new_settings.update(setting_copy) def _copy_workflow( @@ -463,7 +493,7 @@ def _copy_workflow( for workflow in self.workflows.get_all(): existing_workflow_ids = list(map(lambda i: i.uuid, new_workflows.get_all())) workflow_data = copy.copy(workflow) - workflow_data.project_id = to_project.uuid + workflow_data.project_id = to_project.id workflow_data.class_id = annotation_classes_entity_mapping[ workflow.class_id ].id @@ -502,14 +532,14 @@ def _copy_workflow( break if workflow_attributes: self._backend_service.set_project_workflow_attributes_bulk( - project_id=to_project.uuid, + project_id=to_project.id, team_id=to_project.team_id, attributes=workflow_attributes, ) def execute(self): if self.is_valid(): - if self._project_to_create.project_type in ( + if self._project_to_create.type in ( constances.ProjectType.PIXEL.value, constances.ProjectType.VECTOR.value, ): @@ -519,7 +549,7 @@ def execute(self): project = self._projects.insert(self._project_to_create) self.reporter.log_info( f"Created project {self._project_to_create.name} with type" - f" {constances.ProjectType.get_name(self._project_to_create.project_type)}." + f" {constances.ProjectType.get_name(self._project_to_create.type)}." ) annotation_classes_entity_mapping = defaultdict(AnnotationClassEntity) annotation_classes_created = False @@ -551,13 +581,13 @@ def execute(self): self.reporter.log_debug(str(e), exc_info=True) if self._include_workflow: - if self._project.project_type in ( + if self._project.type in ( constances.ProjectType.DOCUMENT.value, constances.ProjectType.VIDEO.value, ): self.reporter.log_warning( "Workflow copy is deprecated for " - f"{constances.ProjectType.get_name(self._project_to_create.project_type)} projects." + f"{constances.ProjectType.get_name(self._project_to_create.type)} projects." ) elif not annotation_classes_created: self.reporter.log_info( @@ -586,7 +616,7 @@ def execute(self): ) self.reporter.log_debug(str(e), exc_info=True) self._response.data = self._projects.get_one( - uuid=project.uuid, team_id=project.team_id + uuid=project.id, team_id=project.team_id ) return self._response @@ -613,7 +643,7 @@ def user_role(self): def execute(self): self._response.data = self._service.share_project_bulk( team_id=self._project_entity.team_id, - project_id=self._project_entity.uuid, + project_id=self._project_entity.id, users=[{"user_id": self._user_id, "user_role": self.user_role}], ) if not self._response.errors: @@ -638,7 +668,7 @@ def __init__( def execute(self): self._response.data = self._service.un_share_project( team_id=self._project_entity.team_id, - project_id=self._project_entity.uuid, + project_id=self._project_entity.id, user_id=self._user_id, ) logger.info( @@ -672,9 +702,9 @@ def __init__( self._fill_classes = fill_classes def validate_project_type(self): - if self._project.project_type in constances.LIMITED_FUNCTIONS: + if self._project.type in constances.LIMITED_FUNCTIONS: raise AppValidationException( - constances.LIMITED_FUNCTIONS[self._project.project_type] + constances.LIMITED_FUNCTIONS[self._project.type] ) def execute(self): @@ -736,9 +766,7 @@ def validate_image_quality(self): def validate_project_type(self): project = self._projects.get_one(uuid=self._project_id, team_id=self._team_id) for attribute in self._to_update: - if attribute.get( - "attribute", "" - ) == "ImageQuality" and project.project_type in [ + if attribute.get("attribute", "") == "ImageQuality" and project.type in [ constances.ProjectType.VIDEO.value, constances.ProjectType.DOCUMENT.value, ]: @@ -751,7 +779,7 @@ def execute(self): old_settings = self._settings.get_all() attr_id_mapping = {} for setting in old_settings: - attr_id_mapping[setting.attribute] = setting.uuid + attr_id_mapping[setting.attribute] = setting.id new_settings_to_update = [] for new_setting in self._to_update: @@ -790,15 +818,15 @@ def validate_user_input(self): raise AppValidationException("The folder does not contain any sub-folders.") def validate_project_type(self): - if self._project.project_type in constances.LIMITED_FUNCTIONS: + if self._project.type in constances.LIMITED_FUNCTIONS: raise AppValidationException( - constances.LIMITED_FUNCTIONS[self._project.project_type] + constances.LIMITED_FUNCTIONS[self._project.type] ) def execute(self): if self.is_valid(): data = self._service.get_project_images_count( - project_id=self._project.uuid, team_id=self._project.team_id + project_id=self._project.id, team_id=self._project.team_id ) count = 0 if self._folder.name == "root": @@ -832,9 +860,9 @@ def __init__( self._project = project def validate_project_type(self): - if self._project.project_type in constances.LIMITED_FUNCTIONS: + if self._project.type in constances.LIMITED_FUNCTIONS: raise AppValidationException( - constances.LIMITED_FUNCTIONS[self._project.project_type] + constances.LIMITED_FUNCTIONS[self._project.type] ) def execute(self): @@ -861,7 +889,7 @@ def execute(self): self._service.set_project_workflow_bulk( team_id=self._project.team_id, - project_id=self._project.uuid, + project_id=self._project.id, steps=self._steps, ) existing_workflows = self._workflow_repo.get_all() @@ -898,7 +926,7 @@ def execute(self): ) self._service.set_project_workflow_attributes_bulk( - project_id=self._project.uuid, + project_id=self._project.id, team_id=self._project.team_id, attributes=req_data, ) @@ -995,7 +1023,7 @@ def execute(self): if to_add: response = self._service.share_project_bulk( team_id=self._team.uuid, - project_id=self._project.uuid, + project_id=self._project.id, users=[ dict(user_id=user_id, user_role=self.user_role) for user_id in to_add diff --git a/src/superannotate/lib/core/video_convertor.py b/src/superannotate/lib/core/video_convertor.py index 9991082d7..0b3726c25 100644 --- a/src/superannotate/lib/core/video_convertor.py +++ b/src/superannotate/lib/core/video_convertor.py @@ -44,14 +44,14 @@ def get_frame(self, frame_no: int): return self.annotations[frame_no] def interpolate_annotations( - self, - class_name: str, - from_frame: int, - to_frame: int, - data: dict, - instance_id: int, - steps: dict = None, - annotation_type: str = "bbox", + self, + class_name: str, + from_frame: int, + to_frame: int, + data: dict, + instance_id: int, + steps: dict = None, + annotation_type: str = "bbox", ) -> dict: annotations = {} for idx, frame_idx in enumerate(range(from_frame + 1, to_frame), 1): @@ -133,39 +133,39 @@ def _process(self): if frames_diff > 1: steps = None if ( - annotation_type == "bbox" - and from_frame.get("points") - and to_frame.get("points") + annotation_type == "bbox" + and from_frame.get("points") + and to_frame.get("points") ): steps = { "y1": round( ( - to_frame["points"]["y1"] - - from_frame["points"]["y1"] + to_frame["points"]["y1"] + - from_frame["points"]["y1"] ) / frames_diff, 2, ), "x2": round( ( - to_frame["points"]["x2"] - - from_frame["points"]["x2"] + to_frame["points"]["x2"] + - from_frame["points"]["x2"] ) / frames_diff, 2, ), "x1": round( ( - to_frame["points"]["x1"] - - from_frame["points"]["x1"] + to_frame["points"]["x1"] + - from_frame["points"]["x1"] ) / frames_diff, 2, ), "y2": round( ( - to_frame["points"]["y2"] - - from_frame["points"]["y2"] + to_frame["points"]["y2"] + - from_frame["points"]["y2"] ) / frames_diff, 2, diff --git a/src/superannotate/lib/infrastructure/controller.py b/src/superannotate/lib/infrastructure/controller.py index ab8dab295..3ea2835dd 100644 --- a/src/superannotate/lib/infrastructure/controller.py +++ b/src/superannotate/lib/infrastructure/controller.py @@ -15,10 +15,12 @@ from lib.core.conditions import Condition from lib.core.conditions import CONDITION_EQ as EQ from lib.core.entities import AnnotationClassEntity +from lib.core.entities import AttachmentEntity from lib.core.entities import FolderEntity from lib.core.entities import ImageEntity from lib.core.entities import MLModelEntity from lib.core.entities import ProjectEntity +from lib.core.entities import SettingEntity from lib.core.entities.integrations import IntegrationEntity from lib.core.exceptions import AppException from lib.core.reporter import Reporter @@ -41,6 +43,14 @@ from superannotate_schemas.validators import AnnotationValidators +def build_condition(**kwargs) -> Condition: + condition = Condition.get_empty_condition() + if any(kwargs.values()): + for key, value in ((key, value) for key, value in kwargs.items() if value): + condition = condition & Condition(key, value, EQ) + return condition + + class BaseController(metaclass=ABCMeta): def __init__(self, config_path: str = None, token: str = None): self._team_data = None @@ -306,9 +316,8 @@ def search_project( condition &= Condition( "status", constances.ProjectStatus.get_value(status), EQ ) - for key, value in kwargs.items(): - if value: - condition &= Condition(key, value, EQ) + + condition &= build_condition(**kwargs) use_case = usecases.GetProjectsUseCase( condition=condition, projects=self.projects, team_id=self.team_id, ) @@ -319,10 +328,10 @@ def create_project( name: str, description: str, project_type: str, - settings: Iterable = tuple(), - annotation_classes: Iterable = tuple(), + settings: Iterable[SettingEntity] = None, + classes: Iterable = tuple(), workflows: Iterable = tuple(), - **extra_kwargs + **extra_kwargs, ) -> Response: try: @@ -334,24 +343,21 @@ def create_project( entity = ProjectEntity( name=name, description=description, - project_type=project_type, + type=project_type, team_id=self.team_id, - **extra_kwargs + settings=settings if settings else [], + **extra_kwargs, ) use_case = usecases.CreateProjectUseCase( project=entity, projects=self.projects, backend_service_provider=self._backend_client, - settings_repo=ProjectSettingsRepository, workflows_repo=WorkflowRepository, annotation_classes_repo=AnnotationClassRepository, - settings=[ - ProjectSettingsRepository.dict2entity(setting) for setting in settings - ], workflows=workflows, - annotation_classes=[ + classes=[ AnnotationClassEntity(**annotation_class) - for annotation_class in annotation_classes + for annotation_class in classes ], ) return use_case.execute() @@ -515,7 +521,7 @@ def interactive_attach_urls( def create_folder(self, project: str, folder_name: str): project = self._get_project(project) folder = FolderEntity( - name=folder_name, project_id=project.uuid, team_id=project.team_id + name=folder_name, project_id=project.id, team_id=project.team_id ) use_case = usecases.CreateFolderUseCase( project=project, folder=folder, folders=self.folders, @@ -535,10 +541,7 @@ def get_folder(self, project_name: str, folder_name: str): def search_folders( self, project_name: str, folder_name: str = None, include_users=False, **kwargs ): - condition = Condition.get_empty_condition() - if kwargs: - for key, val in kwargs: - condition = condition & Condition(key, val, EQ) + condition = build_condition(**kwargs) project = self._get_project(project_name) use_case = usecases.SearchFoldersUseCase( project=project, @@ -575,6 +578,7 @@ def prepare_export( use_case = usecases.PrepareExportUseCase( project=project, folder_names=folder_names, + folders=self.folders, backend_service_provider=self._backend_client, include_fuse=include_fuse, only_pinned=only_pinned, @@ -583,15 +587,7 @@ def prepare_export( return use_case.execute() def search_team_contributors(self, **kwargs): - condition = None - if any(kwargs.values()): - conditions_iter = iter(kwargs) - key = next(conditions_iter) - if kwargs[key]: - condition = Condition(key, kwargs[key], EQ) - for key, val in conditions_iter: - condition = condition & Condition(key, val, EQ) - + condition = build_condition(**kwargs) use_case = usecases.SearchContributorsUseCase( backend_service_provider=self._backend_client, team_id=self.team_id, @@ -826,7 +822,9 @@ def search_annotation_classes(self, project_name: str, name_contains: str = None project_entity = self._get_project(project_name) condition = None if name_contains: - condition = Condition("name", name_contains, EQ) & Condition("pattern", True, EQ) + condition = Condition("name", name_contains, EQ) & Condition( + "pattern", True, EQ + ) use_case = usecases.GetAnnotationClassesUseCase( classes=AnnotationClassRepository( service=self._backend_client, project=project_entity @@ -844,22 +842,11 @@ def set_project_settings(self, project_name: str, new_settings: List[dict]): ), to_update=new_settings, backend_service_provider=self._backend_client, - project_id=project_entity.uuid, + project_id=project_entity.id, team_id=project_entity.team_id, ) return use_case.execute() - def get_image_metadata(self, project_name: str, folder_name: str, image_name: str): - project = self._get_project(project_name) - folder = self._get_folder(project, folder_name) - use_case = usecases.GetImageMetadataUseCase( - image_name=image_name, - project=project, - folder=folder, - service=self._backend_client, - ) - return use_case.execute() - def set_images_annotation_statuses( self, project_name: str, @@ -875,7 +862,7 @@ def set_images_annotation_statuses( projects=self.projects, image_names=image_names, team_id=project_entity.team_id, - project_id=project_entity.uuid, + project_id=project_entity.id, folder_id=folder_entity.uuid, images_repo=images_repo, annotation_status=constances.AnnotationStatus.get_value(annotation_status), @@ -978,14 +965,6 @@ def download_image_annotations( ) return use_case.execute() - @staticmethod - def get_image_from_s3(s3_bucket, image_path: str): - use_case = usecases.GetS3ImageUseCase( - s3_bucket=s3_bucket, image_path=image_path - ) - use_case.execute() - return use_case.execute() - def get_exports(self, project_name: str, return_metadata: bool): project = self._get_project(project_name) @@ -1332,7 +1311,7 @@ def run_prediction( project = self._get_project(project_name) folder = self._get_folder(project, folder_name) ml_model_repo = MLModelRepository( - team_id=project.uuid, service=self._backend_client + team_id=project.id, service=self._backend_client ) use_case = usecases.RunPredictionUseCase( project=project, @@ -1604,8 +1583,8 @@ def list_items( search_condition &= Condition("qa_id", qa_email, EQ) if annotator_email: search_condition &= Condition("annotator_id", annotator_email, EQ) - for key, value in kwargs.items(): - search_condition &= Condition(key, value, EQ) + search_condition &= build_condition(**kwargs) + use_case = usecases.ListItems( reporter=self.default_reporter, project=project, @@ -1617,3 +1596,90 @@ def list_items( ) return use_case.execute() + + def attach_items( + self, + project_name: str, + folder_name: str, + attachments: List[AttachmentEntity], + annotation_status: str, + ): + project = self._get_project(project_name) + folder = self._get_folder(project, folder_name) + + use_case = usecases.AttachItems( + reporter=self.default_reporter, + project=project, + folder=folder, + attachments=attachments, + annotation_status=annotation_status, + backend_service_provider=self.backend_client, + ) + return use_case.execute() + + def copy_items( + self, + project_name: str, + from_folder: str, + to_folder: str, + items: List[str] = None, + include_annotations: bool = False, + ): + project = self._get_project(project_name) + from_folder = self._get_folder(project, from_folder) + to_folder = self._get_folder(project, to_folder) + + use_case = usecases.CopyItems( + self.default_reporter, + project=project, + from_folder=from_folder, + to_folder=to_folder, + item_names=items, + items=self.items, + backend_service_provider=self.backend_client, + include_annotations=include_annotations, + ) + return use_case.execute() + + def move_items( + self, + project_name: str, + from_folder: str, + to_folder: str, + items: List[str] = None, + ): + project = self._get_project(project_name) + from_folder = self._get_folder(project, from_folder) + to_folder = self._get_folder(project, to_folder) + + use_case = usecases.MoveItems( + self.default_reporter, + project=project, + from_folder=from_folder, + to_folder=to_folder, + item_names=items, + items=self.items, + backend_service_provider=self.backend_client, + ) + return use_case.execute() + + def set_annotation_statuses( + self, + project_name: str, + folder_name: str, + annotation_status: str, + item_names: List[str] = None, + ): + project = self._get_project(project_name) + folder = self._get_folder(project, folder_name) + + use_case = usecases.SetAnnotationStatues( + self.default_reporter, + project=project, + folder=folder, + annotation_status=annotation_status, + item_names=item_names, + items=self.items, + backend_service_provider=self.backend_client, + ) + return use_case.execute() diff --git a/src/superannotate/lib/infrastructure/repositories.py b/src/superannotate/lib/infrastructure/repositories.py index 31486b96a..427187e73 100644 --- a/src/superannotate/lib/infrastructure/repositories.py +++ b/src/superannotate/lib/infrastructure/repositories.py @@ -16,8 +16,8 @@ from lib.core.entities import IntegrationEntity from lib.core.entities import MLModelEntity from lib.core.entities import ProjectEntity -from lib.core.entities import ProjectSettingEntity from lib.core.entities import S3FileEntity +from lib.core.entities import SettingEntity from lib.core.entities import TeamEntity from lib.core.entities import UserEntity from lib.core.entities import WorkflowEntity @@ -90,64 +90,33 @@ def __init__(self, service: SuperannotateBackendService): self._service = service def get_one(self, uuid: int, team_id: int) -> ProjectEntity: - return self.dict2entity(self._service.get_project(uuid, team_id)) + return ProjectEntity(**self._service.get_project(uuid, team_id)) def get_all(self, condition: Condition = None) -> List[ProjectEntity]: condition = condition.build_query() if condition else None - return [ - self.dict2entity(project_data) - for project_data in self._service.get_projects(condition) - ] + return parse_obj_as(List[ProjectEntity], self._service.get_projects(condition)) def insert(self, entity: ProjectEntity) -> ProjectEntity: - project_data = self._drop_nones(entity.to_dict()) - result = self._service.create_project(project_data) - return self.dict2entity(result) + result = self._service.create_project(entity.dict(exclude_none=True)) + if "error" in result: + raise AppException(result["error"]) + return ProjectEntity(**result) def update(self, entity: ProjectEntity): condition = Condition("team_id", entity.team_id, EQ) result = self._service.update_project( - entity.to_dict(), query_string=condition.build_query() + entity.dict(exclude_none=True), query_string=condition.build_query() ) - return self.dict2entity(result) + return ProjectEntity(**result) def delete(self, entity: ProjectEntity): team_id = entity.team_id - uuid = entity.uuid + uuid = entity.id condition = Condition("team_id", team_id, EQ) return self._service.delete_project( uuid=uuid, query_string=condition.build_query() ) - @staticmethod - def dict2entity(data: dict) -> ProjectEntity: - try: - return ProjectEntity( - uuid=data["id"], - team_id=data["team_id"], - name=data["name"], - project_type=data["type"], - status=data.get("status"), - instructions_link=data.get("instructions_link"), - entropy_status=data.get("entropy_status"), - sharing_status=data.get("sharing_status"), - creator_id=data["creator_id"], - upload_state=data["upload_state"], - description=data.get("description"), - sync_status=data.get("sync_status"), - folder_id=data.get("folder_id"), - users=data.get("users", ()), - unverified_users=data.get("unverified_users", ()), - completed_images_count=data.get("completedImagesCount"), - root_folder_completed_images_count=data.get( - "rootFolderCompletedImagesCount" - ), - createdAt=data.get("createdAt"), - updatedAt=data.get("updatedAt"), - ) - except KeyError: - raise AppException("Cant serialize project data") - class S3Repository(BaseS3Repository): def get_one(self, uuid: str) -> S3FileEntity: @@ -176,47 +145,35 @@ def get_all(self, condition: Condition = None) -> List[ProjectEntity]: class ProjectSettingsRepository(BaseProjectRelatedManageableRepository): - def get_one(self, uuid: int) -> ProjectEntity: + def get_one(self, uuid: int) -> SettingEntity: raise NotImplementedError - def get_all( - self, condition: Optional[Condition] = None - ) -> List[ProjectSettingEntity]: + def delete(self, uuid: int): + raise NotImplementedError + + def get_all(self, condition: Optional[Condition] = None) -> List[SettingEntity]: data = self._service.get_project_settings( - self._project.uuid, self._project.team_id + self._project.id, self._project.team_id ) if data: - return [self.dict2entity(setting) for setting in data] + return parse_obj_as(List[SettingEntity], data) return [] - def insert(self, entity: ProjectSettingEntity) -> ProjectSettingEntity: - entity = entity.to_dict() - entity.pop("key", None) + def insert(self, entity: SettingEntity) -> SettingEntity: + entity = entity.dict() res = self._service.set_project_settings( - self._project.uuid, self._project.team_id, [entity] + self._project.id, self._project.team_id, [entity] ) - return self.dict2entity(res[0]) - - def delete(self, uuid: int): - raise NotImplementedError + return SettingEntity(**res[0]) - def update(self, entity: ProjectSettingEntity): + def update(self, entity: SettingEntity): if entity.attribute == "ImageQuality" and isinstance(entity.value, str): entity.value = ImageQuality.get_value(entity.value) self._service.set_project_settings( - self._project.uuid, self._project.team_id, [entity.to_dict()] + self._project.id, self._project.team_id, [entity.dict()] ) return entity - @staticmethod - def dict2entity(data: dict) -> ProjectSettingEntity: - return ProjectSettingEntity( - uuid=data["id"], - project_id=data["project_id"], - attribute=data["attribute"], - value=data["value"], - ) - class WorkflowRepository(BaseProjectRelatedManageableRepository): def get_one(self, uuid: int) -> WorkflowEntity: @@ -224,7 +181,7 @@ def get_one(self, uuid: int) -> WorkflowEntity: def get_all(self, condition: Optional[Condition] = None) -> List[WorkflowEntity]: data = self._service.get_project_workflows( - self._project.uuid, self._project.team_id + self._project.id, self._project.team_id ) return [self.dict2entity(setting) for setting in data] @@ -319,11 +276,11 @@ def get_one(self, uuid: Condition) -> AnnotationClassEntity: raise NotImplementedError def get_all( - self, condition: Optional[Condition] = None + self, condition: Optional[Condition] = None ) -> List[AnnotationClassEntity]: query = condition.build_query() if condition else None data = self._service.get_annotation_classes( - self.project.uuid, self.project.team_id, query + self.project.id, self.project.team_id, query ) if data: return [self.dict2entity(data) for data in data] @@ -331,7 +288,7 @@ def get_all( def insert(self, entity: AnnotationClassEntity): res = self._service.set_annotation_classes( - self.project.uuid, self.project.team_id, [entity] + self.project.id, self.project.team_id, [entity] ) if "error" in res: raise AppException(res["error"]) @@ -340,13 +297,13 @@ def insert(self, entity: AnnotationClassEntity): def delete(self, uuid: int): self._service.delete_annotation_class( team_id=self.project.team_id, - project_id=self.project.uuid, + project_id=self.project.id, annotation_class_id=uuid, ) def bulk_insert(self, entities: List[AnnotationClassEntity]): res = self._service.set_annotation_classes( - self.project.uuid, self.project.team_id, entities + self.project.id, self.project.team_id, entities ) if "error" in res: raise AppException(res["error"]) diff --git a/src/superannotate/lib/infrastructure/services.py b/src/superannotate/lib/infrastructure/services.py index 98e467da5..9f5cf8552 100644 --- a/src/superannotate/lib/infrastructure/services.py +++ b/src/superannotate/lib/infrastructure/services.py @@ -1,7 +1,8 @@ import asyncio +import datetime import json +import time from contextlib import contextmanager -from datetime import datetime from typing import Dict from typing import Iterable from typing import List @@ -12,6 +13,7 @@ import lib.core as constance import requests.packages.urllib3 from lib.core.exceptions import AppException +from lib.core.exceptions import BackendError from lib.core.reporter import Reporter from lib.core.service_types import DownloadMLModelAuthData from lib.core.service_types import ServiceResponse @@ -22,7 +24,6 @@ from lib.infrastructure.stream_data_handler import StreamedAnnotations from requests.exceptions import HTTPError - requests.packages.urllib3.disable_warnings() @@ -30,6 +31,8 @@ class PydanticEncoder(json.JSONEncoder): def default(self, obj): if hasattr(obj, "deserialize"): return obj.deserialize() + if isinstance(obj, (datetime.date, datetime.datetime)): + return obj.isoformat() return json.JSONEncoder.default(self, obj) @@ -538,7 +541,7 @@ def prepare_export( "fuse": int(include_fuse), "is_pinned": int(only_pinned), "coco": 0, - "time": datetime.now().strftime("%b %d %Y %H:%M"), + "time": datetime.datetime.now().strftime("%b %d %Y %H:%M"), } if folders: data["folder_names"] = folders @@ -589,13 +592,13 @@ def update_image(self, image_id: int, team_id: int, project_id: int, data: dict) ) return res.ok - def copy_images_between_folders_transaction( + def copy_items_between_folders_transaction( self, team_id: int, project_id: int, from_folder_id: int, to_folder_id: int, - images: List[str], + items: List[str], include_annotations: bool = False, include_pin: bool = False, ) -> int: @@ -609,7 +612,7 @@ def copy_images_between_folders_transaction( params={"team_id": team_id, "project_id": project_id}, data={ "is_folder_copy": False, - "image_names": images, + "image_names": items, "destination_folder_id": to_folder_id, "source_folder_id": from_folder_id, "include_annotations": include_annotations, @@ -654,6 +657,18 @@ def get_progress( ).json() return res["done"], res["skipped"] + def await_progress(self, project_id: int, team_id: int, poll_id: int, items_count): + try: + await_time = items_count * 0.3 + timeout_start = time.time() + while time.time() < timeout_start + await_time: + done_count, skipped = self.get_progress(project_id, team_id, poll_id) + if done_count + skipped == items_count: + break + time.sleep(4) + except (AppException, Exception) as e: + raise BackendError(e) + def get_duplicated_images( self, project_id: int, team_id: int, folder_id: int, images: List[str] ) -> List[str]: diff --git a/src/superannotate/version.py b/src/superannotate/version.py index 1709a571e..af8e92320 100644 --- a/src/superannotate/version.py +++ b/src/superannotate/version.py @@ -1 +1 @@ -__version__ = "4.3.2" +__version__ = "4.3.3" diff --git a/tests/data_set/document_df_data/classes/classes.json b/tests/data_set/document_df_data/classes/classes.json new file mode 100644 index 000000000..0641165ef --- /dev/null +++ b/tests/data_set/document_df_data/classes/classes.json @@ -0,0 +1,32 @@ +[ + { + "id": 873208, + "project_id": 160158, + "name": "vid", + "color": "#0fc1c9", + "count": 0, + "createdAt": "2021-10-22T10:40:03.000Z", + "updatedAt": "2021-10-22T10:40:03.000Z", + "attribute_groups": [ + { + "id": 347588, + "class_id": 873208, + "name": "attr g", + "is_multiselect": 0, + "createdAt": "2021-10-22T10:40:03.000Z", + "updatedAt": "2021-10-22T10:40:03.000Z", + "attributes": [ + { + "id": 1203338, + "group_id": 347588, + "project_id": 160158, + "name": "attr", + "count": 0, + "createdAt": "2021-10-22T10:40:03.000Z", + "updatedAt": "2021-10-22T10:40:03.000Z" + } + ] + } + ] + } +] \ No newline at end of file diff --git a/tests/data_set/document_df_data/folder/text_file_example_1.json b/tests/data_set/document_df_data/folder/text_file_example_1.json new file mode 100644 index 000000000..30085dde2 --- /dev/null +++ b/tests/data_set/document_df_data/folder/text_file_example_1.json @@ -0,0 +1,59 @@ +{ + "metadata": { + "name": "text_file_example_1", + "status": "Completed", + "url": "https://sa-public-files.s3.us-west-2.amazonaws.com/Text+project/text_file_example_1.txt", + "projectId": 160158, + "annotatorEmail": null, + "qaEmail": null, + "lastAction": { + "email": "shab.prog@gmail.com", + "timestamp": 1634899229953 + } + }, + "instances": [ + { + "type": "entity", + "start": 253, + "end": 593, + "classId": 873208, + "createdAt": "2021-10-22T10:40:26.151Z", + "createdBy": { + "email": "shab.prog@gmail.com", + "role": "Admin" + }, + "updatedAt": "2021-10-22T10:40:29.953Z", + "updatedBy": { + "email": "shab.prog@gmail.com", + "role": "Admin" + }, + "attributes": [], + "creationType": "Manual", + "className": "vid" + }, + { + "type": "entity", + "start": 255, + "end": 593, + "classId": 873208, + "createdAt": "2021-10-22T10:40:26.151Z", + "createdBy": { + "email": "shab.prog@gmail.com", + "role": "Admin" + }, + "updatedAt": "2021-10-22T10:40:29.953Z", + "updatedBy": { + "email": "shab.prog@gmail.com", + "role": "Admin" + }, + "attributes": [], + "creationType": "Manual", + "className": "pid" + } + ], + "tags": [ + "vid", + "pid" + ], + "freeText": "" +} \ No newline at end of file diff --git a/tests/data_set/document_df_data/text_file_example_1.json b/tests/data_set/document_df_data/text_file_example_1.json new file mode 100644 index 000000000..06863143a --- /dev/null +++ b/tests/data_set/document_df_data/text_file_example_1.json @@ -0,0 +1,58 @@ +{ + "metadata": { + "name": "text_file_example_1", + "status": "Completed", + "url": "https://sa-public-files.s3.us-west-2.amazonaws.com/Text+project/text_file_example_1.txt", + "projectId": 160158, + "annotatorEmail": null, + "qaEmail": null, + "lastAction": { + "email": "shab.prog@gmail.com", + "timestamp": 1634899229953 + } + }, + "instances": [ + { + "type": "entity", + "start": 253, + "end": 593, + "classId": 873208, + "createdAt": "2021-10-22T10:40:26.151Z", + "createdBy": { + "email": "shab.prog@gmail.com", + "role": "Admin" + }, + "updatedAt": "2021-10-22T10:40:29.953Z", + "updatedBy": { + "email": "shab.prog@gmail.com", + "role": "Admin" + }, + "attributes": [], + "creationType": "Manual", + "className": "vid" + }, + { + "type": "entity", + "start": 255, + "end": 593, + "classId": 873208, + "createdAt": "2021-10-22T10:40:26.151Z", + "createdBy": { + "email": "shab.prog@gmail.com", + "role": "Admin" + }, + "updatedAt": "2021-10-22T10:40:29.953Z", + "updatedBy": { + "email": "shab.prog@gmail.com", + "role": "Admin" + }, + "attributes": [], + "creationType": "Manual", + "className": "pid" + } + ], + "tags": [ + "vid" + ], + "freeText": "" +} \ No newline at end of file diff --git a/tests/data_set/sample_project_vector_single_image/example_image_1.jpg___objects.json b/tests/data_set/sample_project_vector_single_image/example_image_1.jpg___objects.json index 1fee7ad53..898d3140e 100644 --- a/tests/data_set/sample_project_vector_single_image/example_image_1.jpg___objects.json +++ b/tests/data_set/sample_project_vector_single_image/example_image_1.jpg___objects.json @@ -132,103 +132,6 @@ "updatedBy": null, "className": "Personal vehicle" }, - { - "type": "template", - "classId": 72274, - "probability": 100, - "points": [ - { - "id": 1, - "x": 800.8311630011381, - "y": 431.7220764160156 - }, - { - "id": 2, - "x": 834.6965942382812, - "y": 431.8820692877566 - }, - { - "id": 3, - "x": 834.6965942382812, - "y": 480.848388671875 - }, - { - "id": 4, - "x": 801.0125574701838, - "y": 480.848388671875 - }, - { - "id": 5, - "x": 702.6083268971072, - "y": 437.5428573337124 - }, - { - "id": 6, - "x": 702.5221557617188, - "y": 474.8859480851478 - } - ], - "connections": [ - { - "id": 1, - "from": 1, - "to": 2 - }, - { - "id": 2, - "from": 2, - "to": 3 - }, - { - "id": 3, - "from": 3, - "to": 4 - }, - { - "id": 4, - "from": 4, - "to": 1 - }, - { - "id": 5, - "from": 1, - "to": 5 - }, - { - "id": 6, - "from": 5, - "to": 6 - }, - { - "id": 7, - "from": 6, - "to": 4 - } - ], - "groupId": 0, - "pointLabels": { - "4": "top_left", - "5": "bottom_left" - }, - "locked": false, - "visible": false, - "attributes": [ - { - "name": "4", - "groupName": "Num doors", - "groupId": 28230, - "id": 117846 - } - ], - "templateId": -1, - "trackingId": "cbde2787e76c41be77c1079e8d090252ad701ea", - "createdAt": null, - "createdBy": null, - "creationType": null, - "updatedAt": null, - "updatedBy": null, - "className": "Personal vehicle" - }, { "type": "polygon", "classId": 72274, @@ -1087,1434 +990,6 @@ "updatedBy": null, "className": "Large vehicle" }, - { - "type": "template", - "classId": 72276, - "probability": 100, - "points": [ - { - "id": 1, - "x": 590.36328125, - "y": 505.471431864795 - }, - { - "id": 2, - "x": 590.2529541686341, - "y": 504.29565523299704 - }, - { - "id": 3, - "x": 590.0863828554258, - "y": 502.0855402722193 - }, - { - "id": 4, - "x": 589.8926669948704, - "y": 500.1575188822054 - }, - { - "id": 5, - "x": 588.2789742606027, - "y": 491.4069519042969 - }, - { - "id": 6, - "x": 591.6578771570227, - "y": 498.7841862403542 - }, - { - "id": 7, - "x": 592.6675015963041, - "y": 497.5725781649412 - }, - { - "id": 8, - "x": 593.4538138253348, - "y": 495.05589353721325 - }, - { - "id": 9, - "x": 591.9352490770948, - "y": 502.2054028345276 - }, - { - "id": 10, - "x": 591.4315175486134, - "y": 504.8054433249257 - }, - { - "id": 11, - "x": 591.0675032060225, - "y": 506.48433274969244 - }, - { - "id": 12, - "x": 593.6178112658826, - "y": 501.4214392039917 - }, - { - "id": 13, - "x": 592.6682424021291, - "y": 504.65690054240156 - }, - { - "id": 14, - "x": 591.8309557568896, - "y": 507.1707458496094 - }, - { - "id": 15, - "x": 594.685306758671, - "y": 499.50420568423283 - }, - { - "id": 16, - "x": 594.4346668956044, - "y": 503.3523914672602 - }, - { - "id": 17, - "x": 593.4855715573489, - "y": 505.4433191217528 - }, - { - "id": 18, - "x": 592.9555204622038, - "y": 507.0652772868338 - }, - { - "id": 19, - "x": 589.5701713142814, - "y": 496.6512277677259 - }, - { - "id": 20, - "x": 590.8887191604782, - "y": 499.291411604618 - }, - { - "id": 21, - "x": 591.1992693890583, - "y": 501.8345208353304 - }, - { - "id": 22, - "x": 591.0341186523438, - "y": 501.9896778816582 - } - ], - "connections": [ - { - "id": 1, - "from": 5, - "to": 4 - }, - { - "id": 2, - "from": 3, - "to": 4 - }, - { - "id": 3, - "from": 3, - "to": 2 - }, - { - "id": 4, - "from": 2, - "to": 1 - }, - { - "id": 5, - "from": 5, - "to": 6 - }, - { - "id": 6, - "from": 6, - "to": 9 - }, - { - "id": 7, - "from": 9, - "to": 10 - }, - { - "id": 8, - "from": 10, - "to": 11 - }, - { - "id": 9, - "from": 5, - "to": 7 - }, - { - "id": 10, - "from": 7, - "to": 12 - }, - { - "id": 11, - "from": 12, - "to": 13 - }, - { - "id": 12, - "from": 13, - "to": 14 - }, - { - "id": 13, - "from": 5, - "to": 8 - }, - { - "id": 14, - "from": 8, - "to": 15 - }, - { - "id": 15, - "from": 15, - "to": 16 - }, - { - "id": 16, - "from": 16, - "to": 17 - }, - { - "id": 17, - "from": 17, - "to": 18 - }, - { - "id": 18, - "from": 5, - "to": 19 - }, - { - "id": 19, - "from": 19, - "to": 20 - }, - { - "id": 20, - "from": 20, - "to": 21 - }, - { - "id": 21, - "from": 21, - "to": 22 - } - ], - "groupId": 0, - "pointLabels": {}, - "locked": false, - "visible": false, - "attributes": [], - "templateId": -1, - "trackingId": "2c89e809614523cf56c9aeab932e90b87aaf5e4f", - "createdAt": null, - "createdBy": null, - "creationType": null, - "updatedAt": null, - "updatedBy": null, - "className": "Human" - }, - { - "type": "template", - "classId": 72276, - "probability": 100, - "points": [ - { - "id": 1, - "x": 332.9866027832032, - "y": 526.2959883676228 - }, - { - "id": 2, - "x": 332.8439004919032, - "y": 527.5132367654812 - }, - { - "id": 3, - "x": 334.35612353649776, - "y": 527.3324179308058 - }, - { - "id": 4, - "x": 336.2640990372543, - "y": 524.0976645502819 - }, - { - "id": 5, - "x": 337.51601736886164, - "y": 516.1050720214844 - }, - { - "id": 6, - "x": 339.060296362573, - "y": 524.7754271337591 - }, - { - "id": 7, - "x": 341.64884537916925, - "y": 526.5125154522543 - }, - { - "id": 8, - "x": 344.0771833147321, - "y": 527.3880219566797 - }, - { - "id": 9, - "x": 335.88342117477254, - "y": 527.9910814406194 - }, - { - "id": 10, - "x": 334.6968087835627, - "y": 529.0659044885928 - }, - { - "id": 11, - "x": 333.86405081277377, - "y": 527.8757251825314 - }, - { - "id": 12, - "x": 339.9883503337483, - "y": 529.320022177355 - }, - { - "id": 13, - "x": 338.46802612975404, - "y": 530.370269900207 - }, - { - "id": 14, - "x": 337.1430909712236, - "y": 530.7341613769531 - }, - { - "id": 15, - "x": 341.9785882300073, - "y": 531.0127476105173 - }, - { - "id": 16, - "x": 340.85258785708925, - "y": 532.1869901255352 - }, - { - "id": 17, - "x": 339.1688606346047, - "y": 532.8862634202454 - }, - { - "id": 18, - "x": 339.0958418793731, - "y": 532.8511886128618 - }, - { - "id": 19, - "x": 342.74045026171336, - "y": 523.5337313474565 - }, - { - "id": 20, - "x": 343.0975823874003, - "y": 525.8059083903495 - }, - { - "id": 21, - "x": 341.95265642103254, - "y": 527.6336142573132 - }, - { - "id": 22, - "x": 340.4774169921875, - "y": 527.7661633949826 - } - ], - "connections": [ - { - "id": 1, - "from": 5, - "to": 4 - }, - { - "id": 2, - "from": 3, - "to": 4 - }, - { - "id": 3, - "from": 3, - "to": 2 - }, - { - "id": 4, - "from": 2, - "to": 1 - }, - { - "id": 5, - "from": 5, - "to": 6 - }, - { - "id": 6, - "from": 6, - "to": 9 - }, - { - "id": 7, - "from": 9, - "to": 10 - }, - { - "id": 8, - "from": 10, - "to": 11 - }, - { - "id": 9, - "from": 5, - "to": 7 - }, - { - "id": 10, - "from": 7, - "to": 12 - }, - { - "id": 11, - "from": 12, - "to": 13 - }, - { - "id": 12, - "from": 13, - "to": 14 - }, - { - "id": 13, - "from": 5, - "to": 8 - }, - { - "id": 14, - "from": 8, - "to": 15 - }, - { - "id": 15, - "from": 15, - "to": 16 - }, - { - "id": 16, - "from": 16, - "to": 17 - }, - { - "id": 17, - "from": 17, - "to": 18 - }, - { - "id": 18, - "from": 5, - "to": 19 - }, - { - "id": 19, - "from": 19, - "to": 20 - }, - { - "id": 20, - "from": 20, - "to": 21 - }, - { - "id": 21, - "from": 21, - "to": 22 - } - ], - "groupId": 0, - "pointLabels": {}, - "locked": false, - "visible": false, - "attributes": [], - "templateId": -1, - "trackingId": "bab62dc810b0cee390f8d5fb5fa62fade3c8da7", - "createdAt": null, - "createdBy": null, - "creationType": null, - "updatedAt": null, - "updatedBy": null, - "className": "Human" - }, - { - "type": "template", - "classId": 72276, - "probability": 100, - "points": [ - { - "id": 1, - "x": 500.7473449707031, - "y": 512.2212813363728 - }, - { - "id": 2, - "x": 499.83990268916875, - "y": 511.0267255350125 - }, - { - "id": 3, - "x": 499.35212573376333, - "y": 508.78712984486833 - }, - { - "id": 4, - "x": 499.49539176186363, - "y": 505.6112143549695 - }, - { - "id": 5, - "x": 505.1166338239397, - "y": 498.2973327636719 - }, - { - "id": 6, - "x": 501.5269101321042, - "y": 506.7595579931341 - }, - { - "id": 7, - "x": 503.99778336745044, - "y": 506.673098948348 - }, - { - "id": 8, - "x": 506.9555402483259, - "y": 505.9015717613673 - }, - { - "id": 9, - "x": 501.35003494430373, - "y": 510.62224599140063 - }, - { - "id": 10, - "x": 501.986939398797, - "y": 512.5206164026553 - }, - { - "id": 11, - "x": 503.15418142800803, - "y": 512.9774707880001 - }, - { - "id": 12, - "x": 503.6314472575764, - "y": 510.3629298921987 - }, - { - "id": 13, - "x": 503.9346398992853, - "y": 513.4720155056757 - }, - { - "id": 14, - "x": 506.3155763227861, - "y": 514.4830017089844 - }, - { - "id": 15, - "x": 506.32755673586666, - "y": 510.11449321598604 - }, - { - "id": 16, - "x": 506.78978268130794, - "y": 513.0534452036602 - }, - { - "id": 17, - "x": 508.6354744041359, - "y": 513.6350427171204 - }, - { - "id": 18, - "x": 508.56245564890435, - "y": 512.0705489644243 - }, - { - "id": 19, - "x": 509.736452458979, - "y": 503.5178622068315 - }, - { - "id": 20, - "x": 510.1524224752909, - "y": 508.84887714034943 - }, - { - "id": 21, - "x": 509.8898512452513, - "y": 511.676521972157 - }, - { - "id": 22, - "x": 509.7675476074219, - "y": 511.8091321449826 - } - ], - "connections": [ - { - "id": 1, - "from": 5, - "to": 4 - }, - { - "id": 2, - "from": 3, - "to": 4 - }, - { - "id": 3, - "from": 3, - "to": 2 - }, - { - "id": 4, - "from": 2, - "to": 1 - }, - { - "id": 5, - "from": 5, - "to": 6 - }, - { - "id": 6, - "from": 6, - "to": 9 - }, - { - "id": 7, - "from": 9, - "to": 10 - }, - { - "id": 8, - "from": 10, - "to": 11 - }, - { - "id": 9, - "from": 5, - "to": 7 - }, - { - "id": 10, - "from": 7, - "to": 12 - }, - { - "id": 11, - "from": 12, - "to": 13 - }, - { - "id": 12, - "from": 13, - "to": 14 - }, - { - "id": 13, - "from": 5, - "to": 8 - }, - { - "id": 14, - "from": 8, - "to": 15 - }, - { - "id": 15, - "from": 15, - "to": 16 - }, - { - "id": 16, - "from": 16, - "to": 17 - }, - { - "id": 17, - "from": 17, - "to": 18 - }, - { - "id": 18, - "from": 5, - "to": 19 - }, - { - "id": 19, - "from": 19, - "to": 20 - }, - { - "id": 20, - "from": 20, - "to": 21 - }, - { - "id": 21, - "from": 21, - "to": 22 - } - ], - "groupId": 0, - "pointLabels": {}, - "locked": false, - "visible": false, - "attributes": [], - "templateId": -1, - "trackingId": "f8f542a9e9da918d5d5cb8eed9052713302089", - "createdAt": null, - "createdBy": null, - "creationType": null, - "updatedAt": null, - "updatedBy": null, - "className": "Human" - }, - { - "type": "template", - "classId": 72276, - "probability": 100, - "points": [ - { - "id": 1, - "x": 460.2714192848242, - "y": 486.08071083487926 - }, - { - "id": 2, - "x": 454.92882596998356, - "y": 481.9066804669699 - }, - { - "id": 3, - "x": 461.0707178220127, - "y": 481.61528130084 - }, - { - "id": 4, - "x": 462.32680898178, - "y": 482.46856689453125 - }, - { - "id": 5, - "x": 444.8684189242054, - "y": 483.808782080494 - }, - { - "id": 6, - "x": 455.8683091235324, - "y": 497.2664014146353 - }, - { - "id": 7, - "x": 439.86159351357213, - "y": 498.91779556832523 - }, - { - "id": 8, - "x": 432.98627658437374, - "y": 519.4614616257791 - }, - { - "id": 9, - "x": 415.8799309258186, - "y": 515.9119205914317 - }, - { - "id": 10, - "x": 467.5532979208077, - "y": 499.0862192385027 - }, - { - "id": 11, - "x": 479.28433580441475, - "y": 514.1935318132136 - }, - { - "id": 12, - "x": 498.51239013671875, - "y": 512.030284394326 - }, - { - "id": 13, - "x": 454.8632612058889, - "y": 546.5478157765722 - }, - { - "id": 14, - "x": 444.0484270284733, - "y": 546.0017547475499 - }, - { - "id": 15, - "x": 464.16791732413037, - "y": 546.2800095783913 - }, - { - "id": 16, - "x": 468.63255127661785, - "y": 573.6905686937465 - }, - { - "id": 17, - "x": 457.1555372435924, - "y": 577.0907707675425 - }, - { - "id": 18, - "x": 432.2792663574219, - "y": 587.0443088500142 - }, - { - "id": 19, - "x": 429.91821938954894, - "y": 606.0040783618011 - }, - { - "id": 20, - "x": 463.69909188680566, - "y": 602.9990721708784 - }, - { - "id": 21, - "x": 484.317011118421, - "y": 607.0152893066406 - } - ], - "connections": [ - { - "id": 1, - "from": 1, - "to": 6 - }, - { - "id": 2, - "from": 6, - "to": 10 - }, - { - "id": 3, - "from": 10, - "to": 11 - }, - { - "id": 4, - "from": 11, - "to": 12 - }, - { - "id": 5, - "from": 7, - "to": 8 - }, - { - "id": 6, - "from": 8, - "to": 9 - }, - { - "id": 7, - "from": 14, - "to": 7 - }, - { - "id": 8, - "from": 14, - "to": 13 - }, - { - "id": 9, - "from": 13, - "to": 15 - }, - { - "id": 10, - "from": 15, - "to": 10 - }, - { - "id": 11, - "from": 7, - "to": 6 - }, - { - "id": 12, - "from": 14, - "to": 16 - }, - { - "id": 13, - "from": 15, - "to": 17 - }, - { - "id": 14, - "from": 16, - "to": 20 - }, - { - "id": 15, - "from": 20, - "to": 21 - }, - { - "id": 16, - "from": 17, - "to": 18 - }, - { - "id": 17, - "from": 18, - "to": 19 - }, - { - "id": 18, - "from": 5, - "to": 2 - }, - { - "id": 19, - "from": 2, - "to": 1 - }, - { - "id": 20, - "from": 1, - "to": 1 - }, - { - "id": 21, - "from": 3, - "to": 1 - }, - { - "id": 22, - "from": 3, - "to": 4 - } - ], - "groupId": 0, - "pointLabels": { - "0": "Nose" - }, - "locked": false, - "visible": false, - "attributes": [], - "templateId": -1, - "trackingId": "4fd95b7d6d95b7b84750e65aa89c70b9c86eb3b8", - "createdAt": null, - "createdBy": null, - "creationType": null, - "updatedAt": null, - "updatedBy": null, - "className": "Human" - }, - { - "type": "template", - "classId": 72276, - "probability": 100, - "points": [ - { - "id": 1, - "x": 569.4099335784475, - "y": 411.3099511426366 - }, - { - "id": 2, - "x": 565.2798621579027, - "y": 406.3627038525488 - }, - { - "id": 3, - "x": 567.377754831435, - "y": 405.3775634765625 - }, - { - "id": 4, - "x": 562.1341137290701, - "y": 404.67809199715805 - }, - { - "id": 5, - "x": 554.7715578497942, - "y": 408.0821593507321 - }, - { - "id": 6, - "x": 543.3504267346603, - "y": 422.3509408794715 - }, - { - "id": 7, - "x": 530.5325718803996, - "y": 432.4575436529285 - }, - { - "id": 8, - "x": 513.1264329109782, - "y": 468.5712030528786 - }, - { - "id": 9, - "x": 505.0783099316068, - "y": 498.26488325838557 - }, - { - "id": 10, - "x": 564.5019009957019, - "y": 431.59166109918834 - }, - { - "id": 11, - "x": 572.9879904477306, - "y": 466.0899617391194 - }, - { - "id": 12, - "x": 588.320701407949, - "y": 491.39197319472385 - }, - { - "id": 13, - "x": 547.1874731524312, - "y": 499.0241945917735 - }, - { - "id": 14, - "x": 536.2172232162276, - "y": 499.38451563669537 - }, - { - "id": 15, - "x": 558.2200212079587, - "y": 496.61095606638287 - }, - { - "id": 16, - "x": 565.8375729727319, - "y": 546.3956734358432 - }, - { - "id": 17, - "x": 545.4810409910515, - "y": 549.0779244124057 - }, - { - "id": 18, - "x": 502.6168107549702, - "y": 573.1785073042392 - }, - { - "id": 19, - "x": 506.98697907641065, - "y": 599.8044128417969 - }, - { - "id": 20, - "x": 555.6301612734296, - "y": 594.6135561518564 - }, - { - "id": 21, - "x": 585.93212890625, - "y": 602.2106018066406 - } - ], - "connections": [ - { - "id": 1, - "from": 1, - "to": 6 - }, - { - "id": 2, - "from": 6, - "to": 10 - }, - { - "id": 3, - "from": 10, - "to": 11 - }, - { - "id": 4, - "from": 11, - "to": 12 - }, - { - "id": 5, - "from": 7, - "to": 8 - }, - { - "id": 6, - "from": 8, - "to": 9 - }, - { - "id": 7, - "from": 14, - "to": 7 - }, - { - "id": 8, - "from": 14, - "to": 13 - }, - { - "id": 9, - "from": 13, - "to": 15 - }, - { - "id": 10, - "from": 15, - "to": 10 - }, - { - "id": 11, - "from": 7, - "to": 6 - }, - { - "id": 12, - "from": 14, - "to": 16 - }, - { - "id": 13, - "from": 15, - "to": 17 - }, - { - "id": 14, - "from": 16, - "to": 20 - }, - { - "id": 15, - "from": 20, - "to": 21 - }, - { - "id": 16, - "from": 17, - "to": 18 - }, - { - "id": 17, - "from": 18, - "to": 19 - }, - { - "id": 18, - "from": 5, - "to": 2 - }, - { - "id": 19, - "from": 2, - "to": 1 - }, - { - "id": 20, - "from": 1, - "to": 1 - }, - { - "id": 21, - "from": 3, - "to": 1 - }, - { - "id": 22, - "from": 3, - "to": 4 - } - ], - "groupId": 0, - "pointLabels": { - "0": "Nose" - }, - "locked": false, - "visible": false, - "attributes": [], - "templateId": -1, - "trackingId": "8894b2a1727f62631d26e885a5aaf9bc2ac2a578", - "createdAt": null, - "createdBy": null, - "creationType": null, - "updatedAt": null, - "updatedBy": null, - "className": "Human" - }, - { - "type": "template", - "classId": 72276, - "probability": 100, - "points": [ - { - "id": 1, - "x": 388.9594774956746, - "y": 424.3453820508397 - }, - { - "id": 2, - "x": 383.78257983006284, - "y": 420.2971520947363 - }, - { - "id": 3, - "x": 387.1454388819895, - "y": 419.5367736816406 - }, - { - "id": 4, - "x": 382.7214935156717, - "y": 418.8373022022362 - }, - { - "id": 5, - "x": 369.81775320578504, - "y": 421.3423522218259 - }, - { - "id": 6, - "x": 368.5353785473912, - "y": 441.4006845318153 - }, - { - "id": 7, - "x": 353.1593986570741, - "y": 443.28386811581913 - }, - { - "id": 8, - "x": 340.9145244608405, - "y": 484.88446599233174 - }, - { - "id": 9, - "x": 337.471170384727, - "y": 516.0647184634637 - }, - { - "id": 10, - "x": 380.0734310110131, - "y": 441.19236910700084 - }, - { - "id": 11, - "x": 392.6590966976267, - "y": 481.59771320396317 - }, - { - "id": 12, - "x": 411.22125244140625, - "y": 510.38843315566135 - }, - { - "id": 13, - "x": 368.27931488725477, - "y": 514.5319460566172 - }, - { - "id": 14, - "x": 361.465192188568, - "y": 515.6977785761485 - }, - { - "id": 15, - "x": 378.7043428557912, - "y": 512.1187075312266 - }, - { - "id": 16, - "x": 393.26020935016874, - "y": 556.5333687483432 - }, - { - "id": 17, - "x": 344.09536524138383, - "y": 562.7657295881869 - }, - { - "id": 18, - "x": 321.86363692684523, - "y": 598.4685463667392 - }, - { - "id": 19, - "x": 345.55514438756916, - "y": 610.3072814941406 - }, - { - "id": 20, - "x": 402.05302902711884, - "y": 603.0690004877939 - }, - { - "id": 21, - "x": 426.8170225465453, - "y": 607.0261535644531 - } - ], - "connections": [ - { - "id": 1, - "from": 1, - "to": 6 - }, - { - "id": 2, - "from": 6, - "to": 10 - }, - { - "id": 3, - "from": 10, - "to": 11 - }, - { - "id": 4, - "from": 11, - "to": 12 - }, - { - "id": 5, - "from": 7, - "to": 8 - }, - { - "id": 6, - "from": 8, - "to": 9 - }, - { - "id": 7, - "from": 14, - "to": 7 - }, - { - "id": 8, - "from": 14, - "to": 13 - }, - { - "id": 9, - "from": 13, - "to": 15 - }, - { - "id": 10, - "from": 15, - "to": 10 - }, - { - "id": 11, - "from": 7, - "to": 6 - }, - { - "id": 12, - "from": 14, - "to": 16 - }, - { - "id": 13, - "from": 15, - "to": 17 - }, - { - "id": 14, - "from": 16, - "to": 20 - }, - { - "id": 15, - "from": 20, - "to": 21 - }, - { - "id": 16, - "from": 17, - "to": 18 - }, - { - "id": 17, - "from": 18, - "to": 19 - }, - { - "id": 18, - "from": 5, - "to": 2 - }, - { - "id": 19, - "from": 2, - "to": 1 - }, - { - "id": 20, - "from": 1, - "to": 1 - }, - { - "id": 21, - "from": 3, - "to": 1 - }, - { - "id": 22, - "from": 3, - "to": 4 - } - ], - "groupId": 0, - "pointLabels": { - "0": "Nose" - }, - "locked": false, - "visible": false, - "attributes": [], - "templateId": -1, - "trackingId": "2fe1f0c6c4af879955d6f19cfcf113a6b929b73", - "createdAt": null, - "createdBy": null, - "creationType": null, - "updatedAt": null, - "updatedBy": null, - "className": "Human" - }, { "type": "polygon", "classId": 72276, diff --git a/tests/data_set/df_data/classes/classes.json b/tests/data_set/video_df_data/classes/classes.json similarity index 100% rename from tests/data_set/df_data/classes/classes.json rename to tests/data_set/video_df_data/classes/classes.json diff --git a/tests/data_set/df_data/folder/video.mp4.json b/tests/data_set/video_df_data/folder/video.mp4.json similarity index 100% rename from tests/data_set/df_data/folder/video.mp4.json rename to tests/data_set/video_df_data/folder/video.mp4.json diff --git a/tests/data_set/df_data/video.mp4.json b/tests/data_set/video_df_data/video.mp4.json similarity index 100% rename from tests/data_set/df_data/video.mp4.json rename to tests/data_set/video_df_data/video.mp4.json diff --git a/tests/integration/aggregations/__init__.py b/tests/integration/aggregations/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/integration/test_df_processing.py b/tests/integration/aggregations/test_df_processing.py similarity index 69% rename from tests/integration/test_df_processing.py rename to tests/integration/aggregations/test_df_processing.py index f96d72013..4be91039d 100644 --- a/tests/integration/test_df_processing.py +++ b/tests/integration/aggregations/test_df_processing.py @@ -12,13 +12,11 @@ class TestDF(BaseTestCase): PROJECT_NAME = "test df processing" PROJECT_DESCRIPTION = "Desc" PROJECT_TYPE = "Vector" - TEST_FOLDER_PATH = "data_set/sample_project_vector" + FOLDER_PATH = "data_set/sample_project_vector" @property def folder_path(self): - return Path( - Path(os.path.join(dirname(dirname(__file__)), self.TEST_FOLDER_PATH)) - ) + return os.path.join(Path(__file__).parent.parent.parent, self.FOLDER_PATH) def test_filter_instances(self): df = sa.aggregate_annotations_as_df(self.folder_path, self.PROJECT_TYPE) @@ -33,13 +31,11 @@ def test_filter_instances(self): class TestDFWithTagInstance(BaseTestCase): PROJECT_TYPE = "Vector" - TEST_FOLDER_PATH = "data_set/sample_project_vector_with_tag" + FOLDER_PATH = "data_set/sample_project_vector_with_tag" @property def folder_path(self): - return Path( - Path(os.path.join(dirname(dirname(__file__)), self.TEST_FOLDER_PATH)) - ) + return os.path.join(Path(__file__).parent.parent.parent, self.FOLDER_PATH) def test_filter_instances(self): df = sa.aggregate_annotations_as_df(self.folder_path, self.PROJECT_TYPE) @@ -48,17 +44,15 @@ def test_filter_instances(self): class TestClassDistributionWithTagInstance(BaseTestCase): PROJECT_TYPE = "Vector" - EXPORT_ROOT_PATH = "data_set" + FOLDER_PATH = "data_set" PROJECT_NAME = "sample_project_vector_with_tag" @property - def root_path(self): - return Path( - Path(os.path.join(dirname(dirname(__file__)), self.EXPORT_ROOT_PATH)) - ) + def folder_path(self): + return os.path.join(Path(__file__).parent.parent.parent, self.FOLDER_PATH) @pytest.mark.skip(reason="Need to adjust") def test_filter_instances(self): - df = sa.class_distribution(export_root=self.root_path, project_names=[self.PROJECT_NAME]) + df = sa.class_distribution(export_root=self.folder_path, project_names=[self.PROJECT_NAME]) self.assertEqual(df.iloc[0]['count'], 1) self.assertEqual(df.iloc[0]['className'], "Weather") diff --git a/tests/integration/aggregations/test_docuement_annotation_to_df.py b/tests/integration/aggregations/test_docuement_annotation_to_df.py new file mode 100644 index 000000000..cab6a5c7d --- /dev/null +++ b/tests/integration/aggregations/test_docuement_annotation_to_df.py @@ -0,0 +1,45 @@ +import os +from pathlib import Path +from distutils.dir_util import copy_tree +import tempfile +from unittest import mock +from unittest import TestCase + +import src.superannotate as sa +from src.superannotate.logger import get_default_logger + + +class TestAggregateDocumentAnnotation(TestCase): + PROJECT_TYPE = "Document" + FOLDER_PATH = "data_set/document_df_data" + + @property + def folder_path(self): + return os.path.join(Path(__file__).parent.parent.parent, self.FOLDER_PATH) + + def test_data_filling(self): + df = sa.aggregate_annotations_as_df(self.folder_path, self.PROJECT_TYPE, None) + instance_ids = {i for i in df.instanceId if i is not None} + tag_ids = {i for i in df.tagId if i is not None} + + self.assertEqual(instance_ids, {0, 1}) + self.assertEqual(tag_ids, {0, 1}) + + def test_nested_folders_data_filling(self): + df = sa.aggregate_annotations_as_df(self.folder_path, self.PROJECT_TYPE) + folder_names = {i for i in df.folderName} + + self.assertEqual(folder_names, {"folder", None}) + + def test_nested_folder_data_filling(self): + df = sa.aggregate_annotations_as_df(self.folder_path, self.PROJECT_TYPE, folder_names=["folder"]) + folder_names = {i for i in df.folderName} + self.assertEqual(folder_names, {"folder"}) + + def test_empty_folder_log(self): + with tempfile.TemporaryDirectory() as temp_dir: + copy_tree(f"{self.folder_path}/classes", f"{temp_dir}/classes") + logger = get_default_logger() + with mock.patch.object(logger, 'warning') as mock_log: + _ = sa.aggregate_annotations_as_df(temp_dir, self.PROJECT_TYPE) + mock_log.assert_called_with(f"Could not find annotations in {temp_dir}.") diff --git a/tests/integration/aggregations/test_video_annotation_to_df.py b/tests/integration/aggregations/test_video_annotation_to_df.py index 21acf333b..65b8e16b7 100644 --- a/tests/integration/aggregations/test_video_annotation_to_df.py +++ b/tests/integration/aggregations/test_video_annotation_to_df.py @@ -2,7 +2,6 @@ from pathlib import Path from distutils.dir_util import copy_tree import tempfile -import logging from unittest import mock from unittest import TestCase @@ -12,7 +11,7 @@ class TestAggregateVideoAnnotation(TestCase): PROJECT_TYPE = "Video" - FOLDER_PATH = "data_set/df_data" + FOLDER_PATH = "data_set/video_df_data" @property def folder_path(self): diff --git a/tests/integration/annotations/test_annotation_class_new.py b/tests/integration/annotations/test_annotation_class_new.py index 412e2461b..8831874a8 100644 --- a/tests/integration/annotations/test_annotation_class_new.py +++ b/tests/integration/annotations/test_annotation_class_new.py @@ -8,7 +8,6 @@ class TestAnnotationClasses(BaseTestCase): PROJECT_NAME = "test_annotation_class_new" - PROJECT_NAME_JSON = "test_annotation_class_json" PROJECT_DESCRIPTION = "desc" PROJECT_TYPE = "Vector" @@ -34,11 +33,11 @@ def test_annotation_classes_filter(self): def test_create_annotation_class_from_json(self): sa.create_annotation_classes_from_classes_json( - self.PROJECT_NAME_JSON, self.classes_json + self.PROJECT_NAME, self.classes_json ) - self.assertEqual(len(sa.search_annotation_classes(self.PROJECT_NAME_JSON)), 4) + self.assertEqual(len(sa.search_annotation_classes(self.PROJECT_NAME)), 4) sa.create_annotation_classes_from_classes_json( - self.PROJECT_NAME_JSON, self.classes_json + self.PROJECT_NAME, self.classes_json ) - self.assertEqual(len(sa.search_annotation_classes(self.PROJECT_NAME_JSON)), 4) + self.assertEqual(len(sa.search_annotation_classes(self.PROJECT_NAME)), 4) diff --git a/tests/integration/annotations/test_annotation_delete.py b/tests/integration/annotations/test_annotation_delete.py index 441aeabce..9c852fb24 100644 --- a/tests/integration/annotations/test_annotation_delete.py +++ b/tests/integration/annotations/test_annotation_delete.py @@ -1,8 +1,8 @@ import os -from os.path import dirname -import pytest from pathlib import Path +import pytest + import src.superannotate as sa from tests.integration.base import BaseTestCase @@ -23,8 +23,8 @@ def folder_path(self): @property def classes_json(self): return os.path.join(Path(__file__).parent.parent.parent, - "data_set/sample_project_vector/classes/classes.json", - ) + "data_set/sample_project_vector/classes/classes.json", + ) def test_delete_annotations(self): sa.upload_images_from_folder_to_project( @@ -39,7 +39,12 @@ def test_delete_annotations(self): sa.delete_annotations(self.PROJECT_NAME) annotations = sa.get_annotations(self.PROJECT_NAME, [self.EXAMPLE_IMAGE_1]) - assert annotations == [{'metadata': {'name': 'example_image_1.jpg'}, 'instances': []}] + del annotations[0]["metadata"]["projectId"] + assert annotations == [ + {'metadata': {'name': 'example_image_1.jpg', 'height': 683, 'width': 1024, + 'isPredicted': False, 'status': 'NotStarted', 'pinned': False, 'annotatorEmail': None, + 'qaEmail': None}, 'instances': [], 'tags': [], 'comments': []} + ] def test_delete_annotations_by_name(self): sa.upload_images_from_folder_to_project( @@ -53,7 +58,12 @@ def test_delete_annotations_by_name(self): ) sa.delete_annotations(self.PROJECT_NAME, [self.EXAMPLE_IMAGE_1]) annotations = sa.get_annotations(self.PROJECT_NAME, [self.EXAMPLE_IMAGE_1]) - assert annotations == [{'metadata': {'name': 'example_image_1.jpg'}, 'instances': []}] + del annotations[0]["metadata"]["projectId"] + assert annotations == [ + {'metadata': {'name': 'example_image_1.jpg', 'height': 683, 'width': 1024, + 'isPredicted': False, 'status': 'NotStarted', 'pinned': False, 'annotatorEmail': None, + 'qaEmail': None}, 'instances': [], 'tags': [], 'comments': []} + ] def test_delete_annotations_by_not_existing_name(self): sa.upload_images_from_folder_to_project( @@ -95,4 +105,4 @@ def test_delete_annotations_from_folder(self): ) sa.delete_annotations(f"{self.PROJECT_NAME}/{self.TEST_FOLDER_NAME}", [self.EXAMPLE_IMAGE_1]) annotations = sa.get_annotations(self.PROJECT_NAME, [self.EXAMPLE_IMAGE_1]) - assert len(annotations) == 0 \ No newline at end of file + assert len(annotations) == 0 diff --git a/tests/integration/annotations/test_annotation_upload_pixel.py b/tests/integration/annotations/test_annotation_upload_pixel.py index d078c7c30..db974e15e 100644 --- a/tests/integration/annotations/test_annotation_upload_pixel.py +++ b/tests/integration/annotations/test_annotation_upload_pixel.py @@ -1,13 +1,11 @@ import os from os.path import join from pathlib import Path -import json from unittest.mock import patch import src.superannotate as sa from tests.integration.base import BaseTestCase -import tempfile import pytest @@ -28,7 +26,7 @@ def inject_fixtures(self, caplog): def folder_path(self): return os.path.join(Path(__file__).parent.parent.parent, self.TEST_FOLDER_PATH) - @pytest.mark.flaky(reruns=2) + @pytest.mark.flaky(reruns=4) @patch("lib.core.usecases.annotations.UploadAnnotationUseCase.s3_bucket") def test_recursive_annotation_upload_pixel(self, s3_bucket): sa.create_folder(self.PROJECT_NAME, self.FOLDER) diff --git a/tests/integration/annotations/test_annotation_upload_vector.py b/tests/integration/annotations/test_annotation_upload_vector.py index ed7b44a2b..e7d0cef74 100644 --- a/tests/integration/annotations/test_annotation_upload_vector.py +++ b/tests/integration/annotations/test_annotation_upload_vector.py @@ -59,9 +59,10 @@ def test_annotation_folder_upload_download(self): _, _, _ = sa.upload_annotations_from_folder_to_project( self.PROJECT_NAME, self.folder_path ) - images = sa.search_images(self.PROJECT_NAME) + images = sa.search_items(self.PROJECT_NAME) with tempfile.TemporaryDirectory() as tmp_dir: - for image_name in images: + for image in images: + image_name = image["name"] annotation_path = join(self.folder_path, f"{image_name}___objects.json") sa.download_image_annotations(self.PROJECT_NAME, image_name, tmp_dir) origin_annotation = json.load(open(annotation_path)) diff --git a/tests/integration/annotations/test_annotations_upload_status_change.py b/tests/integration/annotations/test_annotations_upload_status_change.py index 546105611..f64c3e095 100644 --- a/tests/integration/annotations/test_annotations_upload_status_change.py +++ b/tests/integration/annotations/test_annotations_upload_status_change.py @@ -31,7 +31,7 @@ def test_upload_annotations_from_folder_to_project__upload_status(self, reporter sa.upload_annotations_from_folder_to_project(self.PROJECT_NAME, self.folder_path) self.assertEqual( constances.AnnotationStatus.IN_PROGRESS.name, - sa.get_image_metadata(self.PROJECT_NAME, self.IMAGE_NAME)["annotation_status"] + sa.get_item_metadata(self.PROJECT_NAME, self.IMAGE_NAME)["annotation_status"] ) @pytest.mark.flaky(reruns=2) @@ -43,7 +43,7 @@ def test_upload_preannotations_from_folder_to_project__upload_status(self, repor sa.upload_preannotations_from_folder_to_project(self.PROJECT_NAME, self.folder_path) self.assertEqual( constances.AnnotationStatus.IN_PROGRESS.name, - sa.get_image_metadata(self.PROJECT_NAME, self.IMAGE_NAME)["annotation_status"] + sa.get_item_metadata(self.PROJECT_NAME, self.IMAGE_NAME)["annotation_status"] ) @pytest.mark.flaky(reruns=2) @@ -56,7 +56,7 @@ def test_upload_image_annotations__upload_status(self, reporter): sa.upload_image_annotations(self.PROJECT_NAME, self.IMAGE_NAME, annotation_path) self.assertEqual( constances.AnnotationStatus.IN_PROGRESS.name, - sa.get_image_metadata(self.PROJECT_NAME, self.IMAGE_NAME)["annotation_status"] + sa.get_item_metadata(self.PROJECT_NAME, self.IMAGE_NAME)["annotation_status"] ) @pytest.mark.flaky(reruns=2) @@ -68,7 +68,7 @@ def test_add_annotation_bbox_to_image__annotation_status(self, reporter): sa.add_annotation_bbox_to_image(self.PROJECT_NAME, self.IMAGE_NAME, [1, 2, 3, 4], "bbox") self.assertEqual( constances.AnnotationStatus.IN_PROGRESS.name, - sa.get_image_metadata(self.PROJECT_NAME, self.IMAGE_NAME)["annotation_status"] + sa.get_item_metadata(self.PROJECT_NAME, self.IMAGE_NAME)["annotation_status"] ) @pytest.mark.flaky(reruns=2) @@ -85,5 +85,5 @@ def test_add_annotation_comment_to_image__annotation_status(self, reporter): "user@superannoate.com") self.assertEqual( constances.AnnotationStatus.IN_PROGRESS.name, - sa.get_image_metadata(self.PROJECT_NAME, self.IMAGE_NAME)["annotation_status"] + sa.get_item_metadata(self.PROJECT_NAME, self.IMAGE_NAME)["annotation_status"] ) diff --git a/tests/integration/annotations/test_preannotation_upload.py b/tests/integration/annotations/test_preannotation_upload.py index 1d8598b63..6a5e00e44 100644 --- a/tests/integration/annotations/test_preannotation_upload.py +++ b/tests/integration/annotations/test_preannotation_upload.py @@ -27,9 +27,10 @@ def test_pre_annotation_folder_upload_download(self): self.PROJECT_NAME, self.folder_path ) count_in = len(list(Path(self.folder_path).glob("*.json"))) - images = sa.search_images(self.PROJECT_NAME) + images = sa.search_items(self.PROJECT_NAME) with tempfile.TemporaryDirectory() as tmp_dir: - for image_name in images: + for image in images: + image_name = image["name"] sa.download_image_annotations(self.PROJECT_NAME, image_name, tmp_dir) count_out = len(list(Path(tmp_dir).glob("*.json"))) diff --git a/tests/integration/annotations/test_upload_annotations_from_folder_to_project.py b/tests/integration/annotations/test_upload_annotations_from_folder_to_project.py index 20a36a6af..175f95d0f 100644 --- a/tests/integration/annotations/test_upload_annotations_from_folder_to_project.py +++ b/tests/integration/annotations/test_upload_annotations_from_folder_to_project.py @@ -31,9 +31,10 @@ def test_annotation_folder_upload_download(self): _, _, _ = sa.upload_annotations_from_folder_to_project( self.PROJECT_NAME, self.folder_path ) - images = sa.search_images(self.PROJECT_NAME) + images = sa.search_items(self.PROJECT_NAME) with tempfile.TemporaryDirectory() as tmp_dir: - for image_name in images: + for image in images: + image_name = image["name"] annotation_path = join(self.folder_path, f"{image_name}___objects.json") sa.download_image_annotations(self.PROJECT_NAME, image_name, tmp_dir) origin_annotation = json.load(open(annotation_path)) diff --git a/tests/integration/base.py b/tests/integration/base.py index 9171ad581..c49899957 100644 --- a/tests/integration/base.py +++ b/tests/integration/base.py @@ -15,6 +15,7 @@ def __init__(self, *args, **kwargs): def setUp(self, *args, **kwargs): self.tearDown() + print(self.PROJECT_NAME) self._project = sa.create_project( self.PROJECT_NAME, self.PROJECT_DESCRIPTION, self.PROJECT_TYPE ) diff --git a/tests/integration/folders/test_folders.py b/tests/integration/folders/test_folders.py index 0094bfd1c..433a17896 100644 --- a/tests/integration/folders/test_folders.py +++ b/tests/integration/folders/test_folders.py @@ -49,7 +49,7 @@ def test_basic_folders(self): sa.upload_images_from_folder_to_project( self.PROJECT_NAME, self.folder_path, annotation_status="InProgress" ) - images = sa.search_images(self.PROJECT_NAME, self.EXAMPLE_IMAGE_1) + images = sa.search_items(self.PROJECT_NAME, self.EXAMPLE_IMAGE_1) self.assertEqual(len(images), 1) folders = sa.search_folders(self.PROJECT_NAME) @@ -68,12 +68,12 @@ def test_basic_folders(self): self.assertEqual(folders[0], self.TEST_FOLDER_NAME_1) - images = sa.search_images( + images = sa.search_items( self.PROJECT_NAME + f"/{self.TEST_FOLDER_NAME_1}", self.EXAMPLE_IMAGE_1 ) self.assertEqual(len(images), 0) - images = sa.search_images_all_folders(self.PROJECT_NAME, self.EXAMPLE_IMAGE_1) + images = sa.search_items(self.PROJECT_NAME, self.EXAMPLE_IMAGE_1) self.assertEqual(len(images), 1) folder = sa.get_folder_metadata(self.PROJECT_NAME, self.TEST_FOLDER_NAME_1) @@ -88,7 +88,7 @@ def test_basic_folders(self): self.folder_path, annotation_status="InProgress", ) - images = sa.search_images( + images = sa.search_items( f"{self.PROJECT_NAME}/{self.TEST_FOLDER_NAME_1}", self.EXAMPLE_IMAGE_1 ) self.assertEqual(len(images), 1) @@ -98,7 +98,7 @@ def test_basic_folders(self): self.folder_path, annotation_status="InProgress", ) - images = sa.search_images(f"{self.PROJECT_NAME}/{self.TEST_FOLDER_NAME_1}") + images = sa.search_items(f"{self.PROJECT_NAME}/{self.TEST_FOLDER_NAME_1}") self.assertEqual(len(images), 4) folder_metadata = sa.create_folder(self.PROJECT_NAME, self.TEST_FOLDER_NAME_2) @@ -167,7 +167,6 @@ def test_delete_folders(self): self.assertEqual(len(sa.search_folders(self.PROJECT_NAME)), 1) self.assertEqual(sa.search_folders(self.PROJECT_NAME)[0], "folder6") - def test_project_folder_image_count(self): sa.upload_images_from_folder_to_project( self.PROJECT_NAME, self.folder_path, annotation_status="InProgress" @@ -223,198 +222,6 @@ def test_delete_images(self): ) self.assertEqual(num_images, 0) - def test_copy_images3(self): - sa.upload_images_from_folder_to_project( - self.PROJECT_NAME, self.folder_path, annotation_status="InProgress" - ) - sa.create_folder(f"{self.PROJECT_NAME}", self.TEST_FOLDER_NAME_1) - time.sleep(1) - sa.copy_images( - f"{self.PROJECT_NAME}/{self.TEST_FOLDER_NAME_1}", - [self.EXAMPLE_IMAGE_2, self.EXAMPLE_IMAGE_3], - f"{self.PROJECT_NAME}", - include_annotations=False, - copy_pin=False, - ) - assert ( - "Copied 2/2 images from test copy3 folder images to test copy3 folder images/folder_1" - ) - - num_images = sa.get_project_image_count(self.PROJECT_NAME) - assert num_images == 4 - - def test_copy_images4(self): - sa.upload_images_from_folder_to_project( - self.PROJECT_NAME, self.folder_path, annotation_status="InProgress" - ) - sa.create_folder(self.PROJECT_NAME, self.TEST_FOLDER_NAME_1) - project = f"{self.PROJECT_NAME}/{self.TEST_FOLDER_NAME_1}" - - sa.copy_images( - self.PROJECT_NAME, [self.EXAMPLE_IMAGE_2, self.EXAMPLE_IMAGE_3], project - ) - - num_images = sa.get_project_image_count(project) - self.assertEqual(num_images, 2) - - num_images = sa.get_project_image_count(self.PROJECT_NAME) - self.assertEqual(num_images, 4) - - def test_copy_images(self): - sa.create_folder(self.PROJECT_NAME, self.TEST_FOLDER_NAME_1) - project = f"{self.PROJECT_NAME}/{self.TEST_FOLDER_NAME_1}" - sa.upload_images_from_folder_to_project( - project, self.folder_path, annotation_status="InProgress" - ) - num_images = sa.get_project_image_count(project) - self.assertEqual(num_images, 4) - - im1 = sa.get_image_metadata(project, self.EXAMPLE_IMAGE_2) - self.assertEqual(im1["annotation_status"], "InProgress") - - sa.create_folder(self.PROJECT_NAME, "folder2") - project2 = self.PROJECT_NAME + "/folder2" - num_images = sa.get_project_image_count(project2) - self.assertEqual(num_images, 0) - - sa.copy_images( - project, - [self.EXAMPLE_IMAGE_2, self.EXAMPLE_IMAGE_3], - project2, - include_annotations=False, - copy_pin=False, - ) - - im1_copied = sa.get_image_metadata(project2, self.EXAMPLE_IMAGE_2) - self.assertEqual(im1_copied["annotation_status"], "NotStarted") - - num_images = sa.get_project_image_count(project2) - self.assertEqual(num_images, 2) - - sa.copy_images(project, None, project2) - - num_images = sa.get_project_image_count(project2) - self.assertEqual(num_images, 4) - - sa.copy_images( - project, - [self.EXAMPLE_IMAGE_2, self.EXAMPLE_IMAGE_3], - self.PROJECT_NAME, - include_annotations=False, - copy_pin=False, - ) - num_images = sa.get_project_image_count(self.PROJECT_NAME) - self.assertEqual(num_images, 2) - - def test_move_images(self): - sa.create_folder(self.PROJECT_NAME, self.TEST_FOLDER_NAME_1) - project = f"{self.PROJECT_NAME}/{self.TEST_FOLDER_NAME_1}" - sa.upload_images_from_folder_to_project( - project, self.folder_path, annotation_status="InProgress" - ) - num_images = sa.get_project_image_count(project) - self.assertEqual(num_images, 4) - - sa.create_folder(self.PROJECT_NAME, self.TEST_FOLDER_NAME_2) - project2 = self.PROJECT_NAME + "/" + self.TEST_FOLDER_NAME_2 - num_images = sa.get_project_image_count(project2) - self.assertEqual(num_images, 0) - - sa.move_images(project, [self.EXAMPLE_IMAGE_2], project2) - num_images = sa.get_project_image_count(project2) - self.assertEqual(num_images, 1) - - num_images = sa.get_project_image_count(project) - self.assertEqual(num_images, 3) - - num_images = sa.get_project_image_count( - self.PROJECT_NAME, with_all_subfolders=True - ) - self.assertEqual(num_images, 4) - - images = sa.search_images_all_folders(self.PROJECT_NAME) - self.assertEqual( - images, - [ - self.EXAMPLE_IMAGE_1, - self.EXAMPLE_IMAGE_2, - self.EXAMPLE_IMAGE_3, - self.EXAMPLE_IMAGE_4, - ], - ) - - def test_move_images2(self): - sa.create_folder(self.PROJECT_NAME, self.TEST_FOLDER_NAME_1) - project = f"{self.PROJECT_NAME}/{self.TEST_FOLDER_NAME_1}" - sa.upload_images_from_folder_to_project( - project, self.folder_path, annotation_status="InProgress" - ) - num_images = sa.get_project_image_count(project) - self.assertEqual(num_images, 4) - - sa.create_folder(self.PROJECT_NAME, self.TEST_FOLDER_NAME_2) - project2 = self.PROJECT_NAME + "/" + self.TEST_FOLDER_NAME_2 - num_images = sa.get_project_image_count(project2) - self.assertEqual(num_images, 0) - - sa.move_images(project, None, project2) - num_images = sa.get_project_image_count(project2) - self.assertEqual(num_images, 4) - - num_images = sa.get_project_image_count(project) - self.assertEqual(num_images, 0) - - def test_folder_export(self): - - sa.create_annotation_classes_from_classes_json( - self.PROJECT_NAME, self.classes_json - ) - sa.upload_images_from_folder_to_project( - self.PROJECT_NAME, self.folder_path, annotation_status="InProgress" - ) - sa.create_folder(self.PROJECT_NAME, self.TEST_FOLDER_NAME_1) - project = f"{self.PROJECT_NAME}/{self.TEST_FOLDER_NAME_1}" - sa.upload_images_from_folder_to_project( - project, self.folder_path, annotation_status="InProgress" - ) - - sa.upload_annotations_from_folder_to_project(project, self.folder_path) - num_images = sa.get_project_image_count(project) - self.assertEqual(num_images, 4) - - sa.create_folder(self.PROJECT_NAME, self.TEST_FOLDER_NAME_2) - project2 = f"{self.PROJECT_NAME}/{self.TEST_FOLDER_NAME_2}" - num_images = sa.get_project_image_count(project2) - self.assertEqual(num_images, 0) - - sa.copy_images(project, [self.EXAMPLE_IMAGE_2, self.EXAMPLE_IMAGE_3], project2) - - export = sa.prepare_export( - self.PROJECT_NAME, [self.TEST_FOLDER_NAME_2, self.TEST_FOLDER_NAME_1] - ) - with tempfile.TemporaryDirectory() as temp_dir: - temp_dir = pathlib.Path(temp_dir) - sa.download_export(project, export, temp_dir) - self.assertEqual(len(list((temp_dir / "classes").rglob("*"))), 1) - self.assertEqual( - len(list((temp_dir / self.TEST_FOLDER_NAME_1).rglob("*"))), 4 - ) - self.assertEqual( - len(list((temp_dir / self.TEST_FOLDER_NAME_2).rglob("*"))), 2 - ) - self.assertEqual(len(list((temp_dir).glob("*.*"))), 0) - - export = sa.prepare_export(self.PROJECT_NAME) - sa.download_export(project, export, temp_dir) - self.assertEqual(len(list((temp_dir / "classes").rglob("*"))), 1) - self.assertEqual( - len(list((temp_dir / self.TEST_FOLDER_NAME_1).rglob("*"))), 4 - ) - self.assertEqual( - len(list((temp_dir / self.TEST_FOLDER_NAME_2).rglob("*"))), 2 - ) - self.assertEqual(len(list((temp_dir).glob("*.*"))), 4) - @pytest.mark.flaky(reruns=2) def test_project_completed_count(self): sa.upload_images_from_folder_to_project( @@ -427,33 +234,9 @@ def test_project_completed_count(self): ) project_metadata = sa.get_project_metadata(self.PROJECT_NAME, include_complete_image_count=True) self.assertEqual(project_metadata['completed_images_count'], 8) - self.assertEqual(project_metadata['rootFolderCompletedImagesCount'], 4) - - def test_folder_image_annotation_status(self): - sa.upload_images_from_folder_to_project( - self.PROJECT_NAME, self.folder_path, annotation_status="InProgress" - ) - sa.create_folder(self.PROJECT_NAME, self.TEST_FOLDER_NAME_1) - project = f"{self.PROJECT_NAME}/{self.TEST_FOLDER_NAME_1}" - sa.upload_images_from_folder_to_project( - project, self.folder_path, annotation_status="InProgress" - ) - sa.set_images_annotation_statuses( - project, "QualityCheck", [self.EXAMPLE_IMAGE_1, self.EXAMPLE_IMAGE_2], - ) - for image in [self.EXAMPLE_IMAGE_1, self.EXAMPLE_IMAGE_2]: - metadata = sa.get_image_metadata(project, image) - self.assertEqual(metadata["annotation_status"], "QualityCheck") - - for image in [self.EXAMPLE_IMAGE_3]: - metadata = sa.get_image_metadata(project, image) - self.assertEqual(metadata["annotation_status"], "InProgress") + self.assertEqual(project_metadata['root_folder_completed_images_count'], 4) - sa.set_images_annotation_statuses(self.PROJECT_NAME, "QualityCheck", None,) - for image in sa.search_images(self.PROJECT_NAME): - metadata = sa.get_image_metadata(self.PROJECT_NAME, image) - self.assertEqual(metadata["annotation_status"], "QualityCheck") def test_folder_misnamed(self): diff --git a/tests/integration/integrations/test_get_integrations.py b/tests/integration/integrations/test_get_integrations.py index 0dd62d5f0..56673a9cc 100644 --- a/tests/integration/integrations/test_get_integrations.py +++ b/tests/integration/integrations/test_get_integrations.py @@ -12,7 +12,7 @@ class TestGetIntegrations(BaseTestCase): TEST_FOLDER_NAME = "test_folder" PROJECT_DESCRIPTION = "desc" PROJECT_TYPE = "Vector" - EXAMPLE_IMAGE = "example_image_1.jpg" + EXAMPLE_IMAGE = "egit xample_image_1.jpg" @property def folder_path(self): diff --git a/tests/integration/items/__init__.py b/tests/integration/items/__init__.py index c9606671a..95b7f3811 100644 --- a/tests/integration/items/__init__.py +++ b/tests/integration/items/__init__.py @@ -1,6 +1,6 @@ ITEM_EXPECTED_KEYS = [ - "name", "path", "url", "annotation_status", "annotator_name", - "qa_name", "entropy_value", "createdAt", "updatedAt" + "name", "path", "url", "annotation_status", "annotator_email", + "qa_email", "entropy_value", "createdAt", "updatedAt" ] IMAGE_EXPECTED_KEYS = ITEM_EXPECTED_KEYS + ["segmentation_status", "prediction_status", "approval_status"] diff --git a/tests/integration/items/test_attach_items.py b/tests/integration/items/test_attach_items.py new file mode 100644 index 000000000..39adfe579 --- /dev/null +++ b/tests/integration/items/test_attach_items.py @@ -0,0 +1,71 @@ +import os +from pathlib import Path + +import src.superannotate as sa +from tests.integration.base import BaseTestCase + + +class TestAttachItemsVector(BaseTestCase): + PROJECT_NAME = "TestAttachItemsVector" + PROJECT_DESCRIPTION = "TestAttachItemsVector" + PROJECT_TYPE = "Vector" + FOLDER_NAME = "test_folder" + CSV_PATH = "data_set/attach_urls.csv" + PATH_TO_50K_URLS = "data_set/501_urls.csv" + ATTACHED_IMAGE_NAME = "6022a74d5384c50017c366b3" + ATTACHMENT_LIST = [ + { + "url": "https://drive.google.com/uc?export=download&id=1vwfCpTzcjxoEA4hhDxqapPOVvLVeS7ZS", + "name": "6022a74d5384c50017c366b3" + }, + { + "url": "https://drive.google.com/uc?export=download&id=1geS2YtQiTYuiduEirKVYxBujHJaIWA3V", + "name": "6022a74b5384c50017c366ad" + }, + { + "url": "1SfGcn9hdkVM35ZP0S93eStsE7Ti4GtHU", + "path": "123" + }, + { + "url": "https://drive.google.com/uc?export=download&id=1geS2YtQiTYuiduEirKVYxBujHJaIWA3V", + "name": "6022a74b5384c50017c366ad" + }, + ] + + @property + def scv_path(self): + return os.path.join(Path(__file__).parent.parent.parent, self.CSV_PATH) + + @property + def scv_path_50k(self): + return os.path.join(Path(__file__).parent.parent.parent, self.PATH_TO_50K_URLS) + + def test_attached_items_csv(self): + uploaded, _, _ = sa.attach_items(self.PROJECT_NAME, self.scv_path) + assert len(uploaded) == 7 + uploaded, _, duplicated = sa.attach_items(self.PROJECT_NAME, self.scv_path) + assert len(uploaded) == 2 + assert len(duplicated) == 5 + + def test_attached_items_list_of_dict(self): + uploaded, _, _ = sa.attach_items(self.PROJECT_NAME, self.ATTACHMENT_LIST) + assert len(uploaded) == 3 + uploaded, _, duplicated = sa.attach_items(self.PROJECT_NAME, self.ATTACHMENT_LIST) + assert len(uploaded) == 1 + assert len(duplicated) == 2 + + def test_attach_items_to_folder(self): + sa.create_folder(self.PROJECT_NAME, self.FOLDER_NAME) + uploaded, _, _ = sa.attach_items(f"{self.PROJECT_NAME}/{self.FOLDER_NAME}", self.ATTACHMENT_LIST) + assert len(uploaded) == 3 + uploaded, _, duplicated = sa.attach_items(f"{self.PROJECT_NAME}/{self.FOLDER_NAME}", self.ATTACHMENT_LIST) + assert len(uploaded) == 1 + assert len(duplicated) == 2 + + def test_limitation(self): + self.assertRaises( + Exception, + sa.attach_items, + self.PROJECT_NAME, + self.scv_path_50k + ) diff --git a/tests/integration/items/test_copy_items.py b/tests/integration/items/test_copy_items.py new file mode 100644 index 000000000..18a01faf2 --- /dev/null +++ b/tests/integration/items/test_copy_items.py @@ -0,0 +1,75 @@ +import os +from collections import Counter +from pathlib import Path + +import src.superannotate as sa +from tests.integration.base import BaseTestCase + + +class TestCopyItems(BaseTestCase): + PROJECT_NAME = "TestCopyItemsVector" + PROJECT_DESCRIPTION = "TestCopyItemsVector" + PROJECT_TYPE = "Vector" + IMAGE_NAME ="test_image" + FOLDER_1 = "folder_1" + FOLDER_2 = "folder_2" + CSV_PATH = "data_set/attach_urls.csv" + + @property + def scv_path(self): + return os.path.join(Path(__file__).parent.parent.parent, self.CSV_PATH) + + def test_copy_items_from_root(self): + uploaded, _, _ = sa.attach_items(self.PROJECT_NAME, self.scv_path) + assert len(uploaded) == 7 + sa.create_folder(self.PROJECT_NAME, self.FOLDER_1) + skipped_items = sa.copy_items(self.PROJECT_NAME, f"{self.PROJECT_NAME}/{self.FOLDER_1}") + assert len(skipped_items) == 0 + assert len(sa.search_items(f"{self.PROJECT_NAME}/{self.FOLDER_1}")) == 7 + + def test_copy_items_from_folder(self): + sa.create_folder(self.PROJECT_NAME, self.FOLDER_1) + sa.create_folder(self.PROJECT_NAME, self.FOLDER_2) + uploaded, _, _ = sa.attach_items(f"{self.PROJECT_NAME}/{self.FOLDER_1}", self.scv_path) + assert len(uploaded) == 7 + skipped_items = sa.copy_items(f"{self.PROJECT_NAME}/{self.FOLDER_1}", f"{self.PROJECT_NAME}/{self.FOLDER_2}") + assert len(skipped_items) == 0 + assert len(sa.search_items(f"{self.PROJECT_NAME}/{self.FOLDER_2}")) == 7 + + def test_skipped_count(self): + sa.create_folder(self.PROJECT_NAME, self.FOLDER_1) + uploaded, _, _ = sa.attach_items(f"{self.PROJECT_NAME}", self.scv_path) + _ = sa.copy_items(f"{self.PROJECT_NAME}", f"{self.PROJECT_NAME}/{self.FOLDER_1}") + skipped_items = sa.copy_items(f"{self.PROJECT_NAME}", f"{self.PROJECT_NAME}/{self.FOLDER_1}") + assert len(skipped_items) == 7 + + def test_copy_item_with_annotations(self): + uploaded, _, _ = sa.attach_items( + self.PROJECT_NAME, [ + {"url": "https://drive.google.com/uc?export=download&id=1vwfCpTzcjxoEA4hhDxqapPOVvLVeS7ZS", + "name": self.IMAGE_NAME} + ] + ) + assert len(uploaded) == 1 + sa.create_annotation_class(self.PROJECT_NAME, "test_class", "#FF0000") + sa.add_annotation_bbox_to_image(self.PROJECT_NAME, self.IMAGE_NAME, [1, 2, 3, 4], "test_class") + sa.create_folder(self.PROJECT_NAME, self.FOLDER_1) + skipped_items = sa.copy_items( + self.PROJECT_NAME, f"{self.PROJECT_NAME}/{self.FOLDER_1}", include_annotations=True + ) + annotations = sa.get_annotations(f"{self.PROJECT_NAME}/{self.FOLDER_1}") + assert len(annotations) == 1 + assert len(skipped_items) == 0 + assert len(sa.search_items(f"{self.PROJECT_NAME}/{self.FOLDER_1}")) == 1 + + def test_copy_items_wrong_items_list(self): + uploaded, _, _ = sa.attach_items( + self.PROJECT_NAME, [ + {"url": "https://drive.google.com/uc?export=download&id=1vwfCpTzcjxoEA4hhDxqapPOVvLVeS7ZS", + "name": self.IMAGE_NAME} + ] + ) + sa.create_folder(self.PROJECT_NAME, self.FOLDER_1) + skipped_items = sa.copy_items(self.PROJECT_NAME, f"{self.PROJECT_NAME}/{self.FOLDER_1}", items=["as", "asd"]) + assert Counter(skipped_items) == Counter(["as", "asd"]) + assert len(sa.search_items(f"{self.PROJECT_NAME}/{self.FOLDER_1}")) == 0 diff --git a/tests/integration/items/test_get_item_metadata.py b/tests/integration/items/test_get_item_metadata.py index bc3789815..1dff00cd9 100644 --- a/tests/integration/items/test_get_item_metadata.py +++ b/tests/integration/items/test_get_item_metadata.py @@ -61,3 +61,29 @@ def test_get_item_metadata(self): assert item_metadata["prediction_status"] == "NotStarted" assert item_metadata["segmentation_status"] == "NotStarted" assert item_metadata["annotation_status"] == "InProgress" + + +class TestGetEntityMetadataVideo(BaseTestCase): + PROJECT_NAME = "TestGetEntityMetadataVideo" + PROJECT_DESCRIPTION = "TestGetEntityMetadataVideo" + PROJECT_TYPE = "Video" + TEST_FOLDER_PATH = "data_set/sample_project_vector" + ITEM_NAME = "example_image_1.jpg" + + @property + def folder_path(self): + return os.path.join(Path(__file__).parent.parent.parent, self.TEST_FOLDER_PATH) + + def test_get_item_metadata(self): + sa.attach_items( + self.PROJECT_NAME, [ + { + "url": "https://drive.google.com/uc?export=download&id=1vwfCpTzcjxoEA4hhDxqapPOVvLVeS7ZS", + "name": self.ITEM_NAME + } + ] + ) + item_metadata = sa.get_item_metadata(self.PROJECT_NAME, self.ITEM_NAME) + assert item_metadata["path"] == f"{self.PROJECT_NAME}/{self.ITEM_NAME}" + assert "prediction_status" not in item_metadata + assert "segmentation_status" not in item_metadata diff --git a/tests/integration/items/test_move_items.py b/tests/integration/items/test_move_items.py new file mode 100644 index 000000000..cf408665f --- /dev/null +++ b/tests/integration/items/test_move_items.py @@ -0,0 +1,56 @@ +import os +from pathlib import Path + +import src.superannotate as sa +from tests.integration.base import BaseTestCase + + +class TestMoveItems(BaseTestCase): + PROJECT_NAME = "TestMoveItemsVector" + PROJECT_DESCRIPTION = "TestCopyItemsVector" + PROJECT_TYPE = "Vector" + IMAGE_NAME = "test_image" + FOLDER_1 = "folder_1" + FOLDER_2 = "folder_2" + CSV_PATH = "data_set/attach_urls.csv" + + @property + def scv_path(self): + return os.path.join(Path(__file__).parent.parent.parent, self.CSV_PATH) + + def test_move_items_from_root(self): + uploaded, _, _ = sa.attach_items(self.PROJECT_NAME, self.scv_path) + assert len(uploaded) == 7 + sa.create_folder(self.PROJECT_NAME, self.FOLDER_1) + skipped_items = sa.move_items(self.PROJECT_NAME, f"{self.PROJECT_NAME}/{self.FOLDER_1}") + assert len(skipped_items) == 0 + assert len(sa.search_items(f"{self.PROJECT_NAME}/{self.FOLDER_1}")) == 7 + + def test_move_items_from_folder(self): + sa.create_folder(self.PROJECT_NAME, self.FOLDER_1) + sa.create_folder(self.PROJECT_NAME, self.FOLDER_2) + uploaded, _, _ = sa.attach_items(f"{self.PROJECT_NAME}/{self.FOLDER_1}", self.scv_path) + assert len(uploaded) == 7 + skipped_items = sa.move_items(f"{self.PROJECT_NAME}/{self.FOLDER_1}", f"{self.PROJECT_NAME}/{self.FOLDER_2}") + assert len(skipped_items) == 0 + assert len(sa.search_items(f"{self.PROJECT_NAME}/{self.FOLDER_2}")) == 7 + assert len(sa.search_items(f"{self.PROJECT_NAME}/{self.FOLDER_1}")) == 0 + + def test_move_item_with_annotations(self): + uploaded, _, _ = sa.attach_items( + self.PROJECT_NAME, [ + {"url": "https://drive.google.com/uc?export=download&id=1vwfCpTzcjxoEA4hhDxqapPOVvLVeS7ZS", + "name": self.IMAGE_NAME} + ] + ) + assert len(uploaded) == 1 + sa.create_annotation_class(self.PROJECT_NAME, "test_class", "#FF0000") + sa.add_annotation_bbox_to_image(self.PROJECT_NAME, self.IMAGE_NAME, [1, 2, 3, 4], "test_class") + sa.create_folder(self.PROJECT_NAME, self.FOLDER_1) + skipped_items = sa.move_items( + self.PROJECT_NAME, f"{self.PROJECT_NAME}/{self.FOLDER_1}" + ) + annotations = sa.get_annotations(f"{self.PROJECT_NAME}/{self.FOLDER_1}") + assert len(annotations) == 1 + assert len(skipped_items) == 0 + assert len(sa.search_items(f"{self.PROJECT_NAME}/{self.FOLDER_1}")) == 1 diff --git a/tests/integration/items/test_set_annotation_statuses.py b/tests/integration/items/test_set_annotation_statuses.py new file mode 100644 index 000000000..d18d4d7f5 --- /dev/null +++ b/tests/integration/items/test_set_annotation_statuses.py @@ -0,0 +1,81 @@ +import os +from pathlib import Path + +import src.superannotate as sa +from src.superannotate import AppException +from src.superannotate.lib.core.usecases import SetAnnotationStatues +from tests.integration.base import BaseTestCase + + +class TestSetAnnotationStatuses(BaseTestCase): + PROJECT_NAME = "TestSetAnnotationStatuses" + PROJECT_DESCRIPTION = "TestSetAnnotationStatuses" + PROJECT_TYPE = "Vector" + FOLDER_NAME = "test_folder" + CSV_PATH = "data_set/attach_urls.csv" + EXAMPLE_IMAGE_1 = "6022a74d5384c50017c366b3" + EXAMPLE_IMAGE_2 = "6022a74b5384c50017c366ad" + ATTACHMENT_LIST = [ + { + "url": "https://drive.google.com/uc?export=download&id=1vwfCpTzcjxoEA4hhDxqapPOVvLVeS7ZS", + "name": "6022a74d5384c50017c366b3" + }, + { + "url": "https://drive.google.com/uc?export=download&id=1geS2YtQiTYuiduEirKVYxBujHJaIWA3V", + "name": "6022a74b5384c50017c366ad" + }, + { + "url": "1SfGcn9hdkVM35ZP0S93eStsE7Ti4GtHU", + "path": "123" + }, + { + "url": "https://drive.google.com/uc?export=download&id=1geS2YtQiTYuiduEirKVYxBujHJaIWA3V", + "name": "6022a74b5384c50017c366ad" + }, + ] + + @property + def scv_path(self): + return os.path.join(Path(__file__).parent.parent.parent, self.CSV_PATH) + + def test_image_annotation_status(self): + sa.attach_items( + self.PROJECT_NAME, self.ATTACHMENT_LIST, annotation_status="InProgress" + ) + + sa.set_annotation_statuses( + self.PROJECT_NAME, "QualityCheck", + ) + for image in sa.search_items(self.PROJECT_NAME): + self.assertEqual(image["annotation_status"], "QualityCheck") + + def test_image_annotation_status_via_names(self): + sa.attach_items( + self.PROJECT_NAME, self.ATTACHMENT_LIST, annotation_status="InProgress" + ) + + sa.set_annotation_statuses( + self.PROJECT_NAME, "QualityCheck", [self.EXAMPLE_IMAGE_1, self.EXAMPLE_IMAGE_2] + ) + + for image_name in [self.EXAMPLE_IMAGE_1, self.EXAMPLE_IMAGE_2]: + metadata = sa.get_item_metadata(self.PROJECT_NAME, image_name) + self.assertEqual(metadata["annotation_status"], "QualityCheck") + + def test_image_annotation_status_via_invalid_names(self): + sa.attach_items( + self.PROJECT_NAME, self.ATTACHMENT_LIST, annotation_status="InProgress" + ) + with self.assertRaisesRegexp(AppException, SetAnnotationStatues.ERROR_MESSAGE): + sa.set_annotation_statuses( + self.PROJECT_NAME, "QualityCheck", ["self.EXAMPLE_IMAGE_1", "self.EXAMPLE_IMAGE_2"] + ) + + def test_set_image_annotation_status(self): + sa.attach_items( + self.PROJECT_NAME, [self.ATTACHMENT_LIST[0]] + ) + data = sa.set_image_annotation_status( + self.PROJECT_NAME, self.ATTACHMENT_LIST[0]["name"], annotation_status="Completed" + ) + assert data["annotation_status"] == "Completed" \ No newline at end of file diff --git a/tests/integration/projects/test_add_contributors_to_project.py b/tests/integration/projects/test_add_contributors_to_project.py index f560a1d0d..e5629f183 100644 --- a/tests/integration/projects/test_add_contributors_to_project.py +++ b/tests/integration/projects/test_add_contributors_to_project.py @@ -77,3 +77,6 @@ def test_invite_contributors(self, client, get_team_mock): added, skipped = sa.invite_contributors_to_team(to_add_emails, False) self.assertEqual(len(added), 3) self.assertEqual(len(skipped), 5) + + def test_(self): + sa.search_team_contributors(email="vaghinak@superannotate.com", first_name="Vaghinak") \ No newline at end of file diff --git a/tests/integration/projects/test_basic_project.py b/tests/integration/projects/test_basic_project.py index 3de8eaa24..f7264c196 100644 --- a/tests/integration/projects/test_basic_project.py +++ b/tests/integration/projects/test_basic_project.py @@ -119,6 +119,7 @@ def tearDown(self) -> None: except: pass + @pytest.mark.flaky(reruns=2) def test_create_project_from_metadata(self): sa.create_annotation_class( self.PROJECT_NAME, @@ -223,7 +224,7 @@ def test_basic_project(self): ) count_in_folder -= len(list(self.folder_path.glob(self.FUSE_PNG_POSTFIX))) count_in_folder -= len(list(self.folder_path.glob(self.PNG_POSTFIX))) - images = sa.search_images(self.PROJECT_NAME) + images = sa.search_items(self.PROJECT_NAME) assert count_in_folder == len(images) sa.create_annotation_classes_from_classes_json( diff --git a/tests/integration/projects/test_create_project.py b/tests/integration/projects/test_create_project.py new file mode 100644 index 000000000..decd336da --- /dev/null +++ b/tests/integration/projects/test_create_project.py @@ -0,0 +1,71 @@ +from unittest import TestCase + +import src.superannotate as sa + + +class BaseTestCase(TestCase): + PROJECT_1 = "project_1" + PROJECT_2 = "project_2" + + def setUp(self, *args, **kwargs): + self.tearDown() + + def tearDown(self) -> None: + try: + for project_name in (self.PROJECT_1, self.PROJECT_2): + projects = sa.search_projects(project_name, return_metadata=True) + for project in projects: + try: + sa.delete_project(project) + except Exception: + pass + except Exception as e: + print(str(e)) + + +class TestSearchProjectVector(BaseTestCase): + PROJECT_1 = "project_1TestSearchProject" + PROJECT_2 = "project_2TestSearchProject" + PROJECT_TYPE = "Vector" + + @property + def projects(self): + return self.PROJECT_2, self.PROJECT_1 + + def test_create_project_without_settings(self): + project = sa.create_project(self.PROJECT_1, "desc", self.PROJECT_TYPE) + assert project["name"] == self.PROJECT_1 + + def test_create_project_with_settings(self): + sa.create_project( + self.PROJECT_1, "desc", self.PROJECT_TYPE, + [{"attribute": "ImageQuality", "value": "original"}] + ) + project = sa.get_project_metadata(self.PROJECT_1, include_settings=True) + for setting in project["settings"]: + if setting["attribute"] == "ImageQuality": + assert setting["value"] == "original" + + +class TestSearchProjectVideo(BaseTestCase): + PROJECT_1 = "project_1TestSearchProjectVideo" + PROJECT_2 = "project_2TestSearchProjectVideo" + PROJECT_TYPE = "Video" + + @property + def projects(self): + return self.PROJECT_2, self.PROJECT_1 + + def test_create_project_without_settings(self): + project = sa.create_project(self.PROJECT_1, "desc", self.PROJECT_TYPE) + assert project["name"] == self.PROJECT_1 + + def test_create_project_with_settings(self): + sa.create_project( + self.PROJECT_1, "desc", self.PROJECT_TYPE, + [{"attribute": "FrameRate", "value": 1}] + ) + project = sa.get_project_metadata(self.PROJECT_1, include_settings=True) + for setting in project["settings"]: + if setting["attribute"] == "FrameRate": + assert setting["value"] == 1 \ No newline at end of file diff --git a/tests/integration/projects/test_create_project_from_metadata.py b/tests/integration/projects/test_create_project_from_metadata.py deleted file mode 100644 index ffeede5eb..000000000 --- a/tests/integration/projects/test_create_project_from_metadata.py +++ /dev/null @@ -1,24 +0,0 @@ -import src.superannotate as sa -from tests.integration.base import BaseTestCase - - -class TestProjectRename(BaseTestCase): - PROJECT_NAME = "TestProjectRename" - NEW_PROJECT_NAME = "NewTestProjectRename" - NAME_TO_RENAME = "TestPr" - PROJECT_DESCRIPTION = "Desc" - PROJECT_TYPE = "Vector" - - def tearDown(self) -> None: - projects = sa.search_projects(self.NEW_PROJECT_NAME, return_metadata=True) - for project in projects: - sa.delete_project(project) - super().tearDown() - - def test_create_project_from_metadata(self): - project = sa.get_project_metadata(self.PROJECT_NAME, include_settings=True, include_contributors=True) - project["name"] = self.NEW_PROJECT_NAME - project["instructions_link"] = "instructions_link" - new_project = sa.create_project_from_metadata(project) - assert new_project["instructions_link"] == "instructions_link" - diff --git a/tests/integration/projects/test_project_rename.py b/tests/integration/projects/test_project_rename.py index 7829554bf..5bbe501e5 100644 --- a/tests/integration/projects/test_project_rename.py +++ b/tests/integration/projects/test_project_rename.py @@ -9,7 +9,7 @@ class TestProjectRename(BaseTestCase): PROJECT_TYPE = "Vector" NEW_PROJECT_NAME = "new" REPLACED_PROJECT_NAME = "_ _ _ _ _ _ _ _ _ _" - BAD_PROJECT_NAME = '/ \ : * ? " “ < > |' + BAD_PROJECT_NAME = '/ \ : * ? " “ < > |' # noqa: w605 def setUp(self, *args, **kwargs): self.tearDown() diff --git a/tests/integration/projects/test_search_project.py b/tests/integration/projects/test_search_project.py deleted file mode 100644 index 3a06faf3d..000000000 --- a/tests/integration/projects/test_search_project.py +++ /dev/null @@ -1,65 +0,0 @@ -from unittest import TestCase - -import src.superannotate as sa -from src.superannotate.lib.core.entities import ProjectEntity - - -class TestSearchProject(TestCase): - PROJECT_1 = "project_1" - PROJECT_2 = "project_2" - - def setUp(self, *args, **kwargs): - self.tearDown() - - def tearDown(self) -> None: - try: - for project_name in (self.PROJECT_1, self.PROJECT_2): - projects = sa.search_projects(project_name, return_metadata=True) - for project in projects: - try: - sa.delete_project(project) - except Exception: - pass - except Exception as e: - print(str(e)) - - @property - def projects(self): - return self.PROJECT_2, self.PROJECT_1 - - def test_search_by_status(self): - controller = sa.get_default_controller() - - project_1 = ProjectEntity( - name=self.PROJECT_1, description="desc", project_type=sa.constances.ProjectType.VECTOR.value, - status=sa.constances.ProjectStatus.Completed.value, team_id=controller.team_id - ) - project_2 = ProjectEntity( - name=self.PROJECT_2, description="desc", project_type=sa.constances.ProjectType.VECTOR.value, - status=sa.constances.ProjectStatus.InProgress.value, team_id=controller.team_id - ) - - controller.projects.insert(project_1) - controller.projects.insert(project_2) - - assert self.PROJECT_1 in sa.search_projects(status=sa.constances.ProjectStatus.Completed.name) - assert self.PROJECT_2 in sa.search_projects(status=sa.constances.ProjectStatus.InProgress.name) - - def test_search_by_multiple_status(self): - controller = sa.get_default_controller() - - project_1 = ProjectEntity( - name=self.PROJECT_1, description="desc", project_type=sa.constances.ProjectType.VECTOR.value, - status=sa.constances.ProjectStatus.OnHold.value, team_id=controller.team_id - ) - project_2 = ProjectEntity( - name=self.PROJECT_2, description="desc", project_type=sa.constances.ProjectType.VECTOR.value, - status=sa.constances.ProjectStatus.OnHold.value, team_id=controller.team_id - ) - - controller.projects.insert(project_1) - controller.projects.insert(project_2) - - assert all( - [project in self.projects for project in sa.search_projects(status=sa.constances.ProjectStatus.OnHold.name)] - ) diff --git a/tests/integration/settings/test_settings.py b/tests/integration/settings/test_settings.py new file mode 100644 index 000000000..420e444b0 --- /dev/null +++ b/tests/integration/settings/test_settings.py @@ -0,0 +1,176 @@ +from unittest import TestCase + +import src.superannotate as sa +from src.superannotate import AppException + + +class BaseTestCase(TestCase): + PROJECT_NAME = "TestSettings" + SECOND_PROJECT_NAME = "SecondTestSettings" + PROJECT_DESCRIPTION = "TestSettings" + + def setUp(self) -> None: + self.tearDown() + + def tearDown(self) -> None: + try: + projects = sa.search_projects(self.PROJECT_NAME, return_metadata=True) + projects.extend(sa.search_projects(self.SECOND_PROJECT_NAME, return_metadata=True)) + for project in projects: + try: + sa.delete_project(project) + except Exception: + pass + except Exception as e: + print(str(e)) + + +class TestSettings(BaseTestCase): + PROJECT_NAME = "TestSettings" + SECOND_PROJECT_NAME = "SecondTestSettings" + PROJECT_TYPE = "Vector" + + def test_create_project_with_empty_settings(self): + sa.create_project( + self.PROJECT_NAME, + self.PROJECT_DESCRIPTION, + self.PROJECT_TYPE, + [] + ) + settings = sa.get_project_settings(self.PROJECT_NAME) + for setting in settings: + if setting["attribute"] == "ImageQuality": + assert setting["value"] == "compressed" + break + else: + raise Exception("Test failed") + + def test_create_project_with_settings(self): + sa.create_project( + self.PROJECT_NAME, + self.PROJECT_DESCRIPTION, + self.PROJECT_TYPE, + [{"attribute": "ImageQuality", "value": "original"}]) + + settings = sa.get_project_settings(self.PROJECT_NAME) + for setting in settings: + if setting["attribute"] == "ImageQuality": + assert setting["value"] == "original" + break + else: + raise Exception("Test failed") + + def test_create_from_metadata(self): + sa.create_project( + self.PROJECT_NAME, + self.PROJECT_DESCRIPTION, + self.PROJECT_TYPE, + [{"attribute": "ImageQuality", "value": "original"}] + ) + project_metadata = sa.get_project_metadata(self.PROJECT_NAME, include_settings=True) + project_metadata["name"] = self.SECOND_PROJECT_NAME + sa.create_project_from_metadata(project_metadata) + settings = sa.get_project_settings(self.SECOND_PROJECT_NAME) + for setting in settings: + if setting["attribute"] == "ImageQuality": + assert setting["value"] == "original" + break + else: + raise Exception("Test failed") + + def test_clone_project(self): + sa.create_project( + self.PROJECT_NAME, + self.PROJECT_DESCRIPTION, + self.PROJECT_TYPE, + [{"attribute": "ImageQuality", "value": "original"}]) + sa.clone_project(self.SECOND_PROJECT_NAME, self.PROJECT_NAME, copy_settings=True) + settings = sa.get_project_settings(self.SECOND_PROJECT_NAME) + for setting in settings: + if setting["attribute"] == "ImageQuality": + assert setting["value"] == "original" + break + else: + raise Exception("Test failed") + + def test_frame_rate_invalid_range_value(self): + with self.assertRaisesRegexp(AppException, "FrameRate is available only for Video projects"): + sa.create_project( + self.PROJECT_NAME, + self.PROJECT_DESCRIPTION, + self.PROJECT_TYPE, + [{"attribute": "FrameRate", "value": 1.0}]) + + +class TestVideoSettings(BaseTestCase): + PROJECT_NAME = "TestVideoSettings12" + SECOND_PROJECT_NAME = "TestVideoSettings2" + PROJECT_TYPE = "Video" + + def test_frame_rate(self): + sa.create_project( + self.PROJECT_NAME, + self.PROJECT_DESCRIPTION, + self.PROJECT_TYPE, + [{"attribute": "FrameRate", "value": 1}]) + settings = sa.get_project_settings(self.PROJECT_NAME) + for setting in settings: + if setting["attribute"] == "FrameRate": + assert setting["value"] == 1 + break + elif setting["attribute"] == "FrameMode": + assert setting["value"] + break + else: + raise Exception("Test failed") + + def test_frame_rate_float(self): + sa.create_project( + self.PROJECT_NAME, + self.PROJECT_DESCRIPTION, + self.PROJECT_TYPE, + [{"attribute": "FrameRate", "value": 1.3}]) + settings = sa.get_project_settings(self.PROJECT_NAME) + for setting in settings: + if setting["attribute"] == "FrameRate": + assert setting["value"] == 1.3 + break + elif setting["attribute"] == "FrameMode": + assert setting["value"] + break + else: + raise Exception("Test failed") + + def test_frame_rate_invalid_range_value(self): + with self.assertRaisesRegexp(AppException, "The FrameRate value range is between 0.001 - 120"): + sa.create_project( + self.PROJECT_NAME, + self.PROJECT_DESCRIPTION, + self.PROJECT_TYPE, + [{"attribute": "FrameRate", "value": 1.00003}]) + + def test_frame_rate_invalid_str_value(self): + with self.assertRaisesRegexp(AppException, "The FrameRate value should be float"): + sa.create_project( + self.PROJECT_NAME, + self.PROJECT_DESCRIPTION, + self.PROJECT_TYPE, + [{"attribute": "FrameRate", "value": "1"}]) + + def test_frames_reset(self): + sa.create_project( + self.PROJECT_NAME, + self.PROJECT_DESCRIPTION, + self.PROJECT_TYPE, + [{"attribute": "FrameRate", "value": 1.3}]) + sa.rename_project(self.PROJECT_NAME, self.SECOND_PROJECT_NAME) + settings = sa.get_project_settings(self.SECOND_PROJECT_NAME) + for setting in settings: + if setting["attribute"] == "FrameRate": + assert setting["value"] == 1.3 + break + elif setting["attribute"] == "FrameMode": + assert setting["value"] + break + else: + raise Exception("Test failed") diff --git a/tests/integration/test_assign_images.py b/tests/integration/test_assign_images.py index 4e15daa3d..ab0b8e567 100644 --- a/tests/integration/test_assign_images.py +++ b/tests/integration/test_assign_images.py @@ -30,10 +30,10 @@ def test_assign_images(self): sa.assign_images( self._project["name"], [self.EXAMPLE_IMAGE_1, self.EXAMPLE_IMAGE_2], email ) - image_metadata = sa.get_image_metadata( + image_metadata = sa.get_item_metadata( self._project["name"], self.EXAMPLE_IMAGE_1 ) - self.assertIsNotNone(image_metadata["qa_name"]) + self.assertIsNotNone(image_metadata["qa_email"]) def test_assign_images_folder(self): @@ -50,11 +50,11 @@ def test_assign_images_folder(self): project_folder, [self.EXAMPLE_IMAGE_1, self.EXAMPLE_IMAGE_2], email ) - im1_metadata = sa.get_image_metadata(project_folder, self.EXAMPLE_IMAGE_1) - im2_metadata = sa.get_image_metadata(project_folder, self.EXAMPLE_IMAGE_2) + im1_metadata = sa.get_item_metadata(project_folder, self.EXAMPLE_IMAGE_1) + im2_metadata = sa.get_item_metadata(project_folder, self.EXAMPLE_IMAGE_2) - self.assertIsNotNone(im1_metadata["qa_name"]) - self.assertIsNotNone(im2_metadata["qa_name"]) + self.assertIsNotNone(im1_metadata["qa_email"]) + self.assertIsNotNone(im2_metadata["qa_email"]) @pytest.mark.flaky(reruns=4) def test_un_assign_images(self): @@ -69,11 +69,11 @@ def test_un_assign_images(self): self.PROJECT_NAME, [self.EXAMPLE_IMAGE_1, self.EXAMPLE_IMAGE_2], ) - im1_metadata = sa.get_image_metadata(self.PROJECT_NAME, self.EXAMPLE_IMAGE_1) - im2_metadata = sa.get_image_metadata(self.PROJECT_NAME, self.EXAMPLE_IMAGE_2) + im1_metadata = sa.get_item_metadata(self.PROJECT_NAME, self.EXAMPLE_IMAGE_1) + im2_metadata = sa.get_item_metadata(self.PROJECT_NAME, self.EXAMPLE_IMAGE_2) - self.assertIsNone(im1_metadata["qa_name"]) - self.assertIsNone(im2_metadata["qa_name"]) + self.assertIsNone(im1_metadata["qa_email"]) + self.assertIsNone(im2_metadata["qa_email"]) sa.create_folder(self.PROJECT_NAME, self.TEST_FOLDER_NAME) project = self.PROJECT_NAME + "/" + self.TEST_FOLDER_NAME @@ -86,13 +86,13 @@ def test_un_assign_images(self): project, [self.EXAMPLE_IMAGE_1, self.EXAMPLE_IMAGE_2], ) - sa.search_images(project) - im1_metadata = sa.get_image_metadata(project, self.EXAMPLE_IMAGE_1) + sa.search_items(project) + im1_metadata = sa.get_item_metadata(project, self.EXAMPLE_IMAGE_1) - im2_metadata = sa.get_image_metadata(project, self.EXAMPLE_IMAGE_2) + im2_metadata = sa.get_item_metadata(project, self.EXAMPLE_IMAGE_2) - self.assertIsNone(im1_metadata["qa_name"]) - self.assertIsNone(im2_metadata["qa_name"]) + self.assertIsNone(im1_metadata["qa_email"]) + self.assertIsNone(im2_metadata["qa_email"]) def test_assign_folder(self): sa.create_folder(self.PROJECT_NAME, self.TEST_FOLDER_NAME) diff --git a/tests/integration/test_attach_image_urls.py b/tests/integration/test_attach_image_urls.py deleted file mode 100644 index 987d35bb6..000000000 --- a/tests/integration/test_attach_image_urls.py +++ /dev/null @@ -1,67 +0,0 @@ -import os -import pytest -from os.path import dirname - -import src.superannotate as sa -from src.superannotate.lib.core import UploadState -from src.superannotate.lib.core.exceptions import AppException -from tests.integration.base import BaseTestCase - - -class TestImageUrls(BaseTestCase): - PROJECT_NAME = "test attach image urls" - PATH_TO_URLS = "data_set/attach_urls.csv" - PATH_TO_50K_URLS = "data_set/501_urls.csv" - PROJECT_DESCRIPTION = "desc" - PROJECT_TYPE = "Vector" - - @pytest.mark.flaky(reruns=2) - def test_attach_image_urls(self): - uploaded, could_not_upload, existing_images = sa.attach_image_urls_to_project( - self.PROJECT_NAME, - os.path.join(dirname(dirname(__file__)), self.PATH_TO_URLS), - ) - project_metadata = sa.get_project_metadata(self.PROJECT_NAME) - - self.assertEqual(UploadState.EXTERNAL.name, project_metadata["upload_state"]) - - self.assertEqual(len(uploaded), 7) - self.assertEqual(len(could_not_upload), 0) - self.assertEqual(len(existing_images), 1) - images = sa.search_images(project=self.PROJECT_NAME, return_metadata=True) - self.assertTrue(all([image["name"] for image in images])) - truth = {'name': '', - 'path': 'https://drive.google.com/uc?export=download&id=1geS2YtQiTYuiduEirKVYxBujHJaIWA3V', - 'annotation_status': 'NotStarted', 'prediction_status': None, 'segmentation_status': None, - 'approval_status': None, 'is_pinned': 0, 'annotator_name': None, 'qa_name': None, 'entropy_value': None, - 'createdAt': '', 'updatedAt': ''} - image = images[0] - image['createdAt'] = '' - image['updatedAt'] = '' - image['name'] = '' - self.assertEqual(image, truth) - - def test_double_attach_image_urls(self): - uploaded, could_not_upload, existing_images = sa.attach_image_urls_to_project( - self.PROJECT_NAME, - os.path.join(dirname(dirname(__file__)), self.PATH_TO_URLS), - ) - self.assertEqual(len(uploaded), 7) - self.assertEqual(len(could_not_upload), 0) - self.assertEqual(len(existing_images), 1) - - uploaded, could_not_upload, existing_images = sa.attach_image_urls_to_project( - self.PROJECT_NAME, - os.path.join(dirname(dirname(__file__)), self.PATH_TO_URLS), - ) - self.assertEqual(len(uploaded), 2) - self.assertEqual(len(could_not_upload), 0) - self.assertEqual(len(existing_images), 6) - - def test_limitation(self): - self.assertRaises( - Exception, - sa.attach_image_urls_to_project, - self.PROJECT_NAME, - os.path.join(dirname(dirname(__file__)), self.PATH_TO_50K_URLS) - ) diff --git a/tests/integration/test_basic_images.py b/tests/integration/test_basic_images.py index 0e936cbd8..be4b05eff 100644 --- a/tests/integration/test_basic_images.py +++ b/tests/integration/test_basic_images.py @@ -3,6 +3,8 @@ from os.path import dirname from pathlib import Path +import pytest + import src.superannotate as sa from tests.integration.base import BaseTestCase @@ -55,6 +57,7 @@ def folder_path(self): def classes_json_path(self): return f"{self.folder_path}/classes/classes.json" + @pytest.mark.flaky(reruns=2) def test_vector_annotations_with_tag_folder_upload(self): sa.upload_images_from_folder_to_project( self.PROJECT_NAME, self.folder_path, annotation_status="InProgress" @@ -72,7 +75,7 @@ def test_vector_annotations_with_tag_folder_upload(self): class TestVectorAnnotationsWithTagFolderUploadPreannotation(BaseTestCase): - PROJECT_NAME = "TestVectorAnnotationsWithTagFolderUploadPreannotation" + PROJECT_NAME = "PreTestVectorAnnotationsWithTagFolderUpload" PROJECT_TYPE = "Vector" PROJECT_DESCRIPTION = "TestVectorAnnotationsWithTag" TEST_FOLDER_PTH = "data_set/sample_project_vector_with_tag" @@ -122,15 +125,14 @@ def test_basic_images(self): sa.create_annotation_classes_from_classes_json( self.PROJECT_NAME, self.classes_json_path ) - image = sa.get_image_metadata(self.PROJECT_NAME, image_name="example_image_1.jpg") - image['createdAt'] = '' - image['updatedAt'] = '' - truth = {'name': 'example_image_1.jpg', 'path': None, 'annotation_status': 'InProgress', + image = sa.get_item_metadata(self.PROJECT_NAME, "example_image_1.jpg") + del image['createdAt'] + del image['updatedAt'] + truth = {'name': 'example_image_1.jpg', 'annotation_status': 'InProgress', 'prediction_status': 'NotStarted', 'segmentation_status': 'NotStarted', 'approval_status': None, - 'is_pinned': 0, 'annotator_name': None, 'qa_name': None, 'entropy_value': None, 'createdAt': '', - 'updatedAt': ''} + 'annotator_email': None, 'qa_email': None, 'entropy_value': None} - self.assertEqual(image, truth) + assert all([truth[i] == image[i] for i in truth]) sa.upload_image_annotations( project=self.PROJECT_NAME, @@ -150,50 +152,3 @@ def test_basic_images(self): self.PROJECT_NAME, self.EXAMPLE_IMAGE_1, temp_dir ) self.assertEqual(len(list(Path(temp_dir).glob("*"))), 3) - - -class TestVectorImages(BaseTestCase): - PROJECT_NAME = "sample_project_vector" - PROJECT_TYPE = "Vector" - PROJECT_DESCRIPTION = "Example Project test vector basic images" - TEST_FOLDER_PTH = "data_set/sample_project_vector" - - @property - def folder_path(self): - return os.path.join(dirname(dirname(__file__)), self.TEST_FOLDER_PTH) - - @folder_path.setter - def folder_path(self, value): - self._folder_path = value - - @property - def classes_json_path(self): - return f"{self.folder_path}/classes/classes.json" - - def test_basic_images(self): - with tempfile.TemporaryDirectory() as temp_dir: - sa.upload_images_from_folder_to_project( - self.PROJECT_NAME, self.folder_path, annotation_status="InProgress" - ) - sa.create_annotation_classes_from_classes_json( - self.PROJECT_NAME, self.classes_json_path - ) - images = sa.search_images(self.PROJECT_NAME, "example_image_1") - self.assertEqual(len(images), 1) - - image_name = images[0] - - image = sa.get_image_metadata(self.PROJECT_NAME, image_name="example_image_1.jpg") - image['createdAt'] = '' - image['updatedAt'] = '' - truth = {'name': 'example_image_1.jpg', 'path': None, 'annotation_status': 'InProgress', - 'prediction_status': 'NotStarted', 'segmentation_status': None, 'approval_status': None, - 'is_pinned': 0, 'annotator_name': None, 'qa_name': None, 'entropy_value': None, 'createdAt': '', - 'updatedAt': ''} - self.assertEqual(image, truth) - - sa.download_image(self.PROJECT_NAME, image_name, temp_dir, True) - self.assertEqual( - sa.get_annotations(self.PROJECT_NAME, [image_name])[0], - {'metadata': {'name': 'example_image_1.jpg'}, 'instances': []} - ) diff --git a/tests/integration/test_benchmark.py b/tests/integration/test_benchmark.py index 7ec9e7680..5d1df2964 100644 --- a/tests/integration/test_benchmark.py +++ b/tests/integration/test_benchmark.py @@ -1,6 +1,5 @@ import os import tempfile -import time from os.path import dirname import pytest diff --git a/tests/integration/test_cli.py b/tests/integration/test_cli.py index ecf6dd3dd..d821c48a9 100644 --- a/tests/integration/test_cli.py +++ b/tests/integration/test_cli.py @@ -122,7 +122,7 @@ def test_upload_images(self): check=True, shell=True, ) - self.assertEqual(1, len(sa.search_images(self.PROJECT_NAME))) + self.assertEqual(1, len(sa.search_items(self.PROJECT_NAME))) # @pytest.mark.skipif(CLI_VERSION and CLI_VERSION != sa.__version__, # reason=f"Updated package version from {CLI_VERSION} to {sa.__version__}") @@ -196,7 +196,8 @@ def test_vector_annotation_folder_upload_download_cli(self): ) count_in = len(list(self.vector_folder_path.glob("*.json"))) with tempfile.TemporaryDirectory() as temp_dir: - for image_name in sa.search_images(self.PROJECT_NAME): + for image in sa.search_items(self.PROJECT_NAME): + image_name = image["name"] sa.download_image_annotations(self.PROJECT_NAME, image_name, temp_dir) count_out = len(list(Path(temp_dir).glob("*.json"))) self.assertEqual(count_in, count_out) @@ -213,7 +214,7 @@ def test_attach_image_urls(self): shell=True, ) - self.assertEqual(3, len(sa.search_images(self.PROJECT_NAME))) + self.assertEqual(3, len(sa.search_items(self.PROJECT_NAME))) # @pytest.mark.skipif(CLI_VERSION and CLI_VERSION != sa.__version__, # reason=f"Updated package version from {CLI_VERSION} to {sa.__version__}") @@ -226,7 +227,7 @@ def test_attach_video_urls(self): check=True, shell=True, ) - # self.assertEqual(3, len(sa.search_images(self.PROJECT_NAME))) + # self.assertEqual(3, len(sa.search_items(self.PROJECT_NAME))) # @pytest.mark.skipif(CLI_VERSION and CLI_VERSION != sa.__version__, # reason=f"Updated package version from {CLI_VERSION} to {sa.__version__}") @@ -240,7 +241,7 @@ def test_upload_videos(self): check=True, shell=True, ) - self.assertEqual(5, len(sa.search_images(self.PROJECT_NAME))) + self.assertEqual(5, len(sa.search_items(self.PROJECT_NAME))) @pytest.mark.skipif(CLI_VERSION and CLI_VERSION != sa.__version__, reason=f"Updated package version from {CLI_VERSION} to {sa.__version__}") diff --git a/tests/integration/test_depricated_functions_document.py b/tests/integration/test_depricated_functions_document.py index 85e58a916..96c9ec420 100644 --- a/tests/integration/test_depricated_functions_document.py +++ b/tests/integration/test_depricated_functions_document.py @@ -68,14 +68,6 @@ def test_deprecated_functions(self): sa.upload_images_from_folder_to_project(self.PROJECT_NAME, "some") except AppException as e: self.assertIn(self.EXCEPTION_MESSAGE, str(e)) - try: - sa.get_image_metadata(self.PROJECT_NAME, "some") - except AppException as e: - self.assertIn(self.EXCEPTION_MESSAGE, str(e)) - try: - sa.search_images(self.PROJECT_NAME) - except AppException as e: - self.assertIn(self.EXCEPTION_MESSAGE, str(e)) try: sa.upload_images_to_project(self.PROJECT_NAME, ["some"]) except AppException as e: @@ -84,11 +76,6 @@ def test_deprecated_functions(self): sa.upload_image_annotations(self.PROJECT_NAME, "some", self.annotation_path) except AppException as e: self.assertIn(self.EXCEPTION_MESSAGE, str(e)) - # TODO: - # try: - # sa.download_image(self.PROJECT_NAME, self.UPLOAD_IMAGE_NAME, './') - # except AppException as e: - # self.assertIn(self.EXCEPTION_MESSAGE, str(e)) try: sa.download_image_annotations(self.PROJECT_NAME, self.UPLOAD_IMAGE_NAME, "./") except AppException as e: @@ -136,10 +123,6 @@ def test_deprecated_functions(self): sa.add_annotation_point_to_image(self.PROJECT_NAME, self.UPLOAD_IMAGE_NAME, [1, 2], "some class") except AppException as e: self.assertIn(self.EXCEPTION_MESSAGE, str(e)) - try: - sa.consensus(self.PROJECT_NAME, ["some"], self.video_export_path) - except AppException as e: - self.assertIn(self.EXCEPTION_MESSAGE_DOCUMENT_VIDEO, str(e)) try: sa.copy_images(self.PROJECT_NAME, [self.UPLOAD_IMAGE_NAME], self.PROJECT_NAME) except AppException as e: @@ -152,10 +135,6 @@ def test_deprecated_functions(self): sa.move_images(self.PROJECT_NAME, [self.UPLOAD_IMAGE_NAME], self.PROJECT_NAME_2) except AppException as e: self.assertIn(self.EXCEPTION_MESSAGE, str(e)) - try: - sa.set_images_annotation_statuses(self.PROJECT_NAME, "Completed", [self.UPLOAD_IMAGE_NAME]) - except AppException as e: - self.assertIn(self.EXCEPTION_MESSAGE, str(e)) try: sa.class_distribution(self.video_export_path, [self.PROJECT_NAME]) except AppException as e: diff --git a/tests/integration/test_depricated_functions_video.py b/tests/integration/test_depricated_functions_video.py index 367049531..75bb7f88f 100644 --- a/tests/integration/test_depricated_functions_video.py +++ b/tests/integration/test_depricated_functions_video.py @@ -63,14 +63,6 @@ def test_deprecated_functions(self): sa.upload_images_from_folder_to_project(self.PROJECT_NAME, "some") except AppException as e: self.assertIn(self.EXCEPTION_MESSAGE, str(e)) - try: - sa.get_image_metadata(self.PROJECT_NAME, "some") - except AppException as e: - self.assertIn(self.EXCEPTION_MESSAGE, str(e)) - try: - sa.search_images(self.PROJECT_NAME) - except AppException as e: - self.assertIn(self.EXCEPTION_MESSAGE, str(e)) try: sa.upload_images_to_project(self.PROJECT_NAME, ["some"]) except AppException as e: @@ -79,11 +71,6 @@ def test_deprecated_functions(self): sa.upload_image_annotations(self.PROJECT_NAME, "some", self.annotation_path) except AppException as e: self.assertIn(self.EXCEPTION_MESSAGE, str(e)) - # TODO: - # try: - # sa.download_image(self.PROJECT_NAME, self.UPLOAD_IMAGE_NAME, './') - # except AppException as e: - # self.assertIn(self.EXCEPTION_MESSAGE, str(e)) try: sa.download_image_annotations(self.PROJECT_NAME, self.UPLOAD_IMAGE_NAME, "./") except AppException as e: @@ -127,10 +114,6 @@ def test_deprecated_functions(self): sa.upload_preannotations_from_folder_to_project(self.PROJECT_NAME, self.folder_path) except AppException as e: self.assertIn(self.EXCEPTION_MESSAGE, str(e)) - try: - sa.consensus(self.PROJECT_NAME, ["some"], self.video_export_path) - except AppException as e: - self.assertIn(self.EXCEPTION_MESSAGE_DOCUMENT_VIDEO, str(e)) try: sa.copy_images(self.PROJECT_NAME, [self.UPLOAD_IMAGE_NAME], self.PROJECT_NAME) except AppException as e: @@ -147,10 +130,6 @@ def test_deprecated_functions(self): sa.set_project_default_image_quality_in_editor(self.PROJECT_NAME, "original") except AppException as e: self.assertIn(self.EXCEPTION_MESSAGE_DOCUMENT_VIDEO, str(e)) - try: - sa.set_images_annotation_statuses(self.PROJECT_NAME, "Completed", [self.UPLOAD_IMAGE_NAME]) - except AppException as e: - self.assertIn(self.EXCEPTION_MESSAGE, str(e)) try: sa.class_distribution(self.video_export_path, [self.PROJECT_NAME]) except AppException as e: diff --git a/tests/integration/test_get_exports.py b/tests/integration/test_get_exports.py index ab3c524e6..47a382af5 100644 --- a/tests/integration/test_get_exports.py +++ b/tests/integration/test_get_exports.py @@ -4,6 +4,7 @@ from os.path import dirname import src.superannotate as sa +from src.superannotate import AppException from tests.integration.base import BaseTestCase diff --git a/tests/integration/test_image_copy_move.py b/tests/integration/test_image_copy_move.py deleted file mode 100644 index db650204c..000000000 --- a/tests/integration/test_image_copy_move.py +++ /dev/null @@ -1,183 +0,0 @@ -import os -from os.path import dirname -from pathlib import Path -import pytest - -import src.superannotate as sa -from tests.integration.base import BaseTestCase - - -class TestImageCopy(BaseTestCase): - PROJECT_NAME = "test image copy 1" - SECOND_PROJECT_NAME = "test image copy 2" - PROJECT_DESCRIPTION = "Desc" - TEST_FOLDER = "new_folder" - PROJECT_TYPE = "Vector" - TEST_FOLDER_PATH = "data_set/sample_project_vector" - EXAMPLE_IMAGE = "example_image_1.jpg" - - def setUp(self, *args, **kwargs): - self.tearDown() - self._project = sa.create_project( - self.PROJECT_NAME, self.PROJECT_DESCRIPTION, self.PROJECT_TYPE - ) - self._second_project = sa.create_project( - self.SECOND_PROJECT_NAME, self.PROJECT_DESCRIPTION, self.PROJECT_TYPE - ) - - def tearDown(self) -> None: - for project_name in (self.PROJECT_NAME, self.SECOND_PROJECT_NAME): - projects = sa.search_projects(project_name, return_metadata=True) - for project in projects: - sa.delete_project(project) - - @property - def folder_path(self): - return Path( - Path(os.path.join(dirname(dirname(__file__)), self.TEST_FOLDER_PATH)) - ) - - def test_image_copy(self): - - sa.upload_image_to_project( - self.PROJECT_NAME, - f"{self.folder_path}/example_image_1.jpg", - annotation_status="InProgress", - ) - sa.upload_image_to_project( - self.PROJECT_NAME, - f"{self.folder_path}/example_image_2.jpg", - annotation_status="InProgress", - ) - - images = sa.search_images(self.PROJECT_NAME) - self.assertEqual(len(images), 2) - image = images[0] - sa.create_folder(self.PROJECT_NAME, self.TEST_FOLDER) - sa.copy_image( - self.PROJECT_NAME, image, f"{self.PROJECT_NAME}/{self.TEST_FOLDER}", copy_annotation_status=True - ) - images = sa.search_images(f"{self.PROJECT_NAME}/{self.TEST_FOLDER}") - self.assertEqual(len(images), 1) - - dest_project = sa.create_project( - self.SECOND_PROJECT_NAME + "dif", "test", "Vector" - ) - sa.copy_image(self.PROJECT_NAME, image, dest_project["name"]) - images = sa.search_images(dest_project["name"], image) - self.assertEqual(len(images), 1) - self.assertEqual(images[0], image) - - def test_image_copy_to_other_project(self): - sa.create_folder(self.PROJECT_NAME, self.TEST_FOLDER) - sa.create_folder(self.SECOND_PROJECT_NAME, self.TEST_FOLDER) - sa.upload_image_to_project( - f"{self.SECOND_PROJECT_NAME}/{self.TEST_FOLDER}", - f"{self.folder_path}/{self.EXAMPLE_IMAGE}", - annotation_status="InProgress", - ) - - sa.copy_image( - f"{self.SECOND_PROJECT_NAME}/{self.TEST_FOLDER}", - self.EXAMPLE_IMAGE, f"{self.PROJECT_NAME}/{self.TEST_FOLDER}", - copy_annotation_status=True - ) - - def test_multiple_image_copy(self): - - sa.upload_image_to_project( - self.PROJECT_NAME, - f"{self.folder_path}/example_image_1.jpg", - annotation_status="InProgress", - ) - sa.create_annotation_classes_from_classes_json( - self.PROJECT_NAME, f"{self.folder_path}/classes/classes.json" - ) - sa.upload_image_annotations( - self.PROJECT_NAME, - "example_image_1.jpg", - f"{self.folder_path}/example_image_1.jpg___objects.json", - ) - sa.upload_image_to_project( - self.PROJECT_NAME, - f"{self.folder_path}/example_image_2.jpg", - annotation_status="InProgress", - ) - sa.pin_image(self.PROJECT_NAME, "example_image_1.jpg") - images = sa.search_images(self.PROJECT_NAME) - self.assertEqual(len(images), 2) - sa.create_folder(self.PROJECT_NAME, self.TEST_FOLDER) - sa.copy_image( - self.PROJECT_NAME, - "example_image_1.jpg", - f"{self.PROJECT_NAME}/{self.TEST_FOLDER}", - include_annotations=True, - copy_annotation_status=True, - copy_pin=True, - ) - self.assertEqual( - len(sa.search_images(f"{self.PROJECT_NAME}/{self.TEST_FOLDER}")), 1 - ) - annotations = sa.get_annotations( - f"{self.PROJECT_NAME}/{self.TEST_FOLDER}", ["example_image_1.jpg"] - ) - self.assertTrue(annotations[0] is not None) - - metadata = sa.get_image_metadata( - f"{self.PROJECT_NAME}/{self.TEST_FOLDER}", "example_image_1.jpg" - ) - self.assertEqual(metadata["is_pinned"], 1) - - @pytest.mark.flaky(reruns=2) - def test_copy_image_with_arguments(self): - sa.upload_image_to_project( - self.PROJECT_NAME, - f"{self.folder_path}/{self.EXAMPLE_IMAGE}", - annotation_status="InProgress", - ) - sa.create_annotation_classes_from_classes_json( - self.PROJECT_NAME, f"{self.folder_path}/classes/classes.json" - ) - sa.upload_image_annotations( - self.PROJECT_NAME, - self.EXAMPLE_IMAGE, - f"{self.folder_path}/{self.EXAMPLE_IMAGE}___objects.json", - ) - sa.create_folder(self.PROJECT_NAME, self.TEST_FOLDER) - sa.copy_image( - self.PROJECT_NAME, self.EXAMPLE_IMAGE, f"{self.PROJECT_NAME}/{self.TEST_FOLDER}", - copy_annotation_status=True - ) - -# def test_image_copy_folders(tmpdir): -# tmpdir = Path(tmpdir) - -# projects_found = sa.search_projects( -# PROJECT_NAME_FOLDER, return_metadata=True -# ) -# for pr in projects_found: -# sa.delete_project(pr) - -# project = sa.create_project(PROJECT_NAME_FOLDER, "test", "Vector") - -# sa.upload_image_to_project( -# project, -# "./tests/sample_project_vector/example_image_1.jpg", -# annotation_status="InProgress" -# ) -# sa.upload_image_to_project( -# project, -# "./tests/sample_project_vector/example_image_2.jpg", -# annotation_status="InProgress" -# ) - -# sa.create_folder(project, "folder1") - -# sa.copy_image( -# project, ["example_image_1.jpg", "example_image_2.jpg"], -# project["name"] + "/folder1" -# ) -# sa.copy_image( -# project, ["example_image_1.jpg", "example_image_2.jpg"], -# project["name"] + "/folder1" -# ) diff --git a/tests/integration/test_interface.py b/tests/integration/test_interface.py index 710129dc0..462321991 100644 --- a/tests/integration/test_interface.py +++ b/tests/integration/test_interface.py @@ -149,7 +149,7 @@ def test_image_upload_with_set_name_on_platform(self): self.IMAGE_PATH_IN_S3, self.NEW_IMAGE_NAME, from_s3_bucket=self.TEST_S3_BUCKET_NAME ) - self.assertIn(sa.search_images(self.PROJECT_NAME)[0], self.NEW_IMAGE_NAME) + assert self.NEW_IMAGE_NAME in [i["name"] for i in sa.search_items(self.PROJECT_NAME)] def test_download_fuse_without_classes(self): sa.upload_image_to_project(self.PROJECT_NAME, f"{self.folder_path}/{self.EXAMPLE_IMAGE_1}") @@ -165,7 +165,7 @@ def test_download_fuse_without_classes(self): ) self.assertIsNotNone(result) - def test_validate_log_for_single_uplaod(self): + def test_validate_log_for_single_upload(self): with self.assertLogs() as logs: sa.upload_image_to_project(self.PROJECT_NAME, f"{self.folder_path}/{self.EXAMPLE_IMAGE_1}") sa.upload_image_annotations( diff --git a/tests/integration/test_ml_funcs.py b/tests/integration/test_ml_funcs.py index c342ae35d..f9d7b3536 100644 --- a/tests/integration/test_ml_funcs.py +++ b/tests/integration/test_ml_funcs.py @@ -33,7 +33,7 @@ def test_run_prediction_for_all_images(self): project=self.PROJECT_NAME, folder_path=self.folder_path ) time.sleep(2) - image_names_vector = sa.search_images(self.PROJECT_NAME) + image_names_vector = [i["name"] for i in sa.search_items(self.PROJECT_NAME)] succeeded_images, failed_images = sa.run_prediction( self.PROJECT_NAME, image_names_vector, self.MODEL_NAME ) diff --git a/tests/integration/test_pin_image.py b/tests/integration/test_pin_image.py deleted file mode 100644 index 7d47d30cd..000000000 --- a/tests/integration/test_pin_image.py +++ /dev/null @@ -1,78 +0,0 @@ -import os -from os.path import dirname -import pytest - -import src.superannotate as sa -from tests.integration.base import BaseTestCase - - -class TestPinImage(BaseTestCase): - PROJECT_NAME = "TestPinImage" - PROJECT_DESCRIPTION = "Desc" - PROJECT_TYPE = "Vector" - TEST_FOLDER_PTH = "data_set" - TEST_FOLDER_PATH = "data_set/sample_project_vector" - EXAMPLE_IMAGE = "example_image_1.jpg" - - @property - def folder_path(self): - return os.path.join(dirname(dirname(__file__)), self.TEST_FOLDER_PATH) - - @pytest.mark.flaky(reruns=2) - def test_pin_image(self): - sa.upload_images_from_folder_to_project( - project=self.PROJECT_NAME, folder_path=self.folder_path - ) - - img_metadata0 = sa.get_image_metadata(self.PROJECT_NAME, self.EXAMPLE_IMAGE) - assert img_metadata0["is_pinned"] == 0 - - sa.pin_image(self.PROJECT_NAME, self.EXAMPLE_IMAGE) - - img_metadata = sa.get_image_metadata(self.PROJECT_NAME, self.EXAMPLE_IMAGE) - assert img_metadata["is_pinned"] == 1 - - sa.pin_image(self.PROJECT_NAME, "example_image_1.jpg", True) - img_metadata = sa.get_image_metadata(self.PROJECT_NAME, self.EXAMPLE_IMAGE) - assert img_metadata["is_pinned"] == 1 - - sa.pin_image(self.PROJECT_NAME, self.EXAMPLE_IMAGE, False) - - img_metadata = sa.get_image_metadata(self.PROJECT_NAME, self.EXAMPLE_IMAGE) - assert img_metadata["is_pinned"] == 0 - - del img_metadata["updatedAt"] - del img_metadata0["updatedAt"] - - assert img_metadata == img_metadata0 - - def test_pin_image_in_folder(self): - test_folder = "test_folder" - sa.create_folder(self.PROJECT_NAME, test_folder) - project_folder = self.PROJECT_NAME + "/" + test_folder - - sa.upload_images_from_folder_to_project( - project=project_folder, folder_path=self.folder_path - ) - - img_metadata0 = sa.get_image_metadata(project_folder, self.EXAMPLE_IMAGE) - assert img_metadata0["is_pinned"] == 0 - - sa.pin_image(project_folder, self.EXAMPLE_IMAGE) - - img_metadata = sa.get_image_metadata(project_folder, self.EXAMPLE_IMAGE) - assert img_metadata["is_pinned"] == 1 - - sa.pin_image(project_folder, self.EXAMPLE_IMAGE, True) - img_metadata = sa.get_image_metadata(project_folder, self.EXAMPLE_IMAGE) - assert img_metadata["is_pinned"] == 1 - - sa.pin_image(project_folder, self.EXAMPLE_IMAGE, False) - - img_metadata = sa.get_image_metadata(project_folder, self.EXAMPLE_IMAGE) - assert img_metadata["is_pinned"] == 0 - - del img_metadata["updatedAt"] - del img_metadata0["updatedAt"] - - assert img_metadata == img_metadata0 diff --git a/tests/integration/test_recursive_folder.py b/tests/integration/test_recursive_folder.py index 497167e84..d7d29649e 100644 --- a/tests/integration/test_recursive_folder.py +++ b/tests/integration/test_recursive_folder.py @@ -39,7 +39,7 @@ def test_non_recursive_annotations_folder(self): annotation_status="QualityCheck", recursive_subfolders=True, ) - self.assertEqual(len(sa.search_images(self.PROJECT_NAME)), 2) + self.assertEqual(len(sa.search_items(self.PROJECT_NAME)), 2) sa.create_annotation_classes_from_classes_json( self.PROJECT_NAME, f"{self.folder_path}/classes/classes.json" @@ -70,7 +70,7 @@ def test_recursive_annotations_folder(self): recursive_subfolders=True, ) - self.assertEqual(len(sa.search_images(self.PROJECT_NAME)), 2) + self.assertEqual(len(sa.search_items(self.PROJECT_NAME)), 2) sa.create_annotation_classes_from_classes_json( self.PROJECT_NAME, f"{self.folder_path}/classes/classes.json" @@ -92,7 +92,7 @@ def test_recursive_annotations_folder_negative_case(self): recursive_subfolders=True, ) - self.assertEqual(len(sa.search_images(self.PROJECT_NAME)), 2) + self.assertEqual(len(sa.search_items(self.PROJECT_NAME)), 2) def test_annotations_recursive_s3_folder(self): @@ -103,7 +103,7 @@ def test_annotations_recursive_s3_folder(self): from_s3_bucket="superannotate-python-sdk-test", recursive_subfolders=True, ) - self.assertEqual(len(sa.search_images(self.PROJECT_NAME)), 2) + self.assertEqual(len(sa.search_items(self.PROJECT_NAME)), 2) sa.create_annotation_classes_from_classes_json( self.PROJECT_NAME, @@ -135,7 +135,7 @@ def test_annotations_non_recursive_s3_folder(self): recursive_subfolders=False, ) - self.assertEqual(len(sa.search_images(self.PROJECT_NAME)), 1) + self.assertEqual(len(sa.search_items(self.PROJECT_NAME)), 1) sa.create_annotation_classes_from_classes_json( self.PROJECT_NAME, @@ -171,14 +171,14 @@ def test_images_non_recursive_s3(self): recursive_subfolders=False, ) - self.assertEqual(len(sa.search_images(self.PROJECT_NAME)), 1) + self.assertEqual(len(sa.search_items(self.PROJECT_NAME)), 1) @pytest.mark.skip(reason="Taking long time.") def test_images_recursive_s3_122(self): sa.upload_images_from_folder_to_project(self.PROJECT_NAME, '8sep', from_s3_bucket="superannotate-python-sdk-test", recursive_subfolders=True) - self.assertEqual(len(sa.search_images(self.PROJECT_NAME)), 122) + self.assertEqual(len(sa.search_items(self.PROJECT_NAME)), 122) @pytest.mark.skip(reason="Taking long time.") def test_annotations_recursive_s3_122(self): @@ -203,4 +203,4 @@ def test_images_non_recursive(self): sa.upload_images_from_folder_to_project( self.PROJECT_NAME, self.folder_path, recursive_subfolders=False ) - self.assertEqual(len(sa.search_images(self.PROJECT_NAME)), 1) + self.assertEqual(len(sa.search_items(self.PROJECT_NAME)), 1) diff --git a/tests/integration/test_recursive_folder_pixel.py b/tests/integration/test_recursive_folder_pixel.py index cb4f38338..308ac882a 100644 --- a/tests/integration/test_recursive_folder_pixel.py +++ b/tests/integration/test_recursive_folder_pixel.py @@ -1,6 +1,3 @@ -import os -from os.path import dirname - import src.superannotate as sa from tests.integration.base import BaseTestCase diff --git a/tests/integration/test_single_annotation_download.py b/tests/integration/test_single_annotation_download.py index 92399723b..97343dd53 100644 --- a/tests/integration/test_single_annotation_download.py +++ b/tests/integration/test_single_annotation_download.py @@ -27,7 +27,6 @@ def classes_path(self): dirname(dirname(__file__)), self.TEST_FOLDER_PATH, "classes/classes.json" ) - # TODO: template name validation error def test_annotation_download_upload_vector(self): sa.upload_images_from_folder_to_project( @@ -39,7 +38,7 @@ def test_annotation_download_upload_vector(self): sa.upload_annotations_from_folder_to_project( self.PROJECT_NAME, self.folder_path ) - image = sa.search_images(self.PROJECT_NAME)[0] + image = sa.search_items(self.PROJECT_NAME)[0]["name"] tempdir = tempfile.TemporaryDirectory() paths = sa.download_image_annotations(self.PROJECT_NAME, image, tempdir.name) @@ -99,7 +98,7 @@ def test_annotation_download_upload_pixel(self): sa.upload_annotations_from_folder_to_project( self.PROJECT_NAME, self.folder_path ) - image = sa.search_images(self.PROJECT_NAME)[0] + image = sa.search_items(self.PROJECT_NAME)[0]["name"] with tempfile.TemporaryDirectory() as tempdir: paths = sa.download_image_annotations(self.PROJECT_NAME, image, tempdir) @@ -107,20 +106,26 @@ def test_annotation_download_upload_pixel(self): uploaded_json = json.load( open(self.folder_path + "/example_image_1.jpg___pixel.json") ) - downloaded_json['metadata']['lastAction'] = None uploaded_json['metadata']['lastAction'] = None - for i in downloaded_json["instances"]: - i.pop("classId", None) - for j in i["attributes"]: - j.pop("groupId", None) - j.pop("id", None) - for i in uploaded_json["instances"]: - i.pop("classId", None) - for j in i["attributes"]: - j.pop("groupId", None) - j.pop("id", None) + self._clean_dict(downloaded_json, ["lastAction", "groupId", "classId", "id", "createdAt", "updatedAt"]) + self._clean_dict(uploaded_json, ["lastAction", "groupId", "classId", "id", "createdAt", "updatedAt"]) assert downloaded_json == uploaded_json uploaded_mask = self.folder_path + "/example_image_1.jpg___save.png" download_mask = paths[1] assert filecmp.cmp(download_mask, uploaded_mask, shallow=False) + + @classmethod + def _clean_dict(cls, obj, keys_to_delete: list): + if isinstance(obj, dict): + for key in list(obj.keys()): + if key in keys_to_delete: + del obj[key] + else: + cls._clean_dict(obj[key], keys_to_delete) + elif isinstance(obj, list): + for i in reversed(range(len(obj))): + if obj[i] in keys_to_delete: + del obj[i] + else: + cls._clean_dict(obj[i], keys_to_delete) diff --git a/tests/integration/test_single_image_upload.py b/tests/integration/test_single_image_upload.py index 93621f467..5959497c0 100644 --- a/tests/integration/test_single_image_upload.py +++ b/tests/integration/test_single_image_upload.py @@ -29,7 +29,7 @@ def test_single_image_upload(self): self.folder_path + "/example_image_1.jpg", annotation_status="InProgress", ) - assert len(sa.search_images(self.PROJECT_NAME)) == 1 + assert len(sa.search_items(self.PROJECT_NAME)) == 1 with open(self.folder_path + "/example_image_1.jpg", "rb") as f: img = io.BytesIO(f.read()) @@ -38,4 +38,4 @@ def test_single_image_upload(self): self.PROJECT_NAME, img, image_name="rr.jpg", annotation_status="InProgress" ) - assert len(sa.search_images(self.PROJECT_NAME)) == 2 + assert len(sa.search_items(self.PROJECT_NAME)) == 2 diff --git a/tests/integration/test_upload_images.py b/tests/integration/test_upload_images.py index 187f5e30d..bba4d14a3 100644 --- a/tests/integration/test_upload_images.py +++ b/tests/integration/test_upload_images.py @@ -2,9 +2,6 @@ from os.path import dirname import src.superannotate as sa -from src.superannotate import AppException -from src.superannotate.lib.core import ATTACHING_UPLOAD_STATE_ERROR -from src.superannotate.lib.core import UPLOADING_UPLOAD_STATE_ERROR from tests.integration.base import BaseTestCase diff --git a/tests/integration/test_upload_priority_scores.py b/tests/integration/test_upload_priority_scores.py index 371f742e3..70cb75329 100644 --- a/tests/integration/test_upload_priority_scores.py +++ b/tests/integration/test_upload_priority_scores.py @@ -42,5 +42,5 @@ def test_upload_priority_scores(self): "name": "example_image_4.jpg", "priority": 100000000 }]) - self.assertEqual(sa.get_image_metadata(self.PROJECT_NAME, "example_image_4.jpg")['entropy_value'], 1000000) - self.assertEqual(sa.get_image_metadata(self.PROJECT_NAME, "example_image_3.jpg")['entropy_value'], 1.12345) + self.assertEqual(sa.get_item_metadata(self.PROJECT_NAME, "example_image_4.jpg")['entropy_value'], 1000000) + self.assertEqual(sa.get_item_metadata(self.PROJECT_NAME, "example_image_3.jpg")['entropy_value'], 1.12345) diff --git a/tests/integration/test_video.py b/tests/integration/test_video.py index ad3d69668..08452ee3a 100644 --- a/tests/integration/test_video.py +++ b/tests/integration/test_video.py @@ -52,10 +52,10 @@ def test_video_upload_from_folder(self): self.folder_path, target_fps=1, ) - self.assertEqual(len(sa.search_images(self.PROJECT_NAME)), 5) + self.assertEqual(len(sa.search_items(self.PROJECT_NAME)), 5) self.assertEqual( - len(sa.search_images(f"{self.PROJECT_NAME}/{self.TEST_FOLDER_NAME}")), - len(sa.search_images(self.PROJECT_NAME)), + len(sa.search_items(f"{self.PROJECT_NAME}/{self.TEST_FOLDER_NAME}")), + len(sa.search_items(self.PROJECT_NAME)), ) def test_single_video_upload(self): @@ -64,7 +64,7 @@ def test_single_video_upload(self): f"{self.folder_path}/{self.TEST_VIDEO_NAME}", target_fps=1, ) - self.assertEqual(len(sa.search_images(self.PROJECT_NAME)), 5) + self.assertEqual(len(sa.search_items(self.PROJECT_NAME)), 5) @pytest.fixture(autouse=True) def inject_fixtures(self, caplog): @@ -77,7 +77,7 @@ def test_video_big(self): f"{self.folder_path_big}/earth.mov", target_fps=1, ) - self.assertEqual(len(sa.search_images(f"{self.PROJECT_NAME}/{self.TEST_FOLDER_NAME_BIG_VIDEO}")), 31) + self.assertEqual(len(sa.search_items(f"{self.PROJECT_NAME}/{self.TEST_FOLDER_NAME_BIG_VIDEO}")), 31) sa.upload_video_to_project( f"{self.PROJECT_NAME}/{self.TEST_FOLDER_NAME_BIG_VIDEO}", f"{self.folder_path_big}/earth.mov",