diff --git a/pytest.ini b/pytest.ini index d9ab3b434..084beff5b 100644 --- a/pytest.ini +++ b/pytest.ini @@ -2,4 +2,4 @@ minversion = 3.0 log_cli=true python_files = test_*.py -;addopts = -n auto --dist=loadscope \ No newline at end of file +addopts = -n auto --dist=loadscope \ No newline at end of file diff --git a/src/superannotate/__init__.py b/src/superannotate/__init__.py index 7210b1ab6..3ce70195b 100644 --- a/src/superannotate/__init__.py +++ b/src/superannotate/__init__.py @@ -25,7 +25,9 @@ attach_document_urls_to_project, ) from superannotate.lib.app.interface.sdk_interface import attach_image_urls_to_project -from superannotate.lib.app.interface.sdk_interface import attach_items_from_integrated_storage +from superannotate.lib.app.interface.sdk_interface import ( + attach_items_from_integrated_storage, +) from superannotate.lib.app.interface.sdk_interface import attach_video_urls_to_project from superannotate.lib.app.interface.sdk_interface import benchmark from superannotate.lib.app.interface.sdk_interface import clone_project @@ -105,9 +107,7 @@ from superannotate.lib.app.interface.sdk_interface import ( upload_preannotations_from_folder_to_project, ) -from superannotate.lib.app.interface.sdk_interface import ( - upload_priority_scores, -) +from superannotate.lib.app.interface.sdk_interface import upload_priority_scores from superannotate.lib.app.interface.sdk_interface import upload_video_to_project from superannotate.lib.app.interface.sdk_interface import ( upload_videos_from_folder_to_project, @@ -159,6 +159,7 @@ "clone_project", "share_project", "delete_project", + "rename_project", "upload_priority_scores", # Images Section "search_images", @@ -215,7 +216,6 @@ "run_prediction", "search_models", "download_model", - "rename_project", "set_image_annotation_status", "benchmark", "consensus", diff --git a/src/superannotate/lib/__init__.py b/src/superannotate/lib/__init__.py index bc765c353..42f7f7243 100644 --- a/src/superannotate/lib/__init__.py +++ b/src/superannotate/lib/__init__.py @@ -8,4 +8,5 @@ def get_default_controller(): from lib.infrastructure.controller import Controller + return Controller.get_default() diff --git a/src/superannotate/lib/app/annotation_helpers.py b/src/superannotate/lib/app/annotation_helpers.py index 8ee8e31eb..15d88e328 100644 --- a/src/superannotate/lib/app/annotation_helpers.py +++ b/src/superannotate/lib/app/annotation_helpers.py @@ -56,15 +56,18 @@ def add_annotation_comment_to_json( ): """Add a comment to SuperAnnotate format annotation JSON - :param annotation_json: annotations in SuperAnnotate format JSON or filepath to JSON :type annotation_json: dict or Pathlike (str or Path) + :param comment_text: comment text :type comment_text: str + :param comment_coords: [x, y] coords :type comment_coords: list + :param comment_author: comment author email :type comment_author: str + :param resolved: comment resolve status :type resolved: bool """ @@ -106,12 +109,16 @@ def add_annotation_bbox_to_json( :param annotation_json: annotations in SuperAnnotate format JSON or filepath to JSON :type annotation_json: dict or Pathlike (str or Path) + :param bbox: 4 element list of top-left x,y and bottom-right x, y coordinates :type bbox: list of floats + :param annotation_class_name: annotation class name :type annotation_class_name: str + :param annotation_class_attributes: list of annotation class attributes :type annotation_class_attributes: list of 2 element dicts + :param error: if not None, marks annotation as error (True) or no-error (False) :type error: bool """ diff --git a/src/superannotate/lib/app/interface/cli_interface.py b/src/superannotate/lib/app/interface/cli_interface.py index f4aabd9a2..fabd1a2f6 100644 --- a/src/superannotate/lib/app/interface/cli_interface.py +++ b/src/superannotate/lib/app/interface/cli_interface.py @@ -185,7 +185,11 @@ def _upload_annotations( ): project_folder_name = project project_name, folder_name = split_project_path(project) - project = Controller.get_default().get_project_metadata(project_name=project_name).data + project = ( + Controller.get_default() + .get_project_metadata(project_name=project_name) + .data + ) if not format: format = "SuperAnnotate" if not dataset_name and format == "COCO": diff --git a/src/superannotate/lib/app/interface/sdk_interface.py b/src/superannotate/lib/app/interface/sdk_interface.py index 366da7b30..922f3302c 100644 --- a/src/superannotate/lib/app/interface/sdk_interface.py +++ b/src/superannotate/lib/app/interface/sdk_interface.py @@ -25,6 +25,7 @@ from lib.app.interface.types import EmailStr from lib.app.interface.types import ImageQualityChoices from lib.app.interface.types import NotEmptyStr +from lib.app.interface.types import ProjectStatusEnum from lib.app.interface.types import ProjectTypes from lib.app.interface.types import validate_arguments from lib.app.mixp.decorators import Trackable @@ -50,7 +51,6 @@ from superannotate.logger import get_default_logger from tqdm import tqdm - logger = get_default_logger() @@ -77,7 +77,6 @@ def set_auth_token(token: str): @Trackable def get_team_metadata(): - """Returns team metadata :return: team metadata @@ -110,9 +109,13 @@ def search_team_contributors( :rtype: list of dicts """ - contributors = Controller.get_default().search_team_contributors( - email=email, first_name=first_name, last_name=last_name - ).data + contributors = ( + Controller.get_default() + .search_team_contributors( + email=email, first_name=first_name, last_name=last_name + ) + .data + ) if not return_metadata: return [contributor["email"] for contributor in contributors] return contributors @@ -124,6 +127,7 @@ def search_projects( name: Optional[NotEmptyStr] = None, return_metadata: bool = False, include_complete_image_count: bool = False, + status: Optional[Union[ProjectStatusEnum, List[ProjectStatusEnum]]] = None, ): """ Project name based case-insensitive search for projects. @@ -138,12 +142,27 @@ def search_projects( :param include_complete_image_count: return projects that have completed images and include the number of completed images in response. :type include_complete_image_count: bool + :param status: search projects via project status + :type status: str + :return: project names or metadatas :rtype: list of strs or dicts """ - result = Controller.get_default().search_project( - name=name, include_complete_image_count=include_complete_image_count - ).data + statuses = [] + if status: + if isinstance(status, (list, tuple, set)): + statuses = list(status) + else: + statuses = [status] + result = ( + Controller.get_default() + .search_project( + name=name, + include_complete_image_count=include_complete_image_count, + statuses=statuses, + ) + .data + ) if return_metadata: return [ProjectSerializer(project).serialize() for project in result] else: @@ -307,7 +326,9 @@ def create_folder(project: NotEmptyStr, folder_name: NotEmptyStr): :rtype: dict """ - res = Controller.get_default().create_folder(project=project, folder_name=folder_name) + res = Controller.get_default().create_folder( + project=project, folder_name=folder_name + ) if res.data: folder = res.data logger.info(f"Folder {folder.name} created in project {project}") @@ -341,7 +362,9 @@ def rename_project(project: NotEmptyStr, new_name: NotEmptyStr): :type new_name: str """ - response = Controller.get_default().update_project(name=project, project_data={"name": new_name}) + response = Controller.get_default().update_project( + name=project, project_data={"name": new_name} + ) if response.errors: raise AppException(response.errors) @@ -363,7 +386,11 @@ def get_folder_metadata(project: NotEmptyStr, folder_name: NotEmptyStr): :return: metadata of folder :rtype: dict """ - result = Controller.get_default().get_folder(project_name=project, folder_name=folder_name).data + result = ( + Controller.get_default() + .get_folder(project_name=project, folder_name=folder_name) + .data + ) if not result: raise AppException("Folder not found.") return result.to_dict() @@ -380,7 +407,9 @@ def delete_folders(project: NotEmptyStr, folder_names: List[NotEmptyStr]): :type folder_names: list of strs """ - res = Controller.get_default().delete_folders(project_name=project, folder_names=folder_names) + res = Controller.get_default().delete_folders( + project_name=project, folder_names=folder_names + ) if res.errors: raise AppException(res.errors) logger.info(f"Folders {folder_names} deleted in project {project}") @@ -472,10 +501,12 @@ def copy_image( destination_project, destination_folder = extract_project_folder( destination_project ) - source_project_metadata = Controller.get_default().get_project_metadata(source_project_name).data - destination_project_metadata = Controller.get_default().get_project_metadata( - destination_project - ).data + source_project_metadata = ( + Controller.get_default().get_project_metadata(source_project_name).data + ) + destination_project_metadata = ( + Controller.get_default().get_project_metadata(destination_project).data + ) if destination_project_metadata["project"].project_type in [ constances.ProjectType.VIDEO.value, @@ -555,9 +586,11 @@ def copy_images( "Source and destination projects should be the same for copy_images" ) if not image_names: - images = Controller.get_default().search_images( - project_name=project_name, folder_path=source_folder_name - ).data + images = ( + Controller.get_default() + .search_images(project_name=project_name, folder_path=source_folder_name) + .data + ) image_names = [image.name for image in images] res = Controller.get_default().bulk_copy_images( @@ -691,14 +724,18 @@ def get_project_metadata( :rtype: dict """ project_name, folder_name = extract_project_folder(project) - response = Controller.get_default().get_project_metadata( - project_name, - include_annotation_classes, - include_settings, - include_workflow, - include_contributors, - include_complete_image_count, - ).data + response = ( + Controller.get_default() + .get_project_metadata( + project_name, + include_annotation_classes, + include_settings, + include_workflow, + include_contributors, + include_complete_image_count, + ) + .data + ) metadata = ProjectSerializer(response["project"]).serialize() metadata["settings"] = [ @@ -774,7 +811,9 @@ def search_annotation_classes( :rtype: list of dicts """ project_name, folder_name = extract_project_folder(project) - classes = Controller.get_default().search_annotation_classes(project_name, name_prefix) + classes = Controller.get_default().search_annotation_classes( + project_name, name_prefix + ) classes = [BaseSerializer(attribute).serialize() for attribute in classes.data] return classes @@ -848,7 +887,9 @@ def get_image_metadata( logger.warning(warning_msg) project_name, folder_name = extract_project_folder(project) project = Controller.get_default()._get_project(project_name) - response = Controller.get_default().get_image_metadata(project_name, folder_name, image_name) + response = Controller.get_default().get_image_metadata( + project_name, folder_name, image_name + ) if response.errors: raise AppException(response.errors) @@ -935,9 +976,8 @@ def assign_images(project: Union[NotEmptyStr, dict], image_names: List[str], use if not folder_name: folder_name = "root" contributors = ( - Controller.get_default().get_project_metadata( - project_name=project_name, include_contributors=True - ) + Controller.get_default() + .get_project_metadata(project_name=project_name, include_contributors=True) .data["project"] .users ) @@ -952,7 +992,9 @@ def assign_images(project: Union[NotEmptyStr, dict], image_names: List[str], use ) return - response = Controller.get_default().assign_images(project_name, folder_name, image_names, user) + response = Controller.get_default().assign_images( + project_name, folder_name, image_names, user + ) if not response.errors: logger.info(f"Assign images to user {user}") else: @@ -1016,9 +1058,8 @@ def assign_folder( """ contributors = ( - Controller.get_default().get_project_metadata( - project_name=project_name, include_contributors=True - ) + Controller.get_default() + .get_project_metadata(project_name=project_name, include_contributors=True) .data["project"] .users ) @@ -1692,7 +1733,11 @@ def set_image_annotation_status( ) if response.errors: raise AppException(response.errors) - image = Controller.get_default().get_image_metadata(project_name, folder_name, image_name).data + image = ( + Controller.get_default() + .get_image_metadata(project_name, folder_name, image_name) + .data + ) return ImageSerializer(image).serialize_by_project(project=project_entity) @@ -2304,12 +2349,15 @@ def add_annotation_bbox_to_image( :type error: bool """ project_name, folder_name = extract_project_folder(project) - response = Controller.get_default().get_image_annotations( - project_name=project_name, folder_name=folder_name, image_name=image_name + response = Controller.get_default().get_annotations( + project_name=project_name, folder_name=folder_name, item_names=[image_name] ) if response.errors: raise AppException(response.errors) - annotations = response.data["annotation_json"] + if response.data: + annotations = response.data[0] + else: + annotations = {} annotations = add_annotation_bbox_to_json( annotations, bbox, @@ -2352,12 +2400,15 @@ def add_annotation_point_to_image( :type error: bool """ project_name, folder_name = extract_project_folder(project) - response = Controller.get_default().get_image_annotations( - project_name=project_name, folder_name=folder_name, image_name=image_name + response = Controller.get_default().get_annotations( + project_name=project_name, folder_name=folder_name, item_names=[image_name] ) if response.errors: raise AppException(response.errors) - annotations = response.data["annotation_json"] + if response.data: + annotations = response.data[0] + else: + annotations = {} annotations = add_annotation_point_to_json( annotations, point, @@ -2397,12 +2448,15 @@ def add_annotation_comment_to_image( :type resolved: bool """ project_name, folder_name = extract_project_folder(project) - response = Controller.get_default().get_image_annotations( - project_name=project_name, folder_name=folder_name, image_name=image_name + response = Controller.get_default().get_annotations( + project_name=project_name, folder_name=folder_name, item_names=[image_name] ) if response.errors: raise AppException(response.errors) - annotations = response.data["annotation_json"] + if response.data: + annotations = response.data[0] + else: + annotations = {} annotations = add_annotation_comment_to_json( annotations, comment_text, @@ -2804,7 +2858,9 @@ def invite_contributors_to_team( :return: lists of invited, skipped contributors of the team :rtype: tuple (2 members) of lists of strs """ - response = Controller.get_default().invite_contributors_to_team(emails=emails, set_admin=admin) + response = Controller.get_default().invite_contributors_to_team( + emails=emails, set_admin=admin + ) if response.errors: raise AppException(response.errors) return response.data @@ -2825,7 +2881,9 @@ def get_annotations(project: NotEmptyStr, items: Optional[List[NotEmptyStr]] = N :rtype: list of strs """ project_name, folder_name = extract_project_folder(project) - response = Controller.get_default().get_annotations(project_name, folder_name, items) + response = Controller.get_default().get_annotations( + project_name, folder_name, items + ) if response.errors: raise AppException(response.errors) return response.data @@ -2851,7 +2909,9 @@ def get_annotations_per_frame(project: NotEmptyStr, video: NotEmptyStr, fps: int :rtype: list of dicts """ project_name, folder_name = extract_project_folder(project) - response = Controller.get_default().get_annotations_per_frame(project_name, folder_name, video_name=video, fps=fps) + response = Controller.get_default().get_annotations_per_frame( + project_name, folder_name, video_name=video, fps=fps + ) if response.errors: raise AppException(response.errors) return response.data @@ -2873,7 +2933,9 @@ def upload_priority_scores(project: NotEmptyStr, scores: List[PriorityScore]): """ project_name, folder_name = extract_project_folder(project) project_folder_name = project - response = Controller.get_default().upload_priority_scores(project_name, folder_name, scores, project_folder_name) + response = Controller.get_default().upload_priority_scores( + project_name, folder_name, scores, project_folder_name + ) if response.errors: raise AppException(response.errors) return response.data @@ -2897,9 +2959,9 @@ def get_integrations(): @Trackable @validate_arguments def attach_items_from_integrated_storage( - project: NotEmptyStr, - integration: Union[NotEmptyStr, IntegrationEntity], - folder_path: Optional[NotEmptyStr] = None + project: NotEmptyStr, + integration: Union[NotEmptyStr, IntegrationEntity], + folder_path: Optional[NotEmptyStr] = None, ): """Link images from integrated external storage to SuperAnnotate. @@ -2917,7 +2979,9 @@ def attach_items_from_integrated_storage( project_name, folder_name = extract_project_folder(project) if isinstance(integration, str): integration = IntegrationEntity(name=integration) - response = Controller.get_default().attach_integrations(project_name, folder_name, integration, folder_path) + response = Controller.get_default().attach_integrations( + project_name, folder_name, integration, folder_path + ) if response.errors: raise AppException(response.errors) @@ -2946,8 +3010,7 @@ def query(project: NotEmptyStr, query: Optional[NotEmptyStr]): @Trackable @validate_arguments def get_item_metadata( - project: NotEmptyStr, - item_name: NotEmptyStr, + project: NotEmptyStr, item_name: NotEmptyStr, ): """Returns item metadata @@ -2970,13 +3033,12 @@ def get_item_metadata( @Trackable @validate_arguments def search_items( - project: NotEmptyStr, - name_contains: NotEmptyStr = None, - annotation_status: Optional[AnnotationStatuses] = None, - annotator_email: Optional[NotEmptyStr] = None, - qa_email: Optional[NotEmptyStr] = None, - recursive: bool = False - + project: NotEmptyStr, + name_contains: NotEmptyStr = None, + annotation_status: Optional[AnnotationStatuses] = None, + annotator_email: Optional[NotEmptyStr] = None, + qa_email: Optional[NotEmptyStr] = None, + recursive: bool = False, ): """Search items by filtering criteria. @@ -3017,12 +3079,13 @@ def search_items( """ project_name, folder_name = extract_project_folder(project) response = Controller.get_default().list_items( - project_name, folder_name, + project_name, + folder_name, name_contains=name_contains, annotation_status=annotation_status, annotator_email=annotator_email, qa_email=qa_email, - recursive=recursive + recursive=recursive, ) if response.errors: raise AppException(response.errors) diff --git a/src/superannotate/lib/app/interface/types.py b/src/superannotate/lib/app/interface/types.py index a0cb33732..076420eb8 100644 --- a/src/superannotate/lib/app/interface/types.py +++ b/src/superannotate/lib/app/interface/types.py @@ -3,6 +3,7 @@ from lib.core.enums import AnnotationStatus from lib.core.enums import ClassTypeEnum +from lib.core.enums import ProjectStatus from lib.core.enums import ProjectType from lib.core.enums import UserRole from lib.core.exceptions import AppException @@ -13,7 +14,6 @@ from pydantic import ValidationError from pydantic.errors import StrRegexError - NotEmptyStr = constr(strict=True, min_length=1) @@ -41,6 +41,18 @@ def validate(cls, value: Union[str]) -> Union[str]: return value +class ProjectStatusEnum(StrictStr): + @classmethod + def validate(cls, value: Union[str]) -> Union[str]: + if cls.curtail_length and len(value) > cls.curtail_length: + value = value[: cls.curtail_length] + if value.lower() not in ProjectStatus.values(): + raise TypeError( + f"Available statuses is {', '.join(ProjectStatus.titles())}. " + ) + return value + + class AnnotatorRole(StrictStr): ANNOTATOR_ROLES = (UserRole.ADMIN.name, UserRole.ANNOTATOR.name, UserRole.QA.name) @@ -95,7 +107,9 @@ class ClassType(StrictStr): def validate(cls, value: Union[str]) -> Union[str]: enum_values = [e.name.lower() for e in ClassTypeEnum] if value.lower() not in enum_values: - raise TypeError(f"Invalid type provided. Please specify one of the {', '.join(enum_values)}. ") + raise TypeError( + f"Invalid type provided. Please specify one of the {', '.join(enum_values)}. " + ) return value.lower() diff --git a/src/superannotate/lib/app/mixp/utils/parsers.py b/src/superannotate/lib/app/mixp/utils/parsers.py index bcb4e73bc..2278d9949 100644 --- a/src/superannotate/lib/app/mixp/utils/parsers.py +++ b/src/superannotate/lib/app/mixp/utils/parsers.py @@ -871,9 +871,11 @@ def assign_images(*args, **kwargs): if not user: user = args[2] - contributors = Controller.get_default().get_project_metadata( - project_name=project, include_contributors=True - ).data["contributors"] + contributors = ( + Controller.get_default() + .get_project_metadata(project_name=project, include_contributors=True) + .data["contributors"] + ) contributor = None for c in contributors: if c["user_id"] == user: @@ -965,7 +967,7 @@ def create_annotation_class(*args, **kwargs): "properties": { "project_name": get_project_name(project), "Attributes": bool(args[3:4] or ("attribute_groups" in kwargs)), - "class_type": class_type if class_type else "object" + "class_type": class_type if class_type else "object", }, } @@ -1241,7 +1243,7 @@ def attach_items_from_integrated_storage(*args, **kwargs): "properties": { "project_type": ProjectType.get_name(project.project_type), "integration_name": integration.name, - "folder_path": bool(folder_path) + "folder_path": bool(folder_path), }, } @@ -1255,8 +1257,7 @@ def query(**kwargs): "event_name": "attach_items_from_integrated_storage", "properties": { "project_type": ProjectType.get_name(project.project_type), - "query": query_str - + "query": query_str, }, } @@ -1267,9 +1268,7 @@ def get_item_metadata(**kwargs): Controller.get_default().get_project_metadata(project_name) return { "event_name": "attach_items_from_integrated_storage", - "properties": { - "project_type": ProjectType.get_name(project.project_type), - }, + "properties": {"project_type": ProjectType.get_name(project.project_type)}, } @@ -1292,6 +1291,5 @@ def search_items(**kwargs): "annotator_email": bool(annotator_email), "qa_email": bool(qa_email), "recursive": bool(recursive), - }, } diff --git a/src/superannotate/lib/app/serializers.py b/src/superannotate/lib/app/serializers.py index 5c466aaa8..3d50d2706 100644 --- a/src/superannotate/lib/app/serializers.py +++ b/src/superannotate/lib/app/serializers.py @@ -23,20 +23,29 @@ def _fill_enum_values(data: dict): data[key] = value.__doc__ return data - def serialize(self, fields: List[str] = None, by_alias: bool = True, flat: bool = False): - return self._fill_enum_values(self._serialize(self._entity, fields, by_alias, flat)) + def serialize( + self, fields: List[str] = None, by_alias: bool = True, flat: bool = False + ): + return self._fill_enum_values( + self._serialize(self._entity, fields, by_alias, flat) + ) def serialize_item( - self, - data: Any, - fields: Union[List[str], Set[str]] = None, - by_alias: bool = False, - flat: bool = False + self, + data: Any, + fields: Union[List[str], Set[str]] = None, + by_alias: bool = False, + flat: bool = False, ): return self._fill_enum_values(self._serialize(data, fields, by_alias, flat)) @staticmethod - def _serialize(entity: Any, fields: List[str] = None, by_alias: bool = False, flat: bool = False): + def _serialize( + entity: Any, + fields: List[str] = None, + by_alias: bool = False, + flat: bool = False, + ): if isinstance(entity, dict): return entity if isinstance(entity, BaseModel): @@ -44,7 +53,9 @@ def _serialize(entity: Any, fields: List[str] = None, by_alias: bool = False, fl fields = set(fields) if len(fields) == 1: if flat: - return entity.dict(include=fields, by_alias=by_alias)[next(iter(fields))] + return entity.dict(include=fields, by_alias=by_alias)[ + next(iter(fields)) + ] return entity.dict(include=fields, by_alias=by_alias) return entity.dict(include=fields, by_alias=by_alias) return entity.dict(by_alias=by_alias) @@ -52,11 +63,11 @@ def _serialize(entity: Any, fields: List[str] = None, by_alias: bool = False, fl @classmethod def serialize_iterable( - cls, - data: List[Any], - fields: Union[List[str], Set[str]] = None, - by_alias: bool = False, - flat: bool = False + cls, + data: List[Any], + fields: Union[List[str], Set[str]] = None, + by_alias: bool = False, + flat: bool = False, ) -> List[Any]: serialized_data = [] for i in data: diff --git a/src/superannotate/lib/core/data_handlers.py b/src/superannotate/lib/core/data_handlers.py index 493d1815c..8b352cb23 100644 --- a/src/superannotate/lib/core/data_handlers.py +++ b/src/superannotate/lib/core/data_handlers.py @@ -39,16 +39,14 @@ def __init__(self, annotation_classes: List[AnnotationClass]): self._annotation_classes: List[AnnotationClass] = annotation_classes @lru_cache() - def get_annotation_class( - self, name: str - ) -> AnnotationClass: + def get_annotation_class(self, name: str) -> AnnotationClass: for annotation_class in self._annotation_classes: if annotation_class.name == name: return annotation_class @lru_cache() def get_attribute_group( - self, annotation_class: AnnotationClass, attr_group_name: str + self, annotation_class: AnnotationClass, attr_group_name: str ) -> AttributeGroup: for attr_group in annotation_class.attribute_groups: if attr_group.name == attr_group_name: @@ -115,10 +113,10 @@ def handle(self, annotation: dict): class MissingIDsHandler(BaseAnnotationDateHandler): def __init__( - self, - annotation_classes: List[AnnotationClass], - templates: List[dict], - reporter: Reporter, + self, + annotation_classes: List[AnnotationClass], + templates: List[dict], + reporter: Reporter, ): super().__init__(annotation_classes) self.validate_existing_classes(annotation_classes) @@ -193,7 +191,7 @@ def handle(self, annotation: dict): template["name"]: template["id"] for template in self._templates } for annotation_instance in ( - i for i in annotation["instances"] if i.get("type", None) == "template" + i for i in annotation["instances"] if i.get("type", None) == "template" ): annotation_instance["templateId"] = template_name_id_map.get( annotation_instance.get("templateName", ""), -1 @@ -350,10 +348,10 @@ def convert_timestamp(timestamp): (group_name, attr_name) ) attributes_to_add = ( - existing_attributes_in_current_instance - active_attributes + existing_attributes_in_current_instance - active_attributes ) attributes_to_delete = ( - active_attributes - existing_attributes_in_current_instance + active_attributes - existing_attributes_in_current_instance ) if attributes_to_add or attributes_to_delete: editor_instance["timeline"][timestamp][ diff --git a/src/superannotate/lib/core/entities/__init__.py b/src/superannotate/lib/core/entities/__init__.py index e504f9ee2..88425761a 100644 --- a/src/superannotate/lib/core/entities/__init__.py +++ b/src/superannotate/lib/core/entities/__init__.py @@ -1,7 +1,7 @@ +from lib.core.entities.base import BaseEntity as TmpBaseEntity from lib.core.entities.integrations import IntegrationEntity from lib.core.entities.items import DocumentEntity from lib.core.entities.items import Entity -from lib.core.entities.items import TmpBaseEntity from lib.core.entities.items import TmpImageEntity from lib.core.entities.items import VideoEntity from lib.core.entities.project_entities import AnnotationClassEntity @@ -27,6 +27,7 @@ __all__ = [ # items + "TmpImageEntity", "BaseEntity", "TmpBaseEntity", "Entity", diff --git a/src/superannotate/lib/core/entities/base.py b/src/superannotate/lib/core/entities/base.py index 43c132e37..cdd243e4b 100644 --- a/src/superannotate/lib/core/entities/base.py +++ b/src/superannotate/lib/core/entities/base.py @@ -1,9 +1,35 @@ from datetime import datetime +from typing import Optional +from lib.core.enums import AnnotationStatus from pydantic import BaseModel +from pydantic import Extra from pydantic import Field class TimedBaseModel(BaseModel): - created_at: datetime = Field(None, alias="createdAt") - updated_at: datetime = Field(None, alias="updatedAt") + createdAt: datetime = Field(None, alias="createdAt") + updatedAt: datetime = Field(None, alias="updatedAt") + + +class BaseEntity(TimedBaseModel): + id: int + name: str + path: Optional[str] = Field( + None, description="Item’s path in SuperAnnotate project" + ) + url: Optional[str] = Field(None, description="Publicly available HTTP address") + annotation_status: AnnotationStatus = Field(description="Item annotation status") + annotator_name: Optional[str] = Field(description="Annotator email") + qa_name: Optional[str] = Field(description="QA email") + entropy_value: Optional[str] = Field(description="Priority score of given item") + createdAt: str = Field(description="Date of creation") + updatedAt: str = Field(description="Update date") + + class Config: + extra = Extra.allow + + def add_path(self, project_name: str, folder_name: str): + path = f"{project_name}{f'/{folder_name}' if folder_name != 'root' else ''}/{self.name}" + self.path = path + return self diff --git a/src/superannotate/lib/core/entities/integrations.py b/src/superannotate/lib/core/entities/integrations.py index 7478f2bd5..3e3fd9009 100644 --- a/src/superannotate/lib/core/entities/integrations.py +++ b/src/superannotate/lib/core/entities/integrations.py @@ -1,8 +1,7 @@ from lib.core.entities.base import TimedBaseModel from lib.core.enums import IntegrationTypeEnum - -from pydantic import Field from pydantic import Extra +from pydantic import Field class IntegrationEntity(TimedBaseModel): diff --git a/src/superannotate/lib/core/entities/items.py b/src/superannotate/lib/core/entities/items.py index 6137bc447..ae4ca0630 100644 --- a/src/superannotate/lib/core/entities/items.py +++ b/src/superannotate/lib/core/entities/items.py @@ -1,42 +1,23 @@ from typing import Optional -from lib.core.enums import AnnotationStatus +from lib.core.entities.base import BaseEntity from lib.core.enums import SegmentationStatus - -from pydantic import BaseModel from pydantic import Extra from pydantic import Field -class TmpBaseEntity(BaseModel): - id: int - name: str - path: Optional[str] = Field(None, description="Item’s path in SuperAnnotate project") - url: Optional[str] = Field(None, description="Publicly available HTTP address") - annotation_status: AnnotationStatus = Field(description="Item annotation status") - annotator_name: Optional[str] = Field(description="Annotator email") - qa_name: Optional[str] = Field(description="QA email") - entropy_value: Optional[str] = Field(description="Priority score of given item") - createdAt: str = Field(description="Date of creation") - updatedAt: str = Field(description="Update date") - - class Config: - extra = Extra.allow - - def add_path(self, project_name: str, folder_name: str): - path = f"{project_name}{f'/{folder_name}' if folder_name != 'root' else ''}/{self.name}" - self.path = path - return self - - -class Entity(TmpBaseEntity): +class Entity(BaseEntity): class Config: extra = Extra.allow class TmpImageEntity(Entity): - prediction_status: Optional[SegmentationStatus] = Field(SegmentationStatus.NOT_STARTED) - segmentation_status: Optional[SegmentationStatus] = Field(SegmentationStatus.NOT_STARTED) + prediction_status: Optional[SegmentationStatus] = Field( + SegmentationStatus.NOT_STARTED + ) + segmentation_status: Optional[SegmentationStatus] = Field( + SegmentationStatus.NOT_STARTED + ) approval_status: bool = None class Config: @@ -45,9 +26,9 @@ class Config: class VideoEntity(Entity): class Config: - ignore_extra = True + extra = Extra.ignore class DocumentEntity(Entity): class Config: - ignore_extra = True + extra = Extra.ignore diff --git a/src/superannotate/lib/core/entities/project_entities.py b/src/superannotate/lib/core/entities/project_entities.py index 93ff503f9..d98977999 100644 --- a/src/superannotate/lib/core/entities/project_entities.py +++ b/src/superannotate/lib/core/entities/project_entities.py @@ -127,7 +127,9 @@ def __copy__(self): team_id=self.team_id, name=self.name, project_type=self.project_type, - description=self.description if self.description else f"Copy of {self.name}.", + description=self.description + if self.description + else f"Copy of {self.name}.", status=self.status, folder_id=self.folder_id, users=self.users, @@ -279,7 +281,7 @@ def __init__( meta: ImageInfoEntity = ImageInfoEntity(), created_at: str = None, updated_at: str = None, - **_ + **_, ): super().__init__(uuid) self.team_id = team_id diff --git a/src/superannotate/lib/core/entities/video.py b/src/superannotate/lib/core/entities/video.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/src/superannotate/lib/core/enums.py b/src/superannotate/lib/core/enums.py index 8687c627d..dca8ded74 100644 --- a/src/superannotate/lib/core/enums.py +++ b/src/superannotate/lib/core/enums.py @@ -70,6 +70,7 @@ class ImageQuality(BaseTitledEnum): class ProjectStatus(BaseTitledEnum): + Undefined = "Undefined", 0 NotStarted = "NotStarted", 1 InProgress = "InProgress", 2 Completed = "Completed", 3 diff --git a/src/superannotate/lib/core/reporter.py b/src/superannotate/lib/core/reporter.py index 7fead24d8..75193342a 100644 --- a/src/superannotate/lib/core/reporter.py +++ b/src/superannotate/lib/core/reporter.py @@ -52,7 +52,10 @@ def log_debug(self, value: str): self.debug_messages.append(value) def start_progress( - self, iterations: Union[int, range], description: str = "Processing", disable=False + self, + iterations: Union[int, range], + description: str = "Processing", + disable=False, ): self.progress_bar = self.get_progress_bar(iterations, description, disable) diff --git a/src/superannotate/lib/core/repositories.py b/src/superannotate/lib/core/repositories.py index 2c6a0deb9..c9a132372 100644 --- a/src/superannotate/lib/core/repositories.py +++ b/src/superannotate/lib/core/repositories.py @@ -15,11 +15,15 @@ class BaseReadOnlyRepository(ABC): @abstractmethod - def get_one(self, uuid: Union[Condition, int]) -> Optional[Union[BaseEntity, TmpBaseEntity]]: + def get_one( + self, uuid: Union[Condition, int] + ) -> Optional[Union[BaseEntity, TmpBaseEntity]]: raise NotImplementedError @abstractmethod - def get_all(self, condition: Optional[Condition] = None) -> List[Union[BaseEntity, TmpBaseEntity]]: + def get_all( + self, condition: Optional[Condition] = None + ) -> List[Union[BaseEntity, TmpBaseEntity]]: raise NotImplementedError @staticmethod diff --git a/src/superannotate/lib/core/serviceproviders.py b/src/superannotate/lib/core/serviceproviders.py index 5ab8469df..9da0dc83f 100644 --- a/src/superannotate/lib/core/serviceproviders.py +++ b/src/superannotate/lib/core/serviceproviders.py @@ -12,20 +12,20 @@ class SuperannotateServiceProvider: @abstractmethod def attach_files( - self, - project_id: int, - folder_id: int, - team_id: int, - files: List[Dict], - annotation_status_code: int, - upload_state_code: int, - meta: Dict, + self, + project_id: int, + folder_id: int, + team_id: int, + files: List[Dict], + annotation_status_code: int, + upload_state_code: int, + meta: Dict, ): raise NotImplementedError @abstractmethod def get_annotation_classes( - self, project_id: int, team_id: int, name_prefix: str = None + self, project_id: int, team_id: int, name_prefix: str = None ): raise NotImplementedError @@ -35,19 +35,19 @@ def share_project_bulk(self, project_id: int, team_id: int, users: Iterable): @abstractmethod def invite_contributors( - self, team_id: int, team_role: int, emails: Iterable + self, team_id: int, team_role: int, emails: Iterable ) -> Tuple[List[str], List[str]]: raise NotImplementedError @abstractmethod def prepare_export( - self, - project_id: int, - team_id: int, - folders: List[str], - annotation_statuses: Iterable[Any], - include_fuse: bool, - only_pinned: bool, + self, + project_id: int, + team_id: int, + folders: List[str], + annotation_statuses: Iterable[Any], + include_fuse: bool, + only_pinned: bool, ): raise NotImplementedError @@ -99,17 +99,17 @@ def update_folder(self, project_id: int, team_id: int, folder_data: dict): raise NotImplementedError def get_download_token( - self, - project_id: int, - team_id: int, - folder_id: int, - image_id: int, - include_original: int = 1, + self, + project_id: int, + team_id: int, + folder_id: int, + image_id: int, + include_original: int = 1, ) -> dict: raise NotImplementedError def get_upload_token( - self, project_id: int, team_id: int, folder_id: int, image_id: int, + self, project_id: int, team_id: int, folder_id: int, image_id: int, ) -> dict: raise NotImplementedError @@ -117,24 +117,24 @@ def update_image(self, image_id: int, team_id: int, project_id: int, data: dict) raise NotImplementedError def copy_images_between_folders_transaction( - self, - team_id: int, - project_id: int, - from_folder_id: int, - to_folder_id: int, - images: List[str], - include_annotations: bool = False, - include_pin: bool = False, + self, + team_id: int, + project_id: int, + from_folder_id: int, + to_folder_id: int, + images: List[str], + include_annotations: bool = False, + include_pin: bool = False, ) -> int: raise NotImplementedError def move_images_between_folders( - self, - team_id: int, - project_id: int, - from_folder_id: int, - to_folder_id: int, - images: List[str], + self, + team_id: int, + project_id: int, + from_folder_id: int, + to_folder_id: int, + images: List[str], ) -> List[str]: """ Returns list of moved images. @@ -142,22 +142,22 @@ def move_images_between_folders( raise NotImplementedError def get_duplicated_images( - self, project_id: int, team_id: int, folder_id: int, images: List[str] + self, project_id: int, team_id: int, folder_id: int, images: List[str] ): raise NotImplementedError def get_progress( - self, project_id: int, team_id: int, poll_id: int + self, project_id: int, team_id: int, poll_id: int ) -> Tuple[int, int]: raise NotImplementedError def set_images_statuses_bulk( - self, - image_names: List[str], - team_id: int, - project_id: int, - folder_id: int, - annotation_status: int, + self, + image_names: List[str], + team_id: int, + project_id: int, + folder_id: int, + annotation_status: int, ): raise NotImplementedError @@ -165,49 +165,49 @@ def delete_images(self, project_id: int, team_id: int, image_ids: List[int]): raise NotImplementedError def assign_images( - self, - team_id: int, - project_id: int, - folder_name: str, - user: str, - image_names: list, + self, + team_id: int, + project_id: int, + folder_name: str, + user: str, + image_names: list, ): raise NotImplementedError def get_bulk_images( - self, project_id: int, team_id: int, folder_id: int, images: List[str] + self, project_id: int, team_id: int, folder_id: int, images: List[str] ) -> List[dict]: raise NotImplementedError def un_assign_folder( - self, team_id: int, project_id: int, folder_name: str, + self, team_id: int, project_id: int, folder_name: str, ): raise NotImplementedError def assign_folder( - self, team_id: int, project_id: int, folder_name: str, users: list + self, team_id: int, project_id: int, folder_name: str, users: list ): raise NotImplementedError def un_assign_images( - self, team_id: int, project_id: int, folder_name: str, image_names: list, + self, team_id: int, project_id: int, folder_name: str, image_names: list, ): raise NotImplementedError def un_share_project( - self, team_id: int, project_id: int, user_id: str, + self, team_id: int, project_id: int, user_id: str, ): raise NotImplementedError def upload_form_s3( - self, - project_id: int, - team_id: int, - access_key: str, - secret_key: str, - bucket_name: str, - from_folder_name: str, - to_folder_id: int, + self, + project_id: int, + team_id: int, + access_key: str, + secret_key: str, + bucket_name: str, + from_folder_name: str, + to_folder_id: int, ): raise NotImplementedError @@ -227,7 +227,7 @@ def get_s3_upload_auth_token(self, team_id: int, folder_id: int, project_id: int raise NotImplementedError def delete_annotation_class( - self, team_id: int, project_id: int, annotation_class_id: int + self, team_id: int, project_id: int, annotation_class_id: int ): raise NotImplementedError @@ -238,17 +238,17 @@ def set_project_workflow_bulk(self, project_id: int, team_id: int, steps: list): raise NotImplementedError def set_project_workflow_attributes_bulk( - self, project_id: int, team_id: int, attributes: list + self, project_id: int, team_id: int, attributes: list ): raise NotImplementedError def get_pre_annotation_upload_data( - self, project_id: int, team_id: int, image_ids: List[int], folder_id: int + self, project_id: int, team_id: int, image_ids: List[int], folder_id: int ): raise NotImplementedError def get_annotation_upload_data( - self, project_id: int, team_id: int, image_ids: List[int], folder_id: int + self, project_id: int, team_id: int, image_ids: List[int], folder_id: int ) -> ServiceResponse: raise NotImplementedError @@ -262,7 +262,7 @@ def get_model_metrics(self, team_id: int, model_id: int) -> dict: raise NotImplementedError def get_models( - self, name: str, team_id: int, project_id: int, model_type: str + self, name: str, team_id: int, project_id: int, model_type: str ) -> List: raise NotImplementedError @@ -276,58 +276,66 @@ def delete_model(self, team_id: int, model_id: int): raise NotImplementedError def get_ml_model_download_tokens( - self, team_id: int, model_id: int + self, team_id: int, model_id: int ) -> ServiceResponse: raise NotImplementedError def run_prediction( - self, team_id: int, project_id: int, ml_model_id: int, image_ids: list + self, team_id: int, project_id: int, ml_model_id: int, image_ids: list ): raise NotImplementedError def delete_image_annotations( - self, - team_id: int, - project_id: int, - folder_id: int = None, - image_names: List[str] = None, + self, + team_id: int, + project_id: int, + folder_id: int = None, + image_names: List[str] = None, ) -> dict: raise NotImplementedError def get_annotations_delete_progress( - self, team_id: int, project_id: int, poll_id: int + self, team_id: int, project_id: int, poll_id: int ): raise NotImplementedError def get_limitations( - self, team_id: int, project_id: int, folder_id: int = None + self, team_id: int, project_id: int, folder_id: int = None ) -> ServiceResponse: raise NotImplementedError @abstractmethod def get_annotations( - self, - project_id: int, - team_id: int, - folder_id: int, - items: List[str], - reporter: Reporter + self, + project_id: int, + team_id: int, + folder_id: int, + items: List[str], + reporter: Reporter, ) -> List[dict]: raise NotImplementedError def upload_priority_scores( - self, team_id: int, project_id: int, folder_id: int, priorities: list + self, team_id: int, project_id: int, folder_id: int, priorities: list ) -> dict: raise NotImplementedError def get_integrations(self, team_id: int) -> List[dict]: raise NotImplementedError - def attach_integrations(self, team_id: int, project_id: int, integration_id: int, folder_id: int, - folder_name: str) -> bool: + def attach_integrations( + self, + team_id: int, + project_id: int, + integration_id: int, + folder_id: int, + folder_name: str, + ) -> bool: raise NotImplementedError - def saqul_query(self, team_id: int, project_id: int, query: str, folder_id: int) -> ServiceResponse: + def saqul_query( + self, team_id: int, project_id: int, query: str, folder_id: int + ) -> ServiceResponse: raise NotImplementedError def validate_saqul_query(self, team_id: int, project_id: int, query: str) -> dict: diff --git a/src/superannotate/lib/core/usecases/annotations.py b/src/superannotate/lib/core/usecases/annotations.py index ab8583cfe..0a9e6ddca 100644 --- a/src/superannotate/lib/core/usecases/annotations.py +++ b/src/superannotate/lib/core/usecases/annotations.py @@ -44,20 +44,20 @@ class UploadAnnotationsUseCase(BaseReportableUseCae): ImageInfo = namedtuple("ImageInfo", ["path", "name", "id"]) def __init__( - self, - reporter: Reporter, - project: ProjectEntity, - folder: FolderEntity, - team: TeamEntity, - images: BaseManageableRepository, - annotation_classes: List[AnnotationClassEntity], - annotation_paths: List[str], - backend_service_provider: SuperannotateServiceProvider, - templates: List[dict], - validators: AnnotationValidators, - pre_annotation: bool = False, - client_s3_bucket=None, - folder_path: str = None, + self, + reporter: Reporter, + project: ProjectEntity, + folder: FolderEntity, + team: TeamEntity, + images: BaseManageableRepository, + annotation_classes: List[AnnotationClassEntity], + annotation_paths: List[str], + backend_service_provider: SuperannotateServiceProvider, + templates: List[dict], + validators: AnnotationValidators, + pre_annotation: bool = False, + client_s3_bucket=None, + folder_path: str = None, ): super().__init__(reporter) self._project = project @@ -81,8 +81,8 @@ def __init__( @property def annotation_postfix(self): if self._project.project_type in ( - constances.ProjectType.VIDEO.value, - constances.ProjectType.DOCUMENT.value, + constances.ProjectType.VIDEO.value, + constances.ProjectType.DOCUMENT.value, ): return constances.ATTACHED_VIDEO_ANNOTATION_POSTFIX elif self._project.project_type == constances.ProjectType.VECTOR.value: @@ -94,8 +94,8 @@ def annotation_postfix(self): def extract_name(value: str): return os.path.basename( value.replace(constances.PIXEL_ANNOTATION_POSTFIX, "") - .replace(constances.VECTOR_ANNOTATION_POSTFIX, "") - .replace(constances.ATTACHED_VIDEO_ANNOTATION_POSTFIX, ""), + .replace(constances.VECTOR_ANNOTATION_POSTFIX, "") + .replace(constances.ATTACHED_VIDEO_ANNOTATION_POSTFIX, ""), ) @property @@ -119,8 +119,8 @@ def annotations_to_upload(self): folder_id=self._folder.uuid, images=[image.name for image in images_detail], ) - .execute() - .data + .execute() + .data ) for image_data in images_data: for idx, detail in enumerate(images_detail): @@ -150,7 +150,7 @@ def missing_annotations(self): return self._missing_annotations def get_annotation_upload_data( - self, image_ids: List[int] + self, image_ids: List[int] ) -> UploadAnnotationAuthData: if self._pre_annotation: function = self._backend_service.get_pre_annotation_upload_data @@ -166,12 +166,12 @@ def get_annotation_upload_data( return response.data def _upload_annotation( - self, - image_id: int, - image_name: str, - upload_data: UploadAnnotationAuthData, - path: str, - bucket, + self, + image_id: int, + image_name: str, + upload_data: UploadAnnotationAuthData, + path: str, + bucket, ): try: self.reporter.disable_warnings() @@ -255,8 +255,8 @@ def execute(self): ) for step in iterations_range: annotations_to_upload = self.annotations_to_upload[ - step: step + self.AUTH_DATA_CHUNK_SIZE # noqa: E203 - ] + step : step + self.AUTH_DATA_CHUNK_SIZE # noqa: E203 + ] upload_data = self.get_annotation_upload_data( [int(image.id) for image in annotations_to_upload] ) @@ -269,11 +269,11 @@ def execute(self): } # dummy progress for _ in range( - len(annotations_to_upload) - len(upload_data.images) + len(annotations_to_upload) - len(upload_data.images) ): self.reporter.update_progress() with concurrent.futures.ThreadPoolExecutor( - max_workers=self.MAX_WORKERS + max_workers=self.MAX_WORKERS ) as executor: results = [ executor.submit( @@ -305,25 +305,25 @@ def execute(self): class UploadAnnotationUseCase(BaseReportableUseCae): def __init__( - self, - project: ProjectEntity, - folder: FolderEntity, - image: ImageEntity, - images: BaseManageableRepository, - team: TeamEntity, - annotation_classes: List[AnnotationClassEntity], - backend_service_provider: SuperannotateServiceProvider, - reporter: Reporter, - templates: List[dict], - validators: AnnotationValidators, - annotation_upload_data: UploadAnnotationAuthData = None, - annotations: dict = None, - s3_bucket=None, - client_s3_bucket=None, - mask=None, - verbose: bool = True, - annotation_path: str = None, - pass_validation: bool = False, + self, + project: ProjectEntity, + folder: FolderEntity, + image: ImageEntity, + images: BaseManageableRepository, + team: TeamEntity, + annotation_classes: List[AnnotationClassEntity], + backend_service_provider: SuperannotateServiceProvider, + reporter: Reporter, + templates: List[dict], + validators: AnnotationValidators, + annotation_upload_data: UploadAnnotationAuthData = None, + annotations: dict = None, + s3_bucket=None, + client_s3_bucket=None, + mask=None, + verbose: bool = True, + annotation_path: str = None, + pass_validation: bool = False, ): super().__init__(reporter) self._project = project @@ -412,18 +412,18 @@ def set_annotation_json(self): @staticmethod def prepare_annotations( - project_type: int, - annotations: dict, - annotation_classes: List[AnnotationClassEntity], - templates: List[dict], - reporter: Reporter, - team: TeamEntity, + project_type: int, + annotations: dict, + annotation_classes: List[AnnotationClassEntity], + templates: List[dict], + reporter: Reporter, + team: TeamEntity, ) -> dict: handlers_chain = ChainedAnnotationHandlers() if project_type in ( - constances.ProjectType.VECTOR.value, - constances.ProjectType.PIXEL.value, - constances.ProjectType.DOCUMENT.value, + constances.ProjectType.VECTOR.value, + constances.ProjectType.PIXEL.value, + constances.ProjectType.DOCUMENT.value, ): handlers_chain.attach( MissingIDsHandler(annotation_classes, templates, reporter) @@ -435,7 +435,7 @@ def prepare_annotations( handlers_chain.attach(LastActionHandler(team.creator_id)) return handlers_chain.handle(annotations) - def clean_json(self, json_data: dict, ) -> Tuple[bool, dict]: + def clean_json(self, json_data: dict,) -> Tuple[bool, dict]: use_case = ValidateAnnotationUseCase( constances.ProjectType.get_name(self._project.project_type), annotation=json_data, @@ -465,8 +465,8 @@ def execute(self): Body=json.dumps(annotation_json), ) if ( - self._project.project_type == constances.ProjectType.PIXEL.value - and self._mask + self._project.project_type == constances.ProjectType.PIXEL.value + and self._mask ): bucket.put_object( Key=self.annotation_upload_data.images[self._image.uuid][ @@ -493,14 +493,14 @@ def execute(self): class GetAnnotations(BaseReportableUseCae): def __init__( - self, - reporter: Reporter, - project: ProjectEntity, - folder: FolderEntity, - images: BaseManageableRepository, - item_names: Optional[List[str]], - backend_service_provider: SuperannotateServiceProvider, - show_process: bool = True + self, + reporter: Reporter, + project: ProjectEntity, + folder: FolderEntity, + images: BaseManageableRepository, + item_names: Optional[List[str]], + backend_service_provider: SuperannotateServiceProvider, + show_process: bool = True, ): super().__init__(reporter) self._project = project @@ -527,9 +527,9 @@ def validate_item_names(self): else: self._item_names_provided = False condition = ( - Condition("team_id", self._project.team_id, EQ) - & Condition("project_id", self._project.uuid, EQ) - & Condition("folder_id", self._folder.uuid, EQ) + Condition("team_id", self._project.team_id, EQ) + & Condition("project_id", self._project.uuid, EQ) + & Condition("folder_id", self._folder.uuid, EQ) ) self._item_names = [item.name for item in self._images.get_all(condition)] @@ -539,7 +539,12 @@ def _prettify_annotations(self, annotations: List[dict]): try: data = [] for annotation in annotations: - data.append((self._item_names.index(annotation["metadata"]["name"]), annotation)) + data.append( + ( + self._item_names.index(annotation["metadata"]["name"]), + annotation, + ) + ) return [i[1] for i in sorted(data, key=lambda x: x[0])] except KeyError: raise AppException("Broken data.") @@ -558,7 +563,7 @@ def execute(self): project_id=self._project.uuid, folder_id=self._folder.uuid, items=self._item_names, - reporter=self.reporter + reporter=self.reporter, ) received_items_count = len(annotations) self.reporter.finish_progress() @@ -572,14 +577,14 @@ def execute(self): class GetVideoAnnotationsPerFrame(BaseReportableUseCae): def __init__( - self, - reporter: Reporter, - project: ProjectEntity, - folder: FolderEntity, - images: BaseManageableRepository, - video_name: str, - fps: int, - backend_service_provider: SuperannotateServiceProvider + self, + reporter: Reporter, + project: ProjectEntity, + folder: FolderEntity, + images: BaseManageableRepository, + video_name: str, + fps: int, + backend_service_provider: SuperannotateServiceProvider, ): super().__init__(reporter) self._project = project @@ -606,18 +611,22 @@ def execute(self): images=self._images, item_names=[self._video_name], backend_service_provider=self._client, - show_process=False + show_process=False, ).execute() self.reporter.enable_info() if response.data: generator = VideoFrameGenerator(response.data[0], fps=self._fps) - self.reporter.log_info(f"Getting annotations for {generator.frames_count} frames from {self._video_name}.") + self.reporter.log_info( + f"Getting annotations for {generator.frames_count} frames from {self._video_name}." + ) if response.errors: self._response.errors = response.errors return self._response if not response.data: - self._response.errors = AppException(f"Video {self._video_name} not found.") + self._response.errors = AppException( + f"Video {self._video_name} not found." + ) annotations = response.data if annotations: self._response.data = list(generator) @@ -632,13 +641,13 @@ class UploadPriorityScoresUseCase(BaseReportableUseCae): CHUNK_SIZE = 100 def __init__( - self, - reporter, - project: ProjectEntity, - folder: FolderEntity, - scores: List[PriorityScore], - project_folder_name: str, - backend_service_provider: SuperannotateServiceProvider + self, + reporter, + project: ProjectEntity, + folder: FolderEntity, + scores: List[PriorityScore], + project_folder_name: str, + backend_service_provider: SuperannotateServiceProvider, ): super().__init__(reporter) self._project = project @@ -655,9 +664,13 @@ def get_clean_priority(priority): priority = 1000000 if priority < 0: priority = 0 - if str(float(priority)).split('.')[1:2]: - if len(str(float(priority)).split('.')[1]) > 5: - priority = float(str(float(priority)).split('.')[0] + '.' + str(float(priority)).split('.')[1][:5]) + if str(float(priority)).split(".")[1:2]: + if len(str(float(priority)).split(".")[1]) > 5: + priority = float( + str(float(priority)).split(".")[0] + + "." + + str(float(priority)).split(".")[1][:5] + ) return priority @property @@ -667,17 +680,21 @@ def folder_path(self): @property def uploading_info(self): data_len: int = len(self._scores) - return f"Uploading priority scores for {data_len} item(s) to {self.folder_path}." + return ( + f"Uploading priority scores for {data_len} item(s) to {self.folder_path}." + ) def execute(self): if self.is_valid(): priorities = [] initial_scores = [] for i in self._scores: - priorities.append({ - "name": i.name, - "entropy_value": self.get_clean_priority(i.priority) - }) + priorities.append( + { + "name": i.name, + "entropy_value": self.get_clean_priority(i.priority), + } + ) initial_scores.append(i.name) uploaded_score_names = [] self.reporter.log_info(self.uploading_info) @@ -685,17 +702,23 @@ def execute(self): self.reporter.start_progress(iterations, "Uploading priority scores") if iterations: for i in iterations: - priorities_to_upload = priorities[i: i + self.CHUNK_SIZE] # noqa: E203 + priorities_to_upload = priorities[ + i : i + self.CHUNK_SIZE + ] # noqa: E203 res = self._client.upload_priority_scores( team_id=self._project.team_id, project_id=self._project.uuid, folder_id=self._folder.uuid, - priorities=priorities_to_upload + priorities=priorities_to_upload, ) self.reporter.update_progress(len(priorities_to_upload)) - uploaded_score_names.extend(list(map(lambda x: x["name"], res.get("data", [])))) + uploaded_score_names.extend( + list(map(lambda x: x["name"], res.get("data", []))) + ) self.reporter.finish_progress() - skipped_score_names = list(set(initial_scores) - set(uploaded_score_names)) + skipped_score_names = list( + set(initial_scores) - set(uploaded_score_names) + ) self._response.data = (uploaded_score_names, skipped_score_names) else: self.reporter.warning_messages("Empty scores.") diff --git a/src/superannotate/lib/core/usecases/images.py b/src/superannotate/lib/core/usecases/images.py index 7fd07f2a1..a35a835e4 100644 --- a/src/superannotate/lib/core/usecases/images.py +++ b/src/superannotate/lib/core/usecases/images.py @@ -515,51 +515,6 @@ def execute(self): self._images.update(self._image) -class DownloadImageFromPublicUrlUseCase(BaseUseCase): - def __init__( - self, project: ProjectEntity, image_url: str, image_name: str = None, - ): - super().__init__() - self._project = project - self._image_url = image_url - self._image_name = image_name - - def validate_project_type(self): - if self._project.upload_state == constances.UploadState.EXTERNAL.value: - raise AppValidationException( - "The function does not support projects containing images attached with URLs" - ) - - def execute(self): - try: - response = requests.get(url=self._image_url) - if response.ok: - import re - - content_description = response.headers.get( - "Content-Description", response.headers.get("Content-Disposition") - ) - if content_description: - result = re.findall( - r"filename\*?=([^;]+)", content_description, flags=re.IGNORECASE - ) - else: - result = None - self._response.data = ( - io.BytesIO(response.content), - result[0].strip().strip('"') - if result - else str(uuid.uuid4()) + ".jpg", - ) - else: - raise requests.exceptions.RequestException() - except requests.exceptions.RequestException as e: - self._response.errors = AppException( - f"Couldn't download image {self._image_url}, {e}" - ) - return self._response - - class ImagesBulkCopyUseCase(BaseUseCase): """ Copy images in bulk between folders in a project. @@ -2193,84 +2148,6 @@ def execute(self): return self._response -class DownloadImagePreAnnotationsUseCase(BaseUseCase): - def __init__( - self, - service: SuperannotateServiceProvider, - project: ProjectEntity, - folder: FolderEntity, - image_name: str, - images: BaseManageableRepository, - destination: str, - ): - super().__init__() - self._service = service - self._project = project - self._folder = folder - self._image_name = image_name - self._image_response = Response() - self._images = images - self._destination = destination - - @property - def image_use_case(self): - return GetImageUseCase( - project=self._project, - folder=self._folder, - image_name=self._image_name, - images=self._images, - service=self._service, - ) - - def execute(self): - data = { - "preannotation_json": None, - "preannotation_json_filename": None, - "preannotation_mask": None, - "preannotation_mask_filename": None, - } - image_response = self.image_use_case.execute() - token = self._service.get_download_token( - project_id=self._project.uuid, - team_id=self._project.team_id, - folder_id=self._folder.uuid, - image_id=image_response.data.uuid, - ) - credentials = token["annotations"]["PREANNOTATION"][0] - annotation_json_creds = credentials["annotation_json_path"] - if self._project.project_type == constances.ProjectType.VECTOR.value: - file_postfix = "___objects.json" - else: - file_postfix = "___pixel.json" - - response = requests.get( - url=annotation_json_creds["url"], headers=annotation_json_creds["headers"], - ) - if not response.ok: - raise AppException("Couldn't load annotations.") - data["preannotation_json"] = response.json() - data["preannotation_json_filename"] = f"{self._image_name}{file_postfix}" - mask_path = None - if self._project.project_type == constances.ProjectType.PIXEL.value: - annotation_blue_map_creds = credentials["annotation_bluemap_path"] - response = requests.get( - url=annotation_blue_map_creds["url"], - headers=annotation_blue_map_creds["headers"], - ) - data["preannotation_mask"] = io.BytesIO(response.content) - data["preannotation_mask_filename"] = f"{self._image_name}___save.png" - mask_path = Path(self._destination) / data["preannotation_mask_filename"] - with open(mask_path, "wb") as f: - f.write(data["preannotation_mask"].getbuffer()) - - json_path = Path(self._destination) / data["preannotation_json_filename"] - with open(json_path, "w") as f: - json.dump(data["preannotation_json"], f, indent=4) - - self._response.data = (str(json_path), str(mask_path)) - return self._response - - class GetImageAnnotationsUseCase(BaseReportableUseCae): def __init__( self, @@ -2535,7 +2412,8 @@ def validate_uniqueness(self): def validate_project_type(self): if ( - self._project.project_type in (ProjectType.PIXEL.value, ProjectType.VIDEO.value) + self._project.project_type + in (ProjectType.PIXEL.value, ProjectType.VIDEO.value) and self._annotation_class.type == "tag" ): raise AppException( @@ -2649,9 +2527,10 @@ def __init__( self._project = project def validate_project_type(self): - if self._project.project_type in (ProjectType.PIXEL.value, ProjectType.VIDEO.value) and any([ - True for i in self._annotation_classes if i.type == "tag" - ]): + if self._project.project_type in ( + ProjectType.PIXEL.value, + ProjectType.VIDEO.value, + ) and any([True for i in self._annotation_classes if i.type == "tag"]): raise AppException( f"Predefined tagging functionality is not supported for projects of type {ProjectType.get_name(self._project.project_type)}." ) @@ -2673,11 +2552,17 @@ def execute(self): created = [] if len(unique_annotation_classes) > self.CHUNK_SIZE: for i in range(len(unique_annotation_classes), 0, -self.CHUNK_SIZE): - created.extend(self._annotation_classes_repo.bulk_insert( - entities=unique_annotation_classes[i - self.CHUNK_SIZE : i], # noqa: E203 - )) + created.extend( + self._annotation_classes_repo.bulk_insert( + entities=unique_annotation_classes[ + i - self.CHUNK_SIZE : i + ], # noqa: E203 + ) + ) else: - created = self._annotation_classes_repo.bulk_insert(entities=unique_annotation_classes) + created = self._annotation_classes_repo.bulk_insert( + entities=unique_annotation_classes + ) self._response.data = created return self._response @@ -2797,92 +2682,6 @@ def execute(self): yield from frames_generator -class UploadS3ImagesBackendUseCase(BaseUseCase): - def __init__( - self, - backend_service_provider: SuperannotateServiceProvider, - settings: BaseReadOnlyRepository, - project: ProjectEntity, - folder: FolderEntity, - access_key: str, - secret_key: str, - bucket_name: str, - folder_path: str, - image_quality: str, - ): - super().__init__() - self._backend_service = backend_service_provider - self._settings = settings - self._project = project - self._folder = folder - self._access_key = access_key - self._secret_key = secret_key - self._bucket_name = bucket_name - self._folder_path = folder_path - self._image_quality = image_quality - - def validate_image_quality(self): - if self._image_quality and self._image_quality not in ( - "compressed", - "original", - ): - raise AppValidationException("Invalid value for image_quality") - - def execute(self): - old_setting = None - if self._image_quality: - settings = self._settings.get_all() - for setting in settings: - if setting.attribute == "ImageQuality": - if setting.value == "compressed": - setting.value = 60 - else: - setting.value = 100 - self._backend_service.set_project_settings( - project_id=self._project.uuid, - team_id=self._project.team_id, - data=[setting.to_dict()], - ) - break - else: - raise AppException("Cant find settings.") - - response = self._backend_service.upload_form_s3( - project_id=self._project.uuid, - team_id=self._project.team_id, - access_key=self._access_key, - secret_key=self._secret_key, - bucket_name=self._bucket_name, - from_folder_name=self._folder_path, - to_folder_id=self._folder.uuid, - ) - - if not response.ok: - self._response.errors = AppException(response.json()["error"]) - - in_progress = response.ok - if in_progress: - while True: - time.sleep(4) - progress = self._backend_service.get_upload_status( - project_id=self._project.uuid, - team_id=self._project.team_id, - folder_id=self._folder.uuid, - ) - if progress == "2": - break - elif progress != "1": - raise AppException("Couldn't upload to project from S3.") - - if old_setting: - self._backend_service.set_project_settings( - project_id=self._project.uuid, - team_id=self._project.team_id, - data=[old_setting.to_dict()], - ) - return self._response - - class ValidateAnnotationUseCase(BaseUseCase): def __init__( self, diff --git a/src/superannotate/lib/core/usecases/integrations.py b/src/superannotate/lib/core/usecases/integrations.py index 345b734fa..82774e797 100644 --- a/src/superannotate/lib/core/usecases/integrations.py +++ b/src/superannotate/lib/core/usecases/integrations.py @@ -14,10 +14,10 @@ class GetIntegrations(BaseReportableUseCae): def __init__( - self, - reporter: Reporter, - team: TeamEntity, - integrations: BaseReadOnlyRepository, + self, + reporter: Reporter, + team: TeamEntity, + integrations: BaseReadOnlyRepository, ): super().__init__(reporter) @@ -34,15 +34,15 @@ def execute(self) -> Response: class AttachIntegrations(BaseReportableUseCae): def __init__( - self, - reporter: Reporter, - team: TeamEntity, - project: ProjectEntity, - folder: FolderEntity, - integrations: BaseReadOnlyRepository, - backend_service: SuperannotateServiceProvider, - integration: IntegrationEntity, - folder_path: str = None + self, + reporter: Reporter, + team: TeamEntity, + project: ProjectEntity, + folder: FolderEntity, + integrations: BaseReadOnlyRepository, + backend_service: SuperannotateServiceProvider, + integration: IntegrationEntity, + folder_path: str = None, ): super().__init__(reporter) @@ -61,7 +61,9 @@ def _upload_path(self): def execute(self) -> Response: integrations: List[IntegrationEntity] = self._integrations.get_all() integration_name_lower = self._integration.name.lower() - integration = next((i for i in integrations if i.name.lower() == integration_name_lower), None) + integration = next( + (i for i in integrations if i.name.lower() == integration_name_lower), None + ) if integration: self.reporter.log_info( "Attaching file(s) from " @@ -73,7 +75,7 @@ def execute(self) -> Response: self._project.uuid, integration.id, self._folder.uuid, - self._folder_path + self._folder_path, ) if not attached: self._response.errors = AppException( diff --git a/src/superannotate/lib/core/usecases/items.py b/src/superannotate/lib/core/usecases/items.py index 7ba4b2490..edc79415c 100644 --- a/src/superannotate/lib/core/usecases/items.py +++ b/src/superannotate/lib/core/usecases/items.py @@ -21,13 +21,12 @@ class GetItem(BaseReportableUseCae): def __init__( - self, - reporter: Reporter, - project: ProjectEntity, - folder: FolderEntity, - items: BaseReadOnlyRepository, - item_name: str - + self, + reporter: Reporter, + project: ProjectEntity, + folder: FolderEntity, + items: BaseReadOnlyRepository, + item_name: str, ): super().__init__(reporter) self._project = project @@ -37,7 +36,10 @@ def __init__( @staticmethod def serialize_entity(entity: Entity, project: ProjectEntity): - if project.project_type in (constances.ProjectType.VECTOR.value, constances.ProjectType.PIXEL.value): + if project.project_type in ( + constances.ProjectType.VECTOR.value, + constances.ProjectType.PIXEL.value, + ): return TmpImageEntity(**entity.dict(by_alias=True)) elif project.project_type == constances.ProjectType.VIDEO.value: return VideoEntity(**entity.dict(by_alias=True)) @@ -48,10 +50,10 @@ def serialize_entity(entity: Entity, project: ProjectEntity): def execute(self) -> Response: if self.is_valid(): condition = ( - Condition("name", self._item_name, EQ) - & Condition("team_id", self._project.team_id, EQ) - & Condition("project_id", self._project.uuid, EQ) - & Condition("folder_id", self._folder.uuid, EQ) + Condition("name", self._item_name, EQ) + & Condition("team_id", self._project.team_id, EQ) + & Condition("project_id", self._project.uuid, EQ) + & Condition("folder_id", self._folder.uuid, EQ) ) entity = self._items.get_one(condition) if entity: @@ -64,13 +66,12 @@ def execute(self) -> Response: class QueryEntities(BaseReportableUseCae): def __init__( - self, - reporter: Reporter, - project: ProjectEntity, - folder: FolderEntity, - backend_service_provider: SuperannotateServiceProvider, - query: str - + self, + reporter: Reporter, + project: ProjectEntity, + folder: FolderEntity, + backend_service_provider: SuperannotateServiceProvider, + query: str, ): super().__init__(reporter) self._project = project @@ -83,7 +84,9 @@ def validate_project_state(self): raise AppException("Data is not synced.") def validate_query(self): - response = self._backend_client.validate_saqul_query(self._project.team_id, self._project.uuid, self._query) + response = self._backend_client.validate_saqul_query( + self._project.team_id, self._project.uuid, self._query + ) if response.get("error"): raise AppException(response["error"]) if not response.get("isValidQuery", False): @@ -101,13 +104,17 @@ def execute(self) -> Response: self._project.team_id, self._project.uuid, self._query, - folder_id=None if self._folder.name == "root" else self._folder.uuid + folder_id=None if self._folder.name == "root" else self._folder.uuid, ) if service_response.ok: if self._project.project_type == constances.ProjectType.VECTOR.value: - data = self._drop_paths(parse_obj_as(List[TmpBaseEntity], service_response.data)) + data = self._drop_paths( + parse_obj_as(List[TmpBaseEntity], service_response.data) + ) else: - data = self._drop_paths(parse_obj_as(List[TmpBaseEntity], service_response.data)) + data = self._drop_paths( + parse_obj_as(List[TmpBaseEntity], service_response.data) + ) for i, item in enumerate(data): data[i] = GetItem.serialize_entity(item, self._project) self._response.data = data @@ -118,15 +125,14 @@ def execute(self) -> Response: class ListItems(BaseReportableUseCae): def __init__( - self, - reporter: Reporter, - project: ProjectEntity, - folder: FolderEntity, - items: BaseReadOnlyRepository, - search_condition: Condition, - folders: BaseReadOnlyRepository, - recursive: bool = False - + self, + reporter: Reporter, + project: ProjectEntity, + folder: FolderEntity, + items: BaseReadOnlyRepository, + search_condition: Condition, + folders: BaseReadOnlyRepository, + recursive: bool = False, ): super().__init__(reporter) self._project = project @@ -148,8 +154,11 @@ def execute(self) -> Response: if not self._recursive: self._search_condition &= Condition("folder_id", self._folder.uuid, EQ) items = [ - GetItem.serialize_entity(item.add_path(self._project.name, self._folder.name), self._project) for - item in self._items.get_all(self._search_condition) + GetItem.serialize_entity( + item.add_path(self._project.name, self._folder.name), + self._project, + ) + for item in self._items.get_all(self._search_condition) ] else: items = [] @@ -159,10 +168,15 @@ def execute(self) -> Response: ) folders.append(self._folder) for folder in folders: - tmp = self._items.get_all(self._search_condition & Condition("folder_id", folder.uuid, EQ)) + tmp = self._items.get_all( + self._search_condition & Condition("folder_id", folder.uuid, EQ) + ) items.extend( [ - GetItem.serialize_entity(item.add_path(self._project.name, folder.name), self._project) + GetItem.serialize_entity( + item.add_path(self._project.name, folder.name), + self._project, + ) for item in tmp ] ) diff --git a/src/superannotate/lib/core/usecases/models.py b/src/superannotate/lib/core/usecases/models.py index 7952cfdd2..c60aea5dd 100644 --- a/src/superannotate/lib/core/usecases/models.py +++ b/src/superannotate/lib/core/usecases/models.py @@ -4,7 +4,6 @@ import time import zipfile from pathlib import Path -from typing import Iterable from typing import List import boto3 @@ -24,7 +23,6 @@ from lib.core.exceptions import AppException from lib.core.exceptions import AppValidationException from lib.core.repositories import BaseManageableRepository -from lib.core.repositories import BaseReadOnlyRepository from lib.core.serviceproviders import SuperannotateServiceProvider from lib.core.usecases.base import BaseInteractiveUseCase from lib.core.usecases.base import BaseUseCase @@ -132,155 +130,6 @@ def execute(self): return self._response -class CreateModelUseCase(BaseUseCase): - def __init__( - self, - base_model_name: str, - model_name: str, - model_description: str, - task: str, - team_id: int, - train_data_paths: Iterable[str], - test_data_paths: Iterable[str], - backend_service_provider: SuperannotateServiceProvider, - projects: BaseReadOnlyRepository, - folders: BaseReadOnlyRepository, - ml_models: BaseManageableRepository, - hyper_parameters: dict = None, - ): - super().__init__() - self._base_model_name = base_model_name - self._model_name = model_name - self._model_description = model_description - self._task = task - self._team_id = team_id - self._hyper_parameters = hyper_parameters - self._train_data_paths = train_data_paths - self._test_data_paths = test_data_paths - self._backend_service = backend_service_provider - self._ml_models = ml_models - self._projects = projects - self._folders = folders - - @property - def hyper_parameters(self): - if self._hyper_parameters: - for parameter in constances.DEFAULT_HYPER_PARAMETERS: - if parameter not in self._hyper_parameters: - self._hyper_parameters[ - parameter - ] = constances.DEFAULT_HYPER_PARAMETERS[parameter] - else: - self._hyper_parameters = constances.DEFAULT_HYPER_PARAMETERS - return self._hyper_parameters - - @staticmethod - def split_path(path: str): - if "/" in path: - return path.split("/") - return path, "root" - - def execute(self): - train_folder_ids = [] - test_folder_ids = [] - projects = [] - - for path in self._train_data_paths: - project_name, folder_name = self.split_path(path) - projects = self._projects.get_all( - Condition("name", project_name, EQ) - & Condition("team_id", self._team_id, EQ) - ) - - projects.extend(projects) - folders = self._folders.get_all( - Condition("name", folder_name, EQ) - & Condition("team_id", self._team_id, EQ) - & Condition("project_id", projects[0].uuid, EQ) - ) - train_folder_ids.append(folders[0].uuid) - - for path in self._test_data_paths: - project_name, folder_name = self.split_path(path) - projects.extend( - self._projects.get_all( - Condition("name", project_name, EQ) - & Condition("team_id", self._team_id, EQ) - ) - ) - folders = self._folders.get_all( - Condition("name", folder_name, EQ) - & Condition("team_id", self._team_id, EQ) - & Condition("project_id", projects[0].uuid, EQ) - ) - test_folder_ids.append(folders[0].uuid) - - project_types = [project.project_type for project in projects] - - if set(train_folder_ids) & set(test_folder_ids): - self._response.errors = AppException( - "Avoid overlapping between training and test data." - ) - return - if len(set(project_types)) != 1: - self._response.errors = AppException( - "All projects have to be of the same type. Either vector or pixel" - ) - return - if any( - { - True - for project in projects - if project.upload_state == constances.UploadState.EXTERNAL.value - } - ): - self._response.errors = AppException( - "The function does not support projects containing images attached with URLs" - ) - return - - base_model = self._ml_models.get_all( - Condition("name", self._base_model_name, EQ) - & Condition("team_id", self._team_id, EQ) - & Condition("task", constances.MODEL_TRAINING_TASKS[self._task], EQ) - & Condition("type", project_types[0], EQ) - & Condition("include_global", True, EQ) - )[0] - - if base_model.model_type != project_types[0]: - self._response.errors = AppException( - f"The type of provided projects is {project_types[0]}, " - "and does not correspond to the type of provided model" - ) - return self._response - - completed_images_data = self._backend_service.bulk_get_folders( - self._team_id, [project.uuid for project in projects] - ) - complete_image_count = sum( - [ - folder["completedCount"] - for folder in completed_images_data["data"] - if folder["id"] in train_folder_ids - ] - ) - ml_model = MLModelEntity( - name=self._model_name, - description=self._model_description, - task=constances.MODEL_TRAINING_TASKS[self._task], - base_model_id=base_model.uuid, - image_count=complete_image_count, - model_type=project_types[0], - train_folder_ids=train_folder_ids, - test_folder_ids=test_folder_ids, - hyper_parameters=self.hyper_parameters, - ) - new_model_data = self._ml_models.insert(ml_model) - - self._response.data = new_model_data - return self._response - - class GetModelMetricsUseCase(BaseUseCase): def __init__( self, @@ -301,20 +150,6 @@ def execute(self): return self._response -class UpdateModelUseCase(BaseUseCase): - def __init__( - self, model: MLModelEntity, models: BaseManageableRepository, - ): - super().__init__() - self._models = models - self._model = model - - def execute(self): - model = self._models.update(self._model) - self._response.data = model - return self._response - - class DeleteMLModel(BaseUseCase): def __init__(self, model_id: int, models: BaseManageableRepository): super().__init__() diff --git a/src/superannotate/lib/core/usecases/projects.py b/src/superannotate/lib/core/usecases/projects.py index 954ee9edf..9758140d2 100644 --- a/src/superannotate/lib/core/usecases/projects.py +++ b/src/superannotate/lib/core/usecases/projects.py @@ -129,10 +129,10 @@ def execute(self): ) root_completed_count = 0 total_completed_count = 0 - for i in completed_images_data['data']: - total_completed_count += i['completedCount'] - if i['is_root']: - root_completed_count = i['completedCount'] + for i in completed_images_data["data"]: + total_completed_count += i["completedCount"] + if i["is_root"]: + root_completed_count = i["completedCount"] project.root_folder_completed_images_count = root_completed_count project.completed_images_count = total_completed_count @@ -214,6 +214,7 @@ def validate_project_name(self): def execute(self): if self.is_valid(): + # new projects can only have the status of NotStarted self._project.status = constances.ProjectStatus.NotStarted.value entity = self._projects.insert(self._project) self._response.data = entity diff --git a/src/superannotate/lib/core/video_convertor.py b/src/superannotate/lib/core/video_convertor.py index 1ee52d977..40ab8fc05 100644 --- a/src/superannotate/lib/core/video_convertor.py +++ b/src/superannotate/lib/core/video_convertor.py @@ -21,22 +21,8 @@ class FrameAnnotation(BaseModel): frame: int annotations: List[Annotation] = [] - def append_annotation(self, annotation: Annotation): - self.annotations.append(annotation) - - -class Annotations(BaseModel): - __root__: List[FrameAnnotation] = [] - - def append(self, value: FrameAnnotation): - self.__root__.append(value) - class VideoFrameGenerator: - class DefaultDict(defaultdict): - def __missing__(self, key): - return self.default_factory(key) - def __init__(self, annotation_data: dict, fps: int): self._annotation_data = annotation_data self.duration = annotation_data["metadata"]["duration"] / (1000 * 1000) @@ -56,13 +42,13 @@ def get_frame(self, frame_no: int): return self.annotations[frame_no] def interpolate_annotations( - self, - class_name: str, - from_frame: int, - to_frame: int, - data: dict, - steps: dict = None, - annotation_type: str = "bbox" + self, + class_name: str, + from_frame: int, + to_frame: int, + data: dict, + steps: dict = None, + annotation_type: str = "bbox", ) -> dict: annotations = {} for idx, frame_idx in enumerate(range(from_frame + 1, to_frame), 1): @@ -79,15 +65,11 @@ def interpolate_annotations( className=class_name, points=points, attributes=data["attributes"], - keyframe=False + keyframe=False, ) return annotations - def _add_annotation( - self, - frame_no: int, - annotation: Annotation - ): + def _add_annotation(self, frame_no: int, annotation: Annotation): frame = self.get_frame(frame_no) frame.annotations.append(annotation) @@ -132,37 +114,68 @@ def _process(self): last_annotation = None interpolated_frames = {} for timestamp in parameter["timestamps"]: - frames_mapping[int(math.ceil(timestamp["timestamp"] / self.ratio))].append(timestamp) + frames_mapping[ + int(math.ceil(timestamp["timestamp"] / self.ratio)) + ].append(timestamp) frames_mapping = self.merge_first_frame(frames_mapping) for from_frame_no, to_frame_no in self.pairwise(sorted(frames_mapping)): last_frame_no = to_frame_no - from_frame, to_frame = frames_mapping[from_frame_no][-1], frames_mapping[to_frame_no][0] + from_frame, to_frame = ( + frames_mapping[from_frame_no][-1], + frames_mapping[to_frame_no][0], + ) frames_diff = to_frame_no - from_frame_no if frames_diff > 1: steps = None - if annotation_type == "bbox" and from_frame.get("points") and to_frame.get("points"): + if ( + annotation_type == "bbox" + and from_frame.get("points") + and to_frame.get("points") + ): steps = { "y1": round( - (to_frame["points"]["y1"] - from_frame["points"]["y1"]) / frames_diff, - 2), + ( + to_frame["points"]["y1"] + - from_frame["points"]["y1"] + ) + / frames_diff, + 2, + ), "x2": round( - (to_frame["points"]["x2"] - from_frame["points"]["x2"]) / frames_diff, - 2), + ( + to_frame["points"]["x2"] + - from_frame["points"]["x2"] + ) + / frames_diff, + 2, + ), "x1": round( - (to_frame["points"]["x1"] - from_frame["points"]["x1"]) / frames_diff, - 2), + ( + to_frame["points"]["x1"] + - from_frame["points"]["x1"] + ) + / frames_diff, + 2, + ), "y2": round( - (to_frame["points"]["y2"] - from_frame["points"]["y2"]) / frames_diff, - 2), + ( + to_frame["points"]["y2"] + - from_frame["points"]["y2"] + ) + / frames_diff, + 2, + ), } - interpolated_frames.update(self.interpolate_annotations( - class_name=class_name, - from_frame=from_frame_no, - to_frame=to_frame_no, - data=from_frame, - steps=steps, - annotation_type=annotation_type - )) + interpolated_frames.update( + self.interpolate_annotations( + class_name=class_name, + from_frame=from_frame_no, + to_frame=to_frame_no, + data=from_frame, + steps=steps, + annotation_type=annotation_type, + ) + ) start_median_frame = self.get_median(frames_mapping[from_frame_no]) end_median_frame = self.get_median(frames_mapping[to_frame_no]) interpolated_frames[from_frame_no] = Annotation( @@ -170,14 +183,14 @@ def _process(self): className=class_name, points=start_median_frame.get("points"), attributes=start_median_frame["attributes"], - keyframe=True + keyframe=True, ) last_annotation = Annotation( type=annotation_type, className=class_name, points=end_median_frame.get("points"), attributes=end_median_frame["attributes"], - keyframe=True + keyframe=True, ) # interpolated_frames[to_frame_no] = Annotation( # type=annotation_type, @@ -187,7 +200,10 @@ def _process(self): # keyframe=True # ) self._add_annotation(last_frame_no, last_annotation) - [self._add_annotation(frame_no, annotation) for frame_no, annotation in interpolated_frames.items()] + [ + self._add_annotation(frame_no, annotation) + for frame_no, annotation in interpolated_frames.items() + ] def __iter__(self): for frame_no in range(1, int(self.frames_count) + 1): diff --git a/src/superannotate/lib/infrastructure/controller.py b/src/superannotate/lib/infrastructure/controller.py index d53029039..2abd5473f 100644 --- a/src/superannotate/lib/infrastructure/controller.py +++ b/src/superannotate/lib/infrastructure/controller.py @@ -41,7 +41,6 @@ class BaseController(metaclass=ABCMeta): - def __init__(self, config_path: str = None, token: str = None): self._team_data = None self._token = None @@ -61,7 +60,7 @@ def __init__(self, config_path: str = None, token: str = None): self._user_id = None self._team_name = None self._reporter = None - self._testing = os.getenv("SA_TESTING", 'False').lower() in ('true', '1', 't') + self._testing = os.getenv("SA_TESTING", "False").lower() in ("true", "1", "t") self._ssl_verify = not self._testing self._backend_url = os.environ.get("SA_URL", constances.BACKEND_URL) @@ -121,7 +120,7 @@ def initialize_backend_client(self): auth_token=self._token, logger=self._logger, verify_ssl=self._ssl_verify, - testing=self._testing + testing=self._testing, ) self._backend_client.get_session.cache_clear() return self._backend_client @@ -198,10 +197,6 @@ def get_integrations_repo(self, team_id: int): self._integrations = IntegrationRepository(self._backend_client, team_id) return self._integrations - @property - def configs(self): - return ConfigRepository(self._config_path) - @property def team_id(self) -> int: if not self._token: @@ -293,26 +288,39 @@ def get_folder_name(name: str = None): return "root" def search_project( - self, name: str = None, include_complete_image_count=False + self, + name: str = None, + include_complete_image_count=False, + statuses: List[str] = None, + **kwargs, ) -> Response: condition = Condition.get_empty_condition() if name: - condition = condition & (Condition("name", name, EQ)) + condition &= Condition("name", name, EQ) if include_complete_image_count: - condition = condition & Condition("completeImagesCount", "true", EQ) + condition &= Condition( + "completeImagesCount", include_complete_image_count, EQ + ) + for status in statuses: + condition &= Condition( + "status", constances.ProjectStatus.get_value(status), EQ + ) + for key, value in kwargs.items(): + if value: + condition &= Condition(key, value, EQ) use_case = usecases.GetProjectsUseCase( condition=condition, projects=self.projects, team_id=self.team_id, ) return use_case.execute() def create_project( - self, - name: str, - description: str, - project_type: str, - settings: Iterable = tuple(), - annotation_classes: Iterable = tuple(), - workflows: Iterable = tuple(), + self, + name: str, + description: str, + project_type: str, + settings: Iterable = tuple(), + annotation_classes: Iterable = tuple(), + workflows: Iterable = tuple(), ) -> Response: try: @@ -341,7 +349,7 @@ def create_project( annotation_classes=[ AnnotationClassEntity(**annotation_class) for annotation_class in annotation_classes - ] + ], ) return use_case.execute() @@ -357,14 +365,14 @@ def update_project(self, name: str, project_data: dict) -> Response: return use_case.execute() def upload_image_to_project( - self, - project_name: str, - folder_name: str, - image_name: str, - image: Union[str, io.BytesIO] = None, - annotation_status: str = None, - image_quality_in_editor: str = None, - from_s3_bucket=None, + self, + project_name: str, + folder_name: str, + image_name: str, + image: Union[str, io.BytesIO] = None, + annotation_status: str = None, + image_quality_in_editor: str = None, + from_s3_bucket=None, ): project = self._get_project(project_name) folder = self._get_folder(project, folder_name) @@ -392,13 +400,13 @@ def upload_image_to_project( ).execute() def upload_images_to_project( - self, - project_name: str, - folder_name: str, - paths: List[str], - annotation_status: str = None, - image_quality_in_editor: str = None, - from_s3_bucket=None, + self, + project_name: str, + folder_name: str, + paths: List[str], + annotation_status: str = None, + image_quality_in_editor: str = None, + from_s3_bucket=None, ): project = self._get_project(project_name) folder = self._get_folder(project, folder_name) @@ -418,16 +426,16 @@ def upload_images_to_project( ) def upload_images_from_folder_to_project( - self, - project_name: str, - folder_name: str, - folder_path: str, - extensions: Optional[List[str]] = None, - annotation_status: str = None, - exclude_file_patterns: Optional[List[str]] = None, - recursive_sub_folders: Optional[bool] = None, - image_quality_in_editor: str = None, - from_s3_bucket=None, + self, + project_name: str, + folder_name: str, + folder_path: str, + extensions: Optional[List[str]] = None, + annotation_status: str = None, + exclude_file_patterns: Optional[List[str]] = None, + recursive_sub_folders: Optional[bool] = None, + image_quality_in_editor: str = None, + from_s3_bucket=None, ): project = self._get_project(project_name) folder = self._get_folder(project, folder_name) @@ -450,14 +458,14 @@ def upload_images_from_folder_to_project( ) def clone_project( - self, - name: str, - from_name: str, - project_description: str, - copy_annotation_classes=True, - copy_settings=True, - copy_workflow=True, - copy_contributors=False, + self, + name: str, + from_name: str, + project_description: str, + copy_annotation_classes=True, + copy_settings=True, + copy_workflow=True, + copy_contributors=False, ): project = self._get_project(from_name) @@ -482,12 +490,12 @@ def clone_project( return use_case.execute() def interactive_attach_urls( - self, - project_name: str, - files: List[ImageEntity], - folder_name: str = None, - annotation_status: str = None, - upload_state_code: int = None, + self, + project_name: str, + files: List[ImageEntity], + folder_name: str = None, + annotation_status: str = None, + upload_state_code: int = None, ): project = self._get_project(project_name) folder = self._get_folder(project, folder_name) @@ -522,7 +530,7 @@ def get_folder(self, project_name: str, folder_name: str): return use_case.execute() def search_folders( - self, project_name: str, folder_name: str = None, include_users=False, **kwargs + self, project_name: str, folder_name: str = None, include_users=False, **kwargs ): condition = Condition.get_empty_condition() if kwargs: @@ -552,12 +560,12 @@ def delete_folders(self, project_name: str, folder_names: List[str]): return use_case.execute() def prepare_export( - self, - project_name: str, - folder_names: List[str], - include_fuse: bool, - only_pinned: bool, - annotation_statuses: List[str] = None, + self, + project_name: str, + folder_names: List[str], + include_fuse: bool, + only_pinned: bool, + annotation_statuses: List[str] = None, ): project = self._get_project(project_name) @@ -589,11 +597,11 @@ def search_team_contributors(self, **kwargs): return use_case.execute() def search_images( - self, - project_name: str, - folder_path: str = None, - annotation_status: str = None, - image_name_prefix: str = None, + self, + project_name: str, + folder_path: str = None, + annotation_status: str = None, + image_name_prefix: str = None, ): project = self._get_project(project_name) folder = self._get_folder(project, folder_path) @@ -608,7 +616,7 @@ def search_images( return use_case.execute() def _get_image( - self, project: ProjectEntity, image_name: str, folder: FolderEntity = None, + self, project: ProjectEntity, image_name: str, folder: FolderEntity = None, ) -> ImageEntity: response = usecases.GetImageUseCase( service=self._backend_client, @@ -622,7 +630,7 @@ def _get_image( return response.data def get_image( - self, project_name: str, image_name: str, folder_path: str = None + self, project_name: str, image_name: str, folder_path: str = None ) -> ImageEntity: project = self._get_project(project_name) folder = self._get_folder(project, folder_path) @@ -633,18 +641,18 @@ def update_folder(self, project_name: str, folder_name: str, folder_data: dict): folder = self._get_folder(project, folder_name) for field, value in folder_data.items(): setattr(folder, field, value) - use_case = usecases.UpdateFolderUseCase(folders=self.folders, folder=folder, ) + use_case = usecases.UpdateFolderUseCase(folders=self.folders, folder=folder,) return use_case.execute() def copy_image( - self, - from_project_name: str, - from_folder_name: str, - to_project_name: str, - to_folder_name: str, - image_name: str, - copy_annotation_status: bool = False, - move: bool = False, + self, + from_project_name: str, + from_folder_name: str, + to_project_name: str, + to_folder_name: str, + image_name: str, + copy_annotation_status: bool = False, + move: bool = False, ): from_project = self._get_project(from_project_name) to_project = self._get_project(to_project_name) @@ -667,12 +675,12 @@ def copy_image( return use_case.execute() def copy_image_annotation_classes( - self, - from_project_name: str, - from_folder_name: str, - to_project_name: str, - to_folder_name: str, - image_name: str, + self, + from_project_name: str, + from_folder_name: str, + to_project_name: str, + to_folder_name: str, + image_name: str, ): from_project = self._get_project(from_project_name) from_folder = self._get_folder(from_project, from_folder_name) @@ -707,7 +715,7 @@ def copy_image_annotation_classes( return use_case.execute() def update_image( - self, project_name: str, image_name: str, folder_name: str = None, **kwargs + self, project_name: str, image_name: str, folder_name: str = None, **kwargs ): image = self.get_image( project_name=project_name, image_name=image_name, folder_path=folder_name @@ -717,24 +725,14 @@ def update_image( use_case = usecases.UpdateImageUseCase(image=image, images=self.images) return use_case.execute() - def download_image_from_public_url( - self, project_name: str, image_url: str, image_name: str = None - ): - use_case = usecases.DownloadImageFromPublicUrlUseCase( - project=self._get_project(project_name), - image_url=image_url, - image_name=image_name, - ) - return use_case.execute() - def bulk_copy_images( - self, - project_name: str, - from_folder_name: str, - to_folder_name: str, - image_names: List[str], - include_annotations: bool, - include_pin: bool, + self, + project_name: str, + from_folder_name: str, + to_folder_name: str, + image_names: List[str], + include_annotations: bool, + include_pin: bool, ): project = self._get_project(project_name) from_folder = self._get_folder(project, from_folder_name) @@ -751,11 +749,11 @@ def bulk_copy_images( return use_case.execute() def bulk_move_images( - self, - project_name: str, - from_folder_name: str, - to_folder_name: str, - image_names: List[str], + self, + project_name: str, + from_folder_name: str, + to_folder_name: str, + image_names: List[str], ): project = self._get_project(project_name) from_folder = self._get_folder(project, from_folder_name) @@ -770,13 +768,13 @@ def bulk_move_images( return use_case.execute() def get_project_metadata( - self, - project_name: str, - include_annotation_classes: bool = False, - include_settings: bool = False, - include_workflow: bool = False, - include_contributors: bool = False, - include_complete_image_count: bool = False, + self, + project_name: str, + include_annotation_classes: bool = False, + include_settings: bool = False, + include_workflow: bool = False, + include_contributors: bool = False, + include_complete_image_count: bool = False, ): project = self._get_project(project_name) @@ -858,11 +856,11 @@ def get_image_metadata(self, project_name: str, folder_name: str, image_name: st return use_case.execute() def set_images_annotation_statuses( - self, - project_name: str, - folder_name: str, - image_names: list, - annotation_status: str, + self, + project_name: str, + folder_name: str, + image_names: list, + annotation_status: str, ): project_entity = self._get_project(project_name) folder_entity = self._get_folder(project_entity, folder_name) @@ -880,7 +878,7 @@ def set_images_annotation_statuses( return use_case.execute() def delete_images( - self, project_name: str, folder_name: str, image_names: List[str] = None, + self, project_name: str, folder_name: str, image_names: List[str] = None, ): project = self._get_project(project_name) folder = self._get_folder(project, folder_name) @@ -895,7 +893,7 @@ def delete_images( return use_case.execute() def assign_images( - self, project_name: str, folder_name: str, image_names: list, user: str + self, project_name: str, folder_name: str, image_names: list, user: str ): project_entity = self._get_project(project_name) folder = self._get_folder(project_entity, folder_name) @@ -958,7 +956,7 @@ def un_share_project(self, project_name: str, user_id: str): return use_case.execute() def get_image_annotations( - self, project_name: str, folder_name: str, image_name: str + self, project_name: str, folder_name: str, image_name: str ): project = self._get_project(project_name) folder = self._get_folder(project=project, name=folder_name) @@ -974,7 +972,7 @@ def get_image_annotations( return use_case.execute() def download_image_annotations( - self, project_name: str, folder_name: str, image_name: str, destination: str + self, project_name: str, folder_name: str, image_name: str, destination: str ): project = self._get_project(project_name) folder = self._get_folder(project=project, name=folder_name) @@ -991,22 +989,6 @@ def download_image_annotations( ) return use_case.execute() - def download_image_pre_annotations( - self, project_name: str, folder_name: str, image_name: str, destination: str - ): - project = self._get_project(project_name) - folder = self._get_folder(project=project, name=folder_name) - - use_case = usecases.DownloadImagePreAnnotationsUseCase( - service=self._backend_client, - project=project, - folder=folder, - image_name=image_name, - images=ImageRepository(service=self._backend_client), - destination=destination, - ) - return use_case.execute() - @staticmethod def get_image_from_s3(s3_bucket, image_path: str): use_case = usecases.GetS3ImageUseCase( @@ -1025,33 +1007,8 @@ def get_exports(self, project_name: str, return_metadata: bool): ) return use_case.execute() - def backend_upload_from_s3( - self, - project_name: str, - folder_name: str, - access_key: str, - secret_key: str, - bucket_name: str, - folder_path: str, - image_quality: str, - ): - project = self._get_project(project_name) - folder = self._get_folder(project, folder_name) - use_case = usecases.UploadS3ImagesBackendUseCase( - backend_service_provider=self._backend_client, - project=project, - settings=ProjectSettingsRepository(self._backend_client, project), - folder=folder, - access_key=access_key, - secret_key=secret_key, - bucket_name=bucket_name, - folder_path=folder_path, - image_quality=image_quality, - ) - return use_case.execute() - def get_project_image_count( - self, project_name: str, folder_name: str, with_all_subfolders: bool + self, project_name: str, folder_name: str, with_all_subfolders: bool ): project = self._get_project(project_name) @@ -1066,62 +1023,13 @@ def get_project_image_count( return use_case.execute() - def extract_video_frames( - self, - project_name: str, - folder_name: str, - video_path: str, - extract_path: str, - start_time: float, - end_time: float = None, - target_fps: float = None, - annotation_status: str = None, - image_quality_in_editor: str = None, - limit: int = None, - ): - annotation_status_code = ( - constances.AnnotationStatus.get_value(annotation_status) - if annotation_status - else None - ) - project = self._get_project(project_name) - folder = self._get_folder(project, folder_name) - use_case = usecases.ExtractFramesUseCase( - backend_service_provider=self._backend_client, - project=project, - folder=folder, - video_path=video_path, - extract_path=extract_path, - start_time=start_time, - end_time=end_time, - target_fps=target_fps, - annotation_status_code=annotation_status_code, - image_quality_in_editor=image_quality_in_editor, - limit=limit, - ) - if use_case.is_valid(): - yield from use_case.execute() - else: - raise AppException(use_case.response.errors) - - def get_duplicate_images(self, project_name: str, folder_name: str, images: list): - project = self._get_project(project_name) - folder = self._get_folder(project, folder_name) - return usecases.GetBulkImages( - service=self._backend_client, - project_id=project.uuid, - team_id=project.team_id, - folder_id=folder.uuid, - images=images, - ) - def create_annotation_class( - self, - project_name: str, - name: str, - color: str, - attribute_groups: List[dict], - class_type: str, + self, + project_name: str, + name: str, + color: str, + attribute_groups: List[dict], + class_type: str, ): project = self._get_project(project_name) annotation_classes = AnnotationClassRepository( @@ -1183,15 +1091,15 @@ def create_annotation_classes(self, project_name: str, annotation_classes: list) return use_case.execute() def download_image( - self, - project_name: str, - image_name: str, - download_path: str, - folder_name: str = None, - image_variant: str = None, - include_annotations: bool = None, - include_fuse: bool = None, - include_overlay: bool = None, + self, + project_name: str, + image_name: str, + download_path: str, + folder_name: str = None, + image_variant: str = None, + include_annotations: bool = None, + include_fuse: bool = None, + include_overlay: bool = None, ): project = self._get_project(project_name) folder = self._get_folder(project, folder_name) @@ -1231,13 +1139,13 @@ def set_project_workflow(self, project_name: str, steps: list): return use_case.execute() def upload_annotations_from_folder( - self, - project_name: str, - folder_name: str, - annotation_paths: List[str], - client_s3_bucket=None, - is_pre_annotations: bool = False, - folder_path: str = None, + self, + project_name: str, + folder_name: str, + annotation_paths: List[str], + client_s3_bucket=None, + is_pre_annotations: bool = False, + folder_path: str = None, ): project = self._get_project(project_name) folder = self._get_folder(project, folder_name) @@ -1263,13 +1171,13 @@ def upload_annotations_from_folder( return use_case.execute() def upload_image_annotations( - self, - project_name: str, - folder_name: str, - image_name: str, - annotations: dict, - mask: io.BytesIO = None, - verbose: bool = True, + self, + project_name: str, + folder_name: str, + image_name: str, + annotations: dict, + mask: io.BytesIO = None, + verbose: bool = True, ): project = self._get_project(project_name) folder = self._get_folder(project, folder_name) @@ -1298,32 +1206,6 @@ def upload_image_annotations( ) return use_case.execute() - def create_model( - self, - model_name: str, - model_description: str, - task: str, - base_model_name: str, - train_data_paths: Iterable[str], - test_data_paths: Iterable[str], - hyper_parameters: dict, - ): - use_case = usecases.CreateModelUseCase( - base_model_name=base_model_name, - model_name=model_name, - model_description=model_description, - task=task, - team_id=self.team_id, - backend_service_provider=self._backend_client, - projects=self.projects, - folders=self.folders, - ml_models=self.ml_models, - train_data_paths=train_data_paths, - test_data_paths=test_data_paths, - hyper_parameters=hyper_parameters, - ) - return use_case.execute() - def get_model_metrics(self, model_id: int): use_case = usecases.GetModelMetricsUseCase( model_id=model_id, @@ -1332,22 +1214,17 @@ def get_model_metrics(self, model_id: int): ) return use_case.execute() - def update_model_status(self, model_id: int, status: int): - model = MLModelEntity(uuid=model_id, training_status=status) - use_case = usecases.UpdateModelUseCase(model=model, models=self.ml_models) - return use_case.execute() - def delete_model(self, model_id: int): use_case = usecases.DeleteMLModel(model_id=model_id, models=self.ml_models) return use_case.execute() def download_export( - self, - project_name: str, - export_name: str, - folder_path: str, - extract_zip_contents: bool, - to_s3_bucket: bool, + self, + project_name: str, + export_name: str, + folder_path: str, + extract_zip_contents: bool, + to_s3_bucket: bool, ): project = self._get_project(project_name) return usecases.DownloadExportUseCase( @@ -1378,14 +1255,14 @@ def download_ml_model(self, model_data: dict, download_path: str): return use_case.execute() def benchmark( - self, - project_name: str, - ground_truth_folder_name: str, - folder_names: List[str], - export_root: str, - image_list: List[str], - annot_type: str, - show_plots: bool, + self, + project_name: str, + ground_truth_folder_name: str, + folder_names: List[str], + export_root: str, + image_list: List[str], + annot_type: str, + show_plots: bool, ): project = self._get_project(project_name) @@ -1421,13 +1298,13 @@ def benchmark( return use_case.execute() def consensus( - self, - project_name: str, - folder_names: list, - export_path: str, - image_list: list, - annot_type: str, - show_plots: bool, + self, + project_name: str, + folder_names: list, + export_path: str, + image_list: list, + annot_type: str, + show_plots: bool, ): project = self._get_project(project_name) @@ -1461,7 +1338,7 @@ def consensus( return use_case.execute() def run_prediction( - self, project_name: str, images_list: list, model_name: str, folder_name: str + self, project_name: str, images_list: list, model_name: str, folder_name: str ): project = self._get_project(project_name) folder = self._get_folder(project, folder_name) @@ -1479,7 +1356,7 @@ def run_prediction( return use_case.execute() def list_images( - self, project_name: str, annotation_status: str = None, name_prefix: str = None, + self, project_name: str, annotation_status: str = None, name_prefix: str = None, ): project = self._get_project(project_name) @@ -1492,12 +1369,12 @@ def list_images( return use_case.execute() def search_models( - self, - name: str, - model_type: str = None, - project_id: int = None, - task: str = None, - include_global: bool = True, + self, + name: str, + model_type: str = None, + project_id: int = None, + task: str = None, + include_global: bool = True, ): ml_models_repo = MLModelRepository( service=self._backend_client, team_id=self.team_id @@ -1520,10 +1397,10 @@ def search_models( return use_case.execute() def delete_annotations( - self, - project_name: str, - folder_name: str, - image_names: Optional[List[str]] = None, + self, + project_name: str, + folder_name: str, + image_names: Optional[List[str]] = None, ): project = self._get_project(project_name) folder = self._get_folder(project, folder_name) @@ -1537,7 +1414,7 @@ def delete_annotations( @staticmethod def validate_annotations( - project_type: str, annotation: dict, allow_extra: bool = False + project_type: str, annotation: dict, allow_extra: bool = False ): use_case = usecases.ValidateAnnotationUseCase( project_type, @@ -1574,17 +1451,17 @@ def invite_contributors_to_team(self, emails: list, set_admin: bool): return use_case.execute() def upload_videos( - self, - project_name: str, - folder_name: str, - paths: List[str], - start_time: float, - extensions: List[str] = None, - exclude_file_patterns: List[str] = None, - end_time: Optional[float] = None, - target_fps: Optional[int] = None, - annotation_status: Optional[str] = None, - image_quality_in_editor: Optional[str] = None, + self, + project_name: str, + folder_name: str, + paths: List[str], + start_time: float, + extensions: List[str] = None, + exclude_file_patterns: List[str] = None, + end_time: Optional[float] = None, + target_fps: Optional[int] = None, + annotation_status: Optional[str] = None, + image_quality_in_editor: Optional[str] = None, ): project = self._get_project(project_name) folder = self._get_folder(project, folder_name) @@ -1609,7 +1486,9 @@ def upload_videos( ) return use_case.execute() - def get_annotations(self, project_name: str, folder_name: str, item_names: List[str]): + def get_annotations( + self, project_name: str, folder_name: str, item_names: List[str] + ): project = self._get_project(project_name) folder = self._get_folder(project, folder_name) @@ -1619,11 +1498,13 @@ def get_annotations(self, project_name: str, folder_name: str, item_names: List[ folder=folder, images=self.images, item_names=item_names, - backend_service_provider=self.backend_client + backend_service_provider=self.backend_client, ) return use_case.execute() - def get_annotations_per_frame(self, project_name: str, folder_name: str, video_name: str, fps: int): + def get_annotations_per_frame( + self, project_name: str, folder_name: str, video_name: str, fps: int + ): project = self._get_project(project_name) folder = self._get_folder(project, folder_name) @@ -1634,11 +1515,13 @@ def get_annotations_per_frame(self, project_name: str, folder_name: str, video_n images=self.images, video_name=video_name, fps=fps, - backend_service_provider=self.backend_client + backend_service_provider=self.backend_client, ) return use_case.execute() - def upload_priority_scores(self, project_name, folder_name, scores, project_folder_name): + def upload_priority_scores( + self, project_name, folder_name, scores, project_folder_name + ): project = self._get_project(project_name) folder = self._get_folder(project, folder_name) use_case = usecases.UploadPriorityScoresUseCase( @@ -1647,7 +1530,7 @@ def upload_priority_scores(self, project_name, folder_name, scores, project_fold folder=folder, scores=scores, backend_service_provider=self.backend_client, - project_folder_name=project_folder_name + project_folder_name=project_folder_name, ) return use_case.execute() @@ -1656,12 +1539,17 @@ def get_integrations(self): use_cae = usecases.GetIntegrations( reporter=self.default_reporter, team=self.team_data.data, - integrations=self.get_integrations_repo(team_id=team.uuid) + integrations=self.get_integrations_repo(team_id=team.uuid), ) return use_cae.execute() - def attach_integrations(self, project_name: str, folder_name: str, integration: IntegrationEntity, - folder_path: str): + def attach_integrations( + self, + project_name: str, + folder_name: str, + integration: IntegrationEntity, + folder_path: str, + ): team = self.team_data.data project = self._get_project(project_name) folder = self._get_folder(project, folder_name) @@ -1673,7 +1561,7 @@ def attach_integrations(self, project_name: str, folder_name: str, integration: folder=folder, integrations=self.get_integrations_repo(team_id=team.uuid), integration=integration, - folder_path=folder_path + folder_path=folder_path, ) return use_case.execute() @@ -1686,7 +1574,7 @@ def query_entities(self, project_name: str, folder_name: str, query: str = None) project=project, folder=folder, query=query, - backend_service_provider=self.backend_client + backend_service_provider=self.backend_client, ) return use_case.execute() @@ -1698,19 +1586,20 @@ def get_item(self, project_name: str, folder_name: str, item_name: str): project=project, folder=folder, item_name=item_name, - items=self.items + items=self.items, ) return use_case.execute() def list_items( - self, - project_name: str, - folder_name: str, - name_contains: str = None, - annotation_status: str = None, - annotator_email: str = None, - qa_email: str = None, - recursive: bool = False + self, + project_name: str, + folder_name: str, + name_contains: str = None, + annotation_status: str = None, + annotator_email: str = None, + qa_email: str = None, + recursive: bool = False, + **kwargs, ): project = self._get_project(project_name) folder = self._get_folder(project, folder_name) @@ -1719,10 +1608,16 @@ def list_items( search_condition &= Condition("name", name_contains, EQ) if annotation_status: search_condition &= Condition( - "annotation_status", constances.AnnotationStatus.get_value(annotation_status), EQ + "annotation_status", + constances.AnnotationStatus.get_value(annotation_status), + EQ, ) + if qa_email: + search_condition &= Condition("qa_id", qa_email, EQ) if annotator_email: - search_condition &= Condition("qa_email", qa_email, EQ) + search_condition &= Condition("annotator_id", qa_email, EQ) + for key, value in kwargs.items(): + search_condition &= Condition(key, value, EQ) use_case = usecases.ListItems( reporter=self.default_reporter, project=project, @@ -1730,7 +1625,7 @@ def list_items( recursive=recursive, items=self.items, folders=self.folders, - search_condition=search_condition + search_condition=search_condition, ) return use_case.execute() diff --git a/src/superannotate/lib/infrastructure/repositories.py b/src/superannotate/lib/infrastructure/repositories.py index a84c35c45..1385cf6bc 100644 --- a/src/superannotate/lib/infrastructure/repositories.py +++ b/src/superannotate/lib/infrastructure/repositories.py @@ -101,8 +101,6 @@ def get_all(self, condition: Condition = None) -> List[ProjectEntity]: def insert(self, entity: ProjectEntity) -> ProjectEntity: project_data = self._drop_nones(entity.to_dict()) - # new projects can only have the status of NotStarted - project_data["status"] = constance.ProjectStatus.NotStarted.value result = self._service.create_project(project_data) return self.dict2entity(result) @@ -145,15 +143,14 @@ def dict2entity(data: dict) -> ProjectEntity: root_folder_completed_images_count=data.get( "rootFolderCompletedImagesCount" ), - createdAt=data["createdAt"], - updatedAt=data["updatedAt"], + createdAt=data.get("createdAt"), + updatedAt=data.get("updatedAt"), ) except KeyError: raise AppException("Cant serialize project data") class S3Repository(BaseS3Repository): - def get_one(self, uuid: str) -> S3FileEntity: file = io.BytesIO() self._resource.Object(self._bucket, uuid).download_fileobj(file) @@ -511,7 +508,9 @@ def get_one(self, uuid: int) -> Optional[TeamEntity]: raise NotImplementedError def get_all(self, condition: Optional[Condition] = None) -> List[IntegrationEntity]: - return parse_obj_as(List[IntegrationEntity], self._service.get_integrations(self._team_id)) + return parse_obj_as( + List[IntegrationEntity], self._service.get_integrations(self._team_id) + ) class ItemRepository(BaseReadOnlyRepository): diff --git a/src/superannotate/lib/infrastructure/services.py b/src/superannotate/lib/infrastructure/services.py index 6337f50f5..f5f9ea4e2 100644 --- a/src/superannotate/lib/infrastructure/services.py +++ b/src/superannotate/lib/infrastructure/services.py @@ -44,7 +44,13 @@ class BaseBackendService(SuperannotateServiceProvider): """ def __init__( - self, api_url: str, auth_token: str, logger, paginate_by=None, verify_ssl=False, testing: bool = False + self, + api_url: str, + auth_token: str, + logger, + paginate_by=None, + verify_ssl=False, + testing: bool = False, ): self.api_url = api_url self._auth_token = auth_token @@ -173,10 +179,10 @@ class SuperannotateBackendService(BaseBackendService): """ Manage projects, images and team in the Superannotate """ + DEFAULT_CHUNK_SIZE = 1000 URL_USERS = "users" - URL_LIST_ALL_IMAGES = "/images/getImagesWithAnnotationPaths" URL_LIST_PROJECTS = "projects" URL_FOLDERS_IMAGES = "images-folders" URL_CREATE_PROJECT = "project" @@ -217,7 +223,6 @@ class SuperannotateBackendService(BaseBackendService): URL_BULK_GET_FOLDERS = "foldersByTeam" URL_GET_EXPORT = "export/{}" URL_GET_ML_MODEL_DOWNLOAD_TOKEN = "ml_model/getMyModelDownloadToken/{}" - URL_SEGMENTATION = "images/segmentation" URL_PREDICTION = "images/prediction" URL_SET_IMAGES_STATUSES_BULK = "image/updateAnnotationStatusBulk" URL_DELETE_ANNOTATIONS = "annotations/remove" @@ -231,14 +236,20 @@ class SuperannotateBackendService(BaseBackendService): URL_VALIDATE_SAQUL_QUERY = "/images/validate/advanced" def upload_priority_scores( - self, team_id: int, project_id: int, folder_id: int, priorities: list + self, team_id: int, project_id: int, folder_id: int, priorities: list ) -> dict: - upload_priority_score_url = urljoin(self.api_url, self.URL_UPLOAD_PRIORITY_SCORES) + upload_priority_score_url = urljoin( + self.api_url, self.URL_UPLOAD_PRIORITY_SCORES + ) res = self._request( upload_priority_score_url, "post", - params={"team_id": team_id, "project_id": project_id, "folder_id": folder_id}, - data={"image_entropies": priorities} + params={ + "team_id": team_id, + "project_id": project_id, + "folder_id": folder_id, + }, + data={"image_entropies": priorities}, ) return res.json() @@ -1034,14 +1045,15 @@ def get_limitations( ) def get_annotations( - self, - project_id: int, - team_id: int, - folder_id: int, - items: List[str], - reporter: Reporter + self, + project_id: int, + team_id: int, + folder_id: int, + items: List[str], + reporter: Reporter, ) -> List[dict]: import nest_asyncio + nest_asyncio.apply() query_params = { @@ -1054,50 +1066,53 @@ def get_annotations( handler = StreamedAnnotations(self.default_headers, reporter) loop = asyncio.new_event_loop() - return loop.run_until_complete(handler.get_data( - url=urljoin(self.assets_provider_url, self.URL_GET_ANNOTATIONS), - data=items, - params=query_params, - chunk_size=self.DEFAULT_CHUNK_SIZE, - map_function=lambda x: {"image_names": x} - )) + return loop.run_until_complete( + handler.get_data( + url=urljoin(self.assets_provider_url, self.URL_GET_ANNOTATIONS), + data=items, + params=query_params, + chunk_size=self.DEFAULT_CHUNK_SIZE, + map_function=lambda x: {"image_names": x}, + ) + ) def get_integrations(self, team_id: int) -> List[dict]: - get_integrations_url = urljoin(self.api_url, self.URL_GET_INTEGRATIONS.format(team_id)) + get_integrations_url = urljoin( + self.api_url, self.URL_GET_INTEGRATIONS.format(team_id) + ) response = self._request( - get_integrations_url, - "get", - params={"team_id": team_id} + get_integrations_url, "get", params={"team_id": team_id} ) if response.ok: return response.json().get("integrations", []) return [] def attach_integrations( - self, - team_id: int, - project_id: int, - integration_id: int, - folder_id: int, - folder_name: str = None) -> bool: - attach_integrations_url = urljoin(self.api_url, self.URL_ATTACH_INTEGRATIONS.format(team_id)) + self, + team_id: int, + project_id: int, + integration_id: int, + folder_id: int, + folder_name: str = None, + ) -> bool: + attach_integrations_url = urljoin( + self.api_url, self.URL_ATTACH_INTEGRATIONS.format(team_id) + ) data = { "team_id": team_id, "project_id": project_id, "folder_id": folder_id, - "integration_id": integration_id + "integration_id": integration_id, } if folder_name: data["customer_folder_name"] = folder_name - response = self._request( - attach_integrations_url, - "post", - data=data - ) + response = self._request(attach_integrations_url, "post", data=data) return response.ok - def saqul_query(self, team_id: int, project_id: int, query: str, folder_id: int) -> ServiceResponse: + def saqul_query( + self, team_id: int, project_id: int, query: str, folder_id: int + ) -> ServiceResponse: CHUNK_SIZE = 50 query_url = urljoin(self.api_url, self.URL_SAQUL_QUERY) params = { @@ -1106,10 +1121,7 @@ def saqul_query(self, team_id: int, project_id: int, query: str, folder_id: int) } if folder_id: params["folder_id"] = folder_id - data = { - "query": query, - "image_index": 0 - } + data = {"query": query, "image_index": 0} items = [] for _ in range(self.MAX_ITEMS_COUNT): response = self._request(query_url, "post", params=params, data=data) @@ -1132,4 +1144,6 @@ def validate_saqul_query(self, team_id: int, project_id: int, query: str) -> dic data = { "query": query, } - return self._request(validate_query_url, "post", params=params, data=data).json() + return self._request( + validate_query_url, "post", params=params, data=data + ).json() diff --git a/src/superannotate/lib/infrastructure/stream_data_handler.py b/src/superannotate/lib/infrastructure/stream_data_handler.py index a28ae0679..4d61e9630 100644 --- a/src/superannotate/lib/infrastructure/stream_data_handler.py +++ b/src/superannotate/lib/infrastructure/stream_data_handler.py @@ -1,18 +1,10 @@ import json from typing import Callable -from typing import List import aiohttp from lib.core.reporter import Reporter -def map_image_names_to_fetch_streamed_data(data: List[str]): - mapping = {"image_names": []} - for image_name in data: - mapping["image_names"].append(image_name) - return mapping - - class StreamedAnnotations: DELIMITER = b"\\n;)\\n" @@ -21,8 +13,14 @@ def __init__(self, headers: dict, reporter: Reporter): self._annotations = [] self._reporter = reporter - async def fetch(self, method: str, session: aiohttp.ClientSession, url: str, data: dict = None, - params: dict = None): + async def fetch( + self, + method: str, + session: aiohttp.ClientSession, + url: str, + data: dict = None, + params: dict = None, + ): response = await session._request(method, url, json=data, params=params) buffer = b"" async for line in response.content.iter_any(): @@ -43,22 +41,33 @@ async def fetch(self, method: str, session: aiohttp.ClientSession, url: str, dat return self._annotations async def get_data( - self, - url: str, - data: list, - method: str = "post", - params=None, - chunk_size: int = 100, - map_function: Callable = lambda x: x, - verify_ssl: bool = False, + self, + url: str, + data: list, + method: str = "post", + params=None, + chunk_size: int = 100, + map_function: Callable = lambda x: x, + verify_ssl: bool = False, ): - async with aiohttp.ClientSession(raise_for_status=True, headers=self._headers, - connector=aiohttp.TCPConnector(ssl=verify_ssl)) as session: + async with aiohttp.ClientSession( + raise_for_status=True, + headers=self._headers, + connector=aiohttp.TCPConnector(ssl=verify_ssl), + ) as session: if chunk_size: for i in range(0, len(data), chunk_size): - data_to_process = data[i:i + chunk_size] - await self.fetch(method, session, url, map_function(data_to_process), params=params) + data_to_process = data[i : i + chunk_size] + await self.fetch( + method, + session, + url, + map_function(data_to_process), + params=params, + ) else: - await self.fetch(method, session, url, map_function(data), params=params) + await self.fetch( + method, session, url, map_function(data), params=params + ) return self._annotations diff --git a/src/superannotate/lib/infrastructure/validators.py b/src/superannotate/lib/infrastructure/validators.py index c0aeef6c2..33d48d63d 100644 --- a/src/superannotate/lib/infrastructure/validators.py +++ b/src/superannotate/lib/infrastructure/validators.py @@ -1,8 +1,6 @@ import os from collections import defaultdict -from lib.core.entities import PixelAnnotation -from lib.core.validators import BaseValidator from pydantic import ValidationError @@ -36,17 +34,3 @@ def wrap_error(e: ValidationError) -> str: ) ) return "\n".join(texts) - - -class BaseSchemaValidator(BaseValidator): - MODEL = PixelAnnotation - - def is_valid(self) -> bool: - try: - self._validate() - except ValidationError as e: - self._validation_output = e - return not bool(self._validation_output) - - def generate_report(self) -> str: - return wrap_error(self._validation_output) diff --git a/src/superannotate/logger.py b/src/superannotate/logger.py index ec6aeb49e..3345c636f 100644 --- a/src/superannotate/logger.py +++ b/src/superannotate/logger.py @@ -8,7 +8,6 @@ default_logger = None -log_path = "/Users/vaghinak.basentsyan/private/log.log" logging.config.dictConfig( { "version": 1, diff --git a/tests/integration/projects/test_search_project.py b/tests/integration/projects/test_search_project.py new file mode 100644 index 000000000..b8e174ee3 --- /dev/null +++ b/tests/integration/projects/test_search_project.py @@ -0,0 +1,64 @@ +from unittest import TestCase + +import src.superannotate as sa +from src.superannotate.lib.core.entities import ProjectEntity + + +class TestSearchProject(TestCase): + PROJECT_1 = "project_1" + PROJECT_2 = "project_2" + + def setUp(self, *args, **kwargs): + self.tearDown() + + def tearDown(self) -> None: + try: + for project_name in (self.PROJECT_1, self.PROJECT_2): + projects = sa.search_projects(project_name, return_metadata=True) + for project in projects: + try: + sa.delete_project(project) + except Exception: + pass + except Exception as e: + print(str(e)) + + @property + def projects(self): + return self.PROJECT_2, self.PROJECT_1 + + def test_search_by_status(self): + controller = sa.get_default_controller() + + project_1 = ProjectEntity( + name=self.PROJECT_1, description="desc", project_type=sa.constances.ProjectType.VECTOR.value, + status=sa.constances.ProjectStatus.Completed.value, team_id=controller.team_id + ) + project_2 = ProjectEntity( + name=self.PROJECT_2, description="desc", project_type=sa.constances.ProjectType.VECTOR.value, + status=sa.constances.ProjectStatus.InProgress.value, team_id=controller.team_id + ) + + controller.projects.insert(project_1) + controller.projects.insert(project_2) + + assert self.PROJECT_1 in sa.search_projects(status=sa.constances.ProjectStatus.Completed.name) + assert self.PROJECT_2 in sa.search_projects(status=sa.constances.ProjectStatus.InProgress.name) + + def test_search_by_multiple_status(self): + controller = sa.get_default_controller() + project_1 = ProjectEntity( + name=self.PROJECT_1, description="desc", project_type=sa.constances.ProjectType.VECTOR.value, + status=sa.constances.ProjectStatus.OnHold.value, team_id=controller.team_id + ) + project_2 = ProjectEntity( + name=self.PROJECT_2, description="desc", project_type=sa.constances.ProjectType.VECTOR.value, + status=sa.constances.ProjectStatus.OnHold.value, team_id=controller.team_id + ) + + controller.projects.insert(project_1) + controller.projects.insert(project_2) + + assert all( + [project in self.projects for project in sa.search_projects(status=sa.constances.ProjectStatus.OnHold.name)] + )