diff --git a/docs/source/superannotate.sdk.rst b/docs/source/superannotate.sdk.rst index d60574dbe..5d4f8a6fc 100644 --- a/docs/source/superannotate.sdk.rst +++ b/docs/source/superannotate.sdk.rst @@ -94,6 +94,7 @@ Subsets ______ .. automethod:: superannotate.SAClient.get_subsets +.. automethod:: superannotate.SAClient.add_items_to_subset ---------- diff --git a/requirements.txt b/requirements.txt index e1b5b2d2c..11136aa72 100644 --- a/requirements.txt +++ b/requirements.txt @@ -13,7 +13,7 @@ plotly==4.1.0 ffmpeg-python>=0.2.0 fire==0.4.0 mixpanel==4.8.3 -pydantic>=1.8.2 +pydantic>=1.10.2 setuptools~=57.4.0 aiohttp==3.8.1 email-validator>=1.0.3 diff --git a/src/superannotate/__init__.py b/src/superannotate/__init__.py index af8a629f9..8c40c6ff1 100644 --- a/src/superannotate/__init__.py +++ b/src/superannotate/__init__.py @@ -1,6 +1,7 @@ import os import sys + __version__ = "4.4.5dev7" sys.path.append(os.path.split(os.path.realpath(__file__))[0]) diff --git a/src/superannotate/lib/app/interface/sdk_interface.py b/src/superannotate/lib/app/interface/sdk_interface.py index fcd3a4a79..c56144e84 100644 --- a/src/superannotate/lib/app/interface/sdk_interface.py +++ b/src/superannotate/lib/app/interface/sdk_interface.py @@ -3,6 +3,7 @@ import json import os import tempfile +import warnings from pathlib import Path from typing import Callable from typing import Dict @@ -13,6 +14,12 @@ from typing import Union import boto3 +from pydantic import StrictBool +from pydantic import conlist +from pydantic import parse_obj_as +from pydantic.error_wrappers import ValidationError +from tqdm import tqdm + import lib.core as constants from lib.app.annotation_helpers import add_annotation_bbox_to_json from lib.app.annotation_helpers import add_annotation_comment_to_json @@ -52,12 +59,7 @@ from lib.core.types import PriorityScore from lib.core.types import Project from lib.infrastructure.validators import wrap_error -from pydantic import conlist -from pydantic import parse_obj_as -from pydantic import StrictBool -from pydantic.error_wrappers import ValidationError from superannotate.logger import get_default_logger -from tqdm import tqdm logger = get_default_logger() @@ -76,9 +78,9 @@ class SAClient(BaseInterfaceFacade, metaclass=TrackableMeta): """ def __init__( - self, - token: str = None, - config_path: str = None, + self, + token: str = None, + config_path: str = None, ): super().__init__(token, config_path) @@ -92,11 +94,11 @@ def get_team_metadata(self): return TeamSerializer(response.data).serialize() def search_team_contributors( - self, - email: EmailStr = None, - first_name: NotEmptyStr = None, - last_name: NotEmptyStr = None, - return_metadata: bool = True, + self, + email: EmailStr = None, + first_name: NotEmptyStr = None, + last_name: NotEmptyStr = None, + return_metadata: bool = True, ): """Search for contributors in the team @@ -122,11 +124,11 @@ def search_team_contributors( return contributors def search_projects( - self, - name: Optional[NotEmptyStr] = None, - return_metadata: bool = False, - include_complete_image_count: bool = False, - status: Optional[Union[ProjectStatusEnum, List[ProjectStatusEnum]]] = None, + self, + name: Optional[NotEmptyStr] = None, + return_metadata: bool = False, + include_complete_image_count: bool = False, + status: Optional[Union[ProjectStatusEnum, List[ProjectStatusEnum]]] = None, ): """ Project name based case-insensitive search for projects. @@ -170,11 +172,11 @@ def search_projects( return [project.name for project in result] def create_project( - self, - project_name: NotEmptyStr, - project_description: NotEmptyStr, - project_type: NotEmptyStr, - settings: List[Setting] = None, + self, + project_name: NotEmptyStr, + project_description: NotEmptyStr, + project_type: NotEmptyStr, + settings: List[Setting] = None, ): """Create a new project in the team. @@ -233,14 +235,14 @@ def create_project_from_metadata(self, project_metadata: Project): return ProjectSerializer(response.data).serialize() def clone_project( - self, - project_name: Union[NotEmptyStr, dict], - from_project: Union[NotEmptyStr, dict], - project_description: Optional[NotEmptyStr] = None, - copy_annotation_classes: Optional[StrictBool] = True, - copy_settings: Optional[StrictBool] = True, - copy_workflow: Optional[StrictBool] = True, - copy_contributors: Optional[StrictBool] = False, + self, + project_name: Union[NotEmptyStr, dict], + from_project: Union[NotEmptyStr, dict], + project_description: Optional[NotEmptyStr] = None, + copy_annotation_classes: Optional[StrictBool] = True, + copy_settings: Optional[StrictBool] = True, + copy_workflow: Optional[StrictBool] = True, + copy_contributors: Optional[StrictBool] = False, ): """Create a new project in the team using annotation classes and settings from from_project. @@ -361,10 +363,10 @@ def delete_folders(self, project: NotEmptyStr, folder_names: List[NotEmptyStr]): logger.info(f"Folders {folder_names} deleted in project {project}") def search_folders( - self, - project: NotEmptyStr, - folder_name: Optional[NotEmptyStr] = None, - return_metadata: Optional[StrictBool] = False, + self, + project: NotEmptyStr, + folder_name: Optional[NotEmptyStr] = None, + return_metadata: Optional[StrictBool] = False, ): """Folder name based case-insensitive search for folders in project. @@ -390,13 +392,13 @@ def search_folders( return [folder.name for folder in data] def copy_image( - self, - source_project: Union[NotEmptyStr, dict], - image_name: NotEmptyStr, - destination_project: Union[NotEmptyStr, dict], - include_annotations: Optional[StrictBool] = False, - copy_annotation_status: Optional[StrictBool] = False, - copy_pin: Optional[StrictBool] = False, + self, + source_project: Union[NotEmptyStr, dict], + image_name: NotEmptyStr, + destination_project: Union[NotEmptyStr, dict], + include_annotations: Optional[StrictBool] = False, + copy_annotation_status: Optional[StrictBool] = False, + copy_pin: Optional[StrictBool] = False, ): """Copy image to a project. The image's project is the same as destination project then the name will be changed to _()., @@ -416,6 +418,9 @@ def copy_image( :param copy_pin: enables image pin status copy :type copy_pin: bool """ + warning_msg = "The SAClient.copy_image method will be deprecated with the Superannotate Python SDK 4.4.6 release" + warnings.warn(warning_msg, DeprecationWarning) + logger.warning(warning_msg) source_project_name, source_folder_name = extract_project_folder(source_project) destination_project, destination_folder = extract_project_folder( @@ -469,13 +474,13 @@ def copy_image( ) def get_project_metadata( - self, - project: Union[NotEmptyStr, dict], - include_annotation_classes: Optional[StrictBool] = False, - include_settings: Optional[StrictBool] = False, - include_workflow: Optional[StrictBool] = False, - include_contributors: Optional[StrictBool] = False, - include_complete_image_count: Optional[StrictBool] = False, + self, + project: Union[NotEmptyStr, dict], + include_annotation_classes: Optional[StrictBool] = False, + include_settings: Optional[StrictBool] = False, + include_workflow: Optional[StrictBool] = False, + include_contributors: Optional[StrictBool] = False, + include_complete_image_count: Optional[StrictBool] = False, ): """Returns project metadata @@ -552,7 +557,7 @@ def get_project_workflow(self, project: Union[str, dict]): return workflow.data def search_annotation_classes( - self, project: Union[NotEmptyStr, dict], name_contains: Optional[str] = None + self, project: Union[NotEmptyStr, dict], name_contains: Optional[str] = None ): """Searches annotation classes by name_prefix (case-insensitive) @@ -577,9 +582,9 @@ def search_annotation_classes( ] def set_project_default_image_quality_in_editor( - self, - project: Union[NotEmptyStr, dict], - image_quality_in_editor: Optional[str], + self, + project: Union[NotEmptyStr, dict], + image_quality_in_editor: Optional[str], ): """Sets project's default image quality in editor setting. @@ -602,10 +607,10 @@ def set_project_default_image_quality_in_editor( return response.data def pin_image( - self, - project: Union[NotEmptyStr, dict], - image_name: str, - pin: Optional[StrictBool] = True, + self, + project: Union[NotEmptyStr, dict], + image_name: str, + pin: Optional[StrictBool] = True, ): """Pins (or unpins) image @@ -641,7 +646,7 @@ def delete_items(self, project: str, items: Optional[List[str]] = None): raise AppException(response.errors) def assign_items( - self, project: Union[NotEmptyStr, dict], items: List[str], user: str + self, project: Union[NotEmptyStr, dict], items: List[str], user: str ): """Assigns items to a user. The assignment role, QA or Annotator, will be deduced from the user's role in the project. The type of the objects` image, video or text @@ -666,7 +671,7 @@ def assign_items( raise AppException(response.errors) def unassign_items( - self, project: Union[NotEmptyStr, dict], items: List[NotEmptyStr] + self, project: Union[NotEmptyStr, dict], items: List[NotEmptyStr] ): """Removes assignment of given items for all assignees. With SDK, the user can be assigned to a role in the project with the share_project @@ -702,10 +707,10 @@ def unassign_folder(self, project_name: NotEmptyStr, folder_name: NotEmptyStr): raise AppException(response.errors) def assign_folder( - self, - project_name: NotEmptyStr, - folder_name: NotEmptyStr, - users: List[NotEmptyStr], + self, + project_name: NotEmptyStr, + folder_name: NotEmptyStr, + users: List[NotEmptyStr], ): """Assigns folder to users. With SDK, the user can be assigned to a role in the project with the share_project function. @@ -722,8 +727,8 @@ def assign_folder( self.controller.get_project_metadata( project_name=project_name, include_contributors=True ) - .data["project"] - .users + .data["project"] + .users ) verified_users = [i["user_id"] for i in contributors] verified_users = set(users).intersection(set(verified_users)) @@ -747,19 +752,19 @@ def assign_folder( raise AppException(response.errors) def upload_images_from_folder_to_project( - self, - project: Union[NotEmptyStr, dict], - folder_path: Union[NotEmptyStr, Path], - extensions: Optional[ - Union[List[NotEmptyStr], Tuple[NotEmptyStr]] - ] = constants.DEFAULT_IMAGE_EXTENSIONS, - annotation_status="NotStarted", - from_s3_bucket=None, - exclude_file_patterns: Optional[ - Iterable[NotEmptyStr] - ] = constants.DEFAULT_FILE_EXCLUDE_PATTERNS, - recursive_subfolders: Optional[StrictBool] = False, - image_quality_in_editor: Optional[str] = None, + self, + project: Union[NotEmptyStr, dict], + folder_path: Union[NotEmptyStr, Path], + extensions: Optional[ + Union[List[NotEmptyStr], Tuple[NotEmptyStr]] + ] = constants.DEFAULT_IMAGE_EXTENSIONS, + annotation_status="NotStarted", + from_s3_bucket=None, + exclude_file_patterns: Optional[ + Iterable[NotEmptyStr] + ] = constants.DEFAULT_FILE_EXCLUDE_PATTERNS, + recursive_subfolders: Optional[StrictBool] = False, + image_quality_in_editor: Optional[str] = None, ): """Uploads all images with given extensions from folder_path to the project. Sets status of all the uploaded images to set_status if it is not None. @@ -854,7 +859,7 @@ def upload_images_from_folder_to_project( return [], [], duplicates if use_case.is_valid(): with tqdm( - total=len(images_to_upload), desc="Uploading images" + total=len(images_to_upload), desc="Uploading images" ) as progress_bar: for _ in use_case.execute(): progress_bar.update(1) @@ -862,9 +867,9 @@ def upload_images_from_folder_to_project( raise AppException(use_case.response.errors) def get_project_image_count( - self, - project: Union[NotEmptyStr, dict], - with_all_subfolders: Optional[StrictBool] = False, + self, + project: Union[NotEmptyStr, dict], + with_all_subfolders: Optional[StrictBool] = False, ): """Returns number of images in the project. @@ -889,10 +894,10 @@ def get_project_image_count( return response.data def download_image_annotations( - self, - project: Union[NotEmptyStr, dict], - image_name: NotEmptyStr, - local_dir_path: Union[str, Path], + self, + project: Union[NotEmptyStr, dict], + image_name: NotEmptyStr, + local_dir_path: Union[str, Path], ): """Downloads annotations of the image (JSON and mask if pixel type project) to local_dir_path. @@ -919,7 +924,7 @@ def download_image_annotations( return res.data def get_exports( - self, project: NotEmptyStr, return_metadata: Optional[StrictBool] = False + self, project: NotEmptyStr, return_metadata: Optional[StrictBool] = False ): """Get all prepared exports of the project. @@ -937,12 +942,12 @@ def get_exports( return response.data def prepare_export( - self, - project: Union[NotEmptyStr, dict], - folder_names: Optional[List[NotEmptyStr]] = None, - annotation_statuses: Optional[List[AnnotationStatuses]] = None, - include_fuse: Optional[StrictBool] = False, - only_pinned=False, + self, + project: Union[NotEmptyStr, dict], + folder_names: Optional[List[NotEmptyStr]] = None, + annotation_statuses: Optional[List[AnnotationStatuses]] = None, + include_fuse: Optional[StrictBool] = False, + only_pinned=False, ): """Prepare annotations and classes.json for export. Original and fused images for images with annotations can be included with include_fuse flag. @@ -989,19 +994,19 @@ def prepare_export( return response.data def upload_videos_from_folder_to_project( - self, - project: Union[NotEmptyStr, dict], - folder_path: Union[NotEmptyStr, Path], - extensions: Optional[ - Union[Tuple[NotEmptyStr], List[NotEmptyStr]] - ] = constants.DEFAULT_VIDEO_EXTENSIONS, - exclude_file_patterns: Optional[List[NotEmptyStr]] = (), - recursive_subfolders: Optional[StrictBool] = False, - target_fps: Optional[int] = None, - start_time: Optional[float] = 0.0, - end_time: Optional[float] = None, - annotation_status: Optional[AnnotationStatuses] = "NotStarted", - image_quality_in_editor: Optional[ImageQualityChoices] = None, + self, + project: Union[NotEmptyStr, dict], + folder_path: Union[NotEmptyStr, Path], + extensions: Optional[ + Union[Tuple[NotEmptyStr], List[NotEmptyStr]] + ] = constants.DEFAULT_VIDEO_EXTENSIONS, + exclude_file_patterns: Optional[List[NotEmptyStr]] = (), + recursive_subfolders: Optional[StrictBool] = False, + target_fps: Optional[int] = None, + start_time: Optional[float] = 0.0, + end_time: Optional[float] = None, + annotation_status: Optional[AnnotationStatuses] = "NotStarted", + image_quality_in_editor: Optional[ImageQualityChoices] = None, ): """Uploads image frames from all videos with given extensions from folder_path to the project. Sets status of all the uploaded images to set_status if it is not None. @@ -1072,14 +1077,14 @@ def upload_videos_from_folder_to_project( return response.data def upload_video_to_project( - self, - project: Union[NotEmptyStr, dict], - video_path: Union[NotEmptyStr, Path], - target_fps: Optional[int] = None, - start_time: Optional[float] = 0.0, - end_time: Optional[float] = None, - annotation_status: Optional[AnnotationStatuses] = "NotStarted", - image_quality_in_editor: Optional[ImageQualityChoices] = None, + self, + project: Union[NotEmptyStr, dict], + video_path: Union[NotEmptyStr, Path], + target_fps: Optional[int] = None, + start_time: Optional[float] = 0.0, + end_time: Optional[float] = None, + annotation_status: Optional[AnnotationStatuses] = "NotStarted", + image_quality_in_editor: Optional[ImageQualityChoices] = None, ): """Uploads image frames from video to platform. Uploaded images will have names "_.jpg". @@ -1123,12 +1128,12 @@ def upload_video_to_project( return response.data def create_annotation_class( - self, - project: Union[Project, NotEmptyStr], - name: NotEmptyStr, - color: NotEmptyStr, - attribute_groups: Optional[List[AttributeGroup]] = None, - class_type: ClassType = "object", + self, + project: Union[Project, NotEmptyStr], + name: NotEmptyStr, + color: NotEmptyStr, + attribute_groups: Optional[List[AttributeGroup]] = None, + class_type: ClassType = "object", ): """Create annotation class in project @@ -1228,7 +1233,7 @@ def create_annotation_class( return BaseSerializer(response.data).serialize(exclude_unset=True) def delete_annotation_class( - self, project: NotEmptyStr, annotation_class: Union[dict, NotEmptyStr] + self, project: NotEmptyStr, annotation_class: Union[dict, NotEmptyStr] ): """Deletes annotation class from project @@ -1242,7 +1247,7 @@ def delete_annotation_class( ) def download_annotation_classes_json( - self, project: NotEmptyStr, folder: Union[str, Path] + self, project: NotEmptyStr, folder: Union[str, Path] ): """Downloads project classes.json to folder @@ -1262,10 +1267,10 @@ def download_annotation_classes_json( return response.data def create_annotation_classes_from_classes_json( - self, - project: Union[NotEmptyStr, dict], - classes_json: Union[List[AnnotationClassEntity], str, Path], - from_s3_bucket=False, + self, + project: Union[NotEmptyStr, dict], + classes_json: Union[List[AnnotationClassEntity], str, Path], + from_s3_bucket=False, ): """Creates annotation classes in project from a SuperAnnotate format annotation classes.json. @@ -1305,12 +1310,12 @@ def create_annotation_classes_from_classes_json( return [BaseSerializer(i).serialize(exclude_unset=True) for i in response.data] def download_export( - self, - project: Union[NotEmptyStr, dict], - export: Union[NotEmptyStr, dict], - folder_path: Union[str, Path], - extract_zip_contents: Optional[StrictBool] = True, - to_s3_bucket=None, + self, + project: Union[NotEmptyStr, dict], + export: Union[NotEmptyStr, dict], + folder_path: Union[str, Path], + extract_zip_contents: Optional[StrictBool] = True, + to_s3_bucket=None, ): """Download prepared export. @@ -1343,7 +1348,7 @@ def download_export( raise AppException(response.errors) def set_project_workflow( - self, project: Union[NotEmptyStr, dict], new_workflow: List[dict] + self, project: Union[NotEmptyStr, dict], new_workflow: List[dict] ): """Sets project's workflow. @@ -1365,14 +1370,14 @@ def set_project_workflow( raise AppException(response.errors) def download_image( - self, - project: Union[NotEmptyStr, dict], - image_name: NotEmptyStr, - local_dir_path: Optional[Union[str, Path]] = "./", - include_annotations: Optional[StrictBool] = False, - include_fuse: Optional[StrictBool] = False, - include_overlay: Optional[StrictBool] = False, - variant: Optional[str] = "original", + self, + project: Union[NotEmptyStr, dict], + image_name: NotEmptyStr, + local_dir_path: Optional[Union[str, Path]] = "./", + include_annotations: Optional[StrictBool] = False, + include_fuse: Optional[StrictBool] = False, + include_overlay: Optional[StrictBool] = False, + variant: Optional[str] = "original", ): """Downloads the image (and annotation if not None) to local_dir_path @@ -1412,11 +1417,11 @@ def download_image( return response.data def upload_annotations_from_folder_to_project( - self, - project: Union[NotEmptyStr, dict], - folder_path: Union[str, Path], - from_s3_bucket=None, - recursive_subfolders: Optional[StrictBool] = False, + self, + project: Union[NotEmptyStr, dict], + folder_path: Union[str, Path], + from_s3_bucket=None, + recursive_subfolders: Optional[StrictBool] = False, ): """Finds and uploads all JSON files in the folder_path as annotations to the project. @@ -1474,11 +1479,11 @@ def upload_annotations_from_folder_to_project( return response.data def upload_preannotations_from_folder_to_project( - self, - project: Union[NotEmptyStr, dict], - folder_path: Union[str, Path], - from_s3_bucket=None, - recursive_subfolders: Optional[StrictBool] = False, + self, + project: Union[NotEmptyStr, dict], + folder_path: Union[str, Path], + from_s3_bucket=None, + recursive_subfolders: Optional[StrictBool] = False, ): """Finds and uploads all JSON files in the folder_path as pre-annotations to the project. @@ -1539,12 +1544,12 @@ def upload_preannotations_from_folder_to_project( return response.data def upload_image_annotations( - self, - project: Union[NotEmptyStr, dict], - image_name: str, - annotation_json: Union[str, Path, dict], - mask: Optional[Union[str, Path, bytes]] = None, - verbose: Optional[StrictBool] = True, + self, + project: Union[NotEmptyStr, dict], + image_name: str, + annotation_json: Union[str, Path, dict], + mask: Optional[Union[str, Path, bytes]] = None, + verbose: Optional[StrictBool] = True, ): """Upload annotations from JSON (also mask for pixel annotations) to the image. @@ -1621,14 +1626,14 @@ def download_model(self, model: MLModel, output_dir: Union[str, Path]): return BaseSerializer(res.data).serialize() def benchmark( - self, - project: Union[NotEmptyStr, dict], - gt_folder: str, - folder_names: List[NotEmptyStr], - export_root: Optional[Union[str, Path]] = None, - image_list=None, - annot_type: Optional[AnnotationType] = "bbox", - show_plots=False, + self, + project: Union[NotEmptyStr, dict], + gt_folder: str, + folder_names: List[NotEmptyStr], + export_root: Optional[Union[str, Path]] = None, + image_list=None, + annot_type: Optional[AnnotationType] = "bbox", + show_plots=False, ): """Computes benchmark score for each instance of given images that are present both gt_project_name project and projects in folder_names list: @@ -1688,13 +1693,13 @@ def benchmark( return response.data def consensus( - self, - project: NotEmptyStr, - folder_names: List[NotEmptyStr], - export_root: Optional[Union[NotEmptyStr, Path]] = None, - image_list: Optional[List[NotEmptyStr]] = None, - annot_type: Optional[AnnotationType] = "bbox", - show_plots: Optional[StrictBool] = False, + self, + project: NotEmptyStr, + folder_names: List[NotEmptyStr], + export_root: Optional[Union[NotEmptyStr, Path]] = None, + image_list: Optional[List[NotEmptyStr]] = None, + annot_type: Optional[AnnotationType] = "bbox", + show_plots: Optional[StrictBool] = False, ): """Computes consensus score for each instance of given images that are present in at least 2 of the given projects: @@ -1741,10 +1746,10 @@ def consensus( return response.data def run_prediction( - self, - project: Union[NotEmptyStr, dict], - images_list: List[NotEmptyStr], - model: Union[NotEmptyStr, dict], + self, + project: Union[NotEmptyStr, dict], + images_list: List[NotEmptyStr], + model: Union[NotEmptyStr, dict], ): """This function runs smart prediction on given list of images from a given project using the neural network of your choice @@ -1779,13 +1784,13 @@ def run_prediction( return response.data def add_annotation_bbox_to_image( - self, - project: NotEmptyStr, - image_name: NotEmptyStr, - bbox: List[float], - annotation_class_name: NotEmptyStr, - annotation_class_attributes: Optional[List[dict]] = None, - error: Optional[StrictBool] = None, + self, + project: NotEmptyStr, + image_name: NotEmptyStr, + bbox: List[float], + annotation_class_name: NotEmptyStr, + annotation_class_attributes: Optional[List[dict]] = None, + error: Optional[StrictBool] = None, ): """Add a bounding box annotation to image annotations @@ -1838,13 +1843,13 @@ def add_annotation_bbox_to_image( ) def add_annotation_point_to_image( - self, - project: NotEmptyStr, - image_name: NotEmptyStr, - point: List[float], - annotation_class_name: NotEmptyStr, - annotation_class_attributes: Optional[List[dict]] = None, - error: Optional[StrictBool] = None, + self, + project: NotEmptyStr, + image_name: NotEmptyStr, + point: List[float], + annotation_class_name: NotEmptyStr, + annotation_class_attributes: Optional[List[dict]] = None, + error: Optional[StrictBool] = None, ): """Add a point annotation to image annotations @@ -1896,13 +1901,13 @@ def add_annotation_point_to_image( raise AppException(response.errors) def add_annotation_comment_to_image( - self, - project: NotEmptyStr, - image_name: NotEmptyStr, - comment_text: NotEmptyStr, - comment_coords: List[float], - comment_author: EmailStr, - resolved: Optional[StrictBool] = False, + self, + project: NotEmptyStr, + image_name: NotEmptyStr, + comment_text: NotEmptyStr, + comment_coords: List[float], + comment_author: EmailStr, + resolved: Optional[StrictBool] = False, ): """Add a comment to SuperAnnotate format annotation JSON @@ -1951,13 +1956,13 @@ def add_annotation_comment_to_image( ) def upload_image_to_project( - self, - project: NotEmptyStr, - img, - image_name: Optional[NotEmptyStr] = None, - annotation_status: Optional[AnnotationStatuses] = "NotStarted", - from_s3_bucket=None, - image_quality_in_editor: Optional[NotEmptyStr] = None, + self, + project: NotEmptyStr, + img, + image_name: Optional[NotEmptyStr] = None, + annotation_status: Optional[AnnotationStatuses] = "NotStarted", + from_s3_bucket=None, + image_quality_in_editor: Optional[NotEmptyStr] = None, ): """Uploads image (io.BytesIO() or filepath to image) to project. Sets status of the uploaded image to set_status if it is not None. @@ -1992,12 +1997,12 @@ def upload_image_to_project( raise AppException(response.errors) def search_models( - self, - name: Optional[NotEmptyStr] = None, - type_: Optional[NotEmptyStr] = None, # noqa - project_id: Optional[int] = None, - task: Optional[NotEmptyStr] = None, - include_global: Optional[StrictBool] = True, + self, + name: Optional[NotEmptyStr] = None, + type_: Optional[NotEmptyStr] = None, # noqa + project_id: Optional[int] = None, + task: Optional[NotEmptyStr] = None, + include_global: Optional[StrictBool] = True, ): r"""Search for ML models. @@ -2029,12 +2034,12 @@ def search_models( return res.data def upload_images_to_project( - self, - project: NotEmptyStr, - img_paths: List[NotEmptyStr], - annotation_status: Optional[AnnotationStatuses] = "NotStarted", - from_s3_bucket=None, - image_quality_in_editor: Optional[ImageQualityChoices] = None, + self, + project: NotEmptyStr, + img_paths: List[NotEmptyStr], + annotation_status: Optional[AnnotationStatuses] = "NotStarted", + from_s3_bucket=None, + image_quality_in_editor: Optional[ImageQualityChoices] = None, ): """Uploads all images given in list of path objects in img_paths to the project. Sets status of all the uploaded images to set_status if it is not None. @@ -2081,7 +2086,7 @@ def upload_images_to_project( return uploaded, failed_images, duplications if use_case.is_valid(): with tqdm( - total=len(images_to_upload), desc="Uploading images" + total=len(images_to_upload), desc="Uploading images" ) as progress_bar: for _ in use_case.execute(): progress_bar.update(1) @@ -2093,9 +2098,9 @@ def upload_images_to_project( @staticmethod def aggregate_annotations_as_df( - project_root: Union[NotEmptyStr, Path], - project_type: ProjectTypes, - folder_names: Optional[List[Union[Path, NotEmptyStr]]] = None, + project_root: Union[NotEmptyStr, Path], + project_type: ProjectTypes, + folder_names: Optional[List[Union[Path, NotEmptyStr]]] = None, ): """Aggregate annotations as pandas dataframe from project root. @@ -2113,8 +2118,8 @@ def aggregate_annotations_as_df( :rtype: pandas DataFrame """ if project_type in ( - constants.ProjectType.VECTOR.name, - constants.ProjectType.PIXEL.name, + constants.ProjectType.VECTOR.name, + constants.ProjectType.PIXEL.name, ): from superannotate.lib.app.analytics.common import ( aggregate_image_annotations_as_df, @@ -2128,8 +2133,8 @@ def aggregate_annotations_as_df( folder_names=folder_names, ) elif project_type in ( - constants.ProjectType.VIDEO.name, - constants.ProjectType.DOCUMENT.name, + constants.ProjectType.VIDEO.name, + constants.ProjectType.DOCUMENT.name, ): from superannotate.lib.app.analytics.aggregators import DataAggregator @@ -2140,7 +2145,7 @@ def aggregate_annotations_as_df( ).aggregate_annotations_as_df() def delete_annotations( - self, project: NotEmptyStr, item_names: Optional[List[NotEmptyStr]] = None + self, project: NotEmptyStr, item_names: Optional[List[NotEmptyStr]] = None ): """ Delete item annotations from a given list of items. @@ -2160,9 +2165,9 @@ def delete_annotations( raise AppException(response.errors) def validate_annotations( - self, - project_type: ProjectTypes, - annotations_json: Union[NotEmptyStr, Path, dict], + self, + project_type: ProjectTypes, + annotations_json: Union[NotEmptyStr, Path, dict], ): """Validates given annotation JSON. @@ -2189,10 +2194,10 @@ def validate_annotations( return False def add_contributors_to_project( - self, - project: NotEmptyStr, - emails: conlist(EmailStr, min_items=1), - role: AnnotatorRole, + self, + project: NotEmptyStr, + emails: conlist(EmailStr, min_items=1), + role: AnnotatorRole, ) -> Tuple[List[str], List[str]]: """Add contributors to project. @@ -2216,7 +2221,7 @@ def add_contributors_to_project( return response.data def invite_contributors_to_team( - self, emails: conlist(EmailStr, min_items=1), admin: StrictBool = False + self, emails: conlist(EmailStr, min_items=1), admin: StrictBool = False ) -> Tuple[List[str], List[str]]: """Invites contributors to the team. @@ -2237,7 +2242,7 @@ def invite_contributors_to_team( return response.data def get_annotations( - self, project: NotEmptyStr, items: Optional[List[NotEmptyStr]] = None + self, project: NotEmptyStr, items: Optional[List[NotEmptyStr]] = None ): """Returns annotations for the given list of items. @@ -2257,7 +2262,7 @@ def get_annotations( return response.data def get_annotations_per_frame( - self, project: NotEmptyStr, video: NotEmptyStr, fps: int = 1 + self, project: NotEmptyStr, video: NotEmptyStr, fps: int = 1 ): """Returns per frame annotations for the given video. @@ -2317,10 +2322,10 @@ def get_integrations(self): return BaseSerializer.serialize_iterable(integrations, ("name", "type", "root")) def attach_items_from_integrated_storage( - self, - project: NotEmptyStr, - integration: Union[NotEmptyStr, IntegrationEntity], - folder_path: Optional[NotEmptyStr] = None, + self, + project: NotEmptyStr, + integration: Union[NotEmptyStr, IntegrationEntity], + folder_path: Optional[NotEmptyStr] = None, ): """Link images from integrated external storage to SuperAnnotate. @@ -2345,10 +2350,10 @@ def attach_items_from_integrated_storage( raise AppException(response.errors) def query( - self, - project: NotEmptyStr, - query: Optional[NotEmptyStr] = None, - subset: Optional[NotEmptyStr] = None, + self, + project: NotEmptyStr, + query: Optional[NotEmptyStr] = None, + subset: Optional[NotEmptyStr] = None, ): """Return items that satisfy the given query. Query syntax should be in SuperAnnotate query language(https://doc.superannotate.com/docs/query-search-1). @@ -2375,10 +2380,10 @@ def query( return BaseSerializer.serialize_iterable(response.data) def get_item_metadata( - self, - project: NotEmptyStr, - item_name: NotEmptyStr, - include_custom_metadata: bool = False, + self, + project: NotEmptyStr, + item_name: NotEmptyStr, + include_custom_metadata: bool = False, ): """Returns item metadata @@ -2433,14 +2438,14 @@ def get_item_metadata( return BaseSerializer(response.data).serialize(exclude=exclude) def search_items( - self, - project: NotEmptyStr, - name_contains: NotEmptyStr = None, - annotation_status: Optional[AnnotationStatuses] = None, - annotator_email: Optional[NotEmptyStr] = None, - qa_email: Optional[NotEmptyStr] = None, - recursive: bool = False, - include_custom_metadata: bool = False, + self, + project: NotEmptyStr, + name_contains: NotEmptyStr = None, + annotation_status: Optional[AnnotationStatuses] = None, + annotator_email: Optional[NotEmptyStr] = None, + qa_email: Optional[NotEmptyStr] = None, + recursive: bool = False, + include_custom_metadata: bool = False, ): """Search items by filtering criteria. @@ -2528,10 +2533,10 @@ def search_items( return BaseSerializer.serialize_iterable(response.data, exclude=exclude) def attach_items( - self, - project: Union[NotEmptyStr, dict], - attachments: AttachmentArg, - annotation_status: Optional[AnnotationStatuses] = "NotStarted", + self, + project: Union[NotEmptyStr, dict], + attachments: AttachmentArg, + annotation_status: Optional[AnnotationStatuses] = "NotStarted", ): """Link items from external storage to SuperAnnotate using URLs. @@ -2590,11 +2595,11 @@ def attach_items( return uploaded, fails, duplicated def copy_items( - self, - source: Union[NotEmptyStr, dict], - destination: Union[NotEmptyStr, dict], - items: Optional[List[NotEmptyStr]] = None, - include_annotations: Optional[StrictBool] = True, + self, + source: Union[NotEmptyStr, dict], + destination: Union[NotEmptyStr, dict], + items: Optional[List[NotEmptyStr]] = None, + include_annotations: Optional[StrictBool] = True, ): """Copy images in bulk between folders in a project @@ -2633,10 +2638,10 @@ def copy_items( return response.data def move_items( - self, - source: Union[NotEmptyStr, dict], - destination: Union[NotEmptyStr, dict], - items: Optional[List[NotEmptyStr]] = None, + self, + source: Union[NotEmptyStr, dict], + destination: Union[NotEmptyStr, dict], + items: Optional[List[NotEmptyStr]] = None, ): """Move images in bulk between folders in a project @@ -2668,10 +2673,10 @@ def move_items( return response.data def set_annotation_statuses( - self, - project: Union[NotEmptyStr, dict], - annotation_status: AnnotationStatuses, - items: Optional[List[NotEmptyStr]] = None, + self, + project: Union[NotEmptyStr, dict], + annotation_status: AnnotationStatuses, + items: Optional[List[NotEmptyStr]] = None, ): """Sets annotation statuses of items @@ -2704,12 +2709,12 @@ def set_annotation_statuses( logger.info("Annotation statuses of items changed") def download_annotations( - self, - project: Union[NotEmptyStr, dict], - path: Union[str, Path] = None, - items: Optional[List[NotEmptyStr]] = None, - recursive: bool = False, - callback: Callable = None, + self, + project: Union[NotEmptyStr, dict], + path: Union[str, Path] = None, + items: Optional[List[NotEmptyStr]] = None, + recursive: bool = False, + callback: Callable = None, ): """Downloads annotation JSON files of the selected items to the local directory. @@ -2889,7 +2894,7 @@ def get_custom_fields(self, project: NotEmptyStr): return response.data def delete_custom_fields( - self, project: NotEmptyStr, fields: conlist(NotEmptyStr, min_items=1) + self, project: NotEmptyStr, fields: conlist(NotEmptyStr, min_items=1) ): """Remove custom fields from a project’s custom metadata schema. @@ -2942,7 +2947,7 @@ def delete_custom_fields( return response.data def upload_custom_values( - self, project: NotEmptyStr, items: conlist(Dict[str, dict], min_items=1) + self, project: NotEmptyStr, items: conlist(Dict[str, dict], min_items=1) ): """ Attach custom metadata to items. @@ -3012,7 +3017,7 @@ def upload_custom_values( return response.data def delete_custom_values( - self, project: NotEmptyStr, items: conlist(Dict[str, List[str]], min_items=1) + self, project: NotEmptyStr, items: conlist(Dict[str, List[str]], min_items=1) ): """ Remove custom data from items @@ -3044,3 +3049,80 @@ def delete_custom_values( ) if response.errors: raise AppException(response.errors) + + def add_items_to_subset( + self, project: NotEmptyStr, subset: NotEmptyStr, items: List[dict] + ): + """ + + Associates selected items with a given subset. Non-existing subset will be automatically created. + + :param project: project name (e.g., “project1”) + :type project: str + + :param subset: a name of an existing/new subset to associate items with. New subsets will be automatically created. + :type subset: str + + :param items: – list of items metadata. Required keys are 'name' and 'path' if the 'id' key is not provided in the dict. + :type items: list of dicts + + Request Example: + :: + client = SAClient() + + # option 1 + queried_items = client.query( + project="Image Project", + query="instance(error = true)" + ) + + client.add_items_to_subset( + project="Medical Annotations", + subset="Brain Study - Disapproved", + items=queried_items + ) + + items_list = [ + { + 'name': 'image_1.jpeg', + 'path': 'Image Project' + }, + { + 'name': 'image_2.jpeg', + 'path': 'Image Project/Subfolder A' + } + ] + + client.add_items_to_subset( + project="Image Project", + subset="Subset Name", + items=items_list + + ) + + Response Example: + :: + { + "succeeded": [ + { + 'name': 'image_1.jpeg', + 'path': 'Image Project' + }, + { + 'name': 'image_2.jpeg', + 'path': 'Image Project/Subfolder A' + } + ], + "failed": [], + "skipped": [] + } + """ + + project_name, _ = extract_project_folder(project) + + response = self.controller.add_items_to_subset(project_name, subset, items) + + if response.errors: + raise AppException(response.errors) + + return response.data diff --git a/src/superannotate/lib/core/entities/base.py b/src/superannotate/lib/core/entities/base.py index 109cdc002..6907dc100 100644 --- a/src/superannotate/lib/core/entities/base.py +++ b/src/superannotate/lib/core/entities/base.py @@ -264,6 +264,7 @@ class BaseItemEntity(TimedBaseModel): createdAt: str = Field(description="Date of creation") updatedAt: str = Field(description="Update date") custom_metadata: Optional[dict] + id: Optional[int] class Config: extra = Extra.allow diff --git a/src/superannotate/lib/core/usecases/items.py b/src/superannotate/lib/core/usecases/items.py index 3ecddeee1..8d72bf6e7 100644 --- a/src/superannotate/lib/core/usecases/items.py +++ b/src/superannotate/lib/core/usecases/items.py @@ -1,8 +1,12 @@ import copy +from collections import defaultdict +from concurrent.futures import as_completed +from concurrent.futures import ThreadPoolExecutor from typing import List from typing import Optional import superannotate.lib.core as constants +from lib.app.helpers import extract_project_folder from lib.core.conditions import Condition from lib.core.conditions import CONDITION_EQ as EQ from lib.core.entities import AttachmentEntity @@ -23,6 +27,7 @@ from lib.core.serviceproviders import SuperannotateServiceProvider from lib.core.usecases.base import BaseReportableUseCase from lib.core.usecases.base import BaseUseCase +from lib.core.usecases.folders import SearchFoldersUseCase from superannotate.logger import get_default_logger logger = get_default_logger() @@ -140,7 +145,7 @@ def __init__( folder: FolderEntity, backend_service_provider: SuperannotateServiceProvider, query: str, - subset: str, + subset: str = None, ): super().__init__(reporter) self._project = project @@ -809,3 +814,282 @@ def execute(self): ) return self._response + + +class AddItemsToSubsetUseCase(BaseUseCase): + CHUNK_SIZE = 5000 + + def __init__( + self, + reporter, + project, + subset_name, + items, + backend_client, + folder_repo, + root_folder, + ): + self.reporter = reporter + self.project = project + self.subset_name = subset_name + self.items = items + self.results = {"succeeded": [], "failed": [], "skipped": []} + self.item_ids = [] + self.path_separated = defaultdict(dict) + self._backend_client = backend_client + self.folder_repository = folder_repo + self.root_folder = root_folder + super().__init__() + + def __filter_duplicates( + self, + ): + def uniqueQ(item, seen): + result = True + if "id" in item: + if item["id"] in seen: + result = False + else: + seen.add(item["id"]) + if "name" in item and "path" in item: + unique_str = f"{item['path']}/{item['name']}" + if unique_str in seen: + result = False + else: + seen.add(unique_str) + return result + + seen = set() + uniques = [x for x in self.items if uniqueQ(x, seen)] + return uniques + + def __filter_invalid_items( + self, + ): + def validQ(item): + if "id" in item: + return True + if "name" in item and "path" in item: + return True + self.results["skipped"].append(item) + return False + + filtered_items = [x for x in self.items if validQ(x)] + + return filtered_items + + def __separate_to_paths( + self, + ): + for item in self.items: + if "id" in item: + self.item_ids.append(item["id"]) + else: + if "items" not in self.path_separated[item["path"]]: + self.path_separated[item["path"]]["items"] = [] + + self.path_separated[item["path"]]["items"].append(item) + + # Removing paths that have incorrect folders in them + # And adding their items to "skipped list" and removing it from self.path_separated + # so that we don't query them later. + # Otherwise include folder in path object in order to later run a query + + removeables = [] + for path, value in self.path_separated.items(): + + project, folder = extract_project_folder(path) + + if project != self.project.name: + removeables.append(path) + continue + + # If no folder was provided in the path use "root" + # Problems with folders name 'root' are going to arise + + if not folder: + value["folder"] = self.root_folder + continue + folder_found = False + try: + folder_candidates = SearchFoldersUseCase( + project=self.project, + folder_name=folder, + folders=self.folder_repository, + condition=Condition.get_empty_condition(), + ).execute() + if folder_candidates.errors: + raise AppException(folder_candidates.errors) + for f in folder_candidates.data: + if f.name == folder: + value["folder"] = f + folder_found = True + break + # If the folder did not exist add to skipped + if not folder_found: + removeables.append(path) + + except Exception as e: + removeables.append(path) + + # Removing completely incorrect paths and their items + for item in removeables: + self.results["skipped"].extend(self.path_separated[item]["items"]) + self.path_separated.pop(item) + + def __build_query_string(self, path, item_names): + _, folder = extract_project_folder(path) + if not folder: + folder = "root" + query_str = f"metadata(name IN {str(item_names)}) AND folder={folder}" + + return query_str + + def __query(self, path, items): + _, folder = extract_project_folder(path) + + item_names = [item["name"] for item in items["items"]] + query = self.__build_query_string(path, item_names) + query_use_case = QueryEntitiesUseCase( + reporter=self.reporter, + project=self.project, + backend_service_provider=self._backend_client, + query=query, + folder=items["folder"], + subset=None, + ) + + queried_items = query_use_case.execute() + # If we failed the query for whatever reason + # Add all items of the folder to skipped + if queried_items.errors: + self.results["skipped"].extend(items["items"]) + return + + queried_items = queried_items.data + # Adding the images missing from specified folder to 'skipped' + tmp = {item["name"]: item for item in items["items"]} + tmp_q = {x.name for x in queried_items} + + for i, val in tmp.items(): + if i not in tmp_q: + self.results["skipped"].append(val) + + # Adding ids to path_separated to later see if they've succeded + + self.path_separated[path] = [ + {"id": x.id, "name": x.name, "path": x.path} for x in queried_items + ] + return [x.id for x in queried_items] + + def __distribute_to_results(self, item_id, response, item): + + if item_id in response.data["success"]: + self.results["succeeded"].append(item) + elif item_id in response.data["skipped"]: + self.results["skipped"].append(item) + else: + self.results["failed"].append(item) + + def validate_items( + self, + ): + filtered_items = self.__filter_duplicates() + if len(filtered_items) != len(self.items): + self.reporter.log_info( + f"Dropping duplicates. Found {len(filtered_items)} / {len(self.items)} unique items" + ) + self.items = filtered_items + self.items = self.__filter_invalid_items() + self.__separate_to_paths() + + def validate_project(self): + response = self._backend_client.validate_saqul_query( + self.project.team_id, self.project.id, "_" + ) + error = response.get("error") + if error: + raise AppException(response["error"]) + + def execute( + self, + ): + if self.is_valid(): + + futures = [] + with ThreadPoolExecutor(max_workers=4) as executor: + for path, items in self.path_separated.items(): + future = executor.submit(self.__query, path, items) + futures.append(future) + + for future in as_completed(futures): + try: + ids = future.result() + except Exception as e: + continue + + self.item_ids.extend(ids) + + subset = self._backend_client.get_subset( + self.project.team_id, self.project.id, self.subset_name + ) + + if not subset: + subset = self._backend_client.create_subset( + self.project.team_id, + self.project.id, + self.subset_name, + ) + + self.reporter.log_info( + f"You've successfully created a new subset - {self.subset_name}." + ) + + subset_id = subset["id"] + response = None + + for i in range(0, len(self.item_ids), self.CHUNK_SIZE): + tmp_response = self._backend_client.add_items_to_subset( + project_id=self.project.id, + team_id=self.project.team_id, + item_ids=self.item_ids[i : i + self.CHUNK_SIZE], # noqa + subset_id=subset_id, + ) + + if not response: + response = tmp_response + else: + response.data["failed"] = response.data["failed"].union( + tmp_response.data["failed"] + ) + response.data["skipped"] = response.data["skipped"].union( + tmp_response.data["skipped"] + ) + response.data["success"] = response.data["success"].union( + tmp_response.data["success"] + ) + + # Iterating over all path_separated (that now have ids in them and sorting them into + # "success", "failed" and "skipped") + + for path, value in self.path_separated.items(): + for item in value: + item_id = item.pop( + "id" + ) # Need to remove it, since its added artificially + self.__distribute_to_results(item_id, response, item) + + for item in self.items: + if "id" not in item: + continue + item_id = item[ + "id" + ] # No need to remove id, since it was supplied by the user + + self.__distribute_to_results(item_id, response, item) + + self._response.data = self.results + # The function should either return something or raise an exception prior to + # returning control to the interface function that called it. So no need for + # error handling in the response + return self._response diff --git a/src/superannotate/lib/infrastructure/controller.py b/src/superannotate/lib/infrastructure/controller.py index f55b06172..62d802472 100644 --- a/src/superannotate/lib/infrastructure/controller.py +++ b/src/superannotate/lib/infrastructure/controller.py @@ -1598,3 +1598,20 @@ def delete_custom_values( backend_client=self.backend_client, ) return use_case.execute() + + def add_items_to_subset(self, project_name: str, subset: str, items: List[dict]): + + project = self._get_project(project_name) + root_folder = FolderEntity(uuid=project.id, name="root") + + use_case = usecases.AddItemsToSubsetUseCase( + reporter=self.get_default_reporter(), + project=project, + subset_name=subset, + items=items, + backend_client=self.backend_client, + folder_repo=self.folders, + root_folder=root_folder, + ) + + return use_case.execute() diff --git a/src/superannotate/lib/infrastructure/services.py b/src/superannotate/lib/infrastructure/services.py index 680969274..b2433250c 100644 --- a/src/superannotate/lib/infrastructure/services.py +++ b/src/superannotate/lib/infrastructure/services.py @@ -316,6 +316,9 @@ class SuperannotateBackendService(BaseBackendService): URL_SYNC_LARGE_ANNOTATION = "items/{item_id}/annotations/sync" URL_SYNC_LARGE_ANNOTATION_STATUS = "items/{item_id}/annotations/sync/status" URL_DOWNLOAD_LARGE_ANNOTATION = "items/{item_id}/annotations/download" + URL_ADD_ITEMS_TO_SUBSET = "project/{project_id}/subset/{subset_id}/change" + URL_GET_SUBSET = "project/{project_id}/subset" + URL_CREATE_SUBSET = "project/{project_id}/subset/bulk" async def _sync_large_annotation(self, team_id, project_id, item_id): @@ -1659,3 +1662,75 @@ def sort_items_by_size( response_data["small"].extend(response_json.get("small", [])) response_data["large"].extend(response_json.get("large", [])) return response_data + + def add_items_to_subset( + self, project_id: int, team_id: int, item_ids: List[int], subset_id: int + ): + params = {"team_id": team_id} + + data = {"action": "ATTACH", "item_ids": item_ids} + + add_items_to_subset_url = urljoin( + self.api_url, + self.URL_ADD_ITEMS_TO_SUBSET.format( + project_id=project_id, subset_id=subset_id + ), + ) + + response = self._request( + url=add_items_to_subset_url, + method="POST", + params=params, + data=data, + content_type=ServiceResponse, + ) + + if not response.ok: + response.data["skipped"] = set() + response.data["failed"] = set(item_ids) + + response.data["skipped"] = set(response.data["skipped"]) + response.data["failed"] = set(response.data["failed"]) + response.data["success"] = set(item_ids) - response.data["skipped"].union( + response.data["failed"] + ) + return response + + def get_subset(self, team_id, project_id, subset_name): + params = {"team_id": team_id} + + get_subset_url = urljoin( + self.api_url, self.URL_GET_SUBSET.format(project_id=project_id) + ) + + response = self._request(url=get_subset_url, method="GET", params=params) + + if not response.ok: + raise AppException(response.json().get("errors", "undefined")) + + subsets = response.json() + + for subset in subsets: + if subset["name"] == subset_name: + return subset + return None + + def create_subset(self, team_id, project_id, subset_name): + + create_subset_url = urljoin( + self.api_url, self.URL_CREATE_SUBSET.format(project_id=project_id) + ) + params = { + "team_id": team_id, + } + + data = {"names": [subset_name]} + + response = self._request( + url=create_subset_url, method="POST", params=params, data=data + ) + + if not response.ok: + raise AppException(response.json().get("errors", "undefined")) + + return response.json()[0]