diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 3982e77cc..b65ed2b11 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -9,7 +9,7 @@ repos: - '--application-directories' - app - repo: 'https://github.com/python/black' - rev: 19.10b0 + rev: 22.3.0 hooks: - id: black name: Code Formatter (black) diff --git a/docs/source/index.rst b/docs/source/index.rst index 1d28b8ff6..6b52ceeea 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -29,7 +29,9 @@ SuperAnnotate Python SDK allows access to the platform without web browser: .. code-block:: python - import superannotate as sa + from superannotate import SAClient + + sa = SAClient() sa.create_project("Example Project 1", "example", "Vector") diff --git a/docs/source/superannotate.sdk.rst b/docs/source/superannotate.sdk.rst index 224765118..665f722a7 100644 --- a/docs/source/superannotate.sdk.rst +++ b/docs/source/superannotate.sdk.rst @@ -8,73 +8,80 @@ API Reference Remote functions ---------------- -Initialization and authentication +Instantiation and authentication _________________________________ -.. autofunction:: superannotate.init +.. autoclass:: superannotate.SAClient -.. _ref_projects: - Projects ________ .. _ref_search_projects: -.. autofunction:: superannotate.search_projects -.. autofunction:: superannotate.create_project -.. autofunction:: superannotate.create_project_from_metadata -.. autofunction:: superannotate.clone_project -.. autofunction:: superannotate.delete_project -.. autofunction:: superannotate.rename_project +.. automethod:: superannotate.SAClient.search_projects +.. automethod:: superannotate.SAClient.create_project +.. automethod:: superannotate.SAClient.create_project_from_metadata +.. automethod:: superannotate.SAClient.clone_project +.. automethod:: superannotate.SAClient.delete_project +.. automethod:: superannotate.SAClient.rename_project .. _ref_get_project_metadata: -.. autofunction:: superannotate.get_project_metadata -.. autofunction:: superannotate.get_project_image_count -.. autofunction:: superannotate.search_folders -.. autofunction:: superannotate.get_folder_metadata -.. autofunction:: superannotate.create_folder -.. autofunction:: superannotate.delete_folders -.. autofunction:: superannotate.upload_images_to_project -.. autofunction:: superannotate.attach_items_from_integrated_storage -.. autofunction:: superannotate.upload_image_to_project -.. autofunction:: superannotate.delete_annotations +.. automethod:: superannotate.SAClient.get_project_metadata +.. automethod:: superannotate.SAClient.get_project_image_count +.. automethod:: superannotate.SAClient.search_folders +.. automethod:: superannotate.SAClient.get_folder_metadata +.. automethod:: superannotate.SAClient.create_folder +.. automethod:: superannotate.SAClient.delete_folders +.. automethod:: superannotate.SAClient.upload_images_to_project +.. automethod:: superannotate.SAClient.attach_items_from_integrated_storage +.. automethod:: superannotate.SAClient.upload_image_to_project +.. automethod:: superannotate.SAClient.delete_annotations .. _ref_upload_images_from_folder_to_project: -.. autofunction:: superannotate.upload_images_from_folder_to_project -.. autofunction:: superannotate.upload_video_to_project -.. autofunction:: superannotate.upload_videos_from_folder_to_project +.. automethod:: superannotate.SAClient.upload_images_from_folder_to_project +.. automethod:: superannotate.SAClient.upload_video_to_project +.. automethod:: superannotate.SAClient.upload_videos_from_folder_to_project .. _ref_upload_annotations_from_folder_to_project: -.. autofunction:: superannotate.upload_annotations_from_folder_to_project -.. autofunction:: superannotate.upload_preannotations_from_folder_to_project -.. autofunction:: superannotate.add_contributors_to_project -.. autofunction:: superannotate.get_project_settings -.. autofunction:: superannotate.set_project_default_image_quality_in_editor -.. autofunction:: superannotate.get_project_workflow -.. autofunction:: superannotate.set_project_workflow +.. automethod:: superannotate.SAClient.upload_annotations_from_folder_to_project +.. automethod:: superannotate.SAClient.upload_preannotations_from_folder_to_project +.. automethod:: superannotate.SAClient.add_contributors_to_project +.. automethod:: superannotate.SAClient.get_project_settings +.. automethod:: superannotate.SAClient.set_project_default_image_quality_in_editor +.. automethod:: superannotate.SAClient.get_project_workflow +.. automethod:: superannotate.SAClient.set_project_workflow ---------- Exports _______ -.. autofunction:: superannotate.prepare_export -.. autofunction:: superannotate.get_annotations -.. autofunction:: superannotate.get_annotations_per_frame +.. automethod:: superannotate.SAClient.prepare_export +.. automethod:: superannotate.SAClient.get_annotations +.. automethod:: superannotate.SAClient.get_annotations_per_frame .. _ref_download_export: -.. autofunction:: superannotate.download_export -.. autofunction:: superannotate.get_exports +.. automethod:: superannotate.SAClient.download_export +.. automethod:: superannotate.SAClient.get_exports ---------- Items ______ -.. autofunction:: superannotate.query -.. autofunction:: superannotate.search_items -.. autofunction:: superannotate.download_annotations -.. autofunction:: superannotate.attach_items -.. autofunction:: superannotate.copy_items -.. autofunction:: superannotate.move_items -.. autofunction:: superannotate.get_item_metadata -.. autofunction:: superannotate.set_annotation_statuses +.. automethod:: superannotate.SAClient.query +.. automethod:: superannotate.SAClient.search_items +.. automethod:: superannotate.SAClient.download_annotations +.. automethod:: superannotate.SAClient.attach_items +.. automethod:: superannotate.SAClient.copy_items +.. automethod:: superannotate.SAClient.move_items +.. automethod:: superannotate.SAClient.assign_items +.. automethod:: superannotate.SAClient.unassign_items +.. automethod:: superannotate.SAClient.get_item_metadata +.. automethod:: superannotate.SAClient.set_annotation_statuses + +---------- + +Subsets +______ + +.. automethod:: superannotate.SAClient.get_subsets ---------- @@ -83,50 +90,48 @@ ______ .. _ref_search_images: -.. autofunction:: superannotate.download_image -.. autofunction:: superannotate.set_image_annotation_status -.. autofunction:: superannotate.set_images_annotation_statuses -.. autofunction:: superannotate.download_image_annotations -.. autofunction:: superannotate.upload_image_annotations -.. autofunction:: superannotate.copy_image -.. autofunction:: superannotate.pin_image -.. autofunction:: superannotate.assign_images -.. autofunction:: superannotate.delete_images -.. autofunction:: superannotate.add_annotation_bbox_to_image -.. autofunction:: superannotate.add_annotation_point_to_image -.. autofunction:: superannotate.add_annotation_comment_to_image -.. autofunction:: superannotate.upload_priority_scores +.. automethod:: superannotate.SAClient.download_image +.. automethod:: superannotate.SAClient.download_image_annotations +.. automethod:: superannotate.SAClient.upload_image_annotations +.. automethod:: superannotate.SAClient.copy_image +.. automethod:: superannotate.SAClient.pin_image +.. automethod:: superannotate.SAClient.assign_images +.. automethod:: superannotate.SAClient.delete_images +.. automethod:: superannotate.SAClient.add_annotation_bbox_to_image +.. automethod:: superannotate.SAClient.add_annotation_point_to_image +.. automethod:: superannotate.SAClient.add_annotation_comment_to_image +.. automethod:: superannotate.SAClient.upload_priority_scores ---------- Annotation Classes __________________ -.. autofunction:: superannotate.create_annotation_class +.. automethod:: superannotate.SAClient.create_annotation_class .. _ref_create_annotation_classes_from_classes_json: -.. autofunction:: superannotate.create_annotation_classes_from_classes_json -.. autofunction:: superannotate.search_annotation_classes -.. autofunction:: superannotate.download_annotation_classes_json -.. autofunction:: superannotate.delete_annotation_class +.. automethod:: superannotate.SAClient.create_annotation_classes_from_classes_json +.. automethod:: superannotate.SAClient.search_annotation_classes +.. automethod:: superannotate.SAClient.download_annotation_classes_json +.. automethod:: superannotate.SAClient.delete_annotation_class ---------- Team _________________ -.. autofunction:: superannotate.get_team_metadata -.. autofunction:: superannotate.get_integrations -.. autofunction:: superannotate.invite_contributors_to_team -.. autofunction:: superannotate.search_team_contributors +.. automethod:: superannotate.SAClient.get_team_metadata +.. automethod:: superannotate.SAClient.get_integrations +.. automethod:: superannotate.SAClient.invite_contributors_to_team +.. automethod:: superannotate.SAClient.search_team_contributors ---------- Neural Network _______________ -.. autofunction:: superannotate.download_model -.. autofunction:: superannotate.run_prediction -.. autofunction:: superannotate.search_models +.. automethod:: superannotate.SAClient.download_model +.. automethod:: superannotate.SAClient.run_prediction +.. automethod:: superannotate.SAClient.search_models ---------- @@ -196,7 +201,7 @@ Export metadata example: Integration metadata -_______________ +______________________ Integration metadata example: @@ -383,8 +388,8 @@ Working with annotations ________________________ .. _ref_aggregate_annotations_as_df: -.. autofunction:: superannotate.validate_annotations -.. autofunction:: superannotate.aggregate_annotations_as_df +.. automethod:: superannotate.SAClient.validate_annotations +.. automethod:: superannotate.SAClient.aggregate_annotations_as_df ---------- @@ -398,5 +403,5 @@ _____________________________________________________________ Utility functions -------------------------------- -.. autofunction:: superannotate.consensus -.. autofunction:: superannotate.benchmark \ No newline at end of file +.. autofunction:: superannotate.SAClient.consensus +.. autofunction:: superannotate.SAClient.benchmark \ No newline at end of file diff --git a/docs/source/tutorial.sdk.rst b/docs/source/tutorial.sdk.rst index ebb52279b..ee66d8880 100644 --- a/docs/source/tutorial.sdk.rst +++ b/docs/source/tutorial.sdk.rst @@ -77,14 +77,14 @@ Include the package in your Python code: .. code-block:: python - import superannotate as sa + from superannotate import SAClient SDK is ready to be used if default location config file was created using the :ref:`CLI init `. Otherwise to authenticate SDK with the :ref:`custom config file `: .. code-block:: python - sa.init("") + sa = SAClient(config_path="") Creating a project ____________________________ @@ -273,80 +273,81 @@ You can find more information annotation format conversion :ref:`here =v1.0.43dev5 +superannotate_schemas>=v1.0.45dev1 diff --git a/src/superannotate/__init__.py b/src/superannotate/__init__.py index b62c99f3a..04b84f412 100644 --- a/src/superannotate/__init__.py +++ b/src/superannotate/__init__.py @@ -1,215 +1,46 @@ -import logging.config import os import sys -import requests -from packaging.version import parse -from superannotate.lib import core as constances -from superannotate.lib import get_default_controller -from superannotate.lib.app.analytics.class_analytics import class_distribution -from superannotate.lib.app.exceptions import AppException -from superannotate.lib.app.input_converters.conversion import convert_json_version -from superannotate.lib.app.input_converters.conversion import convert_project_type -from superannotate.lib.app.input_converters.conversion import export_annotation -from superannotate.lib.app.input_converters.conversion import import_annotation -from superannotate.lib.app.interface.sdk_interface import add_annotation_bbox_to_image -from superannotate.lib.app.interface.sdk_interface import ( - add_annotation_comment_to_image, -) -from superannotate.lib.app.interface.sdk_interface import add_annotation_point_to_image -from superannotate.lib.app.interface.sdk_interface import add_contributors_to_project -from superannotate.lib.app.interface.sdk_interface import aggregate_annotations_as_df -from superannotate.lib.app.interface.sdk_interface import assign_folder -from superannotate.lib.app.interface.sdk_interface import assign_images -from superannotate.lib.app.interface.sdk_interface import attach_items -from superannotate.lib.app.interface.sdk_interface import ( - attach_items_from_integrated_storage, -) -from superannotate.lib.app.interface.sdk_interface import benchmark -from superannotate.lib.app.interface.sdk_interface import clone_project -from superannotate.lib.app.interface.sdk_interface import consensus -from superannotate.lib.app.interface.sdk_interface import copy_image -from superannotate.lib.app.interface.sdk_interface import copy_items -from superannotate.lib.app.interface.sdk_interface import create_annotation_class -from superannotate.lib.app.interface.sdk_interface import ( - create_annotation_classes_from_classes_json, -) -from superannotate.lib.app.interface.sdk_interface import create_folder -from superannotate.lib.app.interface.sdk_interface import create_project -from superannotate.lib.app.interface.sdk_interface import create_project_from_metadata -from superannotate.lib.app.interface.sdk_interface import delete_annotation_class -from superannotate.lib.app.interface.sdk_interface import delete_annotations -from superannotate.lib.app.interface.sdk_interface import delete_folders -from superannotate.lib.app.interface.sdk_interface import delete_images -from superannotate.lib.app.interface.sdk_interface import delete_project -from superannotate.lib.app.interface.sdk_interface import ( - download_annotation_classes_json, -) -from superannotate.lib.app.interface.sdk_interface import download_export -from superannotate.lib.app.interface.sdk_interface import download_image -from superannotate.lib.app.interface.sdk_interface import download_image_annotations -from superannotate.lib.app.interface.sdk_interface import download_model -from superannotate.lib.app.interface.sdk_interface import get_annotations -from superannotate.lib.app.interface.sdk_interface import get_annotations_per_frame -from superannotate.lib.app.interface.sdk_interface import get_exports -from superannotate.lib.app.interface.sdk_interface import get_folder_metadata -from superannotate.lib.app.interface.sdk_interface import get_integrations -from superannotate.lib.app.interface.sdk_interface import get_item_metadata -from superannotate.lib.app.interface.sdk_interface import get_project_image_count -from superannotate.lib.app.interface.sdk_interface import get_project_metadata -from superannotate.lib.app.interface.sdk_interface import get_project_settings -from superannotate.lib.app.interface.sdk_interface import get_project_workflow -from superannotate.lib.app.interface.sdk_interface import get_team_metadata -from superannotate.lib.app.interface.sdk_interface import init -from superannotate.lib.app.interface.sdk_interface import invite_contributors_to_team -from superannotate.lib.app.interface.sdk_interface import move_items -from superannotate.lib.app.interface.sdk_interface import pin_image -from superannotate.lib.app.interface.sdk_interface import prepare_export -from superannotate.lib.app.interface.sdk_interface import query -from superannotate.lib.app.interface.sdk_interface import rename_project -from superannotate.lib.app.interface.sdk_interface import run_prediction -from superannotate.lib.app.interface.sdk_interface import search_annotation_classes -from superannotate.lib.app.interface.sdk_interface import search_folders -from superannotate.lib.app.interface.sdk_interface import search_items -from superannotate.lib.app.interface.sdk_interface import search_models -from superannotate.lib.app.interface.sdk_interface import search_projects -from superannotate.lib.app.interface.sdk_interface import search_team_contributors -from superannotate.lib.app.interface.sdk_interface import set_annotation_statuses -from superannotate.lib.app.interface.sdk_interface import set_auth_token -from superannotate.lib.app.interface.sdk_interface import set_image_annotation_status -from superannotate.lib.app.interface.sdk_interface import set_images_annotation_statuses -from superannotate.lib.app.interface.sdk_interface import ( - set_project_default_image_quality_in_editor, -) -from superannotate.lib.app.interface.sdk_interface import set_project_workflow -from superannotate.lib.app.interface.sdk_interface import unassign_folder -from superannotate.lib.app.interface.sdk_interface import unassign_images -from superannotate.lib.app.interface.sdk_interface import ( - upload_annotations_from_folder_to_project, -) -from superannotate.lib.app.interface.sdk_interface import upload_image_annotations -from superannotate.lib.app.interface.sdk_interface import upload_image_to_project -from superannotate.lib.app.interface.sdk_interface import ( - upload_images_from_folder_to_project, -) -from superannotate.lib.app.interface.sdk_interface import upload_images_to_project -from superannotate.lib.app.interface.sdk_interface import ( - upload_preannotations_from_folder_to_project, -) -from superannotate.lib.app.interface.sdk_interface import upload_priority_scores -from superannotate.lib.app.interface.sdk_interface import upload_video_to_project -from superannotate.lib.app.interface.sdk_interface import ( - upload_videos_from_folder_to_project, -) -from superannotate.lib.app.interface.sdk_interface import validate_annotations -from superannotate.logger import get_default_logger -from superannotate.version import __version__ - +sys.path.append(os.path.split(os.path.realpath(__file__))[0]) -controller = get_default_controller() +import logging.config # noqa +import requests # noqa +from packaging.version import parse # noqa +from superannotate.lib.app.input_converters import convert_json_version # noqa +from superannotate.lib.app.input_converters import convert_project_type # noqa +from superannotate.lib.app.analytics.class_analytics import class_distribution # noqa +from superannotate.lib.app.exceptions import AppException # noqa +from superannotate.lib.app.input_converters import convert_json_version # noqa +from superannotate.lib.app.input_converters import convert_project_type # noqa +from superannotate.lib.app.input_converters import export_annotation # noqa +from superannotate.lib.app.input_converters import import_annotation # noqa +from superannotate.lib.app.interface.sdk_interface import SAClient # noqa +from superannotate.lib.core import PACKAGE_VERSION_INFO_MESSAGE # noqa +from superannotate.lib.core import PACKAGE_VERSION_MAJOR_UPGRADE # noqa +from superannotate.lib.core import PACKAGE_VERSION_UPGRADE # noqa +from superannotate.logger import get_default_logger # noqa +from superannotate.version import __version__ # noqa +import superannotate.lib.core.enums as enums # noqa +SESSIONS = {} __all__ = [ "__version__", - "controller", - "constances", + "SAClient", # Utils + "enums", "AppException", - "validate_annotations", - # - "init", - "set_auth_token", # analytics "class_distribution", - "aggregate_annotations_as_df", - "get_exports", - # annotations - "get_annotations", - "get_annotations_per_frame", - # integrations - "get_integrations", - "attach_items_from_integrated_storage", # converters "convert_json_version", "import_annotation", "export_annotation", "convert_project_type", - # Teams Section - "get_team_metadata", - "search_team_contributors", - # Projects Section - "create_project_from_metadata", - "get_project_settings", - "get_project_metadata", - "get_project_workflow", - "set_project_workflow", - "search_projects", - "create_project", - "clone_project", - "delete_project", - "rename_project", - "upload_priority_scores", - # Images Section - "copy_image", - # Folders Section - "create_folder", - "get_folder_metadata", - "delete_folders", - "search_folders", - "assign_folder", - "unassign_folder", - # Items Section - "get_item_metadata", - "search_items", - "query", - "attach_items", - "copy_items", - "move_items", - "set_annotation_statuses", - # Image Section - "delete_images", - "download_image", - "pin_image", - "get_project_image_count", - "assign_images", - "unassign_images", - "download_image_annotations", - "delete_annotations", - "upload_image_to_project", - "upload_image_annotations", - "upload_images_from_folder_to_project", - # Video Section - "upload_videos_from_folder_to_project", - # Annotation Section - "create_annotation_class", - "delete_annotation_class", - "prepare_export", - "download_export", - "set_images_annotation_statuses", - "add_annotation_bbox_to_image", - "add_annotation_point_to_image", - "add_annotation_comment_to_image", - "search_annotation_classes", - "create_annotation_classes_from_classes_json", - "upload_annotations_from_folder_to_project", - "upload_preannotations_from_folder_to_project", - "download_annotation_classes_json", - "set_project_default_image_quality_in_editor", - "run_prediction", - "search_models", - "download_model", - "set_image_annotation_status", - "benchmark", - "consensus", - "upload_video_to_project", - "upload_images_to_project", - "add_contributors_to_project", - "invite_contributors_to_team", ] __author__ = "Superannotate" -sys.path.append(os.path.split(os.path.realpath(__file__))[0]) logging.getLogger("botocore").setLevel(logging.CRITICAL) logger = get_default_logger() @@ -217,7 +48,7 @@ def log_version_info(): local_version = parse(__version__) if local_version.is_prerelease: - logger.info(constances.PACKAGE_VERSION_INFO_MESSAGE.format(__version__)) + logger.info(PACKAGE_VERSION_INFO_MESSAGE.format(__version__)) req = requests.get("https://pypi.python.org/pypi/superannotate/json") if req.ok: releases = req.json().get("releases", []) @@ -228,14 +59,10 @@ def log_version_info(): pip_version = max(pip_version, ver) if pip_version.major > local_version.major: logger.warning( - constances.PACKAGE_VERSION_MAJOR_UPGRADE.format( - local_version, pip_version - ) + PACKAGE_VERSION_MAJOR_UPGRADE.format(local_version, pip_version) ) elif pip_version > local_version: - logger.warning( - constances.PACKAGE_VERSION_UPGRADE.format(local_version, pip_version) - ) + logger.warning(PACKAGE_VERSION_UPGRADE.format(local_version, pip_version)) if not os.environ.get("SA_VERSION_CHECK", "True").lower() == "false": diff --git a/src/superannotate/lib/__init__.py b/src/superannotate/lib/__init__.py index 42f7f7243..e69de29bb 100644 --- a/src/superannotate/lib/__init__.py +++ b/src/superannotate/lib/__init__.py @@ -1,12 +0,0 @@ -import os -import sys - - -sys.path.append(os.path.dirname(os.path.abspath(__file__))) -sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) - - -def get_default_controller(): - from lib.infrastructure.controller import Controller - - return Controller.get_default() diff --git a/src/superannotate/lib/app/analytics/class_analytics.py b/src/superannotate/lib/app/analytics/class_analytics.py index 36a50bb45..712e9da58 100644 --- a/src/superannotate/lib/app/analytics/class_analytics.py +++ b/src/superannotate/lib/app/analytics/class_analytics.py @@ -2,7 +2,7 @@ import pandas as pd import plotly.express as px -from lib.app.mixp.decorators import Trackable +from lib.app.interface.base_interface import Tracker from superannotate.lib.app.exceptions import AppException from superannotate.lib.core import DEPRICATED_DOCUMENT_VIDEO_MESSAGE from superannotate.logger import get_default_logger @@ -12,7 +12,7 @@ logger = get_default_logger() -@Trackable +@Tracker def class_distribution(export_root, project_names, visualize=False): """Aggregate distribution of classes across multiple projects. @@ -60,7 +60,11 @@ def class_distribution(export_root, project_names, visualize=False): df = df.sort_values(["count"], ascending=False) if visualize: - fig = px.bar(df, x="className", y="count",) + fig = px.bar( + df, + x="className", + y="count", + ) fig.update_traces(hovertemplate="%{x}: %{y}") fig.update_yaxes(title_text="Instance Count") fig.update_xaxes(title_text="") diff --git a/src/superannotate/lib/app/common.py b/src/superannotate/lib/app/common.py index de05b04f6..23a9b9d45 100644 --- a/src/superannotate/lib/app/common.py +++ b/src/superannotate/lib/app/common.py @@ -8,15 +8,13 @@ def hex_to_rgb(hex_string): - """Converts HEX values to RGB values - """ + """Converts HEX values to RGB values""" h = hex_string.lstrip("#") return tuple(int(h[i : i + 2], 16) for i in (0, 2, 4)) def blue_color_generator(n, hex_values=True): - """ Blue colors generator for SuperAnnotate blue mask. - """ + """Blue colors generator for SuperAnnotate blue mask.""" hex_colors = [] for i in range(n + 1): int_color = i * 15 @@ -27,7 +25,9 @@ def blue_color_generator(n, hex_values=True): hex_color = ( "#" + "{:02x}".format(bgr_color[2]) - + "{:02x}".format(bgr_color[1],) + + "{:02x}".format( + bgr_color[1], + ) + "{:02x}".format(bgr_color[0]) ) if hex_values: diff --git a/src/superannotate/lib/app/input_converters/__init__.py b/src/superannotate/lib/app/input_converters/__init__.py index e69de29bb..1acd1e490 100644 --- a/src/superannotate/lib/app/input_converters/__init__.py +++ b/src/superannotate/lib/app/input_converters/__init__.py @@ -0,0 +1,12 @@ +from .conversion import convert_json_version +from .conversion import convert_project_type +from .conversion import export_annotation +from .conversion import import_annotation + + +__all__ = [ + "convert_json_version", + "convert_project_type", + "export_annotation", + "import_annotation", +] diff --git a/src/superannotate/lib/app/input_converters/conversion.py b/src/superannotate/lib/app/input_converters/conversion.py index f9d1a4199..812714089 100644 --- a/src/superannotate/lib/app/input_converters/conversion.py +++ b/src/superannotate/lib/app/input_converters/conversion.py @@ -5,7 +5,7 @@ from pathlib import Path from lib.app.exceptions import AppException -from lib.app.mixp.decorators import Trackable +from lib.app.interface.base_interface import Tracker from lib.core import DEPRICATED_DOCUMENT_VIDEO_MESSAGE from lib.core import LIMITED_FUNCTIONS from lib.core.enums import ProjectType @@ -135,7 +135,7 @@ def _passes_converter_sanity(args, direction): ) -@Trackable +@Tracker def export_annotation( input_dir, output_dir, @@ -145,41 +145,41 @@ def export_annotation( task="object_detection", ): """ - Converts SuperAnnotate annotation format to the other annotation formats. Currently available (project_type, task) combinations for converter - presented below: - - ============== ====================== - From SA to COCO - -------------------------------------- - project_type task - ============== ====================== - Pixel panoptic_segmentation - Pixel instance_segmentation - Vector instance_segmentation - Vector object_detection - Vector keypoint_detection - ============== ====================== - - :param input_dir: Path to the dataset folder that you want to convert. - :type input_dir: Pathlike(str or Path) - :param output_dir: Path to the folder, where you want to have converted dataset. - :type output_dir: Pathlike(str or Path) - :param dataset_format: One of the formats that are possible to convert. Available candidates are: ["COCO"] - :type dataset_format: str - :param dataset_name: Will be used to create json file in the output_dir. - :type dataset_name: str - :param project_type: SuperAnnotate project type is either 'Vector' or 'Pixel' (Default: 'Vector') - 'Vector' project creates ___objects.json for each image. - 'Pixel' project creates ___pixel.jsons and ___save.png annotation mask for each image. - :type project_type: str - :param task: Task can be one of the following: ['panoptic_segmentation', 'instance_segmentation', - 'keypoint_detection', 'object_detection']. (Default: "object_detection"). - 'keypoint_detection' can be used to converts keypoints from/to available annotation format. - 'panoptic_segmentation' will use panoptic mask for each image to generate bluemask for SuperAnnotate annotation format and use bluemask to generate panoptic mask for invert conversion. Panoptic masks should be in the input folder. - 'instance_segmentation' 'Pixel' project_type converts instance masks and 'Vector' project_type generates bounding boxes and polygons from instance masks. Masks should be in the input folder if it is 'Pixel' project_type. - 'object_detection' converts objects from/to available annotation format - :type task: str - """ + Converts SuperAnnotate annotation format to the other annotation formats. Currently available (project_type, task) combinations for converter + presented below: + + ============== ====================== + From SA to COCO + -------------------------------------- + project_type task + ============== ====================== + Pixel panoptic_segmentation + Pixel instance_segmentation + Vector instance_segmentation + Vector object_detection + Vector keypoint_detection + ============== ====================== + + :param input_dir: Path to the dataset folder that you want to convert. + :type input_dir: Pathlike(str or Path) + :param output_dir: Path to the folder, where you want to have converted dataset. + :type output_dir: Pathlike(str or Path) + :param dataset_format: One of the formats that are possible to convert. Available candidates are: ["COCO"] + :type dataset_format: str + :param dataset_name: Will be used to create json file in the output_dir. + :type dataset_name: str + :param project_type: SuperAnnotate project type is either 'Vector' or 'Pixel' (Default: 'Vector') + 'Vector' project creates ___objects.json for each image. + 'Pixel' project creates ___pixel.jsons and ___save.png annotation mask for each image. + :type project_type: str + :param task: Task can be one of the following: ['panoptic_segmentation', 'instance_segmentation', + 'keypoint_detection', 'object_detection']. (Default: "object_detection"). + 'keypoint_detection' can be used to converts keypoints from/to available annotation format. + 'panoptic_segmentation' will use panoptic mask for each image to generate bluemask for SuperAnnotate annotation format and use bluemask to generate panoptic mask for invert conversion. Panoptic masks should be in the input folder. + 'instance_segmentation' 'Pixel' project_type converts instance masks and 'Vector' project_type generates bounding boxes and polygons from instance masks. Masks should be in the input folder if it is 'Pixel' project_type. + 'object_detection' converts objects from/to available annotation format + :type task: str + """ if project_type in [ ProjectType.VIDEO.name, @@ -223,7 +223,7 @@ def export_annotation( export_from_sa(args) -@Trackable +@Tracker def import_annotation( input_dir, output_dir, @@ -407,9 +407,9 @@ def import_annotation( import_to_sa(args) -@Trackable +@Tracker def convert_project_type(input_dir, output_dir): - """ Converts SuperAnnotate 'Vector' project type to 'Pixel' or reverse. + """Converts SuperAnnotate 'Vector' project type to 'Pixel' or reverse. :param input_dir: Path to the dataset folder that you want to convert. :type input_dir: Pathlike(str or Path) @@ -435,7 +435,7 @@ def convert_project_type(input_dir, output_dir): sa_convert_project_type(input_dir, output_dir) -@Trackable +@Tracker def convert_json_version(input_dir, output_dir, version=2): """ Converts SuperAnnotate JSON versions. Newest JSON version is 2. diff --git a/src/superannotate/lib/app/input_converters/converters/coco_converters/coco_converter.py b/src/superannotate/lib/app/input_converters/converters/coco_converters/coco_converter.py index 4c5834986..81fa39461 100644 --- a/src/superannotate/lib/app/input_converters/converters/coco_converters/coco_converter.py +++ b/src/superannotate/lib/app/input_converters/converters/coco_converters/coco_converter.py @@ -137,9 +137,9 @@ def convert_from_old_sa_to_new(self, old_json_data, project_type): def _parse_json_into_common_format(self, sa_annotation_json, fpath): """ - If the annotation format ever changes this function will handle it and - return something optimal for the converters. Additionally, if anything - important is absent from the current json, this function fills it. + If the annotation format ever changes this function will handle it and + return something optimal for the converters. Additionally, if anything + important is absent from the current json, this function fills it. """ if isinstance(sa_annotation_json, list): sa_annotation_json = self.convert_from_old_sa_to_new( diff --git a/src/superannotate/lib/app/interface/base_interface.py b/src/superannotate/lib/app/interface/base_interface.py index ba9fc8e2b..37ab6fa55 100644 --- a/src/superannotate/lib/app/interface/base_interface.py +++ b/src/superannotate/lib/app/interface/base_interface.py @@ -1,14 +1,188 @@ -from lib import get_default_controller +import functools +import os +import sys +from inspect import signature +from pathlib import Path +from types import FunctionType +from typing import Iterable +from typing import Sized +from typing import Tuple + +import lib.core as constants +from lib.app.helpers import extract_project_folder +from lib.app.interface.types import validate_arguments +from lib.core import CONFIG +from lib.core.exceptions import AppException +from lib.infrastructure.controller import Controller from lib.infrastructure.repositories import ConfigRepository +from mixpanel import Mixpanel +from version import __version__ class BaseInterfaceFacade: - def __init__(self): - self._config_path = None - self._controller = get_default_controller() + REGISTRY = [] + + def __init__(self, token: str = None, config_path: str = None): + version = os.environ.get("SA_VERSION", "v1") + _token, _config_path = None, None + _host = os.environ.get("SA_URL", constants.BACKEND_URL) + _ssl_verify = bool(os.environ.get("SA_SSL", True)) + if token: + _token = Controller.validate_token(token=token) + elif config_path: + _token, _host, _ssl_verify = self._retrieve_configs(config_path) + else: + _token = os.environ.get("SA_TOKEN") + if not _token: + _token, _host, _ssl_verify = self._retrieve_configs( + constants.CONFIG_PATH + ) + self._token, self._host = _token, _host + self.controller = Controller(_token, _host, _ssl_verify, version) + BaseInterfaceFacade.REGISTRY.append(self) + + @staticmethod + def _retrieve_configs(path) -> Tuple[str, str, str]: + config_path = os.path.expanduser(str(path)) + if not Path(config_path).is_file() or not os.access(config_path, os.R_OK): + raise AppException( + f"SuperAnnotate config file {str(config_path)} not found." + ) + config_repo = ConfigRepository(config_path) + return ( + Controller.validate_token(config_repo.get_one("token").value), + config_repo.get_one("main_endpoint").value, + config_repo.get_one("ssl_verify").value, + ) + + @property + def host(self): + return self._host @property - def controller(self): - if not ConfigRepository().get_one("token"): - raise Exception("Config does not exists!") - return self._controller + def token(self): + return self._token + + +class Tracker: + def get_mp_instance(self) -> Mixpanel: + client = self.get_client() + mp_token = "ca95ed96f80e8ec3be791e2d3097cf51" + if client: + if client.host != constants.BACKEND_URL: + mp_token = "e741d4863e7e05b1a45833d01865ef0d" + return Mixpanel(mp_token) + + @staticmethod + def get_default_payload(team_name, user_id): + return { + "SDK": True, + "Team": team_name, + "Team Owner": user_id, + "Version": __version__, + } + + def __init__(self, function): + self.function = function + self._client = None + functools.update_wrapper(self, function) + + def get_client(self): + if not self._client: + if BaseInterfaceFacade.REGISTRY: + return BaseInterfaceFacade.REGISTRY[-1] + else: + from lib.app.interface.sdk_interface import SAClient + + try: + return SAClient() + except Exception: + pass + elif hasattr(self._client, "controller"): + return self._client + + @staticmethod + def extract_arguments(function, *args, **kwargs) -> dict: + bound_arguments = signature(function).bind(*args, **kwargs) + bound_arguments.apply_defaults() + return dict(bound_arguments.arguments) + + @staticmethod + def default_parser(function_name: str, kwargs: dict) -> tuple: + properties = {} + for key, value in kwargs.items(): + if key == "self": + continue + elif value is None: + properties[key] = value + elif key == "project": + properties["project_name"], folder_name = extract_project_folder(value) + if folder_name: + properties["folder_name"] = folder_name + elif isinstance(value, (str, int, float, bool)): + properties[key] = value + elif isinstance(value, dict): + properties[key] = list(value.keys()) + elif isinstance(value, Sized): + properties[key] = len(value) + elif isinstance(value, Iterable): + properties[key] = "N/A" + else: + properties[key] = str(value) + return function_name, properties + + def _track(self, user_id: str, event_name: str, data: dict): + if "pytest" not in sys.modules: + self.get_mp_instance().track(user_id, event_name, data) + + def _track_method(self, args, kwargs, success: bool): + try: + client = self.get_client() + if not client: + return + function_name = self.function.__name__ if self.function else "" + arguments = self.extract_arguments(self.function, *args, **kwargs) + event_name, properties = self.default_parser(function_name, arguments) + user_id = client.controller.team_data.creator_id + team_name = client.controller.team_data.name + + properties["Success"] = success + default = self.get_default_payload(team_name=team_name, user_id=user_id) + self._track( + user_id, + event_name, + {**default, **properties, **CONFIG.get_current_session().data}, + ) + except BaseException: + pass + + def __get__(self, obj, owner=None): + if obj is not None: + self._client = obj + tmp = functools.partial(self.__call__, obj) + functools.update_wrapper(tmp, self.function) + return tmp + return self + + def __call__(self, *args, **kwargs): + success = True + try: + result = self.function(*args, **kwargs) + except Exception as e: + success = False + raise e + else: + return result + finally: + self._track_method(args=args, kwargs=kwargs, success=success) + + +class TrackableMeta(type): + def __new__(mcs, name, bases, attrs): + for attr_name, attr_value in attrs.items(): + if isinstance( + attr_value, FunctionType + ) and not attr_value.__name__.startswith("_"): + attrs[attr_name] = Tracker(validate_arguments(attr_value)) + tmp = super().__new__(mcs, name, bases, attrs) + return tmp diff --git a/src/superannotate/lib/app/interface/cli_interface.py b/src/superannotate/lib/app/interface/cli_interface.py index 570d49b96..54f6f90e0 100644 --- a/src/superannotate/lib/app/interface/cli_interface.py +++ b/src/superannotate/lib/app/interface/cli_interface.py @@ -8,20 +8,12 @@ from lib import __file__ as lib_path from lib.app.helpers import split_project_path from lib.app.input_converters.conversion import import_annotation -from lib.app.interface.base_interface import BaseInterfaceFacade -from lib.app.interface.sdk_interface import attach_items -from lib.app.interface.sdk_interface import create_folder -from lib.app.interface.sdk_interface import create_project -from lib.app.interface.sdk_interface import upload_annotations_from_folder_to_project -from lib.app.interface.sdk_interface import upload_images_from_folder_to_project -from lib.app.interface.sdk_interface import upload_preannotations_from_folder_to_project -from lib.app.interface.sdk_interface import upload_videos_from_folder_to_project +from lib.app.interface.sdk_interface import SAClient from lib.core.entities import ConfigEntity -from lib.infrastructure.controller import Controller from lib.infrastructure.repositories import ConfigRepository -class CLIFacade(BaseInterfaceFacade): +class CLIFacade: """ With SuperAnnotate CLI, basic tasks can be accomplished using shell commands: superannotatecli <--arg1 val1> <--arg2 val2> [--optional_arg3 val3] [--optional_arg4] ... @@ -66,13 +58,13 @@ def create_project(self, name: str, description: str, type: str): """ To create a new project """ - create_project(name, description, type) + SAClient().create_project(name, description, type) def create_folder(self, project: str, name: str): """ To create a new folder """ - create_folder(project, name) + SAClient().create_folder(project, name) sys.exit(0) def upload_images( @@ -95,7 +87,7 @@ def upload_images( """ if not isinstance(extensions, list): extensions = extensions.split(",") - upload_images_from_folder_to_project( + SAClient().upload_images_from_folder_to_project( project, folder_path=folder, extensions=extensions, @@ -116,16 +108,21 @@ def export_project( ): project_name, folder_name = split_project_path(project) folders = None + if not annotation_statuses: + annotation_statuses = [] if folder_name: folders = [folder_name] - export_res = Controller.get_default().prepare_export( - project_name, folders, include_fuse, False, annotation_statuses + export_res = SAClient().prepare_export( + project=project_name, + folder_names=folders, + include_fuse=include_fuse, + annotation_statuses=annotation_statuses, ) - export_name = export_res.data["name"] + export_name = export_res["name"] - Controller.get_default().download_export( - project_name=project_name, - export_name=export_name, + SAClient().download_export( + project=project_name, + export=export_name, folder_path=folder, extract_zip_contents=not disable_extract_zip_contents, to_s3_bucket=False, @@ -180,9 +177,7 @@ def _upload_annotations( project_folder_name = project project_name, folder_name = split_project_path(project) project = ( - Controller.get_default() - .get_project_metadata(project_name=project_name) - .data + SAClient().controller.get_project_metadata(project_name=project_name).data ) if not format: format = "SuperAnnotate" @@ -207,11 +202,11 @@ def _upload_annotations( ) annotations_path = temp_dir if pre: - upload_preannotations_from_folder_to_project( + SAClient().upload_preannotations_from_folder_to_project( project_folder_name, annotations_path ) else: - upload_annotations_from_folder_to_project( + SAClient().upload_annotations_from_folder_to_project( project_folder_name, annotations_path ) sys.exit(0) @@ -226,7 +221,7 @@ def attach_image_urls( To attach image URLs to project use: """ - attach_items( + SAClient().attach_items( project=project, attachments=attachments, annotation_status=annotation_status, @@ -239,7 +234,7 @@ def attach_video_urls( attachments: str, annotation_status: Optional[Any] = "NotStarted", ): - attach_items( + SAClient().attach_items( project=project, attachments=attachments, annotation_status=annotation_status, @@ -250,7 +245,7 @@ def attach_video_urls( def attach_document_urls( project: str, attachments: str, annotation_status: Optional[Any] = "NotStarted" ): - attach_items( + SAClient().attach_items( project=project, attachments=attachments, annotation_status=annotation_status, @@ -279,7 +274,7 @@ def upload_videos( end-time specifies time (in seconds) up to which to extract frames. If it is not specified, then up to end is assumed. """ - upload_videos_from_folder_to_project( + SAClient().upload_videos_from_folder_to_project( project=project, folder_path=folder, extensions=extensions, diff --git a/src/superannotate/lib/app/interface/sdk_interface.py b/src/superannotate/lib/app/interface/sdk_interface.py index 03ac5d6dc..af98a7dd2 100644 --- a/src/superannotate/lib/app/interface/sdk_interface.py +++ b/src/superannotate/lib/app/interface/sdk_interface.py @@ -13,13 +13,15 @@ from typing import Union import boto3 -import lib.core as constances +import lib.core as constants from lib.app.annotation_helpers import add_annotation_bbox_to_json from lib.app.annotation_helpers import add_annotation_comment_to_json from lib.app.annotation_helpers import add_annotation_point_to_json from lib.app.helpers import extract_project_folder from lib.app.helpers import get_annotation_paths from lib.app.helpers import get_name_url_duplicated_from_csv +from lib.app.interface.base_interface import BaseInterfaceFacade +from lib.app.interface.base_interface import TrackableMeta from lib.app.interface.types import AnnotationStatuses from lib.app.interface.types import AnnotationType from lib.app.interface.types import AnnotatorRole @@ -32,8 +34,6 @@ from lib.app.interface.types import ProjectStatusEnum from lib.app.interface.types import ProjectTypes from lib.app.interface.types import Setting -from lib.app.interface.types import validate_arguments -from lib.app.mixp.decorators import Trackable from lib.app.serializers import BaseSerializer from lib.app.serializers import FolderSerializer from lib.app.serializers import ProjectSerializer @@ -61,2269 +61,2159 @@ logger = get_default_logger() -@validate_arguments -def init(path_to_config_json: Optional[str] = None, token: str = None): - """ - Initializes and authenticates to SuperAnnotate platform using the config file. - If not initialized then $HOME/.superannotate/config.json - will be used. - - :param path_to_config_json: Location to config JSON file - :type path_to_config_json: str or Path +class SAClient(BaseInterfaceFacade, metaclass=TrackableMeta): + """Create SAClient instance to authorize SDK in a team scope. + In case of no argument has been provided, SA_TOKEN environmental variable + will be checked or $HOME/.superannotate/config.json will be used. - :param token: Team token + :param token: team token :type token: str - """ - Controller.set_default(Controller(config_path=path_to_config_json, token=token)) + :param config_path: path to config file + :type config_path: path-like (str or Path) -@validate_arguments -def set_auth_token(token: str): - Controller.get_default().set_token(token) + """ + def __init__( + self, + token: str = None, + config_path: str = None, + ): + super().__init__(token, config_path) -@Trackable -def get_team_metadata(): - """Returns team metadata + def get_team_metadata(self): + """Returns team metadata - :return: team metadata - :rtype: dict - """ - response = Controller.get_default().get_team() - return TeamSerializer(response.data).serialize() - - -@Trackable -@validate_arguments -def search_team_contributors( - email: EmailStr = None, - first_name: NotEmptyStr = None, - last_name: NotEmptyStr = None, - return_metadata: bool = True, -): - """Search for contributors in the team - - :param email: filter by email - :type email: str - :param first_name: filter by first name - :type first_name: str - :param last_name: filter by last name - :type last_name: str - :param return_metadata: return metadata of contributors instead of names - :type return_metadata: bool - - :return: metadata of found users - :rtype: list of dicts - """ + :return: team metadata + :rtype: dict + """ + response = self.controller.get_team() + return TeamSerializer(response.data).serialize() + + def search_team_contributors( + self, + email: EmailStr = None, + first_name: NotEmptyStr = None, + last_name: NotEmptyStr = None, + return_metadata: bool = True, + ): + """Search for contributors in the team + + :param email: filter by email + :type email: str + :param first_name: filter by first name + :type first_name: str + :param last_name: filter by last name + :type last_name: str + :param return_metadata: return metadata of contributors instead of names + :type return_metadata: bool + + :return: metadata of found users + :rtype: list of dicts + """ - contributors = ( - Controller.get_default() - .search_team_contributors( + contributors = self.controller.search_team_contributors( email=email, first_name=first_name, last_name=last_name - ) - .data - ) - if not return_metadata: - return [contributor["email"] for contributor in contributors] - return contributors - - -@Trackable -@validate_arguments -def search_projects( - name: Optional[NotEmptyStr] = None, - return_metadata: bool = False, - include_complete_image_count: bool = False, - status: Optional[Union[ProjectStatusEnum, List[ProjectStatusEnum]]] = None, -): - """ - Project name based case-insensitive search for projects. - If **name** is None, all the projects will be returned. + ).data + + if not return_metadata: + return [contributor["email"] for contributor in contributors] + return contributors + + def search_projects( + self, + name: Optional[NotEmptyStr] = None, + return_metadata: bool = False, + include_complete_image_count: bool = False, + status: Optional[Union[ProjectStatusEnum, List[ProjectStatusEnum]]] = None, + ): + """ + Project name based case-insensitive search for projects. + If **name** is None, all the projects will be returned. - :param name: search string - :type name: str + :param name: search string + :type name: str - :param return_metadata: return metadata of projects instead of names - :type return_metadata: bool + :param return_metadata: return metadata of projects instead of names + :type return_metadata: bool - :param include_complete_image_count: return projects that have completed images and include the number of completed images in response. - :type include_complete_image_count: bool + :param include_complete_image_count: return projects that have completed images and include the number of completed images in response. + :type include_complete_image_count: bool - :param status: search projects via project status - :type status: str + :param status: search projects via project status + :type status: str - :return: project names or metadatas - :rtype: list of strs or dicts - """ - statuses = [] - if status: - if isinstance(status, (list, tuple, set)): - statuses = list(status) - else: - statuses = [status] - result = ( - Controller.get_default() - .search_project( + :return: project names or metadatas + :rtype: list of strs or dicts + """ + statuses = [] + if status: + if isinstance(status, (list, tuple, set)): + statuses = list(status) + else: + statuses = [status] + result = self.controller.search_project( name=name, include_complete_image_count=include_complete_image_count, statuses=statuses, - ) - .data - ) - - if return_metadata: - return [ - ProjectSerializer(project).serialize( - exclude={ - "annotation_classes", - "workflows", - "settings", - "contributors", - "classes", - } - ) - for project in result - ] - else: - return [project.name for project in result] + ).data + + if return_metadata: + return [ + ProjectSerializer(project).serialize( + exclude={ + "annotation_classes", + "workflows", + "settings", + "contributors", + "classes", + } + ) + for project in result + ] + else: + return [project.name for project in result] + + def create_project( + self, + project_name: NotEmptyStr, + project_description: NotEmptyStr, + project_type: NotEmptyStr, + settings: List[Setting] = None, + ): + """Create a new project in the team. + :param project_name: the new project's name + :type project_name: str -@Trackable -@validate_arguments -def create_project( - project_name: NotEmptyStr, - project_description: NotEmptyStr, - project_type: NotEmptyStr, - settings: List[Setting] = None, -): - """Create a new project in the team. + :param project_description: the new project's description + :type project_description: str - :param project_name: the new project's name - :type project_name: str + :param project_type: the new project type, Vector or Pixel. + :type project_type: str - :param project_description: the new project's description - :type project_description: str + :param settings: list of settings objects + :type settings: list of dicts - :param project_type: the new project type, Vector or Pixel. - :type project_type: str + :return: dict object metadata the new project + :rtype: dict + """ + if settings: + settings = parse_obj_as(List[SettingEntity], settings) + else: + settings = [] + response = self.controller.create_project( + name=project_name, + description=project_description, + project_type=project_type, + settings=settings, + ) + if response.errors: + raise AppException(response.errors) - :param settings: list of settings objects - :type settings: list of dicts + return ProjectSerializer(response.data).serialize() - :return: dict object metadata the new project - :rtype: dict - """ - if settings: - settings = parse_obj_as(List[SettingEntity], settings) - else: - settings = [] - response = Controller.get_default().create_project( - name=project_name, - description=project_description, - project_type=project_type, - settings=settings, - ) - if response.errors: - raise AppException(response.errors) - - return ProjectSerializer(response.data).serialize() - - -@Trackable -@validate_arguments -def create_project_from_metadata(project_metadata: Project): - """ - Create a new project in the team using project metadata object dict. - Mandatory keys in project_metadata are "name", "description" and "type" (Vector or Pixel) - Non-mandatory keys: "workflow", "settings" and "annotation_classes". + def create_project_from_metadata(self, project_metadata: Project): + """Create a new project in the team using project metadata object dict. + Mandatory keys in project_metadata are "name", "description" and "type" (Vector or Pixel) + Non-mandatory keys: "workflow", "settings" and "annotation_classes". - :param project_metadata: project metadata - :type project_metadata: dict + :return: dict object metadata the new project + :rtype: dict + """ + project_metadata = project_metadata.dict() + response = self.controller.create_project( + name=project_metadata["name"], + description=project_metadata.get("description"), + project_type=project_metadata["type"], + settings=parse_obj_as( + List[SettingEntity], project_metadata.get("settings", []) + ), + classes=project_metadata.get("classes", []), + workflows=project_metadata.get("workflows", []), + instructions_link=project_metadata.get("instructions_link"), + ) + if response.errors: + raise AppException(response.errors) + return ProjectSerializer(response.data).serialize() + + def clone_project( + self, + project_name: Union[NotEmptyStr, dict], + from_project: Union[NotEmptyStr, dict], + project_description: Optional[NotEmptyStr] = None, + copy_annotation_classes: Optional[StrictBool] = True, + copy_settings: Optional[StrictBool] = True, + copy_workflow: Optional[StrictBool] = True, + copy_contributors: Optional[StrictBool] = False, + ): + """Create a new project in the team using annotation classes and settings from from_project. + + :param project_name: new project's name + :type project_name: str + :param from_project: the name of the project being used for duplication + :type from_project: str + :param project_description: the new project's description. If None, from_project's + description will be used + :type project_description: str + :param copy_annotation_classes: enables copying annotation classes + :type copy_annotation_classes: bool + :param copy_settings: enables copying project settings + :type copy_settings: bool + :param copy_workflow: enables copying project workflow + :type copy_workflow: bool + :param copy_contributors: enables copying project contributors + :type copy_contributors: bool + + :return: dict object metadata of the new project + :rtype: dict + """ + response = self.controller.clone_project( + name=project_name, + from_name=from_project, + project_description=project_description, + copy_annotation_classes=copy_annotation_classes, + copy_settings=copy_settings, + copy_workflow=copy_workflow, + copy_contributors=copy_contributors, + ) + if response.errors: + raise AppException(response.errors) + return ProjectSerializer(response.data).serialize() - :return: dict object metadata the new project - :rtype: dict - """ - project_metadata = project_metadata.dict() - response = Controller.get_default().create_project( - name=project_metadata["name"], - description=project_metadata.get("description"), - project_type=project_metadata["type"], - settings=parse_obj_as( - List[SettingEntity], project_metadata.get("settings", []) - ), - classes=project_metadata.get("classes", []), - workflows=project_metadata.get("workflows", []), - instructions_link=project_metadata.get("instructions_link"), - ) - if response.errors: - raise AppException(response.errors) - return ProjectSerializer(response.data).serialize() - - -@Trackable -@validate_arguments -def clone_project( - project_name: Union[NotEmptyStr, dict], - from_project: Union[NotEmptyStr, dict], - project_description: Optional[NotEmptyStr] = None, - copy_annotation_classes: Optional[StrictBool] = True, - copy_settings: Optional[StrictBool] = True, - copy_workflow: Optional[StrictBool] = True, - copy_contributors: Optional[StrictBool] = False, -): - """Create a new project in the team using annotation classes and settings from from_project. - - :param project_name: new project's name - :type project_name: str - :param from_project: the name of the project being used for duplication - :type from_project: str - :param project_description: the new project's description. If None, from_project's - description will be used - :type project_description: str - :param copy_annotation_classes: enables copying annotation classes - :type copy_annotation_classes: bool - :param copy_settings: enables copying project settings - :type copy_settings: bool - :param copy_workflow: enables copying project workflow - :type copy_workflow: bool - :param copy_contributors: enables copying project contributors - :type copy_contributors: bool - - :return: dict object metadata of the new project - :rtype: dict - """ - response = Controller.get_default().clone_project( - name=project_name, - from_name=from_project, - project_description=project_description, - copy_annotation_classes=copy_annotation_classes, - copy_settings=copy_settings, - copy_workflow=copy_workflow, - copy_contributors=copy_contributors, - ) - if response.errors: - raise AppException(response.errors) - return ProjectSerializer(response.data).serialize() - - -@Trackable -@validate_arguments -def create_folder(project: NotEmptyStr, folder_name: NotEmptyStr): - """Create a new folder in the project. - - :param project: project name - :type project: str - :param folder_name: the new folder's name - :type folder_name: str - - :return: dict object metadata the new folder - :rtype: dict - """ + def create_folder(self, project: NotEmptyStr, folder_name: NotEmptyStr): + """Create a new folder in the project. + + :param project: project name + :type project: str + :param folder_name: the new folder's name + :type folder_name: str - res = Controller.get_default().create_folder( - project=project, folder_name=folder_name - ) - if res.data: - folder = res.data - logger.info(f"Folder {folder.name} created in project {project}") - return folder.to_dict() - if res.errors: - raise AppException(res.errors) + :return: dict object metadata the new folder + :rtype: dict + """ + res = self.controller.create_folder(project=project, folder_name=folder_name) + if res.data: + folder = res.data + logger.info(f"Folder {folder.name} created in project {project}") + return folder.to_dict() + if res.errors: + raise AppException(res.errors) -@Trackable -@validate_arguments -def delete_project(project: Union[NotEmptyStr, dict]): - """Deletes the project + def delete_project(self, project: Union[NotEmptyStr, dict]): + """Deletes the project :param project: project name or folder path (e.g., "project1/folder1") :type project: str - """ - name = project - if isinstance(project, dict): - name = project["name"] - Controller.get_default().delete_project(name=name) + """ + name = project + if isinstance(project, dict): + name = project["name"] + self.controller.delete_project(name=name) + def rename_project(self, project: NotEmptyStr, new_name: NotEmptyStr): + """Renames the project -@Trackable -@validate_arguments -def rename_project(project: NotEmptyStr, new_name: NotEmptyStr): - """Renames the project + :param project: project name or folder path (e.g., "project1/folder1") + :type project: str + :param new_name: project's new name + :type new_name: str + """ - :param project: project name or folder path (e.g., "project1/folder1") - :type project: str - :param new_name: project's new name - :type new_name: str - """ + response = self.controller.update_project( + name=project, project_data={"name": new_name} + ) + if response.errors: + raise AppException(response.errors) + logger.info( + "Successfully renamed project %s to %s.", project, response.data.name + ) + return ProjectSerializer(response.data).serialize() - response = Controller.get_default().update_project( - name=project, project_data={"name": new_name} - ) - if response.errors: - raise AppException(response.errors) - logger.info("Successfully renamed project %s to %s.", project, response.data.name) - return ProjectSerializer(response.data).serialize() + def get_folder_metadata(self, project: NotEmptyStr, folder_name: NotEmptyStr): + """Returns folder metadata + :param project: project name + :type project: str + :param folder_name: folder's name + :type folder_name: str -@Trackable -@validate_arguments -def get_folder_metadata(project: NotEmptyStr, folder_name: NotEmptyStr): - """Returns folder metadata + :return: metadata of folder + :rtype: dict + """ + result = self.controller.get_folder( + project_name=project, folder_name=folder_name + ).data + if not result: + raise AppException("Folder not found.") + return FolderSerializer(result).serialize() - :param project: project name - :type project: str - :param folder_name: folder's name - :type folder_name: str + def delete_folders(self, project: NotEmptyStr, folder_names: List[NotEmptyStr]): + """Delete folder in project. - :return: metadata of folder - :rtype: dict - """ - result = ( - Controller.get_default() - .get_folder(project_name=project, folder_name=folder_name) - .data - ) - if not result: - raise AppException("Folder not found.") - return FolderSerializer(result).serialize() - - -@Trackable -@validate_arguments -def delete_folders(project: NotEmptyStr, folder_names: List[NotEmptyStr]): - """Delete folder in project. - - :param project: project name - :type project: str - :param folder_names: to be deleted folders' names - :type folder_names: list of strs - """ + :param project: project name + :type project: str + :param folder_names: to be deleted folders' names + :type folder_names: list of strs + """ - res = Controller.get_default().delete_folders( - project_name=project, folder_names=folder_names - ) - if res.errors: - raise AppException(res.errors) - logger.info(f"Folders {folder_names} deleted in project {project}") - - -@Trackable -@validate_arguments -def search_folders( - project: NotEmptyStr, - folder_name: Optional[NotEmptyStr] = None, - return_metadata: Optional[StrictBool] = False, -): - """Folder name based case-insensitive search for folders in project. - - :param project: project name - :type project: str - :param folder_name: the new folder's name - :type folder_name: str. If None, all the folders in the project will be returned. - :param return_metadata: return metadata of folders instead of names - :type return_metadata: bool - - :return: folder names or metadatas - :rtype: list of strs or dicts - """ + res = self.controller.delete_folders( + project_name=project, folder_names=folder_names + ) + if res.errors: + raise AppException(res.errors) + logger.info(f"Folders {folder_names} deleted in project {project}") + + def search_folders( + self, + project: NotEmptyStr, + folder_name: Optional[NotEmptyStr] = None, + return_metadata: Optional[StrictBool] = False, + ): + """Folder name based case-insensitive search for folders in project. - response = Controller.get_default().search_folders( - project_name=project, folder_name=folder_name, include_users=return_metadata - ) - if response.errors: - raise AppException(response.errors) - data = response.data - if return_metadata: - return [FolderSerializer(folder).serialize() for folder in data] - return [folder.name for folder in data] - - -@Trackable -@validate_arguments -def copy_image( - source_project: Union[NotEmptyStr, dict], - image_name: NotEmptyStr, - destination_project: Union[NotEmptyStr, dict], - include_annotations: Optional[StrictBool] = False, - copy_annotation_status: Optional[StrictBool] = False, - copy_pin: Optional[StrictBool] = False, -): - """Copy image to a project. The image's project is the same as destination - project then the name will be changed to _()., - where is the next available number deducted from project image list. - - :param source_project: project name plus optional subfolder in the project (e.g., "project1/folder1") or - metadata of the project of source project - :type source_project: str or dict - :param image_name: image name - :type image_name: str - :param destination_project: project name or metadata of the project of destination project - :type destination_project: str or dict - :param include_annotations: enables annotations copy - :type include_annotations: bool - :param copy_annotation_status: enables annotations status copy - :type copy_annotation_status: bool - :param copy_pin: enables image pin status copy - :type copy_pin: bool - """ - source_project_name, source_folder_name = extract_project_folder(source_project) - - destination_project, destination_folder = extract_project_folder( - destination_project - ) - source_project_metadata = ( - Controller.get_default().get_project_metadata(source_project_name).data - ) - destination_project_metadata = ( - Controller.get_default().get_project_metadata(destination_project).data - ) - - if destination_project_metadata["project"].type in [ - constances.ProjectType.VIDEO.value, - constances.ProjectType.DOCUMENT.value, - ] or source_project_metadata["project"].type in [ - constances.ProjectType.VIDEO.value, - constances.ProjectType.DOCUMENT.value, - ]: - raise AppException(LIMITED_FUNCTIONS[source_project_metadata["project"].type]) - - response = Controller.get_default().copy_image( - from_project_name=source_project_name, - from_folder_name=source_folder_name, - to_project_name=destination_project, - to_folder_name=destination_folder, - image_name=image_name, - copy_annotation_status=copy_annotation_status, - ) - if response.errors: - raise AppException(response.errors) - - if include_annotations: - Controller.get_default().copy_image_annotation_classes( + :param project: project name + :type project: str + :param folder_name: the new folder's name + :type folder_name: str. If None, all the folders in the project will be returned. + :param return_metadata: return metadata of folders instead of names + :type return_metadata: bool + + :return: folder names or metadatas + :rtype: list of strs or dicts + """ + + response = self.controller.search_folders( + project_name=project, folder_name=folder_name, include_users=return_metadata + ) + if response.errors: + raise AppException(response.errors) + data = response.data + if return_metadata: + return [FolderSerializer(folder).serialize() for folder in data] + return [folder.name for folder in data] + + def copy_image( + self, + source_project: Union[NotEmptyStr, dict], + image_name: NotEmptyStr, + destination_project: Union[NotEmptyStr, dict], + include_annotations: Optional[StrictBool] = False, + copy_annotation_status: Optional[StrictBool] = False, + copy_pin: Optional[StrictBool] = False, + ): + """Copy image to a project. The image's project is the same as destination + project then the name will be changed to _()., + where is the next available number deducted from project image list. + + :param source_project: project name plus optional subfolder in the project (e.g., "project1/folder1") or + metadata of the project of source project + :type source_project: str or dict + :param image_name: image name + :type image_name: str + :param destination_project: project name or metadata of the project of destination project + :type destination_project: str or dict + :param include_annotations: enables annotations copy + :type include_annotations: bool + :param copy_annotation_status: enables annotations status copy + :type copy_annotation_status: bool + :param copy_pin: enables image pin status copy + :type copy_pin: bool + """ + source_project_name, source_folder_name = extract_project_folder(source_project) + + destination_project, destination_folder = extract_project_folder( + destination_project + ) + source_project_metadata = self.controller.get_project_metadata( + source_project_name + ).data + destination_project_metadata = self.controller.get_project_metadata( + destination_project + ).data + + if destination_project_metadata["project"].type in [ + constants.ProjectType.VIDEO.value, + constants.ProjectType.DOCUMENT.value, + ] or source_project_metadata["project"].type in [ + constants.ProjectType.VIDEO.value, + constants.ProjectType.DOCUMENT.value, + ]: + raise AppException( + LIMITED_FUNCTIONS[source_project_metadata["project"].type] + ) + + response = self.controller.copy_image( from_project_name=source_project_name, from_folder_name=source_folder_name, - to_folder_name=destination_folder, to_project_name=destination_project, + to_folder_name=destination_folder, image_name=image_name, + copy_annotation_status=copy_annotation_status, ) - if copy_pin: - Controller.get_default().update_image( - project_name=destination_project, - folder_name=destination_folder, - image_name=image_name, - is_pinned=1, - ) - logger.info( - f"Copied image {source_project}/{image_name}" - f" to {destination_project}/{destination_folder}." - ) - - -@Trackable -@validate_arguments -def get_project_metadata( - project: Union[NotEmptyStr, dict], - include_annotation_classes: Optional[StrictBool] = False, - include_settings: Optional[StrictBool] = False, - include_workflow: Optional[StrictBool] = False, - include_contributors: Optional[StrictBool] = False, - include_complete_image_count: Optional[StrictBool] = False, -): - """Returns project metadata - - :param project: project name - :type project: str - :param include_annotation_classes: enables project annotation classes output under - the key "annotation_classes" - :type include_annotation_classes: bool - :param include_settings: enables project settings output under - the key "settings" - :type include_settings: bool - :param include_workflow: enables project workflow output under - the key "workflow" - :type include_workflow: bool - :param include_contributors: enables project contributors output under - the key "contributors" - :type include_contributors: bool - - :param include_complete_image_count: enables project complete image count output under - the key "completed_images_count" - :type include_complete_image_count: bool - - :return: metadata of project - :rtype: dict - """ - project_name, folder_name = extract_project_folder(project) - response = ( - Controller.get_default() - .get_project_metadata( + if response.errors: + raise AppException(response.errors) + + if include_annotations: + self.controller.copy_image_annotation_classes( + from_project_name=source_project_name, + from_folder_name=source_folder_name, + to_folder_name=destination_folder, + to_project_name=destination_project, + image_name=image_name, + ) + if copy_pin: + self.controller.update_image( + project_name=destination_project, + folder_name=destination_folder, + image_name=image_name, + is_pinned=1, + ) + logger.info( + f"Copied image {source_project}/{image_name}" + f" to {destination_project}/{destination_folder}." + ) + + def get_project_metadata( + self, + project: Union[NotEmptyStr, dict], + include_annotation_classes: Optional[StrictBool] = False, + include_settings: Optional[StrictBool] = False, + include_workflow: Optional[StrictBool] = False, + include_contributors: Optional[StrictBool] = False, + include_complete_image_count: Optional[StrictBool] = False, + ): + """Returns project metadata + + :param project: project name + :type project: str + :param include_annotation_classes: enables project annotation classes output under + the key "annotation_classes" + :type include_annotation_classes: bool + :param include_settings: enables project settings output under + the key "settings" + :type include_settings: bool + :param include_workflow: enables project workflow output under + the key "workflow" + :type include_workflow: bool + :param include_contributors: enables project contributors output under + the key "contributors" + :type include_contributors: bool + + :param include_complete_image_count: enables project complete image count output under + the key "completed_images_count" + :type include_complete_image_count: bool + + :return: metadata of project + :rtype: dict + """ + project_name, folder_name = extract_project_folder(project) + response = self.controller.get_project_metadata( project_name, include_annotation_classes, include_settings, include_workflow, include_contributors, include_complete_image_count, - ) - .data - ) + ).data - metadata = ProjectSerializer(response["project"]).serialize() + metadata = ProjectSerializer(response["project"]).serialize() - for elem in "classes", "workflows", "contributors": - if response.get(elem): - metadata[elem] = [ - BaseSerializer(attribute).serialize() for attribute in response[elem] - ] - return metadata + for elem in "classes", "workflows", "contributors": + if response.get(elem): + metadata[elem] = [ + BaseSerializer(attribute).serialize() + for attribute in response[elem] + ] + return metadata + def get_project_settings(self, project: Union[NotEmptyStr, dict]): + """Gets project's settings. -@Trackable -@validate_arguments -def get_project_settings(project: Union[NotEmptyStr, dict]): - """Gets project's settings. + Return value example: [{ "attribute" : "Brightness", "value" : 10, ...},...] - Return value example: [{ "attribute" : "Brightness", "value" : 10, ...},...] + :param project: project name or metadata + :type project: str or dict - :param project: project name or metadata - :type project: str or dict + :return: project settings + :rtype: list of dicts + """ + project_name, folder_name = extract_project_folder(project) + settings = self.controller.get_project_settings(project_name=project_name) + settings = [ + SettingsSerializer(attribute).serialize() for attribute in settings.data + ] + return settings - :return: project settings - :rtype: list of dicts - """ - project_name, folder_name = extract_project_folder(project) - settings = Controller.get_default().get_project_settings(project_name=project_name) - settings = [ - SettingsSerializer(attribute).serialize() for attribute in settings.data - ] - return settings + def get_project_workflow(self, project: Union[str, dict]): + """Gets project's workflow. + Return value example: [{ "step" : , "className" : , "tool" : , ...},...] -@Trackable -@validate_arguments -def get_project_workflow(project: Union[str, dict]): - """Gets project's workflow. + :param project: project name or metadata + :type project: str or dict - Return value example: [{ "step" : , "className" : , "tool" : , ...},...] + :return: project workflow + :rtype: list of dicts + """ + project_name, folder_name = extract_project_folder(project) + workflow = self.controller.get_project_workflow(project_name=project_name) + if workflow.errors: + raise AppException(workflow.errors) + return workflow.data - :param project: project name or metadata - :type project: str or dict + def search_annotation_classes( + self, project: Union[NotEmptyStr, dict], name_contains: Optional[str] = None + ): + """Searches annotation classes by name_prefix (case-insensitive) - :return: project workflow - :rtype: list of dicts - """ - project_name, folder_name = extract_project_folder(project) - workflow = Controller.get_default().get_project_workflow(project_name=project_name) - if workflow.errors: - raise AppException(workflow.errors) - return workflow.data - - -@Trackable -@validate_arguments -def search_annotation_classes( - project: Union[NotEmptyStr, dict], name_contains: Optional[str] = None -): - """ - Searches annotation classes by name_prefix (case-insensitive) + :param project: project name + :type project: str + :param name_contains: search string. Returns those classes, + where the given string is found anywhere within its name. If None, all annotation classes will be returned. + :type name_prefix: str - :param project: project name - :type project: str + :return: annotation classes of the project + :rtype: list of dicts + """ + project_name, folder_name = extract_project_folder(project) + classes = self.controller.search_annotation_classes(project_name, name_contains) + classes = [BaseSerializer(attribute).serialize() for attribute in classes.data] + return classes + + def set_project_default_image_quality_in_editor( + self, + project: Union[NotEmptyStr, dict], + image_quality_in_editor: Optional[str], + ): + """Sets project's default image quality in editor setting. - :param name_contains: search string. Returns those classes, - where the given string is found anywhere within its name. If None, all annotation classes will be returned. - :type name_contains: str + :param project: project name or metadata + :type project: str or dict + :param image_quality_in_editor: new setting value, should be "original" or "compressed" + :type image_quality_in_editor: str + """ + project_name, folder_name = extract_project_folder(project) + image_quality_in_editor = ImageQuality.get_value(image_quality_in_editor) - :return: annotation classes of the project - :rtype: list of dicts - """ - project_name, folder_name = extract_project_folder(project) - classes = Controller.get_default().search_annotation_classes( - project_name, name_contains - ) - classes = [BaseSerializer(attribute).serialize() for attribute in classes.data] - return classes - - -@Trackable -@validate_arguments -def set_project_default_image_quality_in_editor( - project: Union[NotEmptyStr, dict], image_quality_in_editor: Optional[str], -): - """Sets project's default image quality in editor setting. - - :param project: project name or metadata - :type project: str or dict - :param image_quality_in_editor: new setting value, should be "original" or "compressed" - :type image_quality_in_editor: str - """ - project_name, folder_name = extract_project_folder(project) - image_quality_in_editor = ImageQuality.get_value(image_quality_in_editor) - - response = Controller.get_default().set_project_settings( - project_name=project_name, - new_settings=[{"attribute": "ImageQuality", "value": image_quality_in_editor}], - ) - if response.errors: - raise AppException(response.errors) - return response.data - - -@Trackable -@validate_arguments -def pin_image( - project: Union[NotEmptyStr, dict], image_name: str, pin: Optional[StrictBool] = True -): - """Pins (or unpins) image - - :param project: project name or folder path (e.g., "project1/folder1") - :type project: str - :param image_name: image name - :type image_name: str - :param pin: sets to pin if True, else unpins image - :type pin: bool - """ - project_name, folder_name = extract_project_folder(project) - Controller.get_default().update_image( - project_name=project_name, - image_name=image_name, - folder_name=folder_name, - is_pinned=int(pin), - ) - - -@Trackable -@validate_arguments -def set_images_annotation_statuses( - project: Union[NotEmptyStr, dict], - annotation_status: NotEmptyStr, - image_names: Optional[List[NotEmptyStr]] = None, -): - """Sets annotation statuses of images - - :param project: project name or folder path (e.g., "project1/folder1") - :type project: str - :param image_names: image names. If None, all the images in the project will be used - :type image_names: list of str - :param annotation_status: annotation status to set, - should be one of NotStarted InProgress QualityCheck Returned Completed Skipped - :type annotation_status: str - """ - warning_msg = ( - "We're deprecating the set_images_annotation_statuses function. Please use set_annotation_statuses instead. " - "Learn more. \n" - "https://superannotate.readthedocs.io/en/stable/superannotate.sdk.html#superannotate.set_annotation_statuses" - ) - logger.warning(warning_msg) - warnings.warn(warning_msg, DeprecationWarning) - project_name, folder_name = extract_project_folder(project) - response = Controller.get_default().set_images_annotation_statuses( - project_name, folder_name, image_names, annotation_status - ) - if response.errors: - raise AppException(response.errors) - logger.info("Annotations status of images changed") - - -@Trackable -@validate_arguments -def delete_images( - project: Union[NotEmptyStr, dict], image_names: Optional[List[str]] = None -): - """Delete images in project. - - :param project: project name or folder path (e.g., "project1/folder1") - :type project: str - :param image_names: to be deleted images' names. If None, all the images will be deleted - :type image_names: list of strs - """ - project_name, folder_name = extract_project_folder(project) - - if not isinstance(image_names, list) and image_names is not None: - raise AppException("Image_names should be a list of str or None.") - - response = Controller.get_default().delete_images( - project_name=project_name, folder_name=folder_name, image_names=image_names - ) - if response.errors: - raise AppException(response.errors) - - logger.info( - f"Images deleted in project {project_name}{'/' + folder_name if folder_name else ''}" - ) - - -@Trackable -@validate_arguments -def assign_images(project: Union[NotEmptyStr, dict], image_names: List[str], user: str): - """Assigns images to a user. The assignment role, QA or Annotator, will - be deduced from the user's role in the project. With SDK, the user can be - assigned to a role in the project with the share_project function. - - :param project: project name or folder path (e.g., "project1/folder1") - :type project: str - :param image_names: list of image names to assign - :type image_names: list of str - :param user: user email - :type user: str - """ - project_name, folder_name = extract_project_folder(project) - project = Controller.get_default().get_project_metadata(project_name).data - - if project["project"].type in [ - constances.ProjectType.VIDEO.value, - constances.ProjectType.DOCUMENT.value, - ]: - raise AppException(LIMITED_FUNCTIONS[project["project"].type]) - - contributors = ( - Controller.get_default() - .get_project_metadata(project_name=project_name, include_contributors=True) - .data["project"] - .users - ) - contributor = None - for c in contributors: - if c["user_id"] == user: - contributor = user - - if not contributor: - logger.warning( - f"Skipping {user}. {user} is not a verified contributor for the {project_name}" - ) - return - - response = Controller.get_default().assign_images( - project_name, folder_name, image_names, user - ) - if not response.errors: - logger.info(f"Assign images to user {user}") - else: - raise AppException(response.errors) - - -@Trackable -@validate_arguments -def unassign_images(project: Union[NotEmptyStr, dict], image_names: List[NotEmptyStr]): - """Removes assignment of given images for all assignees.With SDK, - the user can be assigned to a role in the project with the share_project - function. - - :param project: project name or folder path (e.g., "project1/folder1") - :type project: str - :param image_names: list of image unassign - :type image_names: list of str - """ - project_name, folder_name = extract_project_folder(project) - - response = Controller.get_default().un_assign_images( - project_name=project_name, folder_name=folder_name, image_names=image_names - ) - if response.errors: - raise AppException(response.errors) - - -@Trackable -@validate_arguments -def unassign_folder(project_name: NotEmptyStr, folder_name: NotEmptyStr): - """Removes assignment of given folder for all assignees. - With SDK, the user can be assigned to a role in the project - with the share_project function. - - :param project_name: project name - :type project_name: str - :param folder_name: folder name to remove assignees - :type folder_name: str - """ - response = Controller.get_default().un_assign_folder( - project_name=project_name, folder_name=folder_name - ) - if response.errors: - raise AppException(response.errors) - - -@Trackable -@validate_arguments -def assign_folder( - project_name: NotEmptyStr, folder_name: NotEmptyStr, users: List[NotEmptyStr] -): - """Assigns folder to users. With SDK, the user can be - assigned to a role in the project with the share_project function. - - :param project_name: project name or metadata of the project - :type project_name: str or dict - :param folder_name: folder name to assign - :type folder_name: str - :param users: list of user emails - :type users: list of str - """ + response = self.controller.set_project_settings( + project_name=project_name, + new_settings=[ + {"attribute": "ImageQuality", "value": image_quality_in_editor} + ], + ) + if response.errors: + raise AppException(response.errors) + return response.data - contributors = ( - Controller.get_default() - .get_project_metadata(project_name=project_name, include_contributors=True) - .data["project"] - .users - ) - verified_users = [i["user_id"] for i in contributors] - verified_users = set(users).intersection(set(verified_users)) - unverified_contributor = set(users) - verified_users - - for user in unverified_contributor: - logger.warning( - f"Skipping {user} from assignees. {user} is not a verified contributor for the {project_name}" - ) - - if not verified_users: - return - - response = Controller.get_default().assign_folder( - project_name=project_name, folder_name=folder_name, users=list(verified_users) - ) - - if response.errors: - raise AppException(response.errors) - - -@validate_arguments -def upload_images_from_folder_to_project( - project: Union[NotEmptyStr, dict], - folder_path: Union[NotEmptyStr, Path], - extensions: Optional[ - Union[List[NotEmptyStr], Tuple[NotEmptyStr]] - ] = constances.DEFAULT_IMAGE_EXTENSIONS, - annotation_status="NotStarted", - from_s3_bucket=None, - exclude_file_patterns: Optional[ - Iterable[NotEmptyStr] - ] = constances.DEFAULT_FILE_EXCLUDE_PATTERNS, - recursive_subfolders: Optional[StrictBool] = False, - image_quality_in_editor: Optional[str] = None, -): - """ - Uploads all images with given extensions from folder_path to the project. - Sets status of all the uploaded images to set_status if it is not None. - If an image with existing name already exists in the project it won't be uploaded, - and its path will be appended to the third member of return value of this - function. + def pin_image( + self, + project: Union[NotEmptyStr, dict], + image_name: str, + pin: Optional[StrictBool] = True, + ): + """Pins (or unpins) image - :param project: project name or folder path (e.g., "project1/folder1") - :type project: str or dict + :param project: project name or folder path (e.g., "project1/folder1") + :type project: str + :param image_name: image name + :type image_name: str + :param pin: sets to pin if True, else unpins image + :type pin: bool + """ + project_name, folder_name = extract_project_folder(project) + self.controller.update_image( + project_name=project_name, + image_name=image_name, + folder_name=folder_name, + is_pinned=int(pin), + ) - :param folder_path: from which folder to upload the images - :type folder_path: Path-like (str or Path) + def delete_images( + self, project: Union[NotEmptyStr, dict], image_names: Optional[List[str]] = None + ): + """Delete Images in project. - :param extensions: tuple or list of filename extensions to include from folder - :type extensions: tuple or list of strs + :param project: project name or folder path (e.g., "project1/folder1") + :type project: str + :param image_names: to be deleted images' names. If None, all the images will be deleted + :type image_names: list of strs + """ - :param annotation_status: value to set the annotation statuses of the uploaded images - NotStarted InProgress QualityCheck Returned Completed Skipped - :type annotation_status: str + warning_msg = ( + "We're deprecating the delete_images function. Please use delete_items instead." + "Learn more. \n" + "https://superannotate.readthedocs.io/en/stable/superannotate.sdk.html#superannotate.delete_items" + ) + logger.warning(warning_msg) + warnings.warn(warning_msg, DeprecationWarning) + project_name, folder_name = extract_project_folder(project) - :param from_s3_bucket: AWS S3 bucket to use. If None then folder_path is in local filesystem - :type from_s3_bucket: str + if not isinstance(image_names, list) and image_names is not None: + raise AppException("image_names should be a list of str or None.") - :param exclude_file_patterns: filename patterns to exclude from uploading, - default value is to exclude SuperAnnotate export related ["___save.png", "___fuse.png"] - :type exclude_file_patterns: list or tuple of strs + response = self.controller.delete_images( + project_name=project_name, folder_name=folder_name, image_names=image_names + ) + if response.errors: + raise AppException(response.errors) - :param recursive_subfolders: enable recursive subfolder parsing - :type recursive_subfolders: bool + logger.info( + f"Images deleted in project {project_name}{'/' + folder_name if folder_name else ''}" + ) - :param image_quality_in_editor: image quality be seen in SuperAnnotate web annotation editor. - Can be either "compressed" or "original". If None then the default value in project settings will be used. - :type image_quality_in_editor: str + def delete_items(self, project: str, items: Optional[List[str]] = None): + """Delete items in a given project. - :return: uploaded, could-not-upload, existing-images file-paths - :rtype: tuple (3 members) of list of strs - """ + :param project: project name or folder path (e.g., "project1/folder1") + :type project: str + :param items: to be deleted items' names. If None, all the items will be deleted + :type items: list of str + """ + project_name, folder_name = extract_project_folder(project) - project_name, folder_name = extract_project_folder(project) - if recursive_subfolders: - logger.info( - "When using recursive subfolder parsing same name images in different subfolders will overwrite each other." - ) - if not isinstance(extensions, (list, tuple)): - print(extensions) - raise AppException( - "extensions should be a list or a tuple in upload_images_from_folder_to_project" - ) - elif len(extensions) < 1: - return [], [], [] - - if exclude_file_patterns: - exclude_file_patterns = list(exclude_file_patterns) + list( - constances.DEFAULT_FILE_EXCLUDE_PATTERNS - ) - exclude_file_patterns = list(set(exclude_file_patterns)) - - project_folder_name = project_name + (f"/{folder_name}" if folder_name else "") - - logger.info( - "Uploading all images with extensions %s from %s to project %s. Excluded file patterns are: %s.", - extensions, - folder_path, - project_folder_name, - exclude_file_patterns, - ) - - use_case = Controller.get_default().upload_images_from_folder_to_project( - project_name=project_name, - folder_name=folder_name, - folder_path=folder_path, - extensions=extensions, - annotation_status=annotation_status, - from_s3_bucket=from_s3_bucket, - exclude_file_patterns=exclude_file_patterns, - recursive_sub_folders=recursive_subfolders, - image_quality_in_editor=image_quality_in_editor, - ) - images_to_upload, duplicates = use_case.images_to_upload - if len(duplicates): - logger.warning( - "%s already existing images found that won't be uploaded.", len(duplicates) - ) - logger.info( - "Uploading %s images to project %s.", len(images_to_upload), project_folder_name - ) - if not images_to_upload: - return [], [], duplicates - if use_case.is_valid(): - with tqdm(total=len(images_to_upload), desc="Uploading images") as progress_bar: - for _ in use_case.execute(): - progress_bar.update(1) - return use_case.data - raise AppException(use_case.response.errors) - - -@Trackable -@validate_arguments -def get_project_image_count( - project: Union[NotEmptyStr, dict], with_all_subfolders: Optional[StrictBool] = False -): - """Returns number of images in the project. - - :param project: project name or folder path (e.g., "project1/folder1") - :type project: str - :param with_all_subfolders: enables recursive folder counting - :type with_all_subfolders: bool - - :return: number of images in the project - :rtype: int - """ + response = self.controller.delete_items( + project_name=project_name, folder_name=folder_name, items=items + ) + if response.errors: + raise AppException(response.errors) - project_name, folder_name = extract_project_folder(project) - - response = Controller.get_default().get_project_image_count( - project_name=project_name, - folder_name=folder_name, - with_all_subfolders=with_all_subfolders, - ) - if response.errors: - raise AppException(response.errors) - return response.data - - -@Trackable -@validate_arguments -def download_image_annotations( - project: Union[NotEmptyStr, dict], - image_name: NotEmptyStr, - local_dir_path: Union[str, Path], -): - """Downloads annotations of the image (JSON and mask if pixel type project) - to local_dir_path. - - :param project: project name or folder path (e.g., "project1/folder1") - :type project: str - :param image_name: image name - :type image_name: str - :param local_dir_path: local directory path to download to - :type local_dir_path: Path-like (str or Path) - - :return: paths of downloaded annotations - :rtype: tuple - """ - project_name, folder_name = extract_project_folder(project) - res = Controller.get_default().download_image_annotations( - project_name=project_name, - folder_name=folder_name, - image_name=image_name, - destination=local_dir_path, - ) - if res.errors: - raise AppException(res.errors) - return res.data - - -@Trackable -@validate_arguments -def get_exports(project: NotEmptyStr, return_metadata: Optional[StrictBool] = False): - """Get all prepared exports of the project. - - :param project: project name - :type project: str - :param return_metadata: return metadata of images instead of names - :type return_metadata: bool - - :return: names or metadata objects of the all prepared exports of the project - :rtype: list of strs or dicts - """ - response = Controller.get_default().get_exports( - project_name=project, return_metadata=return_metadata - ) - return response.data - - -@Trackable -@validate_arguments -def prepare_export( - project: Union[NotEmptyStr, dict], - folder_names: Optional[List[NotEmptyStr]] = None, - annotation_statuses: Optional[List[AnnotationStatuses]] = None, - include_fuse: Optional[StrictBool] = False, - only_pinned=False, -): - """Prepare annotations and classes.json for export. Original and fused images for images with - annotations can be included with include_fuse flag. - - :param project: project name - :type project: str - :param folder_names: names of folders to include in the export. If None, whole project will be exported - :type folder_names: list of str - :param annotation_statuses: images with which status to include, if None, - ["NotStarted", "InProgress", "QualityCheck", "Returned", "Completed", "Skipped"] will be chose - list elements should be one of NotStarted InProgress QualityCheck Returned Completed Skipped - :type annotation_statuses: list of strs - :param include_fuse: enables fuse images in the export - :type include_fuse: bool - :param only_pinned: enable only pinned output in export. This option disables all other types of output. - :type only_pinned: bool - - :return: metadata object of the prepared export - :rtype: dict - """ - project_name, folder_name = extract_project_folder(project) - if folder_names is None: - folders = [folder_name] if folder_name else [] - else: - folders = folder_names - if not annotation_statuses: - annotation_statuses = [ - constances.AnnotationStatus.NOT_STARTED.name, - constances.AnnotationStatus.IN_PROGRESS.name, - constances.AnnotationStatus.QUALITY_CHECK.name, - constances.AnnotationStatus.RETURNED.name, - constances.AnnotationStatus.COMPLETED.name, - constances.AnnotationStatus.SKIPPED.name, - ] - response = Controller.get_default().prepare_export( - project_name=project_name, - folder_names=folders, - include_fuse=include_fuse, - only_pinned=only_pinned, - annotation_statuses=annotation_statuses, - ) - if response.errors: - raise AppException(response.errors) - return response.data - - -@Trackable -@validate_arguments -def upload_videos_from_folder_to_project( - project: Union[NotEmptyStr, dict], - folder_path: Union[NotEmptyStr, Path], - extensions: Optional[ - Union[Tuple[NotEmptyStr], List[NotEmptyStr]] - ] = constances.DEFAULT_VIDEO_EXTENSIONS, - exclude_file_patterns: Optional[List[NotEmptyStr]] = (), - recursive_subfolders: Optional[StrictBool] = False, - target_fps: Optional[int] = None, - start_time: Optional[float] = 0.0, - end_time: Optional[float] = None, - annotation_status: Optional[AnnotationStatuses] = "NotStarted", - image_quality_in_editor: Optional[ImageQualityChoices] = None, -): - """Uploads image frames from all videos with given extensions from folder_path to the project. - Sets status of all the uploaded images to set_status if it is not None. - - :param project: project name or folder path (e.g., "project1/folder1") - :type project: str - :param folder_path: from which folder to upload the videos - :type folder_path: Path-like (str or Path) - :param extensions: tuple or list of filename extensions to include from folder - :type extensions: tuple or list of strs - :param exclude_file_patterns: filename patterns to exclude from uploading - :type exclude_file_patterns: listlike of strs - :param recursive_subfolders: enable recursive subfolder parsing - :type recursive_subfolders: bool - :param target_fps: how many frames per second need to extract from the video (approximate). - If None, all frames will be uploaded - :type target_fps: float - :param start_time: Time (in seconds) from which to start extracting frames - :type start_time: float - :param end_time: Time (in seconds) up to which to extract frames. If None up to end - :type end_time: float - :param annotation_status: value to set the annotation statuses of the uploaded images - NotStarted InProgress QualityCheck Returned Completed Skipped - :type annotation_status: str - :param image_quality_in_editor: image quality be seen in SuperAnnotate web annotation editor. - Can be either "compressed" or "original". If None then the default value in project settings will be used. - :type image_quality_in_editor: str - - :return: uploaded and not-uploaded video frame images' filenames - :rtype: tuple of list of strs - """ + def assign_items( + self, project: Union[NotEmptyStr, dict], items: List[str], user: str + ): + """Assigns items to a user. The assignment role, QA or Annotator, will + be deduced from the user's role in the project. The type of the objects` image, video or text + will be deduced from the project type. With SDK, the user can be + assigned to a role in the project with the share_project function. + + :param project: project name or folder path (e.g., "project1/folder1") + :type project: str + + :param items: list of items to assign + :type items: list of str + + :param user: user email + :type user: str + """ - project_name, folder_name = extract_project_folder(project) + project_name, folder_name = extract_project_folder(project) + + response = self.controller.assign_items(project_name, folder_name, items, user) - video_paths = [] - for extension in extensions: - if not recursive_subfolders: - video_paths += list(Path(folder_path).glob(f"*.{extension.lower()}")) - if os.name != "nt": - video_paths += list(Path(folder_path).glob(f"*.{extension.upper()}")) + if response.errors: + raise AppException(response.errors) + + def unassign_items( + self, project: Union[NotEmptyStr, dict], items: List[NotEmptyStr] + ): + """Removes assignment of given items for all assignees. With SDK, + the user can be assigned to a role in the project with the share_project + function. + + :param project: project name or folder path (e.g., "project1/folder1") + :type project: str + :param items: list of items to unassign + :type items: list of str + """ + project_name, folder_name = extract_project_folder(project) + + response = self.controller.un_assign_items( + project_name=project_name, folder_name=folder_name, item_names=items + ) + if response.errors: + raise AppException(response.errors) + + def assign_images( + self, project: Union[NotEmptyStr, dict], image_names: List[str], user: str + ): + """Assigns images to a user. The assignment role, QA or Annotator, will + be deduced from the user's role in the project. With SDK, the user can be + assigned to a role in the project with the share_project function. + + :param project: project name or folder path (e.g., "project1/folder1") + :type project: str + :param image_names: list of image names to assign + :type image_names: list of str + :param user: user email + :type user: str + """ + + warning_msg = ( + "We're deprecating the assign_images function. Please use assign_items instead." + "Learn more. \n" + "https://superannotate.readthedocs.io/en/stable/superannotate.sdk.html#superannotate.assign_items" + ) + logger.warning(warning_msg) + warnings.warn(warning_msg, DeprecationWarning) + project_name, folder_name = extract_project_folder(project) + project = self.controller.get_project_metadata(project_name).data + + if project["project"].type in [ + constants.ProjectType.VIDEO.value, + constants.ProjectType.DOCUMENT.value, + ]: + raise AppException(LIMITED_FUNCTIONS[project["project"].type]) + + contributors = ( + self.controller.get_project_metadata( + project_name=project_name, include_contributors=True + ) + .data["project"] + .users + ) + contributor = None + for c in contributors: + if c["user_id"] == user: + contributor = user + + if not contributor: + logger.warning( + f"Skipping {user}. {user} is not a verified contributor for the {project_name}" + ) + return + + response = self.controller.assign_images( + project_name, folder_name, image_names, user + ) + if not response.errors: + logger.info(f"Assign images to user {user}") else: + raise AppException(response.errors) + + def unassign_images( + self, project: Union[NotEmptyStr, dict], image_names: List[NotEmptyStr] + ): + """Removes assignment of given images for all assignees. With SDK, + the user can be assigned to a role in the project with the share_project + function. + + :param project: project name or folder path (e.g., "project1/folder1") + :type project: str + :param image_names: list of images to unassign + :type image_names: list of str + """ + + warning_msg = ( + "We're deprecating the unassign_images function. Please use unassign_items instead." + "Learn more. \n" + "https://superannotate.readthedocs.io/en/stable/superannotate.sdk.html#superannotate.unassign_items" + ) + logger.warning(warning_msg) + warnings.warn(warning_msg, DeprecationWarning) + project_name, folder_name = extract_project_folder(project) + + response = self.controller.un_assign_images( + project_name=project_name, folder_name=folder_name, image_names=image_names + ) + if response.errors: + raise AppException(response.errors) + + def unassign_folder(self, project_name: NotEmptyStr, folder_name: NotEmptyStr): + """Removes assignment of given folder for all assignees. + With SDK, the user can be assigned to a role in the project + with the share_project function. + + :param project_name: project name + :type project_name: str + :param folder_name: folder name to remove assignees + :type folder_name: str + """ + response = self.controller.un_assign_folder( + project_name=project_name, folder_name=folder_name + ) + if response.errors: + raise AppException(response.errors) + + def assign_folder( + self, + project_name: NotEmptyStr, + folder_name: NotEmptyStr, + users: List[NotEmptyStr], + ): + """Assigns folder to users. With SDK, the user can be + assigned to a role in the project with the share_project function. + + :param project_name: project name or metadata of the project + :type project_name: str or dict + :param folder_name: folder name to assign + :type folder_name: str + :param users: list of user emails + :type users: list of str + """ + + contributors = ( + self.controller.get_project_metadata( + project_name=project_name, include_contributors=True + ) + .data["project"] + .users + ) + verified_users = [i["user_id"] for i in contributors] + verified_users = set(users).intersection(set(verified_users)) + unverified_contributor = set(users) - verified_users + + for user in unverified_contributor: logger.warning( - "When using recursive subfolder parsing same name videos " - "in different subfolders will overwrite each other." + f"Skipping {user} from assignees. {user} is not a verified contributor for the {project_name}" ) - video_paths += list(Path(folder_path).rglob(f"*.{extension.lower()}")) - if os.name != "nt": - video_paths += list(Path(folder_path).rglob(f"*.{extension.upper()}")) - - video_paths = [str(path) for path in video_paths] - response = Controller.get_default().upload_videos( - project_name=project_name, - folder_name=folder_name, - paths=video_paths, - target_fps=target_fps, - start_time=start_time, - exclude_file_patterns=exclude_file_patterns, - end_time=end_time, - annotation_status=annotation_status, - image_quality_in_editor=image_quality_in_editor, - ) - if response.errors: - raise AppException(response.errors) - return response.data - - -@Trackable -@validate_arguments -def upload_video_to_project( - project: Union[NotEmptyStr, dict], - video_path: Union[NotEmptyStr, Path], - target_fps: Optional[int] = None, - start_time: Optional[float] = 0.0, - end_time: Optional[float] = None, - annotation_status: Optional[AnnotationStatuses] = "NotStarted", - image_quality_in_editor: Optional[ImageQualityChoices] = None, -): - """Uploads image frames from video to platform. Uploaded images will have - names "_.jpg". - - :param project: project name or folder path (e.g., "project1/folder1") - :type project: str - :param video_path: video to upload - :type video_path: Path-like (str or Path) - :param target_fps: how many frames per second need to extract from the video (approximate). - If None, all frames will be uploaded - :type target_fps: float - :param start_time: Time (in seconds) from which to start extracting frames - :type start_time: float - :param end_time: Time (in seconds) up to which to extract frames. If None up to end - :type end_time: float - :param annotation_status: value to set the annotation statuses of the uploaded - video frames NotStarted InProgress QualityCheck Returned Completed Skipped - :type annotation_status: str - :param image_quality_in_editor: image quality be seen in SuperAnnotate web annotation editor. - Can be either "compressed" or "original". If None then the default value in project settings will be used. - :type image_quality_in_editor: str - - :return: filenames of uploaded images - :rtype: list of strs - """ - project_name, folder_name = extract_project_folder(project) - - response = Controller.get_default().upload_videos( - project_name=project_name, - folder_name=folder_name, - paths=[video_path], - target_fps=target_fps, - start_time=start_time, - end_time=end_time, - annotation_status=annotation_status, - image_quality_in_editor=image_quality_in_editor, - ) - if response.errors: - raise AppException(response.errors) - return response.data - - -@Trackable -@validate_arguments -def create_annotation_class( - project: Union[Project, NotEmptyStr], - name: NotEmptyStr, - color: NotEmptyStr, - attribute_groups: Optional[List[AttributeGroup]] = None, - class_type: ClassType = "object", -): - """Create annotation class in project - - :param project: project name - :type project: str - :param name: name for the class - :type name: str - :param color: RGB hex color value, e.g., "#FFFFAA" - :type color: str - :param attribute_groups: example: - [ { "name": "tall", "is_multiselect": 0, "attributes": [ { "name": "yes" }, { "name": "no" } ] }, - { "name": "age", "is_multiselect": 0, "attributes": [ { "name": "young" }, { "name": "old" } ] } ] - :type attribute_groups: list of dicts - :param class_type: class type - :type class_type: str - - :return: new class metadata - :rtype: dict - """ - if isinstance(project, Project): - project = project.dict() - attribute_groups = ( - list(map(lambda x: x.dict(), attribute_groups)) if attribute_groups else [] - ) - response = Controller.get_default().create_annotation_class( - project_name=project, - name=name, - color=color, - attribute_groups=attribute_groups, - class_type=class_type, - ) - if response.errors: - raise AppException(response.errors) - return BaseSerializer(response.data).serialize() - - -@Trackable -@validate_arguments -def delete_annotation_class( - project: NotEmptyStr, annotation_class: Union[dict, NotEmptyStr] -): - """Deletes annotation class from project - - :param project: project name - :type project: str - :param annotation_class: annotation class name or metadata - :type annotation_class: str or dict - """ - Controller.get_default().delete_annotation_class( - project_name=project, annotation_class_name=annotation_class - ) + if not verified_users: + return + response = self.controller.assign_folder( + project_name=project_name, + folder_name=folder_name, + users=list(verified_users), + ) -@Trackable -@validate_arguments -def download_annotation_classes_json(project: NotEmptyStr, folder: Union[str, Path]): - """Downloads project classes.json to folder + if response.errors: + raise AppException(response.errors) - :param project: project name - :type project: str - :param folder: folder to download to - :type folder: Path-like (str or Path) + def upload_images_from_folder_to_project( + self, + project: Union[NotEmptyStr, dict], + folder_path: Union[NotEmptyStr, Path], + extensions: Optional[ + Union[List[NotEmptyStr], Tuple[NotEmptyStr]] + ] = constants.DEFAULT_IMAGE_EXTENSIONS, + annotation_status="NotStarted", + from_s3_bucket=None, + exclude_file_patterns: Optional[ + Iterable[NotEmptyStr] + ] = constants.DEFAULT_FILE_EXCLUDE_PATTERNS, + recursive_subfolders: Optional[StrictBool] = False, + image_quality_in_editor: Optional[str] = None, + ): + """Uploads all images with given extensions from folder_path to the project. + Sets status of all the uploaded images to set_status if it is not None. - :return: path of the download file - :rtype: str - """ - response = Controller.get_default().download_annotation_classes( - project_name=project, download_path=folder - ) - if response.errors: - raise AppException(response.errors) - return response.data - - -@Trackable -@validate_arguments -def create_annotation_classes_from_classes_json( - project: Union[NotEmptyStr, dict], - classes_json: Union[List[AnnotationClassEntity], str, Path], - from_s3_bucket=False, -): - """Creates annotation classes in project from a SuperAnnotate format - annotation classes.json. - - :param project: project name - :type project: str - :param classes_json: JSON itself or path to the JSON file - :type classes_json: list or Path-like (str or Path) - :param from_s3_bucket: AWS S3 bucket to use. If None then classes_json is in local filesystem - :type from_s3_bucket: str - - :return: list of created annotation class metadatas - :rtype: list of dicts - """ - if isinstance(classes_json, str) or isinstance(classes_json, Path): - if from_s3_bucket: - from_session = boto3.Session() - from_s3 = from_session.resource("s3") - file = io.BytesIO() - from_s3_object = from_s3.Object(from_s3_bucket, classes_json) - from_s3_object.download_fileobj(file) - file.seek(0) - data = file + If an image with existing name already exists in the project it won't be uploaded, + and its path will be appended to the third member of return value of this + function. + + :param project: project name or folder path (e.g., "project1/folder1") + :type project: str or dict + + :param folder_path: from which folder to upload the images + :type folder_path: Path-like (str or Path) + + :param extensions: tuple or list of filename extensions to include from folder + :type extensions: tuple or list of strs + + :param annotation_status: value to set the annotation statuses of the uploaded images + NotStarted InProgress QualityCheck Returned Completed Skipped + :type annotation_status: str + + :param from_s3_bucket: AWS S3 bucket to use. If None then folder_path is in local filesystem + :type from_s3_bucket: str + + :param exclude_file_patterns: filename patterns to exclude from uploading, + default value is to exclude SuperAnnotate export related ["___save.png", "___fuse.png"] + :type exclude_file_patterns: list or tuple of strs + + :param recursive_subfolders: enable recursive subfolder parsing + :type recursive_subfolders: bool + + :param image_quality_in_editor: image quality be seen in SuperAnnotate web annotation editor. + Can be either "compressed" or "original". If None then the default value in project settings will be used. + :type image_quality_in_editor: str + + :return: uploaded, could-not-upload, existing-images filepaths + :rtype: tuple (3 members) of list of strs + """ + + project_name, folder_name = extract_project_folder(project) + if recursive_subfolders: + logger.info( + "When using recursive subfolder parsing same name images in different subfolders will overwrite each other." + ) + if not isinstance(extensions, (list, tuple)): + print(extensions) + raise AppException( + "extensions should be a list or a tuple in upload_images_from_folder_to_project" + ) + elif len(extensions) < 1: + return [], [], [] + + if exclude_file_patterns: + exclude_file_patterns = list(exclude_file_patterns) + list( + constants.DEFAULT_FILE_EXCLUDE_PATTERNS + ) + exclude_file_patterns = list(set(exclude_file_patterns)) + + project_folder_name = project_name + (f"/{folder_name}" if folder_name else "") + + logger.info( + "Uploading all images with extensions %s from %s to project %s. Excluded file patterns are: %s.", + extensions, + folder_path, + project_folder_name, + exclude_file_patterns, + ) + + use_case = self.controller.upload_images_from_folder_to_project( + project_name=project_name, + folder_name=folder_name, + folder_path=folder_path, + extensions=extensions, + annotation_status=annotation_status, + from_s3_bucket=from_s3_bucket, + exclude_file_patterns=exclude_file_patterns, + recursive_sub_folders=recursive_subfolders, + image_quality_in_editor=image_quality_in_editor, + ) + images_to_upload, duplicates = use_case.images_to_upload + if len(duplicates): + logger.warning( + "%s already existing images found that won't be uploaded.", + len(duplicates), + ) + logger.info( + "Uploading %s images to project %s.", + len(images_to_upload), + project_folder_name, + ) + if not images_to_upload: + return [], [], duplicates + if use_case.is_valid(): + with tqdm( + total=len(images_to_upload), desc="Uploading images" + ) as progress_bar: + for _ in use_case.execute(): + progress_bar.update(1) + return use_case.data + raise AppException(use_case.response.errors) + + def get_project_image_count( + self, + project: Union[NotEmptyStr, dict], + with_all_subfolders: Optional[StrictBool] = False, + ): + """Returns number of images in the project. + + :param project: project name or folder path (e.g., "project1/folder1") + :type project: str + :param with_all_subfolders: enables recursive folder counting + :type with_all_subfolders: bool + + :return: number of images in the project + :rtype: int + """ + + project_name, folder_name = extract_project_folder(project) + + response = self.controller.get_project_image_count( + project_name=project_name, + folder_name=folder_name, + with_all_subfolders=with_all_subfolders, + ) + if response.errors: + raise AppException(response.errors) + return response.data + + def download_image_annotations( + self, + project: Union[NotEmptyStr, dict], + image_name: NotEmptyStr, + local_dir_path: Union[str, Path], + ): + """Downloads annotations of the image (JSON and mask if pixel type project) + to local_dir_path. + + :param project: project name or folder path (e.g., "project1/folder1") + :type project: str + :param image_name: image name + :type image_name: str + :param local_dir_path: local directory path to download to + :type local_dir_path: Path-like (str or Path) + + :return: paths of downloaded annotations + :rtype: tuple + """ + project_name, folder_name = extract_project_folder(project) + res = self.controller.download_image_annotations( + project_name=project_name, + folder_name=folder_name, + image_name=image_name, + destination=local_dir_path, + ) + if res.errors: + raise AppException(res.errors) + return res.data + + def get_exports( + self, project: NotEmptyStr, return_metadata: Optional[StrictBool] = False + ): + """Get all prepared exports of the project. + + :param project: project name + :type project: str + :param return_metadata: return metadata of images instead of names + :type return_metadata: bool + + :return: names or metadata objects of the all prepared exports of the project + :rtype: list of strs or dicts + """ + response = self.controller.get_exports( + project_name=project, return_metadata=return_metadata + ) + return response.data + + def prepare_export( + self, + project: Union[NotEmptyStr, dict], + folder_names: Optional[List[NotEmptyStr]] = None, + annotation_statuses: Optional[List[AnnotationStatuses]] = None, + include_fuse: Optional[StrictBool] = False, + only_pinned=False, + ): + """Prepare annotations and classes.json for export. Original and fused images for images with + annotations can be included with include_fuse flag. + + :param project: project name + :type project: str + :param folder_names: names of folders to include in the export. If None, whole project will be exported + :type folder_names: list of str + :param annotation_statuses: images with which status to include, if None, + ["NotStarted", "InProgress", "QualityCheck", "Returned", "Completed", "Skipped"] will be chose + list elements should be one of NotStarted InProgress QualityCheck Returned Completed Skipped + :type annotation_statuses: list of strs + :param include_fuse: enables fuse images in the export + :type include_fuse: bool + :param only_pinned: enable only pinned output in export. This option disables all other types of output. + :type only_pinned: bool + + :return: metadata object of the prepared export + :rtype: dict + """ + project_name, folder_name = extract_project_folder(project) + if folder_names is None: + folders = [folder_name] if folder_name else [] else: - data = open(classes_json) - classes_json = json.load(data) - try: - annotation_classes = parse_obj_as(List[AnnotationClassEntity], classes_json) - except ValidationError: - raise AppException("Couldn't validate annotation classes.") - logger.info(f"Creating annotation classes in project {project}.") - response = Controller.get_default().create_annotation_classes( - project_name=project, annotation_classes=annotation_classes, - ) - if response.errors: - raise AppException(response.errors) - return [BaseSerializer(i).serialize() for i in response.data] - - -@Trackable -@validate_arguments -def download_export( - project: Union[NotEmptyStr, dict], - export: Union[NotEmptyStr, dict], - folder_path: Union[str, Path] = None, - extract_zip_contents: Optional[StrictBool] = True, - to_s3_bucket=None, -): - """Download prepared export. - - WARNING: Starting from version 1.9.0 :ref:`download_export ` additionally - requires :py:obj:`project` as first argument. - - :param project: project name - :type project: str - - :param export: export name - :type export: str, dict - - :param folder_path: where to download the export - :type folder_path: Path-like (str or Path) - - :param extract_zip_contents: if False then a zip file will be downloaded, - if True the zip file will be extracted at folder_path - :type extract_zip_contents: bool - - :param to_s3_bucket: AWS S3 bucket to use for download. If None then folder_path is in local filesystem. - :type to_s3_bucket: Bucket object - """ - project_name, folder_name = extract_project_folder(project) - export_name = export["name"] if isinstance(export, dict) else export - - response = Controller.get_default().download_export( - project_name=project_name, - export_name=export_name, - folder_path=folder_path, - extract_zip_contents=extract_zip_contents, - to_s3_bucket=to_s3_bucket, - ) - if response.errors: - raise AppException(response.errors) - - -@Trackable -@validate_arguments -def set_image_annotation_status( - project: Union[NotEmptyStr, dict], - image_name: NotEmptyStr, - annotation_status: NotEmptyStr, -): - """Sets the image annotation status - - :param project: project name or folder path (e.g., "project1/folder1") - :type project: str - :param image_name: image name - :type image_name: str - :param annotation_status: annotation status to set, - should be one of NotStarted InProgress QualityCheck Returned Completed Skipped - :type annotation_status: str - - :return: metadata of the updated image - :rtype: dict - """ - warning_msg = ( - "We're deprecating the set_image_annotation_status function. Please use set_annotation_statuses instead. " - "Learn more. \n" - "https://superannotate.readthedocs.io/en/stable/superannotate.sdk.html#superannotate.set_annotation_statuses" - ) - logger.warning(warning_msg) - warnings.warn(warning_msg, DeprecationWarning) - project_name, folder_name = extract_project_folder(project) - response = Controller.get_default().set_images_annotation_statuses( - project_name, folder_name, [image_name], annotation_status - ) - if response.errors: - raise AppException(response.errors) - image = ( - Controller.get_default().get_item(project_name, folder_name, image_name).data - ) - return BaseSerializer(image).serialize() - - -@Trackable -@validate_arguments -def set_project_workflow(project: Union[NotEmptyStr, dict], new_workflow: List[dict]): - """Sets project's workflow. - - new_workflow example: [{ "step" : , "className" : , "tool" : , - "attribute":[{"attribute" : {"name" : , "attribute_group" : {"name": }}}, - ...] - },...] - - :param project: project name or metadata - :type project: str or dict - :param new_workflow: new workflow list of dicts - :type new_workflow: list of dicts - """ - project_name, _ = extract_project_folder(project) - response = Controller.get_default().set_project_workflow( - project_name=project_name, steps=new_workflow - ) - if response.errors: - raise AppException(response.errors) - - -@Trackable -@validate_arguments -def download_image( - project: Union[NotEmptyStr, dict], - image_name: NotEmptyStr, - local_dir_path: Optional[Union[str, Path]] = "./", - include_annotations: Optional[StrictBool] = False, - include_fuse: Optional[StrictBool] = False, - include_overlay: Optional[StrictBool] = False, - variant: Optional[str] = "original", -): - """Downloads the image (and annotation if not None) to local_dir_path - - :param project: project name or folder path (e.g., "project1/folder1") - :type project: str - :param image_name: image name - :type image_name: str - :param local_dir_path: where to download the image - :type local_dir_path: Path-like (str or Path) - :param include_annotations: enables annotation download with the image - :type include_annotations: bool - :param include_fuse: enables fuse image download with the image - :type include_fuse: bool - :param include_overlay: enables overlay image download with the image - :type include_overlay: bool - :param variant: which resolution to download, can be 'original' or 'lores' - (low resolution used in web editor) - :type variant: str - - :return: paths of downloaded image and annotations if included - :rtype: tuple - """ - project_name, folder_name = extract_project_folder(project) - response = Controller.get_default().download_image( - project_name=project_name, - folder_name=folder_name, - image_name=image_name, - download_path=str(local_dir_path), - image_variant=variant, - include_annotations=include_annotations, - include_fuse=include_fuse, - include_overlay=include_overlay, - ) - if response.errors: - raise AppException(response.errors) - logger.info(f"Downloaded image {image_name} to {local_dir_path} ") - return response.data - - -@Trackable -@validate_arguments -def upload_annotations_from_folder_to_project( - project: Union[NotEmptyStr, dict], - folder_path: Union[str, Path], - from_s3_bucket=None, - recursive_subfolders: Optional[StrictBool] = False, -): - """Finds and uploads all JSON files in the folder_path as annotations to the project. - - The JSON files should follow specific naming convention. For Vector - projects they should be named "___objects.json" (e.g., if - image is cats.jpg the annotation filename should be cats.jpg___objects.json), for Pixel projects - JSON file should be named "___pixel.json" and also second mask - image file should be present with the name "___save.png". In both cases - image with should be already present on the platform. - - Existing annotations will be overwritten. - - :param project: project name or folder path (e.g., "project1/folder1") - :type project: str or dict - :param folder_path: from which folder to upload annotations - :type folder_path: str or dict - :param from_s3_bucket: AWS S3 bucket to use. If None then folder_path is in local filesystem - :type from_s3_bucket: str - :param recursive_subfolders: enable recursive subfolder parsing - :type recursive_subfolders: bool - - :return: paths to annotations uploaded, could-not-upload, missing-images - :rtype: tuple of list of strs - """ + folders = folder_names + if not annotation_statuses: + annotation_statuses = [ + constants.AnnotationStatus.NOT_STARTED.name, + constants.AnnotationStatus.IN_PROGRESS.name, + constants.AnnotationStatus.QUALITY_CHECK.name, + constants.AnnotationStatus.RETURNED.name, + constants.AnnotationStatus.COMPLETED.name, + constants.AnnotationStatus.SKIPPED.name, + ] + response = self.controller.prepare_export( + project_name=project_name, + folder_names=folders, + include_fuse=include_fuse, + only_pinned=only_pinned, + annotation_statuses=annotation_statuses, + ) + if response.errors: + raise AppException(response.errors) + return response.data + + def upload_videos_from_folder_to_project( + self, + project: Union[NotEmptyStr, dict], + folder_path: Union[NotEmptyStr, Path], + extensions: Optional[ + Union[Tuple[NotEmptyStr], List[NotEmptyStr]] + ] = constants.DEFAULT_VIDEO_EXTENSIONS, + exclude_file_patterns: Optional[List[NotEmptyStr]] = (), + recursive_subfolders: Optional[StrictBool] = False, + target_fps: Optional[int] = None, + start_time: Optional[float] = 0.0, + end_time: Optional[float] = None, + annotation_status: Optional[AnnotationStatuses] = "NotStarted", + image_quality_in_editor: Optional[ImageQualityChoices] = None, + ): + """Uploads image frames from all videos with given extensions from folder_path to the project. + Sets status of all the uploaded images to set_status if it is not None. + + :param project: project name or folder path (e.g., "project1/folder1") + :type project: str + :param folder_path: from which folder to upload the videos + :type folder_path: Path-like (str or Path) + :param extensions: tuple or list of filename extensions to include from folder + :type extensions: tuple or list of strs + :param exclude_file_patterns: filename patterns to exclude from uploading + :type exclude_file_patterns: listlike of strs + :param recursive_subfolders: enable recursive subfolder parsing + :type recursive_subfolders: bool + :param target_fps: how many frames per second need to extract from the video (approximate). + If None, all frames will be uploaded + :type target_fps: float + :param start_time: Time (in seconds) from which to start extracting frames + :type start_time: float + :param end_time: Time (in seconds) up to which to extract frames. If None up to end + :type end_time: float + :param annotation_status: value to set the annotation statuses of the uploaded images + NotStarted InProgress QualityCheck Returned Completed Skipped + :type annotation_status: str + :param image_quality_in_editor: image quality be seen in SuperAnnotate web annotation editor. + Can be either "compressed" or "original". If None then the default value in project settings will be used. + :type image_quality_in_editor: str + + :return: uploaded and not-uploaded video frame images' filenames + :rtype: tuple of list of strs + """ + + project_name, folder_name = extract_project_folder(project) + + video_paths = [] + for extension in extensions: + if not recursive_subfolders: + video_paths += list(Path(folder_path).glob(f"*.{extension.lower()}")) + if os.name != "nt": + video_paths += list( + Path(folder_path).glob(f"*.{extension.upper()}") + ) + else: + logger.warning( + "When using recursive subfolder parsing same name videos " + "in different subfolders will overwrite each other." + ) + video_paths += list(Path(folder_path).rglob(f"*.{extension.lower()}")) + if os.name != "nt": + video_paths += list( + Path(folder_path).rglob(f"*.{extension.upper()}") + ) + + video_paths = [str(path) for path in video_paths] + response = self.controller.upload_videos( + project_name=project_name, + folder_name=folder_name, + paths=video_paths, + target_fps=target_fps, + start_time=start_time, + exclude_file_patterns=exclude_file_patterns, + end_time=end_time, + annotation_status=annotation_status, + image_quality_in_editor=image_quality_in_editor, + ) + if response.errors: + raise AppException(response.errors) + return response.data + + def upload_video_to_project( + self, + project: Union[NotEmptyStr, dict], + video_path: Union[NotEmptyStr, Path], + target_fps: Optional[int] = None, + start_time: Optional[float] = 0.0, + end_time: Optional[float] = None, + annotation_status: Optional[AnnotationStatuses] = "NotStarted", + image_quality_in_editor: Optional[ImageQualityChoices] = None, + ): + """Uploads image frames from video to platform. Uploaded images will have + names "_.jpg". + + :param project: project name or folder path (e.g., "project1/folder1") + :type project: str + :param video_path: video to upload + :type video_path: Path-like (str or Path) + :param target_fps: how many frames per second need to extract from the video (approximate). + If None, all frames will be uploaded + :type target_fps: float + :param start_time: Time (in seconds) from which to start extracting frames + :type start_time: float + :param end_time: Time (in seconds) up to which to extract frames. If None up to end + :type end_time: float + :param annotation_status: value to set the annotation statuses of the uploaded + video frames NotStarted InProgress QualityCheck Returned Completed Skipped + :type annotation_status: str + :param image_quality_in_editor: image quality be seen in SuperAnnotate web annotation editor. + Can be either "compressed" or "original". If None then the default value in project settings will be used. + :type image_quality_in_editor: str + + :return: filenames of uploaded images + :rtype: list of strs + """ + + project_name, folder_name = extract_project_folder(project) + + response = self.controller.upload_videos( + project_name=project_name, + folder_name=folder_name, + paths=[video_path], + target_fps=target_fps, + start_time=start_time, + end_time=end_time, + annotation_status=annotation_status, + image_quality_in_editor=image_quality_in_editor, + ) + if response.errors: + raise AppException(response.errors) + return response.data + + def create_annotation_class( + self, + project: Union[Project, NotEmptyStr], + name: NotEmptyStr, + color: NotEmptyStr, + attribute_groups: Optional[List[AttributeGroup]] = None, + class_type: ClassType = "object", + ): + """Create annotation class in project + + :param project: project name + :type project: str + :param name: name for the class + :type name: str + :param color: RGB hex color value, e.g., "#FFFFAA" + :type color: str + :param attribute_groups: example: + [ { "name": "tall", "is_multiselect": 0, "attributes": [ { "name": "yes" }, { "name": "no" } ] }, + { "name": "age", "is_multiselect": 0, "attributes": [ { "name": "young" }, { "name": "old" } ] } ] + :type attribute_groups: list of dicts + :param class_type: class type + :type class_type: str + + :return: new class metadata + :rtype: dict + """ + if isinstance(project, Project): + project = project.dict() + attribute_groups = ( + list(map(lambda x: x.dict(), attribute_groups)) if attribute_groups else [] + ) + response = self.controller.create_annotation_class( + project_name=project, + name=name, + color=color, + attribute_groups=attribute_groups, + class_type=class_type, + ) + if response.errors: + raise AppException(response.errors) + return BaseSerializer(response.data).serialize() + + def delete_annotation_class( + self, project: NotEmptyStr, annotation_class: Union[dict, NotEmptyStr] + ): + """Deletes annotation class from project + + :param project: project name + :type project: str + :param annotation_class: annotation class name or metadata + :type annotation_class: str or dict + """ + self.controller.delete_annotation_class( + project_name=project, annotation_class_name=annotation_class + ) + + def download_annotation_classes_json( + self, project: NotEmptyStr, folder: Union[str, Path] + ): + """Downloads project classes.json to folder + + :param project: project name + :type project: str + :param folder: folder to download to + :type folder: Path-like (str or Path) + + :return: path of the download file + :rtype: str + """ + response = self.controller.download_annotation_classes( + project_name=project, download_path=folder + ) + if response.errors: + raise AppException(response.errors) + return response.data + + def create_annotation_classes_from_classes_json( + self, + project: Union[NotEmptyStr, dict], + classes_json: Union[List[AnnotationClassEntity], str, Path], + from_s3_bucket=False, + ): + """Creates annotation classes in project from a SuperAnnotate format + annotation classes.json. - project_name, folder_name = extract_project_folder(project) - project_folder_name = project_name + (f"/{folder_name}" if folder_name else "") + :param project: project name + :type project: str + :param classes_json: JSON itself or path to the JSON file + :type classes_json: list or Path-like (str or Path) + :param from_s3_bucket: AWS S3 bucket to use. If None then classes_json is in local filesystem + :type from_s3_bucket: str - if recursive_subfolders: + :return: list of created annotation class metadatas + :rtype: list of dicts + """ + if isinstance(classes_json, str) or isinstance(classes_json, Path): + if from_s3_bucket: + from_session = boto3.Session() + from_s3 = from_session.resource("s3") + file = io.BytesIO() + from_s3_object = from_s3.Object(from_s3_bucket, classes_json) + from_s3_object.download_fileobj(file) + file.seek(0) + data = file + else: + data = open(classes_json) + classes_json = json.load(data) + try: + annotation_classes = parse_obj_as(List[AnnotationClassEntity], classes_json) + except ValidationError: + raise AppException("Couldn't validate annotation classes.") + logger.info(f"Creating annotation classes in project {project}.") + response = self.controller.create_annotation_classes( + project_name=project, + annotation_classes=annotation_classes, + ) + if response.errors: + raise AppException(response.errors) + return [BaseSerializer(i).serialize() for i in response.data] + + def download_export( + self, + project: Union[NotEmptyStr, dict], + export: Union[NotEmptyStr, dict], + folder_path: Union[str, Path], + extract_zip_contents: Optional[StrictBool] = True, + to_s3_bucket=None, + ): + """Download prepared export. + + WARNING: Starting from version 1.9.0 :ref:`download_export ` additionally + requires :py:obj:`project` as first argument. + + :param project: project name + :type project: str + :param export: export name + :type export: str, dict + :param folder_path: where to download the export + :type folder_path: Path-like (str or Path) + :param extract_zip_contents: if False then a zip file will be downloaded, + if True the zip file will be extracted at folder_path + :type extract_zip_contents: bool + :param to_s3_bucket: AWS S3 bucket to use for download. If None then folder_path is in local filesystem. + :type to_s3_bucket: Bucket object + """ + project_name, folder_name = extract_project_folder(project) + export_name = export["name"] if isinstance(export, dict) else export + + response = self.controller.download_export( + project_name=project_name, + export_name=export_name, + folder_path=folder_path, + extract_zip_contents=extract_zip_contents, + to_s3_bucket=to_s3_bucket, + ) + if response.errors: + raise AppException(response.errors) + logger.info(response.data) + + def set_project_workflow( + self, project: Union[NotEmptyStr, dict], new_workflow: List[dict] + ): + """Sets project's workflow. + + new_workflow example: [{ "step" : , "className" : , "tool" : , + "attribute":[{"attribute" : {"name" : , "attribute_group" : {"name": }}}, + ...] + },...] + + :param project: project name or metadata + :type project: str or dict + :param new_workflow: new workflow list of dicts + :type new_workflow: list of dicts + """ + project_name, _ = extract_project_folder(project) + response = self.controller.set_project_workflow( + project_name=project_name, steps=new_workflow + ) + if response.errors: + raise AppException(response.errors) + + def download_image( + self, + project: Union[NotEmptyStr, dict], + image_name: NotEmptyStr, + local_dir_path: Optional[Union[str, Path]] = "./", + include_annotations: Optional[StrictBool] = False, + include_fuse: Optional[StrictBool] = False, + include_overlay: Optional[StrictBool] = False, + variant: Optional[str] = "original", + ): + """Downloads the image (and annotation if not None) to local_dir_path + + :param project: project name or folder path (e.g., "project1/folder1") + :type project: str + :param image_name: image name + :type image_name: str + :param local_dir_path: where to download the image + :type local_dir_path: Path-like (str or Path) + :param include_annotations: enables annotation download with the image + :type include_annotations: bool + :param include_fuse: enables fuse image download with the image + :type include_fuse: bool + :param include_overlay: enables overlay image download with the image + :type include_overlay: bool + :param variant: which resolution to download, can be 'original' or 'lores' + (low resolution used in web editor) + :type variant: str + + :return: paths of downloaded image and annotations if included + :rtype: tuple + """ + project_name, folder_name = extract_project_folder(project) + response = self.controller.download_image( + project_name=project_name, + folder_name=folder_name, + image_name=image_name, + download_path=str(local_dir_path), + image_variant=variant, + include_annotations=include_annotations, + include_fuse=include_fuse, + include_overlay=include_overlay, + ) + if response.errors: + raise AppException(response.errors) + logger.info(f"Downloaded image {image_name} to {local_dir_path} ") + return response.data + + def upload_annotations_from_folder_to_project( + self, + project: Union[NotEmptyStr, dict], + folder_path: Union[str, Path], + from_s3_bucket=None, + recursive_subfolders: Optional[StrictBool] = False, + ): + """Finds and uploads all JSON files in the folder_path as annotations to the project. + + The JSON files should follow specific naming convention. For Vector + projects they should be named "___objects.json" (e.g., if + image is cats.jpg the annotation filename should be cats.jpg___objects.json), for Pixel projects + JSON file should be named "___pixel.json" and also second mask + image file should be present with the name "___save.png". In both cases + image with should be already present on the platform. + + Existing annotations will be overwritten. + + :param project: project name or folder path (e.g., "project1/folder1") + :type project: str or dict + :param folder_path: from which folder to upload annotations + :type folder_path: str or dict + :param from_s3_bucket: AWS S3 bucket to use. If None then folder_path is in local filesystem + :type from_s3_bucket: str + :param recursive_subfolders: enable recursive subfolder parsing + :type recursive_subfolders: bool + + :return: paths to annotations uploaded, could-not-upload, missing-images + :rtype: tuple of list of strs + """ + + project_name, folder_name = extract_project_folder(project) + project_folder_name = project_name + (f"/{folder_name}" if folder_name else "") + + if recursive_subfolders: + logger.info( + "When using recursive subfolder parsing same name annotations in different " + "subfolders will overwrite each other.", + ) logger.info( - "When using recursive subfolder parsing same name annotations in different " - "subfolders will overwrite each other.", - ) - logger.info( - "The JSON files should follow a specific naming convention, matching file names already present " - "on the platform. Existing annotations will be overwritten" - ) - - annotation_paths = get_annotation_paths( - folder_path, from_s3_bucket, recursive_subfolders - ) - - logger.info( - f"Uploading {len(annotation_paths)} annotations from {folder_path} to the project {project_folder_name}." - ) - response = Controller.get_default().upload_annotations_from_folder( - project_name=project_name, - folder_name=folder_name, - annotation_paths=annotation_paths, # noqa: E203 - client_s3_bucket=from_s3_bucket, - folder_path=folder_path, - ) - if response.errors: - raise AppException(response.errors) - return response.data - - -@Trackable -@validate_arguments -def upload_preannotations_from_folder_to_project( - project: Union[NotEmptyStr, dict], - folder_path: Union[str, Path], - from_s3_bucket=None, - recursive_subfolders: Optional[StrictBool] = False, -): - """Finds and uploads all JSON files in the folder_path as pre-annotations to the project. - - The JSON files should follow specific naming convention. For Vector - projects they should be named "___objects.json" (e.g., if - image is cats.jpg the annotation filename should be cats.jpg___objects.json), for Pixel projects - JSON file should be named "___pixel.json" and also second mask - image file should be present with the name "___save.png". In both cases - image with should be already present on the platform. - - Existing pre-annotations will be overwritten. - - :param project: project name or folder path (e.g., "project1/folder1") - :type project: str - :param folder_path: from which folder to upload the pre-annotations - :type folder_path: Path-like (str or Path) - :param from_s3_bucket: AWS S3 bucket to use. If None then folder_path is in local filesystem - :type from_s3_bucket: str - :param recursive_subfolders: enable recursive subfolder parsing - :type recursive_subfolders: bool - - :return: paths to pre-annotations uploaded and could-not-upload - :rtype: tuple of list of strs - """ - project_name, folder_name = extract_project_folder(project) - project_folder_name = project_name + (f"/{folder_name}" if folder_name else "") - project = Controller.get_default().get_project_metadata(project_name).data - if project["project"].type in [ - constances.ProjectType.VIDEO.value, - constances.ProjectType.DOCUMENT.value, - ]: - raise AppException(LIMITED_FUNCTIONS[project["project"].type]) - if recursive_subfolders: + "The JSON files should follow a specific naming convention, matching file names already present " + "on the platform. Existing annotations will be overwritten" + ) + + annotation_paths = get_annotation_paths( + folder_path, from_s3_bucket, recursive_subfolders + ) + logger.info( - "When using recursive subfolder parsing same name annotations in different " - "subfolders will overwrite each other.", - ) - logger.info( - "The JSON files should follow a specific naming convention, matching file names already present " - "on the platform. Existing annotations will be overwritten" - ) - annotation_paths = get_annotation_paths( - folder_path, from_s3_bucket, recursive_subfolders - ) - logger.info( - f"Uploading {len(annotation_paths)} annotations from {folder_path} to the project {project_folder_name}." - ) - response = Controller.get_default().upload_annotations_from_folder( - project_name=project_name, - folder_name=folder_name, - annotation_paths=annotation_paths, # noqa: E203 - client_s3_bucket=from_s3_bucket, - folder_path=folder_path, - is_pre_annotations=True, - ) - if response.errors: - raise AppException(response.errors) - return response.data - - -@Trackable -@validate_arguments -def upload_image_annotations( - project: Union[NotEmptyStr, dict], - image_name: str, - annotation_json: Union[str, Path, dict], - mask: Optional[Union[str, Path, bytes]] = None, - verbose: Optional[StrictBool] = True, -): - """ - Upload annotations from JSON (also mask for pixel annotations) to the image. + f"Uploading {len(annotation_paths)} annotations from {folder_path} to the project {project_folder_name}." + ) + response = self.controller.upload_annotations_from_folder( + project_name=project_name, + folder_name=folder_name, + annotation_paths=annotation_paths, # noqa: E203 + client_s3_bucket=from_s3_bucket, + folder_path=folder_path, + ) + if response.errors: + raise AppException(response.errors) + return response.data + + def upload_preannotations_from_folder_to_project( + self, + project: Union[NotEmptyStr, dict], + folder_path: Union[str, Path], + from_s3_bucket=None, + recursive_subfolders: Optional[StrictBool] = False, + ): + """Finds and uploads all JSON files in the folder_path as pre-annotations to the project. + + The JSON files should follow specific naming convention. For Vector + projects they should be named "___objects.json" (e.g., if + image is cats.jpg the annotation filename should be cats.jpg___objects.json), for Pixel projects + JSON file should be named "___pixel.json" and also second mask + image file should be present with the name "___save.png". In both cases + image with should be already present on the platform. + + Existing pre-annotations will be overwritten. + + :param project: project name or folder path (e.g., "project1/folder1") + :type project: str + :param folder_path: from which folder to upload the pre-annotations + :type folder_path: Path-like (str or Path) + :param from_s3_bucket: AWS S3 bucket to use. If None then folder_path is in local filesystem + :type from_s3_bucket: str + :param recursive_subfolders: enable recursive subfolder parsing + :type recursive_subfolders: bool + + :return: paths to pre-annotations uploaded and could-not-upload + :rtype: tuple of list of strs + """ + project_name, folder_name = extract_project_folder(project) + project_folder_name = project_name + (f"/{folder_name}" if folder_name else "") + project = self.controller.get_project_metadata(project_name).data + if project["project"].type in [ + constants.ProjectType.VIDEO.value, + constants.ProjectType.DOCUMENT.value, + ]: + raise AppException(LIMITED_FUNCTIONS[project["project"].type]) + if recursive_subfolders: + logger.info( + "When using recursive subfolder parsing same name annotations in different " + "subfolders will overwrite each other.", + ) + logger.info( + "The JSON files should follow a specific naming convention, matching file names already present " + "on the platform. Existing annotations will be overwritten" + ) + annotation_paths = get_annotation_paths( + folder_path, from_s3_bucket, recursive_subfolders + ) + logger.info( + f"Uploading {len(annotation_paths)} annotations from {folder_path} to the project {project_folder_name}." + ) + response = self.controller.upload_annotations_from_folder( + project_name=project_name, + folder_name=folder_name, + annotation_paths=annotation_paths, # noqa: E203 + client_s3_bucket=from_s3_bucket, + folder_path=folder_path, + is_pre_annotations=True, + ) + if response.errors: + raise AppException(response.errors) + return response.data + + def upload_image_annotations( + self, + project: Union[NotEmptyStr, dict], + image_name: str, + annotation_json: Union[str, Path, dict], + mask: Optional[Union[str, Path, bytes]] = None, + verbose: Optional[StrictBool] = True, + ): + """Upload annotations from JSON (also mask for pixel annotations) + to the image. - :param project: project name or folder path (e.g., "project1/folder1") - :type project: str + :param project: project name or folder path (e.g., "project1/folder1") + :type project: str + :param image_name: image name + :type image_name: str + :param annotation_json: annotations in SuperAnnotate format JSON dict or path to JSON file + :type annotation_json: dict or Path-like (str or Path) + :param mask: BytesIO object or filepath to mask annotation for pixel projects in SuperAnnotate format + :type mask: BytesIO or Path-like (str or Path) + """ - :param image_name: image name - :type image_name: str + project_name, folder_name = extract_project_folder(project) - :param annotation_json: annotations in SuperAnnotate format JSON dict or path to JSON file - :type annotation_json: dict or Path-like (str or Path) + project = self.controller.get_project_metadata(project_name).data + if project["project"].type in [ + constants.ProjectType.VIDEO.value, + constants.ProjectType.DOCUMENT.value, + ]: + raise AppException(LIMITED_FUNCTIONS[project["project"].type]) + + if not mask: + if not isinstance(annotation_json, dict): + mask_path = str(annotation_json).replace("___pixel.json", "___save.png") + else: + mask_path = f"{image_name}___save.png" + if os.path.exists(mask_path): + mask = open(mask_path, "rb").read() + elif isinstance(mask, str) or isinstance(mask, Path): + if os.path.exists(mask): + mask = open(mask, "rb").read() - :param verbose: provide detailed log - :type verbose: bool + if not isinstance(annotation_json, dict): + if verbose: + logger.info("Uploading annotations from %s.", annotation_json) + annotation_json = json.load(open(annotation_json)) + response = self.controller.upload_image_annotations( + project_name=project_name, + folder_name=folder_name, + image_name=image_name, + annotations=annotation_json, + mask=mask, + verbose=verbose, + ) + if response.errors and not response.errors == constants.INVALID_JSON_MESSAGE: + raise AppException(response.errors) - :param mask: BytesIO object or filepath to mask annotation for pixel projects in SuperAnnotate format - :type mask: BytesIO or Path-like (str or Path) - """ + def download_model(self, model: MLModel, output_dir: Union[str, Path]): + """Downloads the neural network and related files + which are the .pth/pkl. .json, .yaml, classes_mapper.json + + :param model: the model that needs to be downloaded + :type model: dict + :param output_dir: the directory in which the files will be saved + :type output_dir: str + :return: the metadata of the model + :rtype: dict + """ + res = self.controller.download_ml_model( + model_data=model.dict(), download_path=output_dir + ) + if res.errors: + logger.error("\n".join([str(error) for error in res.errors])) + else: + return BaseSerializer(res.data).serialize() + + def benchmark( + self, + project: Union[NotEmptyStr, dict], + gt_folder: str, + folder_names: List[NotEmptyStr], + export_root: Optional[Union[str, Path]] = None, + image_list=None, + annot_type: Optional[AnnotationType] = "bbox", + show_plots=False, + ): + """Computes benchmark score for each instance of given images that are present both gt_project_name project and projects in folder_names list: + + :param project: project name or metadata of the project + :type project: str or dict + :param gt_folder: project folder name that contains the ground truth annotations + :type gt_folder: str + :param folder_names: list of folder names in the project for which the scores will be computed + :type folder_names: list of str + :param export_root: root export path of the projects + :type export_root: Path-like (str or Path) + :param image_list: List of image names from the projects list that must be used. If None, then all images from the projects list will be used. Default: None + :type image_list: list + :param annot_type: Type of annotation instances to consider. Available candidates are: ["bbox", "polygon", "point"] + :type annot_type: str + :param show_plots: If True, show plots based on results of consensus computation. Default: False + :type show_plots: bool + + :return: Pandas DateFrame with columns (creatorEmail, QA, imageName, instanceId, className, area, attribute, folderName, score) + :rtype: pandas DataFrame + """ + project_name = project + if isinstance(project, dict): + project_name = project["name"] + + project = self.controller.get_project_metadata(project_name).data + if project["project"].type in [ + constants.ProjectType.VIDEO.value, + constants.ProjectType.DOCUMENT.value, + ]: + raise AppException(LIMITED_FUNCTIONS[project["project"].type]) + + if not export_root: + with tempfile.TemporaryDirectory() as temp_dir: + response = self.controller.benchmark( + project_name=project_name, + ground_truth_folder_name=gt_folder, + folder_names=folder_names, + export_root=temp_dir, + image_list=image_list, + annot_type=annot_type, + show_plots=show_plots, + ) - project_name, folder_name = extract_project_folder(project) + else: + response = self.controller.benchmark( + project_name=project_name, + ground_truth_folder_name=gt_folder, + folder_names=folder_names, + export_root=export_root, + image_list=image_list, + annot_type=annot_type, + show_plots=show_plots, + ) + if response.errors: + raise AppException(response.errors) + return response.data + + def consensus( + self, + project: NotEmptyStr, + folder_names: List[NotEmptyStr], + export_root: Optional[Union[NotEmptyStr, Path]] = None, + image_list: Optional[List[NotEmptyStr]] = None, + annot_type: Optional[AnnotationType] = "bbox", + show_plots: Optional[StrictBool] = False, + ): + """Computes consensus score for each instance of given images that are present in at least 2 of the given projects: + + :param project: project name + :type project: str + :param folder_names: list of folder names in the project for which the scores will be computed + :type folder_names: list of str + :param export_root: root export path of the projects + :type export_root: Path-like (str or Path) + :param image_list: List of image names from the projects list that must be used. If None, then all images from the projects list will be used. Default: None + :type image_list: list + :param annot_type: Type of annotation instances to consider. Available candidates are: ["bbox", "polygon", "point"] + :type annot_type: str + :param show_plots: If True, show plots based on results of consensus computation. Default: False + :type show_plots: bool + + :return: Pandas DateFrame with columns (creatorEmail, QA, imageName, instanceId, className, area, attribute, folderName, score) + :rtype: pandas DataFrame + """ - project = Controller.get_default().get_project_metadata(project_name).data - if project["project"].type in [ - constances.ProjectType.VIDEO.value, - constances.ProjectType.DOCUMENT.value, - ]: - raise AppException(LIMITED_FUNCTIONS[project["project"].type]) + if export_root is None: + with tempfile.TemporaryDirectory() as temp_dir: + export_root = temp_dir + response = self.controller.consensus( + project_name=project, + folder_names=folder_names, + export_path=export_root, + image_list=image_list, + annot_type=annot_type, + show_plots=show_plots, + ) - if not mask: - if not isinstance(annotation_json, dict): - mask_path = str(annotation_json).replace("___pixel.json", "___save.png") else: - mask_path = f"{image_name}___save.png" - if os.path.exists(mask_path): - mask = open(mask_path, "rb").read() - elif isinstance(mask, str) or isinstance(mask, Path): - if os.path.exists(mask): - mask = open(mask, "rb").read() - - if not isinstance(annotation_json, dict): - if verbose: - logger.info("Uploading annotations from %s.", annotation_json) - annotation_json = json.load(open(annotation_json)) - response = Controller.get_default().upload_image_annotations( - project_name=project_name, - folder_name=folder_name, - image_name=image_name, - annotations=annotation_json, - mask=mask, - verbose=verbose, - ) - if response.errors and not response.errors == constances.INVALID_JSON_MESSAGE: - raise AppException(response.errors) - - -@Trackable -@validate_arguments -def download_model(model: MLModel, output_dir: Union[str, Path]): - """Downloads the neural network and related files - which are the .pth/pkl. .json, .yaml, classes_mapper.json - - :param model: the model that needs to be downloaded - :type model: dict - :param output_dir: the directiory in which the files will be saved - :type output_dir: str - :return: the metadata of the model - :rtype: dict - """ - res = Controller.get_default().download_ml_model( - model_data=model.dict(), download_path=output_dir - ) - if res.errors: - logger.error("\n".join([str(error) for error in res.errors])) - else: - return BaseSerializer(res.data).serialize() - - -@Trackable -@validate_arguments -def benchmark( - project: Union[NotEmptyStr, dict], - gt_folder: str, - folder_names: List[NotEmptyStr], - export_root: Optional[Union[str, Path]] = None, - image_list=None, - annot_type: Optional[AnnotationType] = "bbox", - show_plots=False, -): - """Computes benchmark score for each instance of given images that are present both gt_project_name project and projects in folder_names list: - - :param project: project name or metadata of the project - :type project: str or dict - :param gt_folder: project folder name that contains the ground truth annotations - :type gt_folder: str - :param folder_names: list of folder names in the project for which the scores will be computed - :type folder_names: list of str - :param export_root: root export path of the projects - :type export_root: Path-like (str or Path) - :param image_list: List of image names from the projects list that must be used. If None, then all images from the projects list will be used. Default: None - :type image_list: list - :param annot_type: Type of annotation instances to consider. Available candidates are: ["bbox", "polygon", "point"] - :type annot_type: str - :param show_plots: If True, show plots based on results of consensus computation. Default: False - :type show_plots: bool - - :return: Pandas DateFrame with columns (creatorEmail, QA, imageName, instanceId, className, area, attribute, folderName, score) - :rtype: pandas DataFrame - """ - project_name = project - if isinstance(project, dict): - project_name = project["name"] - - project = Controller.get_default().get_project_metadata(project_name).data - if project["project"].type in [ - constances.ProjectType.VIDEO.value, - constances.ProjectType.DOCUMENT.value, - ]: - raise AppException(LIMITED_FUNCTIONS[project["project"].type]) - - if not export_root: - with tempfile.TemporaryDirectory() as temp_dir: - response = Controller.get_default().benchmark( - project_name=project_name, - ground_truth_folder_name=gt_folder, + response = self.controller.consensus( + project_name=project, folder_names=folder_names, - export_root=temp_dir, + export_path=export_root, image_list=image_list, annot_type=annot_type, show_plots=show_plots, ) + if response.errors: + raise AppException(response.errors) + return response.data + + def run_prediction( + self, + project: Union[NotEmptyStr, dict], + images_list: List[NotEmptyStr], + model: Union[NotEmptyStr, dict], + ): + """This function runs smart prediction on given list of images from a given project using the neural network of your choice + + :param project: the project in which the target images are uploaded. + :type project: str or dict + :param images_list: the list of image names on which smart prediction has to be run + :type images_list: list of str + :param model: the name of the model that should be used for running smart prediction + :type model: str or dict + :return: tupe of two lists, list of images on which the prediction has succeded and failed respectively + :rtype: tuple + """ + project_name = None + folder_name = None + if isinstance(project, dict): + project_name = project["name"] + if isinstance(project, str): + project_name, folder_name = extract_project_folder(project) + + model_name = model + if isinstance(model, dict): + model_name = model["name"] + + response = self.controller.run_prediction( + project_name=project_name, + images_list=images_list, + model_name=model_name, + folder_name=folder_name, + ) + if response.errors: + raise AppException(response.errors) + return response.data + + def add_annotation_bbox_to_image( + self, + project: NotEmptyStr, + image_name: NotEmptyStr, + bbox: List[float], + annotation_class_name: NotEmptyStr, + annotation_class_attributes: Optional[List[dict]] = None, + error: Optional[StrictBool] = None, + ): + """Add a bounding box annotation to image annotations + + annotation_class_attributes has the form + [ {"name" : "" }, "groupName" : ""} ], ... ] - else: - response = Controller.get_default().benchmark( + :param project: project name or folder path (e.g., "project1/folder1") + :type project: str + :param image_name: image name + :type image_name: str + :param bbox: 4 element list of top-left x,y and bottom-right x, y coordinates + :type bbox: list of floats + :param annotation_class_name: annotation class name + :type annotation_class_name: str + :param annotation_class_attributes: list of annotation class attributes + :type annotation_class_attributes: list of 2 element dicts + :param error: if not None, marks annotation as error (True) or no-error (False) + :type error: bool + """ + project_name, folder_name = extract_project_folder(project) + project = self.controller.get_project_metadata(project_name).data + if project["project"].type in [ + constants.ProjectType.VIDEO.value, + constants.ProjectType.DOCUMENT.value, + ]: + raise AppException(LIMITED_FUNCTIONS[project["project"].type]) + response = self.controller.get_annotations( project_name=project_name, - ground_truth_folder_name=gt_folder, - folder_names=folder_names, - export_root=export_root, - image_list=image_list, - annot_type=annot_type, - show_plots=show_plots, + folder_name=folder_name, + item_names=[image_name], + logging=False, ) if response.errors: raise AppException(response.errors) - return response.data - - -@Trackable -@validate_arguments -def consensus( - project: NotEmptyStr, - folder_names: List[NotEmptyStr], - export_root: Optional[Union[NotEmptyStr, Path]] = None, - image_list: Optional[List[NotEmptyStr]] = None, - annot_type: Optional[AnnotationType] = "bbox", - show_plots: Optional[StrictBool] = False, -): - """Computes consensus score for each instance of given images that are present in at least 2 of the given projects: - - :param project: project name - :type project: str - :param folder_names: list of folder names in the project for which the scores will be computed - :type folder_names: list of str - :param export_root: root export path of the projects - :type export_root: Path-like (str or Path) - :param image_list: List of image names from the projects list that must be used. If None, then all images from the projects list will be used. Default: None - :type image_list: list - :param annot_type: Type of annotation instances to consider. Available candidates are: ["bbox", "polygon", "point"] - :type annot_type: str - :param show_plots: If True, show plots based on results of consensus computation. Default: False - :type show_plots: bool - - :return: Pandas DateFrame with columns (creatorEmail, QA, imageName, instanceId, className, area, attribute, folderName, score) - :rtype: pandas DataFrame - """ + if response.data: + annotations = response.data[0] + else: + annotations = {} + annotations = add_annotation_bbox_to_json( + annotations, + bbox, + annotation_class_name, + annotation_class_attributes, + error, + image_name, + ) - if export_root is None: - with tempfile.TemporaryDirectory() as temp_dir: - export_root = temp_dir - response = Controller.get_default().consensus( - project_name=project, - folder_names=folder_names, - export_path=export_root, - image_list=image_list, - annot_type=annot_type, - show_plots=show_plots, - ) + self.controller.upload_image_annotations( + project_name, folder_name, image_name, annotations + ) - else: - response = Controller.get_default().consensus( - project_name=project, - folder_names=folder_names, - export_path=export_root, - image_list=image_list, - annot_type=annot_type, - show_plots=show_plots, + def add_annotation_point_to_image( + self, + project: NotEmptyStr, + image_name: NotEmptyStr, + point: List[float], + annotation_class_name: NotEmptyStr, + annotation_class_attributes: Optional[List[dict]] = None, + error: Optional[StrictBool] = None, + ): + """Add a point annotation to image annotations + + annotation_class_attributes has the form [ {"name" : "", "groupName" : ""}, ... ] + + :param project: project name or folder path (e.g., "project1/folder1") + :type project: str + :param image_name: image name + :type image_name: str + :param point: [x,y] list of coordinates + :type point: list of floats + :param annotation_class_name: annotation class name + :type annotation_class_name: str + :param annotation_class_attributes: list of annotation class attributes + :type annotation_class_attributes: list of 2 element dicts + :param error: if not None, marks annotation as error (True) or no-error (False) + :type error: bool + """ + project_name, folder_name = extract_project_folder(project) + project = self.controller.get_project_metadata(project_name).data + if project["project"].type in [ + constants.ProjectType.VIDEO.value, + constants.ProjectType.DOCUMENT.value, + ]: + raise AppException(LIMITED_FUNCTIONS[project["project"].type]) + response = self.controller.get_annotations( + project_name=project_name, + folder_name=folder_name, + item_names=[image_name], + logging=False, ) if response.errors: raise AppException(response.errors) - return response.data - - -@Trackable -@validate_arguments -def run_prediction( - project: Union[NotEmptyStr, dict], - images_list: List[NotEmptyStr], - model: Union[NotEmptyStr, dict], -): - """This function runs smart prediction on given list of images from a given project using the neural network of your choice - - :param project: the project in which the target images are uploaded. - :type project: str or dict - :param images_list: the list of image names on which smart prediction has to be run - :type images_list: list of str - :param model: the name of the model that should be used for running smart prediction - :type model: str or dict - :return: tupe of two lists, list of images on which the prediction has succeded and failed respectively - :rtype: tuple - """ - project_name = None - folder_name = None - if isinstance(project, dict): - project_name = project["name"] - if isinstance(project, str): + if response.data: + annotations = response.data[0] + else: + annotations = {} + annotations = add_annotation_point_to_json( + annotations, + point, + annotation_class_name, + image_name, + annotation_class_attributes, + error, + ) + self.controller.upload_image_annotations( + project_name, folder_name, image_name, annotations + ) + + def add_annotation_comment_to_image( + self, + project: NotEmptyStr, + image_name: NotEmptyStr, + comment_text: NotEmptyStr, + comment_coords: List[float], + comment_author: EmailStr, + resolved: Optional[StrictBool] = False, + ): + """Add a comment to SuperAnnotate format annotation JSON + + :param project: project name or folder path (e.g., "project1/folder1") + :type project: str + :param image_name: image name + :type image_name: str + :param comment_text: comment text + :type comment_text: str + :param comment_coords: [x, y] coords + :type comment_coords: list + :param comment_author: comment author email + :type comment_author: str + :param resolved: comment resolve status + :type resolved: bool + """ project_name, folder_name = extract_project_folder(project) + project = self.controller.get_project_metadata(project_name).data + if project["project"].type in [ + constants.ProjectType.VIDEO.value, + constants.ProjectType.DOCUMENT.value, + ]: + raise AppException(LIMITED_FUNCTIONS[project["project"].type]) + response = self.controller.get_annotations( + project_name=project_name, + folder_name=folder_name, + item_names=[image_name], + logging=False, + ) + if response.errors: + raise AppException(response.errors) + if response.data: + annotations = response.data[0] + else: + annotations = {} + annotations = add_annotation_comment_to_json( + annotations, + comment_text, + comment_coords, + comment_author, + resolved=resolved, + image_name=image_name, + ) + self.controller.upload_image_annotations( + project_name, folder_name, image_name, annotations + ) - model_name = model - if isinstance(model, dict): - model_name = model["name"] - - response = Controller.get_default().run_prediction( - project_name=project_name, - images_list=images_list, - model_name=model_name, - folder_name=folder_name, - ) - if response.errors: - raise AppException(response.errors) - return response.data - - -@Trackable -@validate_arguments -def add_annotation_bbox_to_image( - project: NotEmptyStr, - image_name: NotEmptyStr, - bbox: List[float], - annotation_class_name: NotEmptyStr, - annotation_class_attributes: Optional[List[dict]] = None, - error: Optional[StrictBool] = None, -): - """Add a bounding box annotation to image annotations - - annotation_class_attributes has the form - [ {"name" : "" }, "groupName" : ""} ], ... ] - - :param project: project name or folder path (e.g., "project1/folder1") - :type project: str - :param image_name: image name - :type image_name: str - :param bbox: 4 element list of top-left x,y and bottom-right x, y coordinates - :type bbox: list of floats - :param annotation_class_name: annotation class name - :type annotation_class_name: str - :param annotation_class_attributes: list of annotation class attributes - :type annotation_class_attributes: list of 2 element dicts - :param error: if not None, marks annotation as error (True) or no-error (False) - :type error: bool - """ - project_name, folder_name = extract_project_folder(project) - project = Controller.get_default().get_project_metadata(project_name).data - if project["project"].type in [ - constances.ProjectType.VIDEO.value, - constances.ProjectType.DOCUMENT.value, - ]: - raise AppException(LIMITED_FUNCTIONS[project["project"].type]) - response = Controller.get_default().get_annotations( - project_name=project_name, - folder_name=folder_name, - item_names=[image_name], - logging=False, - ) - if response.errors: - raise AppException(response.errors) - if response.data: - annotations = response.data[0] - else: - annotations = {} - annotations = add_annotation_bbox_to_json( - annotations, - bbox, - annotation_class_name, - annotation_class_attributes, - error, - image_name, - ) - - Controller.get_default().upload_image_annotations( - project_name, folder_name, image_name, annotations - ) - - -@Trackable -@validate_arguments -def add_annotation_point_to_image( - project: NotEmptyStr, - image_name: NotEmptyStr, - point: List[float], - annotation_class_name: NotEmptyStr, - annotation_class_attributes: Optional[List[dict]] = None, - error: Optional[StrictBool] = None, -): - """Add a point annotation to image annotations - - annotation_class_attributes has the form [ {"name" : "", "groupName" : ""}, ... ] - - :param project: project name or folder path (e.g., "project1/folder1") - :type project: str - :param image_name: image name - :type image_name: str - :param point: [x,y] list of coordinates - :type point: list of floats - :param annotation_class_name: annotation class name - :type annotation_class_name: str - :param annotation_class_attributes: list of annotation class attributes - :type annotation_class_attributes: list of 2 element dicts - :param error: if not None, marks annotation as error (True) or no-error (False) - :type error: bool - """ - project_name, folder_name = extract_project_folder(project) - project = Controller.get_default().get_project_metadata(project_name).data - if project["project"].type in [ - constances.ProjectType.VIDEO.value, - constances.ProjectType.DOCUMENT.value, - ]: - raise AppException(LIMITED_FUNCTIONS[project["project"].type]) - response = Controller.get_default().get_annotations( - project_name=project_name, - folder_name=folder_name, - item_names=[image_name], - logging=False, - ) - if response.errors: - raise AppException(response.errors) - if response.data: - annotations = response.data[0] - else: - annotations = {} - annotations = add_annotation_point_to_json( - annotations, - point, - annotation_class_name, - image_name, - annotation_class_attributes, - error, - ) - Controller.get_default().upload_image_annotations( - project_name, folder_name, image_name, annotations - ) - - -@Trackable -@validate_arguments -def add_annotation_comment_to_image( - project: NotEmptyStr, - image_name: NotEmptyStr, - comment_text: NotEmptyStr, - comment_coords: List[float], - comment_author: EmailStr, - resolved: Optional[StrictBool] = False, -): - """Add a comment to SuperAnnotate format annotation JSON - - :param project: project name or folder path (e.g., "project1/folder1") - :type project: str - :param image_name: image name - :type image_name: str - :param comment_text: comment text - :type comment_text: str - :param comment_coords: [x, y] coords - :type comment_coords: list - :param comment_author: comment author email - :type comment_author: str - :param resolved: comment resolve status - :type resolved: bool - """ - project_name, folder_name = extract_project_folder(project) - project = Controller.get_default().get_project_metadata(project_name).data - if project["project"].type in [ - constances.ProjectType.VIDEO.value, - constances.ProjectType.DOCUMENT.value, - ]: - raise AppException(LIMITED_FUNCTIONS[project["project"].type]) - response = Controller.get_default().get_annotations( - project_name=project_name, - folder_name=folder_name, - item_names=[image_name], - logging=False, - ) - if response.errors: - raise AppException(response.errors) - if response.data: - annotations = response.data[0] - else: - annotations = {} - annotations = add_annotation_comment_to_json( - annotations, - comment_text, - comment_coords, - comment_author, - resolved=resolved, - image_name=image_name, - ) - Controller.get_default().upload_image_annotations( - project_name, folder_name, image_name, annotations - ) - - -@Trackable -@validate_arguments -def upload_image_to_project( - project: NotEmptyStr, - img, - image_name: Optional[NotEmptyStr] = None, - annotation_status: Optional[AnnotationStatuses] = "NotStarted", - from_s3_bucket=None, - image_quality_in_editor: Optional[NotEmptyStr] = None, -): - """Uploads image (io.BytesIO() or filepath to image) to project. - Sets status of the uploaded image to set_status if it is not None. - - :param project: project name or folder path (e.g., "project1/folder1") - :type project: str - :param img: image to upload - :type img: io.BytesIO() or Path-like (str or Path) - :param image_name: image name to set on platform. If None and img is filepath, - image name will be set to filename of the path - :type image_name: str - :param annotation_status: value to set the annotation statuses of the uploaded image NotStarted InProgress QualityCheck Returned Completed Skipped - :type annotation_status: str - :param from_s3_bucket: AWS S3 bucket to use. If None then folder_path is in local filesystem - :type from_s3_bucket: str - :param image_quality_in_editor: image quality be seen in SuperAnnotate web annotation editor. - Can be either "compressed" or "original". If None then the default value in project settings will be used. - :type image_quality_in_editor: str - """ - project_name, folder_name = extract_project_folder(project) - - response = Controller.get_default().upload_image_to_project( - project_name=project_name, - folder_name=folder_name, - image_name=image_name, - image=img, - annotation_status=annotation_status, - from_s3_bucket=from_s3_bucket, - image_quality_in_editor=image_quality_in_editor, - ) - if response.errors: - raise AppException(response.errors) - - -@Trackable -@validate_arguments -def search_models( - name: Optional[NotEmptyStr] = None, - type_: Optional[NotEmptyStr] = None, - project_id: Optional[int] = None, - task: Optional[NotEmptyStr] = None, - include_global: Optional[StrictBool] = True, -): - """Search for ML models. - - :param name: search string - :type name: str - - :param type_: ml model type string - :type type_: str - - :param project_id: project id - :type project_id: int - - :param task: training task - :type task: str - - :param include_global: include global ml models - :type include_global: bool - - :return: ml model metadata - :rtype: list of dicts - """ - res = Controller.get_default().search_models( - name=name, - model_type=type_, - project_id=project_id, - task=task, - include_global=include_global, - ) - return res.data - - -@Trackable -@validate_arguments -def upload_images_to_project( - project: NotEmptyStr, - img_paths: List[NotEmptyStr], - annotation_status: Optional[AnnotationStatuses] = "NotStarted", - from_s3_bucket=None, - image_quality_in_editor: Optional[ImageQualityChoices] = None, -): - """Uploads all images given in list of path objects in img_paths to the project. - Sets status of all the uploaded images to set_status if it is not None. - - If an image with existing name already exists in the project it won't be uploaded, - and its path will be appended to the third member of return value of this - function. - - :param project: project name or folder path (e.g., "project1/folder1") - :type project: str - :param img_paths: list of Path-like (str or Path) objects to upload - :type img_paths: list - :param annotation_status: value to set the annotation statuses of the uploaded images NotStarted InProgress QualityCheck Returned Completed Skipped - :type annotation_status: str - :param from_s3_bucket: AWS S3 bucket to use. If None then folder_path is in local filesystem - :type from_s3_bucket: str - :param image_quality_in_editor: image quality be seen in SuperAnnotate web annotation editor. - Can be either "compressed" or "original". If None then the default value in project settings will be used. - :type image_quality_in_editor: str - - :return: uploaded, could-not-upload, existing-images filepaths - :rtype: tuple (3 members) of list of strs - """ - project_name, folder_name = extract_project_folder(project) - - use_case = Controller.get_default().upload_images_to_project( - project_name=project_name, - folder_name=folder_name, - paths=img_paths, - annotation_status=annotation_status, - image_quality_in_editor=image_quality_in_editor, - from_s3_bucket=from_s3_bucket, - ) - - images_to_upload, duplicates = use_case.images_to_upload - if len(duplicates): - logger.warning( - "%s already existing images found that won't be uploaded.", len(duplicates) - ) - logger.info(f"Uploading {len(images_to_upload)} images to project {project}.") - uploaded, failed_images, duplications = [], [], duplicates - if not images_to_upload: - return uploaded, failed_images, duplications - if use_case.is_valid(): - with tqdm(total=len(images_to_upload), desc="Uploading images") as progress_bar: - for _ in use_case.execute(): - progress_bar.update(1) - uploaded, failed_images, duplications = use_case.data - if duplications: - logger.info(f"Duplicated images {', '.join(duplications)}") - return uploaded, failed_images, duplications - raise AppException(use_case.response.errors) - - -@Trackable -@validate_arguments -def aggregate_annotations_as_df( - project_root: Union[NotEmptyStr, Path], - project_type: ProjectTypes, - folder_names: Optional[List[Union[Path, NotEmptyStr]]] = None, -): - """Aggregate annotations as pandas dataframe from project root. - - :param project_root: the export path of the project - :type project_root: Path-like (str or Path) - - :param project_type: the project type, Vector/Pixel, Video or Document - :type project_type: str - - :param folder_names: Aggregate the specified folders from project_root. - If None aggregate all folders in the project_root - :type folder_names: list of Pathlike (str or Path) objects - - :return: DataFrame on annotations - :rtype: pandas DataFrame - """ - if project_type in ( - constances.ProjectType.VECTOR.name, - constances.ProjectType.PIXEL.name, + def upload_image_to_project( + self, + project: NotEmptyStr, + img, + image_name: Optional[NotEmptyStr] = None, + annotation_status: Optional[AnnotationStatuses] = "NotStarted", + from_s3_bucket=None, + image_quality_in_editor: Optional[NotEmptyStr] = None, ): - from superannotate.lib.app.analytics.common import ( - aggregate_image_annotations_as_df, + """Uploads image (io.BytesIO() or filepath to image) to project. + Sets status of the uploaded image to set_status if it is not None. + + :param project: project name or folder path (e.g., "project1/folder1") + :type project: str + :param img: image to upload + :type img: io.BytesIO() or Path-like (str or Path) + :param image_name: image name to set on platform. If None and img is filepath, + image name will be set to filename of the path + :type image_name: str + :param annotation_status: value to set the annotation statuses of the uploaded image NotStarted InProgress QualityCheck Returned Completed Skipped + :type annotation_status: str + :param from_s3_bucket: AWS S3 bucket to use. If None then folder_path is in local filesystem + :type from_s3_bucket: str + :param image_quality_in_editor: image quality be seen in SuperAnnotate web annotation editor. + Can be either "compressed" or "original". If None then the default value in project settings will be used. + :type image_quality_in_editor: str + """ + project_name, folder_name = extract_project_folder(project) + + response = self.controller.upload_image_to_project( + project_name=project_name, + folder_name=folder_name, + image_name=image_name, + image=img, + annotation_status=annotation_status, + from_s3_bucket=from_s3_bucket, + image_quality_in_editor=image_quality_in_editor, ) + if response.errors: + raise AppException(response.errors) - return aggregate_image_annotations_as_df( - project_root=project_root, - include_classes_wo_annotations=False, - include_comments=True, - include_tags=True, - folder_names=folder_names, + def search_models( + self, + name: Optional[NotEmptyStr] = None, + type_: Optional[NotEmptyStr] = None, + project_id: Optional[int] = None, + task: Optional[NotEmptyStr] = None, + include_global: Optional[StrictBool] = True, + ): + """Search for ML models. + + :param name: search string + :type name: str + :param type_: ml model type string + :type type_: str + :param project_id: project id + :type project_id: int + :param task: training task + :type task: str + :param include_global: include global ml models + :type include_global: bool + + :return: ml model metadata + :rtype: list of dicts + """ + res = self.controller.search_models( + name=name, + model_type=type_, + project_id=project_id, + task=task, + include_global=include_global, ) - elif project_type in ( - constances.ProjectType.VIDEO.name, - constances.ProjectType.DOCUMENT.name, + return res.data + + def upload_images_to_project( + self, + project: NotEmptyStr, + img_paths: List[NotEmptyStr], + annotation_status: Optional[AnnotationStatuses] = "NotStarted", + from_s3_bucket=None, + image_quality_in_editor: Optional[ImageQualityChoices] = None, ): - from superannotate.lib.app.analytics.aggregators import DataAggregator + """Uploads all images given in list of path objects in img_paths to the project. + Sets status of all the uploaded images to set_status if it is not None. - return DataAggregator( - project_type=project_type, - project_root=project_root, - folder_names=folder_names, - ).aggregate_annotations_as_df() + If an image with existing name already exists in the project it won't be uploaded, + and its path will be appended to the third member of return value of this + function. + :param project: project name or folder path (e.g., "project1/folder1") + :type project: str + :param img_paths: list of Path-like (str or Path) objects to upload + :type img_paths: list + :param annotation_status: value to set the annotation statuses of the uploaded images NotStarted InProgress QualityCheck Returned Completed Skipped + :type annotation_status: str + :param from_s3_bucket: AWS S3 bucket to use. If None then folder_path is in local filesystem + :type from_s3_bucket: str + :param image_quality_in_editor: image quality be seen in SuperAnnotate web annotation editor. + Can be either "compressed" or "original". If None then the default value in project settings will be used. + :type image_quality_in_editor: str + + :return: uploaded, could-not-upload, existing-images filepaths + :rtype: tuple (3 members) of list of strs + """ + project_name, folder_name = extract_project_folder(project) -@Trackable -@validate_arguments -def delete_annotations( - project: NotEmptyStr, item_names: Optional[List[NotEmptyStr]] = None -): - """ - Delete image annotations from a given list of images. + use_case = self.controller.upload_images_to_project( + project_name=project_name, + folder_name=folder_name, + paths=img_paths, + annotation_status=annotation_status, + image_quality_in_editor=image_quality_in_editor, + from_s3_bucket=from_s3_bucket, + ) - :param project: project name or folder path (e.g., "project1/folder1") - :type project: str - :param item_names: item names. If None, all item annotations from a given project/folder will be deleted. - :type item_names: list of strs - """ + images_to_upload, duplicates = use_case.images_to_upload + if len(duplicates): + logger.warning( + "%s already existing images found that won't be uploaded.", + len(duplicates), + ) + logger.info(f"Uploading {len(images_to_upload)} images to project {project}.") + uploaded, failed_images, duplications = [], [], duplicates + if not images_to_upload: + return uploaded, failed_images, duplications + if use_case.is_valid(): + with tqdm( + total=len(images_to_upload), desc="Uploading images" + ) as progress_bar: + for _ in use_case.execute(): + progress_bar.update(1) + uploaded, failed_images, duplications = use_case.data + if duplications: + logger.info(f"Duplicated images {', '.join(duplications)}") + return uploaded, failed_images, duplications + raise AppException(use_case.response.errors) + + @staticmethod + def aggregate_annotations_as_df( + project_root: Union[NotEmptyStr, Path], + project_type: ProjectTypes, + folder_names: Optional[List[Union[Path, NotEmptyStr]]] = None, + ): + """Aggregate annotations as pandas dataframe from project root. + + :param project_root: the export path of the project + :type project_root: Path-like (str or Path) + + :param project_type: the project type, Vector/Pixel, Video or Document + :type project_type: str + + :param folder_names: Aggregate the specified folders from project_root. + If None aggregate all folders in the project_root + :type folder_names: list of Pathlike (str or Path) objects + + :return: DataFrame on annotations + :rtype: pandas DataFrame + """ + if project_type in ( + constants.ProjectType.VECTOR.name, + constants.ProjectType.PIXEL.name, + ): + from superannotate.lib.app.analytics.common import ( + aggregate_image_annotations_as_df, + ) - project_name, folder_name = extract_project_folder(project) + return aggregate_image_annotations_as_df( + project_root=project_root, + include_classes_wo_annotations=False, + include_comments=True, + include_tags=True, + folder_names=folder_names, + ) + elif project_type in ( + constants.ProjectType.VIDEO.name, + constants.ProjectType.DOCUMENT.name, + ): + from superannotate.lib.app.analytics.aggregators import DataAggregator + + return DataAggregator( + project_type=project_type, + project_root=project_root, + folder_names=folder_names, + ).aggregate_annotations_as_df() + + def delete_annotations( + self, project: NotEmptyStr, item_names: Optional[List[NotEmptyStr]] = None + ): + """ + Delete item annotations from a given list of items. + + :param project: project name or folder path (e.g., "project1/folder1") + :type project: str + :param item_names: image names. If None, all image annotations from a given project/folder will be deleted. + :type item_names: list of strs + """ - response = Controller.get_default().delete_annotations( - project_name=project_name, folder_name=folder_name, item_names=item_names - ) - if response.errors: - raise AppException(response.errors) + project_name, folder_name = extract_project_folder(project) + response = self.controller.delete_annotations( + project_name=project_name, folder_name=folder_name, item_names=item_names + ) + if response.errors: + raise AppException(response.errors) -@Trackable -@validate_arguments -def validate_annotations( - project_type: ProjectTypes, annotations_json: Union[NotEmptyStr, Path] -): - """Validates given annotation JSON. + def validate_annotations( + self, project_type: ProjectTypes, annotations_json: Union[NotEmptyStr, Path] + ): + """Validates given annotation JSON. :param project_type: The project type Vector, Pixel, Video or Document :type project_type: str @@ -2334,519 +2224,520 @@ def validate_annotations( :return: The success of the validation :rtype: bool """ - with open(annotations_json) as file: - annotation_data = json.loads(file.read()) - response = Controller.validate_annotations( - project_type, annotation_data, allow_extra=False + with open(annotations_json) as file: + annotation_data = json.loads(file.read()) + response = Controller.validate_annotations( + project_type, annotation_data, allow_extra=False + ) + if response.errors: + raise AppException(response.errors) + is_valid, _ = response.data + if is_valid: + return True + print(response.report) + return False + + def add_contributors_to_project( + self, + project: NotEmptyStr, + emails: conlist(EmailStr, min_items=1), + role: AnnotatorRole, + ) -> Tuple[List[str], List[str]]: + """Add contributors to project. + + :param project: project name + :type project: str + + :param emails: users email + :type emails: list + + :param role: user role to apply, one of Admin , Annotator , QA + :type role: str + + :return: lists of added, skipped contributors of the project + :rtype: tuple (2 members) of lists of strs + """ + response = self.controller.add_contributors_to_project( + project_name=project, emails=emails, role=role ) if response.errors: raise AppException(response.errors) - is_valid, _ = response.data - if is_valid: - return True - print(response.report) - return False + return response.data + + def invite_contributors_to_team( + self, emails: conlist(EmailStr, min_items=1), admin: StrictBool = False + ) -> Tuple[List[str], List[str]]: + """Invites contributors to the team. + + :param emails: list of contributor emails + :type emails: list + + :param admin: enables admin privileges for the contributor + :type admin: bool + :return: lists of invited, skipped contributors of the team + :rtype: tuple (2 members) of lists of strs + """ + response = self.controller.invite_contributors_to_team( + emails=emails, set_admin=admin + ) + if response.errors: + raise AppException(response.errors) + return response.data -@Trackable -@validate_arguments -def add_contributors_to_project( - project: NotEmptyStr, emails: conlist(EmailStr, min_items=1), role: AnnotatorRole -) -> Tuple[List[str], List[str]]: - """Add contributors to project. + def get_annotations( + self, project: NotEmptyStr, items: Optional[List[NotEmptyStr]] = None + ): + """Returns annotations for the given list of items. - :param project: project name - :type project: str + :param project: project name or folder path (e.g., “project1/folder1”). + :type project: str - :param emails: users email - :type emails: list + :param items: item names. If None all items in the project will be exported + :type items: list of strs - :param role: user role to apply, one of Admin , Annotator , QA - :type role: str + :return: list of annotations + :rtype: list of strs + """ + project_name, folder_name = extract_project_folder(project) + response = self.controller.get_annotations(project_name, folder_name, items) + if response.errors: + raise AppException(response.errors) + return response.data - :return: lists of added, skipped contributors of the project - :rtype: tuple (2 members) of lists of strs - """ - response = Controller.get_default().add_contributors_to_project( - project_name=project, emails=emails, role=role - ) - if response.errors: - raise AppException(response.errors) - return response.data + def get_annotations_per_frame( + self, project: NotEmptyStr, video: NotEmptyStr, fps: int = 1 + ): + """Returns per frame annotations for the given video. -@Trackable -@validate_arguments -def invite_contributors_to_team( - emails: conlist(EmailStr, min_items=1), admin: StrictBool = False -) -> Tuple[List[str], List[str]]: - """Invites contributors to the team. + :param project: project name or folder path (e.g., “project1/folder1”). + :type project: str - :param emails: list of contributor emails - :type emails: list + :param video: video name + :type video: str - :param admin: enables admin privileges for the contributor - :type admin: bool + :param fps: how many frames per second needs to be extracted from the video. + Will extract 1 frame per second by default. + :type fps: str - :return: lists of invited, skipped contributors of the team - :rtype: tuple (2 members) of lists of strs - """ - response = Controller.get_default().invite_contributors_to_team( - emails=emails, set_admin=admin - ) - if response.errors: - raise AppException(response.errors) - return response.data + :return: list of annotation objects + :rtype: list of dicts + """ + project_name, folder_name = extract_project_folder(project) + response = self.controller.get_annotations_per_frame( + project_name, folder_name, video_name=video, fps=fps + ) + if response.errors: + raise AppException(response.errors) + return response.data + def upload_priority_scores(self, project: NotEmptyStr, scores: List[PriorityScore]): + """Returns per frame annotations for the given video. -@Trackable -@validate_arguments -def get_annotations(project: NotEmptyStr, items: Optional[List[NotEmptyStr]] = None): - """Returns annotations for the given list of items. + :param project: project name or folder path (e.g., “project1/folder1”) + :type project: str - :param project: project name or folder path (e.g., “project1/folder1”). - :type project: str + :param scores: list of score objects + :type scores: list of dicts - :param items: item names. If None all items in the project will be exported - :type items: list of strs + :return: lists of uploaded, skipped items + :rtype: tuple (2 members) of lists of strs + """ + project_name, folder_name = extract_project_folder(project) + project_folder_name = project + response = self.controller.upload_priority_scores( + project_name, folder_name, scores, project_folder_name + ) + if response.errors: + raise AppException(response.errors) + return response.data - :return: list of annotations - :rtype: list of strs - """ - project_name, folder_name = extract_project_folder(project) - response = Controller.get_default().get_annotations( - project_name, folder_name, items - ) - if response.errors: - raise AppException(response.errors) - return response.data + def get_integrations(self): + """Get all integrations per team + :return: metadata objects of all integrations of the team. + :rtype: list of dicts + """ + response = self.controller.get_integrations() + if response.errors: + raise AppException(response.errors) + integrations = response.data + return BaseSerializer.serialize_iterable(integrations, ("name", "type", "root")) + + def attach_items_from_integrated_storage( + self, + project: NotEmptyStr, + integration: Union[NotEmptyStr, IntegrationEntity], + folder_path: Optional[NotEmptyStr] = None, + ): + """Link images from integrated external storage to SuperAnnotate. -@Trackable -@validate_arguments -def get_annotations_per_frame(project: NotEmptyStr, video: NotEmptyStr, fps: int = 1): - """Returns per frame annotations for the given video. + :param project: project name or folder path where items should be attached (e.g., “project1/folder1”). + :type project: str + :param integration: existing integration name or metadata dict to pull items from. + Mandatory keys in integration metadata’s dict is “name”. + :type integration: str or dict - :param project: project name or folder path (e.g., “project1/folder1”). - :type project: str + :param folder_path: Points to an exact folder/directory within given storage. + If None, items are fetched from the root directory. + :type folder_path: str + """ + project_name, folder_name = extract_project_folder(project) + if isinstance(integration, str): + integration = IntegrationEntity(name=integration) + response = self.controller.attach_integrations( + project_name, folder_name, integration, folder_path + ) + if response.errors: + raise AppException(response.errors) - :param video: video name - :type video: str + def query( + self, + project: NotEmptyStr, + query: Optional[NotEmptyStr] = None, + subset: Optional[NotEmptyStr] = None, + ): + """Return items that satisfy the given query. + Query syntax should be in SuperAnnotate query language(https://doc.superannotate.com/docs/query-search-1). - :param fps: how many frames per second needs to be extracted from the video. - Will extract 1 frame per second by default. - :type fps: int + :param project: project name or folder path (e.g., “project1/folder1”) + :type project: str - :return: list of annotation objects - :rtype: list of dicts - """ - project_name, folder_name = extract_project_folder(project) - response = Controller.get_default().get_annotations_per_frame( - project_name, folder_name, video_name=video, fps=fps - ) - if response.errors: - raise AppException(response.errors) - return response.data + :param query: SAQuL query string. + :type query: str + :param subset: subset name. Allows you to query items in a specific subset. + To return all the items in the specified subset, set the value of query param to None. + :type subset: str -@Trackable -@validate_arguments -def upload_priority_scores(project: NotEmptyStr, scores: List[PriorityScore]): - """Upload priority scores for the given list of items. + :return: queried items’ metadata list + :rtype: list of dicts + """ + project_name, folder_name = extract_project_folder(project) + response = self.controller.query_entities( + project_name, folder_name, query, subset + ) + if response.errors: + raise AppException(response.errors) + return BaseSerializer.serialize_iterable(response.data) - :param project: project name or folder path (e.g., “project1/folder1”) - :type project: str + def get_item_metadata( + self, + project: NotEmptyStr, + item_name: NotEmptyStr, + ): + """Returns item metadata - :param scores: list of score objects - :type scores: list of dicts + :param project: project name or folder path (e.g., “project1/folder1”) + :type project: str - :return: lists of uploaded, skipped items - :rtype: tuple (2 members) of lists of strs - """ - project_name, folder_name = extract_project_folder(project) - project_folder_name = project - response = Controller.get_default().upload_priority_scores( - project_name, folder_name, scores, project_folder_name - ) - if response.errors: - raise AppException(response.errors) - return response.data - - -@Trackable -@validate_arguments -def get_integrations(): - """Get all integrations per team - - :return: metadata objects of all integrations of the team. - :rtype: list of dicts - """ - response = Controller.get_default().get_integrations() - if response.errors: - raise AppException(response.errors) - integrations = response.data - return BaseSerializer.serialize_iterable(integrations, ("name", "type", "root")) - - -@Trackable -@validate_arguments -def attach_items_from_integrated_storage( - project: NotEmptyStr, - integration: Union[NotEmptyStr, IntegrationEntity], - folder_path: Optional[NotEmptyStr] = None, -): - """Link images from integrated external storage to SuperAnnotate. - - :param project: project name or folder path where items should be attached (e.g., “project1/folder1”). - :type project: str - - :param integration: existing integration name or metadata dict to pull items from. - Mandatory keys in integration metadata’s dict is “name”. - :type integration: str or dict - - :param folder_path: Points to an exact folder/directory within given storage. - If None, items are fetched from the root directory. - :type folder_path: str - """ - project_name, folder_name = extract_project_folder(project) - if isinstance(integration, str): - integration = IntegrationEntity(name=integration) - response = Controller.get_default().attach_integrations( - project_name, folder_name, integration, folder_path - ) - if response.errors: - raise AppException(response.errors) - - -@Trackable -@validate_arguments -def query(project: NotEmptyStr, query: Optional[NotEmptyStr]): - """Return items that satisfy the given query. - Query syntax should be in SuperAnnotate query language(https://doc.superannotate.com/docs/query-search-1). - - :param project: project name or folder path (e.g., “project1/folder1”) - :type project: str - - :param query: SAQuL query string. - :type query: str - - :return: queried items’ metadata list - :rtype: list of dicts - """ - project_name, folder_name = extract_project_folder(project) - response = Controller.get_default().query_entities(project_name, folder_name, query) - if response.errors: - raise AppException(response.errors) - return BaseSerializer.serialize_iterable(response.data) + :param item_name: item name + :type item_name: str + :return: metadata of item + :rtype: dict + """ + project_name, folder_name = extract_project_folder(project) + response = self.controller.get_item(project_name, folder_name, item_name) + if response.errors: + raise AppException(response.errors) + return BaseSerializer(response.data).serialize() + + def search_items( + self, + project: NotEmptyStr, + name_contains: NotEmptyStr = None, + annotation_status: Optional[AnnotationStatuses] = None, + annotator_email: Optional[NotEmptyStr] = None, + qa_email: Optional[NotEmptyStr] = None, + recursive: bool = False, + ): + """Search items by filtering criteria. -@Trackable -@validate_arguments -def get_item_metadata( - project: NotEmptyStr, item_name: NotEmptyStr, -): - """Returns item metadata - :param project: project name or folder path (e.g., “project1/folder1”) - :type project: str + :param project: project name or folder path (e.g., “project1/folder1”). + If recursive=False=True, then only the project name is required. + :type project: str - :param item_name: item name - :type item_name: str + :param name_contains: Returns those items, where the given string is found anywhere within an item’s name. + If None, all items returned, in accordance with the recursive=False parameter. + :type name_contains: str - :return: metadata of item - :rtype: dict - """ - project_name, folder_name = extract_project_folder(project) - response = Controller.get_default().get_item(project_name, folder_name, item_name) - if response.errors: - raise AppException(response.errors) - return BaseSerializer(response.data).serialize() - - -@Trackable -@validate_arguments -def search_items( - project: NotEmptyStr, - name_contains: NotEmptyStr = None, - annotation_status: Optional[AnnotationStatuses] = None, - annotator_email: Optional[NotEmptyStr] = None, - qa_email: Optional[NotEmptyStr] = None, - recursive: bool = False, -): - """Search items by filtering criteria. - - - :param project: project name or folder path (e.g., “project1/folder1”). - :type project: str - - :param name_contains: Returns those items, where the given string is found anywhere within an item’s name. - If None, all items returned, in accordance with the recursive=False parameter. - :type name_contains: str - - :param annotation_status: if not None, filters items by annotation status. + :param annotation_status: if not None, filters items by annotation status. Values are: ♦ “NotStarted” \n ♦ “InProgress” \n ♦ “QualityCheck” \n ♦ “Returned” \n ♦ “Completed” \n - ♦ “Skipped” \n - :type annotation_status: str + ♦ “Skippe + :type annotation_status: str + :type annotation_status: str - :param annotator_email: returns those items’ names that are assigned to the specified annotator. - If None, all items are returned. Strict equal. - :type annotator_email: str + :param annotator_email: returns those items’ names that are assigned to the specified annotator. + If None, all items are returned. Strict equal. + :type annotator_email: str - :param qa_email: returns those items’ names that are assigned to the specified QA. - If None, all items are returned. Strict equal. - :type qa_email: str + :param qa_email: returns those items’ names that are assigned to the specified QA. + If None, all items are returned. Strict equal. + :type qa_email: str - :param recursive: search in the project’s root and all of its folders. - If False search only in the project’s root or given directory. - :type recursive: bool + :param recursive: search in the project’s root and all of its folders. + If False search only in the project’s root or given directory. + :type recursive: bool - :return: items' metadata - :rtype: list of dicts - """ - project_name, folder_name = extract_project_folder(project) - response = Controller.get_default().list_items( - project_name, - folder_name, - name_contains=name_contains, - annotation_status=annotation_status, - annotator_email=annotator_email, - qa_email=qa_email, - recursive=recursive, - ) - if response.errors: - raise AppException(response.errors) - return BaseSerializer.serialize_iterable(response.data) - - -@Trackable -@validate_arguments -def attach_items( - project: Union[NotEmptyStr, dict], - attachments: AttachmentArg, - annotation_status: AnnotationStatuses = "NotStarted", -): - """Link items from external storage to SuperAnnotate using URLs. - - :param project: project name or folder path (e.g., “project1/folder1”) - :type project: str - - :param attachments: path to CSV file or list of dicts containing attachments URLs. - :type attachments: path-like (str or Path) or list of dicts - - :param annotation_status: value to set the annotation statuses of the linked items - “NotStarted” - “InProgress” - “QualityCheck” - “Returned” - “Completed” - “Skipped” - :type annotation_status: str - """ - attachments = attachments.data - project_name, folder_name = extract_project_folder(project) - if attachments and isinstance(attachments[0], AttachmentDict): - unique_attachments = set(attachments) - duplicate_attachments = [ - item - for item, count in collections.Counter(attachments).items() - if count > 1 - ] - else: - unique_attachments, duplicate_attachments = get_name_url_duplicated_from_csv( - attachments - ) - if duplicate_attachments: - logger.info("Dropping duplicates.") - unique_attachments = parse_obj_as(List[AttachmentEntity], unique_attachments) - uploaded, fails, duplicated = [], [], [] - if unique_attachments: - logger.info( - f"Attaching {len(unique_attachments)} file(s) to project {project}." + :return: items' metadata + :rtype: list of dicts + """ + project_name, folder_name = extract_project_folder(project) + response = self.controller.list_items( + project_name, + folder_name, + name_contains=name_contains, + annotation_status=annotation_status, + annotator_email=annotator_email, + qa_email=qa_email, + recursive=recursive, ) - response = Controller.get_default().attach_items( + if response.errors: + raise AppException(response.errors) + return BaseSerializer.serialize_iterable(response.data) + + def attach_items( + self, + project: Union[NotEmptyStr, dict], + attachments: AttachmentArg, + annotation_status: Optional[AnnotationStatuses] = "NotStarted", + ): + """Link items from external storage to SuperAnnotate using URLs. + + :param project: project name or folder path (e.g., “project1/folder1”) + :type project: str + + :param attachments: path to CSV file or list of dicts containing attachments URLs. + :type attachments: path-like (str or Path) or list of dicts + + :param annotation_status: value to set the annotation statuses of the linked items + “NotStarted” + “InProgress” + “QualityCheck” + “Returned” + “Completed” + “Skipped” + :type annotation_status: str + """ + attachments = attachments.data + project_name, folder_name = extract_project_folder(project) + if attachments and isinstance(attachments[0], AttachmentDict): + unique_attachments = set(attachments) + duplicate_attachments = [ + item + for item, count in collections.Counter(attachments).items() + if count > 1 + ] + else: + ( + unique_attachments, + duplicate_attachments, + ) = get_name_url_duplicated_from_csv(attachments) + if duplicate_attachments: + logger.info("Dropping duplicates.") + unique_attachments = parse_obj_as(List[AttachmentEntity], unique_attachments) + uploaded, fails, duplicated = [], [], [] + if unique_attachments: + logger.info( + f"Attaching {len(unique_attachments)} file(s) to project {project}." + ) + response = self.controller.attach_items( + project_name=project_name, + folder_name=folder_name, + attachments=unique_attachments, + annotation_status=annotation_status, + ) + if response.errors: + raise AppException(response.errors) + uploaded, duplicated = response.data + uploaded = [i["name"] for i in uploaded] + fails = [ + attachment.name + for attachment in unique_attachments + if attachment.name not in uploaded and attachment.name not in duplicated + ] + return uploaded, fails, duplicated + + def copy_items( + self, + source: Union[NotEmptyStr, dict], + destination: Union[NotEmptyStr, dict], + items: Optional[List[NotEmptyStr]] = None, + include_annotations: Optional[StrictBool] = True, + ): + """Copy images in bulk between folders in a project + + :param source: project name or folder path to select items from (e.g., “project1/folder1”). + :type source: str + + :param destination: project name (root) or folder path to place copied items. + :type destination: str + + :param items: names of items to copy. If None, all items from the source directory will be copied. + :type items: list of str + + :param include_annotations: enables annotations copy + :type include_annotations: bool + + :return: list of skipped item names + :rtype: list of strs + """ + + project_name, source_folder = extract_project_folder(source) + + to_project_name, destination_folder = extract_project_folder(destination) + if project_name != to_project_name: + raise AppException("Source and destination projects should be the same") + + response = self.controller.copy_items( project_name=project_name, - folder_name=folder_name, - attachments=unique_attachments, - annotation_status=annotation_status, + from_folder=source_folder, + to_folder=destination_folder, + items=items, + include_annotations=include_annotations, ) if response.errors: raise AppException(response.errors) - uploaded, duplicated = response.data - uploaded = [i["name"] for i in uploaded] - fails = [ - attachment.name - for attachment in unique_attachments - if attachment.name not in uploaded and attachment.name not in duplicated - ] - return uploaded, fails, duplicated + return response.data -@Trackable -@validate_arguments -def copy_items( - source: Union[NotEmptyStr, dict], - destination: Union[NotEmptyStr, dict], - items: Optional[List[NotEmptyStr]] = None, - include_annotations: Optional[StrictBool] = True, -): - """Copy images in bulk between folders in a project + def move_items( + self, + source: Union[NotEmptyStr, dict], + destination: Union[NotEmptyStr, dict], + items: Optional[List[NotEmptyStr]] = None, + ): + """Move images in bulk between folders in a project - :param source: project name or folder path to select items from (e.g., “project1/folder1”). - :type source: str + :param source: project name or folder path to pick items from (e.g., “project1/folder1”). + :type source: str - :param destination: project name (root) or folder path to place copied items. - :type destination: str + :param destination: project name (root) or folder path to move items to. + :type destination: str - :param items: names of items to copy. If None, all items from the source directory will be copied. - :type items: list of str + :param items: names of items to move. If None, all items from the source directory will be moved. + :type items: list of str - :param include_annotations: enables annotations copy - :type include_annotations: bool + :return: list of skipped item names + :rtype: list of strs + """ - :return: list of skipped item names - :rtype: list of strs - """ + project_name, source_folder = extract_project_folder(source) + to_project_name, destination_folder = extract_project_folder(destination) + if project_name != to_project_name: + raise AppException("Source and destination projects should be the same") + response = self.controller.move_items( + project_name=project_name, + from_folder=source_folder, + to_folder=destination_folder, + items=items, + ) + if response.errors: + raise AppException(response.errors) + return response.data + + def set_annotation_statuses( + self, + project: Union[NotEmptyStr, dict], + annotation_status: AnnotationStatuses, + items: Optional[List[NotEmptyStr]] = None, + ): + """Sets annotation statuses of items + + :param project: project name or folder path (e.g., “project1/folder1”). + :type project: str - project_name, source_folder = extract_project_folder(source) + :param annotation_status: annotation status to set, should be one of. + “NotStarted” + “InProgress” + “QualityCheck” + “Returned” + “Completed” + “Skipped” + :type annotation_status: str + + :param items: item names to set the mentioned status for. If None, all the items in the project will be used. + :type items: list of strs + """ - to_project_name, destination_folder = extract_project_folder(destination) - if project_name != to_project_name: - raise AppException("Source and destination projects should be the same") + project_name, folder_name = extract_project_folder(project) + response = self.controller.set_annotation_statuses( + project_name=project_name, + folder_name=folder_name, + annotation_status=annotation_status, + item_names=items, + ) + if response.errors: + raise AppException(response.errors) + else: + logger.info("Annotation statuses of items changed") + return response.data + + def download_annotations( + self, + project: Union[NotEmptyStr, dict], + path: Union[str, Path] = None, + items: Optional[List[NotEmptyStr]] = None, + recursive: bool = False, + callback: Callable = None, + ): + """Downloads annotation JSON files of the selected items to the local directory. - response = Controller.get_default().copy_items( - project_name=project_name, - from_folder=source_folder, - to_folder=destination_folder, - items=items, - include_annotations=include_annotations, - ) - if response.errors: - raise AppException(response.errors) + :param project: project name or folder path (e.g., “project1/folder1”). + :type project: str - return response.data + :param path: local directory path where the annotations will be downloaded. If none, the current directory is used. + :type path: Path-like (str or Path) + :param items: project name (root) or folder path to move items to. + :type items: list of str -@Trackable -@validate_arguments -def move_items( - source: Union[NotEmptyStr, dict], - destination: Union[NotEmptyStr, dict], - items: Optional[List[NotEmptyStr]] = None, -): - """Move images in bulk between folders in a project + :param recursive: download annotations from the project’s root and all of its folders with the preserved structure. + If False download only from the project’s root or given directory. + :type recursive: bool - :param source: project name or folder path to pick items from (e.g., “project1/folder1”). - :type source: str + :param callback: a function that allows you to modify each annotation’s dict before downloading. + The function receives each annotation as an argument and the returned value will be applied to the download. + :type callback: callable - :param destination: project name (root) or folder path to move items to. - :type destination: str + :return: local path of the downloaded annotations folder. + :rtype: str + """ + project_name, folder_name = extract_project_folder(project) + response = self.controller.download_annotations( + project_name=project_name, + folder_name=folder_name, + destination=path, + recursive=recursive, + item_names=items, + callback=callback, + ) + if response.errors: + raise AppException(response.errors) + return response.data - :param items: names of items to move. If None, all items from the source directory will be moved. - :type items: list of str + def get_subsets(self, project: Union[NotEmptyStr, dict]): + """Get Subsets - :return: list of skipped item names - :rtype: list of strs - """ + :param project: project name (e.g., “project1”) + :type project: str - project_name, source_folder = extract_project_folder(source) - to_project_name, destination_folder = extract_project_folder(destination) - if project_name != to_project_name: - raise AppException("Source and destination projects should be the same") - response = Controller.get_default().move_items( - project_name=project_name, - from_folder=source_folder, - to_folder=destination_folder, - items=items, - ) - if response.errors: - raise AppException(response.errors) - return response.data - - -@Trackable -@validate_arguments -def set_annotation_statuses( - project: Union[NotEmptyStr, dict], - annotation_status: AnnotationStatuses, - items: Optional[List[NotEmptyStr]] = None, -): - """Sets annotation statuses of items - - :param project: project name or folder path (e.g., “project1/folder1”). - :type project: str - - :param annotation_status: annotation status to set, should be one of. - “NotStarted” - “InProgress” - “QualityCheck” - “Returned” - “Completed” - “Skipped” - :type annotation_status: str - - :param items: item names to set the mentioned status for. If None, all the items in the project will be used. - :type items: str - """ + :return: subsets’ metadata + :rtype: list of dicts + """ + project_name, _ = extract_project_folder(project) - project_name, folder_name = extract_project_folder(project) - response = Controller.get_default().set_annotation_statuses( - project_name=project_name, - folder_name=folder_name, - annotation_status=annotation_status, - item_names=items, - ) - if response.errors: - raise AppException(response.errors) - return response.data - - -@Trackable -@validate_arguments -def download_annotations( - project: Union[NotEmptyStr, dict], - path: Union[str, Path] = None, - items: Optional[List[NotEmptyStr]] = None, - recursive: bool = False, - callback: Callable = None, -): - """Downloads annotation JSON files of the selected items to the local directory. - - :param project: project name or folder path (e.g., “project1/folder1”). - :type project: str - - :param path: local directory path where the annotations will be downloaded. If none, the current directory is used. - :type path: Path-like (str or Path) - - :param items: project name (root) or folder path to move items to. - :type items: list of str - - :param recursive: download annotations from the project’s root and all of its folders with the preserved structure. - If False download only from the project’s root or given directory. - :type recursive: bool - - :param callback: a function that allows you to modify each annotation’s dict before downloading. - The function receives each annotation as an argument and the returned value will be applied to the download. - :type callback: callable - - :return: local path of the downloaded annotations folder. - :rtype: str - """ - project_name, folder_name = extract_project_folder(project) - response = Controller.get_default().download_annotations( - project_name=project_name, - folder_name=folder_name, - destination=path, - recursive=recursive, - item_names=items, - callback=callback, - ) - if response.errors: - raise AppException(response.errors) - return response.data + response = self.controller.list_subsets(project_name) + if response.errors: + raise AppException(response.errors) + return BaseSerializer.serialize_iterable(response.data, ["name"]) diff --git a/src/superannotate/lib/app/interface/types.py b/src/superannotate/lib/app/interface/types.py index cf0903d6b..780c88a83 100644 --- a/src/superannotate/lib/app/interface/types.py +++ b/src/superannotate/lib/app/interface/types.py @@ -14,6 +14,7 @@ from pydantic import BaseModel from pydantic import conlist from pydantic import constr +from pydantic import errors from pydantic import Extra from pydantic import Field from pydantic import parse_obj_as @@ -21,11 +22,23 @@ from pydantic import StrictStr from pydantic import validate_arguments as pydantic_validate_arguments from pydantic import ValidationError +from pydantic.errors import PydanticTypeError from pydantic.errors import StrRegexError NotEmptyStr = constr(strict=True, min_length=1) +class EnumMemberError(PydanticTypeError): + code = "enum" + + def __str__(self) -> str: + permitted = ", ".join(str(v.name) for v in self.enum_values) # type: ignore + return f"Available values are: {permitted}" + + +errors.EnumMemberError = EnumMemberError + + class EmailStr(StrictStr): @classmethod def validate(cls, value: Union[str]) -> Union[str]: @@ -170,9 +183,9 @@ def validate(cls, value: Union[str]) -> Union[str]: def validate_arguments(func): @wraps(func) - def wrapped(*args, **kwargs): + def wrapped(self, *args, **kwargs): try: - return pydantic_validate_arguments(func)(*args, **kwargs) + return pydantic_validate_arguments(func)(self, *args, **kwargs) except ValidationError as e: raise AppException(wrap_error(e)) diff --git a/src/superannotate/lib/app/mixp/decorators.py b/src/superannotate/lib/app/mixp/decorators.py deleted file mode 100644 index 8a0f86a64..000000000 --- a/src/superannotate/lib/app/mixp/decorators.py +++ /dev/null @@ -1,135 +0,0 @@ -import functools -import sys -from inspect import signature - -from lib import get_default_controller -from mixpanel import Mixpanel -from superannotate.logger import get_default_logger -from version import __version__ - -from .utils import parsers - -logger = get_default_logger() - - -def get_mp_instance() -> Mixpanel: - if "api.annotate.online" in get_default_controller()._backend_url: - return Mixpanel("ca95ed96f80e8ec3be791e2d3097cf51") - return Mixpanel("e741d4863e7e05b1a45833d01865ef0d") - - -def get_default(team_name, user_id, project_name=None): - return { - "SDK": True, - "Paid": True, - "Team": team_name, - "Team Owner": user_id, - "Project Name": project_name, - "Project Role": "Admin", - "Version": __version__, - } - - -class Trackable: - TEAM_DATA = None - INITIAL_EVENT = {"event_name": "SDK init", "properties": {}} - INITIAL_LOGGED = False - - def __init__(self, function, initial=False): - self.function = function - self._success = False - self._initial = initial - if initial: - self.track() - functools.update_wrapper(self, function) - - @staticmethod - def extract_arguments(function, *args, **kwargs) -> dict: - bound_arguments = signature(function).bind(*args, **kwargs) - bound_arguments.apply_defaults() - return dict(bound_arguments.arguments) - - @property - def team(self): - return get_default_controller().get_team() - - @staticmethod - def default_parser(function_name: str, kwargs: dict): - properties = {} - for key, value in kwargs: - if isinstance(value, (str, int, float, bool, str)): - properties[key] = value - elif isinstance(value, (list, set, tuple)): - properties[key] = len(value) - elif isinstance(value, dict): - properties[key] = value.keys() - elif hasattr(value, "__len__"): - properties[key] = len(value) - else: - properties[key] = str(value) - return {"event_name": function_name, "properties": properties} - - def track(self, *args, **kwargs): - try: - function_name = self.function.__name__ if self.function else "" - if self._initial: - data = self.INITIAL_EVENT - Trackable.INITIAL_LOGGED = True - self._success = True - else: - data = {} - arguments = self.extract_arguments(self.function, *args, **kwargs) - if hasattr(parsers, function_name): - try: - data = getattr(parsers, function_name)(**arguments) - except Exception: - pass - else: - data = self.default_parser(function_name, arguments) - event_name = data.get("event_name",) - properties = data.get("properties", {}) - team_data = self.team.data - user_id = team_data.creator_id - team_name = team_data.name - properties["Success"] = self._success - default = get_default( - team_name=team_name, - user_id=user_id, - project_name=properties.get("project_name", None), - ) - properties.pop("project_name", None) - properties = {**default, **properties} - - if "pytest" not in sys.modules: - get_mp_instance().track(user_id, event_name, properties) - except Exception: - pass - - def __call__(self, *args, **kwargs): - try: - controller = get_default_controller() - if controller: - self.__class__.TEAM_DATA = controller.get_team() - result = self.function(*args, **kwargs) - self._success = True - else: - raise Exception( - "SuperAnnotate config file not found." - " Please provide correct config file location to sa.init() or use " - "CLI's superannotate init to generate default location config file." - ) - except Exception as e: - self._success = False - logger.debug(str(e), exc_info=True) - raise e - else: - return result - finally: - try: - self.track(*args, **kwargs) - except Exception: - pass - - -if __name__ == "lib.app.mixp.decorators" and not Trackable.INITIAL_LOGGED: - Trackable(None, initial=True) diff --git a/src/superannotate/lib/app/mixp/utils/__init__.py b/src/superannotate/lib/app/mixp/utils/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/src/superannotate/lib/app/mixp/utils/parsers.py b/src/superannotate/lib/app/mixp/utils/parsers.py deleted file mode 100644 index 105b8d3b6..000000000 --- a/src/superannotate/lib/app/mixp/utils/parsers.py +++ /dev/null @@ -1,988 +0,0 @@ -import os -from pathlib import Path - -import lib.core as constances -from lib.app.helpers import extract_project_folder -from lib.core.entities import IntegrationEntity -from lib.core.enums import ProjectType -from lib.infrastructure.controller import Controller - - -def get_project_name(project): - project_name = "" - if isinstance(project, dict): - project_name = project["name"] - if isinstance(project, str): - if "/" in project: - project_name = project.split("/")[0] - else: - project_name = project - return project_name - - -def get_team_metadata(**kwargs): - return {"event_name": "get_team_metadata", "properties": {}} - - -def invite_contributors_to_team(**kwargs): - admin = kwargs.get("admin") - if not admin: - admin_value = False - else: - admin_value = admin - return { - "event_name": "invite_contributors_to_team", - "properties": {"Admin": admin_value}, - } - - -def search_team_contributors(**kwargs): - return { - "event_name": "search_team_contributors", - "properties": { - "Email": bool(kwargs.get("email")), - "Name": bool(kwargs.get("first_name")), - "Surname": bool(kwargs.get("last_name")), - }, - } - - -def search_projects(**kwargs): - project = kwargs.get("name") - status = kwargs.get("status") - return { - "event_name": "search_projects", - "properties": { - "Metadata": bool(kwargs.get("return_metadata")), - "project_name": get_project_name(project[0]) if project else None, - "status": status, - }, - } - - -def create_project(**kwargs): - project = kwargs["project_name"] - project_type = kwargs["project_type"] - return { - "event_name": "create_project", - "properties": { - "Project Type": project_type, - "project_name": get_project_name(project), - }, - } - - -def create_project_from_metadata(**kwargs): - project = kwargs.get("project_metadata") - - return { - "event_name": "create_project_from_metadata", - "properties": {"project_name": get_project_name(project)}, - } - - -def clone_project(**kwargs): - project = kwargs.get("project_name") - - project_metadata = ( - Controller.get_default().get_project_metadata(project).data["project"] - ) - project_type = ProjectType.get_name(project_metadata.type) - - return { - "event_name": "clone_project", - "properties": { - "External": bool( - project_metadata.upload_state == constances.UploadState.EXTERNAL.value - ), - "Project Type": project_type, - "Copy Classes": bool(kwargs.get("copy_annotation_classes")), - "Copy Settings": bool(kwargs.get("copy_settings")), - "Copy Workflow": bool(kwargs.get("copy_workflow")), - "Copy Contributors": bool(kwargs.get("copy_contributors")), - "project_name": get_project_name(project), - }, - } - - -def upload_images_to_project(**kwargs): - project = kwargs["project"] - - img_paths = kwargs.get("img_paths") - return { - "event_name": "upload_images_to_project", - "properties": { - "Image Count": len(img_paths) if img_paths else None, - "Annotation Status": bool(kwargs.get("annotation_status")), - "From S3": bool(kwargs.get("from_s3")), - "project_name": get_project_name(project), - }, - } - - -def upload_image_to_project(**kwargs): - project = kwargs["project"] - - return { - "event_name": "upload_image_to_project", - "properties": { - "Image Name": bool(kwargs.get("image_name")), - "Annotation Status": bool(kwargs.get("annotation_status")), - "project_name": get_project_name(project), - }, - } - - -def upload_video_to_project(**kwargs): - project = kwargs["project"] - - return { - "event_name": "upload_video_to_project", - "properties": { - "project_name": get_project_name(project), - "FPS": bool(kwargs.get("target_fps")), - "Start": bool(kwargs.get("start_time")), - "End": bool(kwargs.get("end_time")), - }, - } - - -def set_images_annotation_statuses(**kwargs): - project = kwargs["project"] - annotation_status = kwargs.get("annotation_status") - image_names = kwargs["image_names"] - return { - "event_name": "set_images_annotation_statuses", - "properties": { - "project_name": get_project_name(project), - "Image Count": len(image_names) if image_names else None, - "Annotation Status": annotation_status, - }, - } - - -def download_image_annotations(**kwargs): - project = kwargs["project"] - - return { - "event_name": "download_image_annotations", - "properties": {"project_name": get_project_name(project)}, - } - - -def add_annotation_comment_to_image(**kwargs): - project = kwargs["project"] - - return { - "event_name": "add_annotation_comment_to_image", - "properties": {"project_name": get_project_name(project)}, - } - - -def delete_annotation_class(**kwargs): - project = kwargs["project"] - - return { - "event_name": "delete_annotation_class", - "properties": {"project_name": get_project_name(project)}, - } - - -def download_annotation_classes_json(**kwargs): - project = kwargs["project"] - - return { - "event_name": "download_annotation_classes_json", - "properties": {"project_name": get_project_name(project)}, - } - - -def search_annotation_classes(**kwargs): - project = kwargs["project"] - name_contains = kwargs.get("name_contains") - - return { - "event_name": "search_annotation_classes", - "properties": { - "project_name": get_project_name(project), - "name_contains": bool(name_contains), - }, - } - - -def get_project_image_count(**kwargs): - project = kwargs["project"] - - return { - "event_name": "get_project_image_count", - "properties": {"project_name": get_project_name(project)}, - } - - -def get_project_settings(**kwargs): - project = kwargs["project"] - - return { - "event_name": "get_project_settings", - "properties": {"project_name": get_project_name(project)}, - } - - -def get_project_metadata(**kwargs): - project = kwargs["project"] - - return { - "event_name": "get_project_metadata", - "properties": {"project_name": get_project_name(project)}, - } - - -def delete_project(**kwargs): - project = kwargs["project"] - - return { - "event_name": "delete_project", - "properties": {"project_name": get_project_name(project)}, - } - - -def rename_project(**kwargs): - project = kwargs["project"] - return { - "event_name": "rename_project", - "properties": {"project_name": get_project_name(project)}, - } - - -def get_project_workflow(**kwargs): - project = kwargs["project"] - - return { - "event_name": "get_project_workflow", - "properties": {"project_name": get_project_name(project)}, - } - - -def set_project_workflow(**kwargs): - project = kwargs["project"] - return { - "event_name": "set_project_workflow", - "properties": {"project_name": get_project_name(project)}, - } - - -def create_folder(**kwargs): - project = kwargs["project"] - return { - "event_name": "create_folder", - "properties": {"project_name": get_project_name(project)}, - } - - -def get_folder_metadata(**kwargs): - project = kwargs["project"] - return { - "event_name": "get_folder_metadata", - "properties": {"project_name": get_project_name(project)}, - } - - -def download_model(**kwargs): - model = kwargs["model"] - return { - "event_name": "download_model", - "properties": {"model": model}, - } - - -def convert_project_type(**kwargs): - return { - "event_name": "convert_project_type", - "properties": {}, - } - - -def convert_json_version(**kwargs): - return { - "event_name": "convert_json_version", - "properties": {}, - } - - -def upload_image_annotations(**kwargs): - project = kwargs["project"] - return { - "event_name": "upload_image_annotations", - "properties": { - "project_name": get_project_name(project), - "Pixel": bool("mask" in kwargs), - }, - } - - -def download_image(**kwargs): - project = kwargs["project"] - return { - "event_name": "download_image", - "properties": { - "project_name": get_project_name(project), - "Download Annotations": bool("include_annotations" in kwargs), - "Download Fuse": bool("include_fuse" in kwargs), - "Download Overlay": bool("include_overlay" in kwargs), - }, - } - - -def copy_image(**kwargs): - project = kwargs["source_project"] - return { - "event_name": "copy_image", - "properties": { - "project_name": get_project_name(project), - "Copy Annotations": bool("include_annotations" in kwargs), - "Copy Annotation Status": bool("copy_annotation_status" in kwargs), - "Copy Pin": bool("copy_pin" in kwargs), - }, - } - - -def run_prediction(**kwargs): - project = kwargs["project"] - project_name = get_project_name(project) - res = Controller.get_default().get_project_metadata(project_name) - project_metadata = res.data["project"] - project_type = ProjectType.get_name(project_metadata.typy) - image_list = kwargs["images_list"] - return { - "event_name": "run_prediction", - "properties": { - "Project Type": project_type, - "Image Count": len(image_list) if image_list else None, - }, - } - - -def upload_videos_from_folder_to_project(**kwargs): - folder_path = kwargs["folder_path"] - glob_iterator = Path(folder_path).glob("*") - return { - "event_name": "upload_videos_from_folder_to_project", - "properties": {"Video Count": sum(1 for _ in glob_iterator)}, - } - - -def export_annotation(**kwargs): - dataset_format = kwargs["dataset_format"] - project_type = kwargs["project_type"] - if not project_type: - project_type = "Vector" - - task = kwargs.get("task") - if not task: - task = "object_detection" - return { - "event_name": "export_annotation", - "properties": { - "Format": dataset_format, - "Project Type": project_type, - "Task": task, - }, - } - - -def import_annotation(**kwargs): - dataset_format = kwargs["dataset_format"] - project_type = kwargs["project_type"] - if not project_type: - project_type = "Vector" - task = kwargs.get("task") - if not task: - task = "object_detection" - return { - "event_name": "import_annotation", - "properties": { - "Format": dataset_format, - "Project Type": project_type, - "Task": task, - }, - } - - -def consensus(**kwargs): - folder_names = kwargs["folder_names"] - image_list = kwargs["image_list"] - annot_type = kwargs.get("annot_type") - if not annot_type: - annot_type = "bbox" - show_plots = kwargs.get("show_plots") - if not show_plots: - show_plots = False - return { - "event_name": "consensus", - "properties": { - "Folder Count": len(folder_names), - "Image Count": len(image_list) if image_list else None, - "Annotation Type": annot_type, - "Plot": show_plots, - }, - } - - -def benchmark(**kwargs): - folder_names = kwargs.get("folder_names") - image_list = kwargs.get("image_list") - annot_type = kwargs.get("annot_type") - if not annot_type: - annot_type = "bbox" - show_plots = kwargs.get("show_plots") - if not show_plots: - show_plots = False - - return { - "event_name": "benchmark", - "properties": { - "Folder Count": len(folder_names) if folder_names else None, - "Image Count": len(image_list) if image_list else None, - "Annotation Type": annot_type, - "Plot": show_plots, - }, - } - - -def upload_annotations_from_folder_to_project(**kwargs): - project = kwargs["project"] - project_name = get_project_name(project) - res = Controller.get_default().get_project_metadata(project_name) - project_metadata = res.data["project"] - project_type = ProjectType.get_name(project_metadata.type) - - folder_path = kwargs["folder_path"] - glob_iterator = Path(folder_path).glob("*.json") - return { - "event_name": "upload_annotations_from_folder_to_project", - "properties": { - "Annotation Count": sum(1 for _ in glob_iterator), - "Project Type": project_type, - "From S3": bool("from_s3_bucket" in kwargs), - }, - } - - -def upload_preannotations_from_folder_to_project(**kwargs): - project = kwargs["project"] - - project_name = get_project_name(project) - res = Controller.get_default().get_project_metadata(project_name) - project_metadata = res.data["project"] - project_type = ProjectType.get_name(project_metadata.type) - folder_path = kwargs["folder_path"] - glob_iterator = Path(folder_path).glob("*.json") - return { - "event_name": "upload_preannotations_from_folder_to_project", - "properties": { - "Annotation Count": sum(1 for _ in glob_iterator), - "Project Type": project_type, - "From S3": bool("from_s3_bucket" in kwargs), - }, - } - - -def upload_images_from_folder_to_project(**kwargs): - folder_path = kwargs["folder_path"] - recursive_subfolders = kwargs["recursive_subfolders"] - extensions = kwargs["extensions"] - if not extensions: - extensions = constances.DEFAULT_IMAGE_EXTENSIONS - exclude_file_patterns = kwargs["exclude_file_patterns"] - if not exclude_file_patterns: - exclude_file_patterns = constances.DEFAULT_FILE_EXCLUDE_PATTERNS - - paths = [] - for extension in extensions: - if not recursive_subfolders: - paths += list(Path(folder_path).glob(f"*.{extension.lower()}")) - if os.name != "nt": - paths += list(Path(folder_path).glob(f"*.{extension.upper()}")) - else: - paths += list(Path(folder_path).rglob(f"*.{extension.lower()}")) - if os.name != "nt": - paths += list(Path(folder_path).rglob(f"*.{extension.upper()}")) - - filtered_paths = [] - for path in paths: - not_in_exclude_list = [x not in Path(path).name for x in exclude_file_patterns] - if all(not_in_exclude_list): - filtered_paths.append(path) - - return { - "event_name": "upload_images_from_folder_to_project", - "properties": { - "Image Count": len(filtered_paths), - "Custom Extentions": bool(kwargs["extensions"]), - "Annotation Status": bool(kwargs.get("annotation_status")), - "From S3": bool(kwargs.get("from_s3_bucket")), - "Custom Exclude Patters": bool(kwargs["exclude_file_patterns"]), - }, - } - - -def prepare_export(**kwargs): - project = kwargs["project"] - return { - "event_name": "prepare_export", - "properties": { - "project_name": get_project_name(project), - "Folder Count": bool(kwargs.get("folder_names")), - "Annotation Statuses": bool(kwargs.get("annotation_statuses")), - "Include Fuse": bool(kwargs.get("include_fuse")), - "Only Pinned": bool(kwargs.get("only_pinned")), - }, - } - - -def download_export(**kwargs): - project = kwargs["project"] - - return { - "event_name": "download_export", - "properties": { - "project_name": get_project_name(project), - "to_s3_bucket": bool(kwargs.get("to_s3_bucket")), - }, - } - - -def assign_images(**kwargs): - project = kwargs["project"] - project_name, folder_name = extract_project_folder(project) - image_names = kwargs.get("image_names") - user = kwargs.get("user") - - contributors = ( - Controller.get_default() - .get_project_metadata(project_name=project_name, include_contributors=True) - .data["contributors"] - ) - contributor = None - for c in contributors: - if c["user_id"] == user: - contributor = c - user_role = "ADMIN" - if contributor["user_role"] == 3: - user_role = "ANNOTATOR" - if contributor["user_role"] == 4: - user_role = "QA" - is_root = True - if folder_name: - is_root = False - - return { - "event_name": "assign_images", - "properties": { - "project_name": project_name, - "Assign Folder": is_root, - "Image Count": len(image_names) if image_names else None, - "User Role": user_role, - }, - } - - -def pin_image(**kwargs): - project = kwargs["project"] - - return { - "event_name": "pin_image", - "properties": { - "project_name": get_project_name(project), - "Pin": bool("pin" in kwargs), - }, - } - - -def set_image_annotation_status(**kwargs): - project = kwargs["project"] - - return { - "event_name": "set_image_annotation_status", - "properties": { - "project_name": get_project_name(project), - "Annotation Status": bool("annotation_status" in kwargs), - }, - } - - -def add_annotation_bbox_to_image(**kwargs): - project = kwargs["project"] - - return { - "event_name": "add_annotation_bbox_to_image", - "properties": { - "project_name": get_project_name(project), - "Attributes": bool("annotation_class_attributes" in kwargs), - "Error": bool("error" in kwargs), - }, - } - - -def add_annotation_point_to_image(**kwargs): - project = kwargs["project"] - - return { - "event_name": "add_annotation_point_to_image", - "properties": { - "project_name": get_project_name(project), - "Attributes": bool("annotation_class_attributes" in kwargs), - "Error": bool("error" in kwargs), - }, - } - - -def create_annotation_class(**kwargs): - project = kwargs["project"] - class_type = kwargs.get("class_type") - - return { - "event_name": "create_annotation_class", - "properties": { - "project_name": get_project_name(project), - "Attributes": bool("attribute_groups" in kwargs), - "class_type": class_type if class_type else "object", - }, - } - - -def create_annotation_classes_from_classes_json(**kwargs): - project = kwargs["project"] - return { - "event_name": "create_annotation_classes_from_classes_json", - "properties": { - "project_name": get_project_name(project), - "From S3": bool("from_s3_bucket" in kwargs), - }, - } - - -def class_distribution(**kwargs): - return { - "event_name": "class_distribution", - "properties": {"Plot": bool("visualize" in kwargs)}, - } - - -def set_project_default_image_quality_in_editor(**kwargs): - project = kwargs["project"] - - image_quality_in_editor = kwargs.get("image_quality_in_editor") - return { - "event_name": "set_project_default_image_quality_in_editor", - "properties": { - "project_name": get_project_name(project), - "Image Quality": image_quality_in_editor, - }, - } - - -def get_exports(**kwargs): - project = kwargs["project"] - - return { - "event_name": "get_exports", - "properties": { - "project_name": get_project_name(project), - "Metadata": bool("return_metadata" in kwargs), - }, - } - - -def search_folders(**kwargs): - project = kwargs["project"] - - return { - "event_name": "search_folders", - "properties": { - "project_name": get_project_name(project), - "Metadata": bool("return_metadata" in kwargs), - }, - } - - -def aggregate_annotations_as_df(**kwargs): - folder_names = kwargs.get("folder_names") - if not folder_names: - folder_names = [] - - project_type = kwargs["project_type"] - - return { - "event_name": "aggregate_annotations_as_df", - "properties": {"Folder Count": len(folder_names), "Project Type": project_type}, - } - - -def delete_folders(**kwargs): - project = kwargs["project"] - folder_names = kwargs.get("folder_names") - if not folder_names: - folder_names = [] - return { - "event_name": "delete_folders", - "properties": { - "project_name": get_project_name(project), - "Folder Count": len(folder_names), - }, - } - - -def delete_images(**kwargs): - project = kwargs["project"] - project_name, folder_name = extract_project_folder(project) - - image_names = kwargs.get("image_names", False) - if not image_names: - res = Controller.get_default().search_images(project_name, folder_name) - image_names = res.data - return { - "event_name": "delete_images", - "properties": {"project_name": project_name, "Image Count": len(image_names)}, - } - - -def unassign_folder(**kwargs): - return {"event_name": "unassign_folder", "properties": {}} - - -def assign_folder(**kwargs): - users = kwargs.get("users") - return {"event_name": "assign_folder", "properties": {"User Count": len(users)}} - - -def unassign_images(**kwargs): - image_names = kwargs.get("image_names") - - project = kwargs["project"] - - _, folder_name = extract_project_folder(project) - is_root = True - if folder_name: - is_root = False - - return { - "event_name": "unassign_images", - "properties": {"Assign Folder": is_root, "Image Count": len(image_names)}, - } - - -def delete_annotations(**kwargs): - return {"event_name": "delete_annotations", "properties": {}} - - -def validate_annotations(**kwargs): - project_type = kwargs["project_type"] - return { - "event_name": "validate_annotations", - "properties": {"Project Type": project_type}, - } - - -def add_contributors_to_project(**kwargs): - user_role = kwargs.get("role") - - return { - "event_name": "add_contributors_to_project", - "properties": {"User Role": user_role}, - } - - -def get_annotations(**kwargs): - project = kwargs["project"] - items = kwargs["items"] - - return { - "event_name": "get_annotations", - "properties": { - "Project": project, - "items_count": len(items) if items else None, - }, - } - - -def get_annotations_per_frame(**kwargs): - project = kwargs["project"] - fps = kwargs["fps"] - if not fps: - fps = 1 - return { - "event_name": "get_annotations_per_frame", - "properties": {"Project": project, "fps": fps}, - } - - -def upload_priority_scores(**kwargs): - scores = kwargs["scores"] - return { - "event_name": "upload_priority_scores", - "properties": {"Score Count": len(scores) if scores else None}, - } - - -def get_integrations(**kwargs): - return { - "event_name": "get_integrations", - "properties": {}, - } - - -def attach_items_from_integrated_storage(**kwargs): - project = kwargs.get("project") - project_name, _ = extract_project_folder(project) - integration = kwargs.get("integration") - folder_path = kwargs.get("folder_path") - - if isinstance(integration, str): - integration = IntegrationEntity(name=integration) - project = ( - Controller.get_default().get_project_metadata(project_name).data["project"] - ) - return { - "event_name": "attach_items_from_integrated_storage", - "properties": { - "project_type": ProjectType.get_name(project.type), - "integration_name": integration.name, - "folder_path": bool(folder_path), - }, - } - - -def query(**kwargs): - project = kwargs["project"] - query_str = kwargs["query"] - project_name, folder_name = extract_project_folder(project) - project = ( - Controller.get_default().get_project_metadata(project_name).data["project"] - ) - return { - "event_name": "query", - "properties": { - "project_type": ProjectType.get_name(project.type), - "query": query_str, - }, - } - - -def get_item_metadata(**kwargs): - project = kwargs["project"] - project_name, _ = extract_project_folder(project) - project = ( - Controller.get_default().get_project_metadata(project_name).data["project"] - ) - return { - "event_name": "get_item_metadata", - "properties": {"project_type": ProjectType.get_name(project.type)}, - } - - -def search_items(**kwargs): - project = kwargs["project"] - name_contains = kwargs["name_contains"] - annotation_status = kwargs["annotation_status"] - annotator_email = kwargs["annotator_email"] - qa_email = kwargs["qa_email"] - recursive = kwargs["recursive"] - project_name, folder_name = extract_project_folder(project) - project = ( - Controller.get_default().get_project_metadata(project_name).data["project"] - ) - return { - "event_name": "search_items", - "properties": { - "project_type": ProjectType.get_name(project.type), - "query": query, - "name_contains": len(name_contains) if name_contains else False, - "annotation_status": annotation_status if annotation_status else False, - "annotator_email": bool(annotator_email), - "qa_email": bool(qa_email), - "recursive": bool(recursive), - }, - } - - -def move_items(**kwargs): - project = kwargs["source"] - project_name, _ = extract_project_folder(project) - project = ( - Controller.get_default().get_project_metadata(project_name).data["project"] - ) - items = kwargs["items"] - return { - "event_name": "move_items", - "properties": { - "project_type": ProjectType.get_name(project.type), - "items_count": len(items) if items else None, - }, - } - - -def copy_items(**kwargs): - project = kwargs["source"] - project_name, _ = extract_project_folder(project) - project = ( - Controller.get_default().get_project_metadata(project_name).data["project"] - ) - items = kwargs["items"] - return { - "event_name": "copy_items", - "properties": { - "project_type": ProjectType.get_name(project.type), - "items_count": len(items) if items else None, - "include_annotations": kwargs["include_annotations"], - }, - } - - -def attach_items(**kwargs): - project = kwargs["project"] - project_name, _ = extract_project_folder(project) - project = ( - Controller.get_default().get_project_metadata(project_name).data["project"] - ) - attachments = kwargs["attachments"] - return { - "event_name": "attach_items", - "properties": { - "project_type": ProjectType.get_name(project.type), - "attachments": "csv" if isinstance(attachments, (str, Path)) else "dict", - "annotation_status": kwargs["annotation_status"], - }, - } - - -def set_annotation_statuses(**kwargs): - project = kwargs["project"] - project_name, folder_name = extract_project_folder(project) - return { - "event_name": "set_annotation_statuses", - "properties": { - "item_count": len(kwargs.get("items", [])), - "annotation_status": kwargs["annotation_status"], - "root": folder_name == "root", - }, - } - - -def download_annotations(**kwargs): - project = kwargs["project"] - project_name, folder_name = extract_project_folder(project) - project = ( - Controller.get_default().get_project_metadata(project_name).data["project"] - ) - return { - "event_name": "download_annotations", - "properties": { - "project_name": project_name, - "project_type": ProjectType.get_name(project.type), - "root": bool(folder_name), - "recursive": kwargs["recursive"], - "path": bool(kwargs["path"]), - "callback": bool(kwargs["callback"]), - }, - } diff --git a/src/superannotate/lib/core/__init__.py b/src/superannotate/lib/core/__init__.py index 1b1fed04c..3956efc86 100644 --- a/src/superannotate/lib/core/__init__.py +++ b/src/superannotate/lib/core/__init__.py @@ -1,5 +1,6 @@ -from pathlib import Path +from os.path import expanduser +from superannotate.lib.core.config import Config from superannotate.lib.core.enums import AnnotationStatus from superannotate.lib.core.enums import ImageQuality from superannotate.lib.core.enums import ProjectState @@ -12,8 +13,12 @@ from superannotate.lib.core.enums import UserRole -CONFIG_FILE_LOCATION = str(Path.home() / ".superannotate" / "config.json") -LOG_FILE_LOCATION = str(Path.home() / ".superannotate" / "sa.log") +CONFIG = Config() + + +CONFIG_PATH = "~/.superannotate/config.json" +CONFIG_FILE_LOCATION = expanduser(CONFIG_PATH) +LOG_FILE_LOCATION = expanduser("~/.superannotate/sa.log") BACKEND_URL = "https://api.annotate.online" DEFAULT_IMAGE_EXTENSIONS = ["jpg", "jpeg", "png", "tif", "tiff", "webp", "bmp"] @@ -141,6 +146,7 @@ ImageQuality, AnnotationStatus, CONFIG_FILE_LOCATION, + CONFIG_PATH, BACKEND_URL, DEFAULT_IMAGE_EXTENSIONS, DEFAULT_FILE_EXCLUDE_PATTERNS, diff --git a/src/superannotate/lib/core/config.py b/src/superannotate/lib/core/config.py new file mode 100644 index 000000000..e5d955e12 --- /dev/null +++ b/src/superannotate/lib/core/config.py @@ -0,0 +1,70 @@ +import threading +from typing import Dict + +from dataclasses import dataclass +from dataclasses import field + + +class Session: + def __init__(self): + self.pk = threading.get_ident() + self._data_dict = {} + + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + if type is not None: + return False + + def __del__(self): + Config().delete_current_session() + + @property + def data(self): + return self._data_dict + + @staticmethod + def get_current_session(): + return Config().get_current_session() + + def __setitem__(self, key, item): + self._data_dict[key] = item + + def __getitem__(self, key): + return self._data_dict[key] + + def __repr__(self): + return repr(self._data_dict) + + def clear(self): + return self._data_dict.clear() + + +class Singleton(type): + _instances = {} + _lock = threading.Lock() + + def __call__(cls, *args, **kwargs): + if cls not in cls._instances: + with cls._lock: + if cls not in cls._instances: + cls._instances[cls] = super().__call__(*args, **kwargs) + return cls._instances[cls] + + +@dataclass() +class Config(metaclass=Singleton): + SESSIONS: Dict[int, Session] = field(default_factory=dict) + + def get_current_session(self): + session = self.SESSIONS.get(threading.get_ident()) + if not session: + session = Session() + self.SESSIONS.update({session.pk: session}) + return session + + def delete_current_session(self): + ident = threading.get_ident() + if ident in self.SESSIONS: + del self.SESSIONS[ident] diff --git a/src/superannotate/lib/core/data_handlers.py b/src/superannotate/lib/core/data_handlers.py index c0a1c442c..1eb77e684 100644 --- a/src/superannotate/lib/core/data_handlers.py +++ b/src/superannotate/lib/core/data_handlers.py @@ -266,7 +266,7 @@ def safe_time(timestamp): return "0" if str(timestamp) == "0.0" else timestamp def convert_timestamp(timestamp): - return timestamp / 10 ** 6 if timestamp else "0" + return timestamp / 10**6 if timestamp else "0" editor_data = { "instances": [], diff --git a/src/superannotate/lib/core/entities/__init__.py b/src/superannotate/lib/core/entities/__init__.py index d05a2bec7..41605abf7 100644 --- a/src/superannotate/lib/core/entities/__init__.py +++ b/src/superannotate/lib/core/entities/__init__.py @@ -2,6 +2,7 @@ from lib.core.entities.base import BaseEntity as TmpBaseEntity from lib.core.entities.base import ProjectEntity from lib.core.entities.base import SettingEntity +from lib.core.entities.base import SubSetEntity from lib.core.entities.integrations import IntegrationEntity from lib.core.entities.items import DocumentEntity from lib.core.entities.items import Entity @@ -31,6 +32,7 @@ __all__ = [ # base "SettingEntity", + "SubSetEntity", # items "TmpImageEntity", "BaseEntity", diff --git a/src/superannotate/lib/core/entities/base.py b/src/superannotate/lib/core/entities/base.py index 52bdc4c51..f3b6875f3 100644 --- a/src/superannotate/lib/core/entities/base.py +++ b/src/superannotate/lib/core/entities/base.py @@ -27,6 +27,14 @@ def validate(cls, v: datetime): return v.isoformat() +class SubSetEntity(BaseModel): + id: Optional[int] + name: str + + class Config: + extra = Extra.ignore + + class TimedBaseModel(BaseModel): createdAt: StringDate = Field(None, alias="createdAt") updatedAt: StringDate = Field(None, alias="updatedAt") diff --git a/src/superannotate/lib/core/entities/project_entities.py b/src/superannotate/lib/core/entities/project_entities.py index 4e50080f3..244bca0c9 100644 --- a/src/superannotate/lib/core/entities/project_entities.py +++ b/src/superannotate/lib/core/entities/project_entities.py @@ -40,7 +40,10 @@ def to_dict(self): class BaseTimedEntity(BaseEntity): def __init__( - self, uuid: Any = None, createdAt: str = None, updatedAt: str = None, + self, + uuid: Any = None, + createdAt: str = None, + updatedAt: str = None, ): super().__init__(uuid) self.createdAt = createdAt @@ -222,7 +225,10 @@ def to_dict(self): class ImageInfoEntity(BaseEntity): def __init__( - self, uuid=None, width: float = None, height: float = None, + self, + uuid=None, + width: float = None, + height: float = None, ): super().__init__(uuid), self.width = width diff --git a/src/superannotate/lib/core/enums.py b/src/superannotate/lib/core/enums.py index 62a5bcc58..edefb6027 100644 --- a/src/superannotate/lib/core/enums.py +++ b/src/superannotate/lib/core/enums.py @@ -8,8 +8,13 @@ def __new__(cls, title, value): obj._value_ = value obj.__doc__ = title obj._type = "titled_enum" + cls._value2member_map_[title] = obj return obj + @classmethod + def choices(cls): + return tuple(cls._value2member_map_.keys()) + @DynamicClassAttribute def name(self) -> str: return self.__doc__ @@ -41,6 +46,9 @@ def titles(cls): def equals(self, other: Enum): return self.__doc__.lower() == other.__doc__.lower() + def __eq__(self, other): + return super().__eq__(other) + class AnnotationTypes(str, Enum): BBOX = "bbox" diff --git a/src/superannotate/lib/core/plugin.py b/src/superannotate/lib/core/plugin.py index 06f77115e..9ef6fec30 100644 --- a/src/superannotate/lib/core/plugin.py +++ b/src/superannotate/lib/core/plugin.py @@ -254,7 +254,10 @@ def frames_generator( @staticmethod def get_extractable_frames( - video_path: str, start_time, end_time, target_fps: float, + video_path: str, + start_time, + end_time, + target_fps: float, ): total = VideoPlugin.get_frames_count(video_path) total_with_fps = sum( diff --git a/src/superannotate/lib/core/reporter.py b/src/superannotate/lib/core/reporter.py index 365d1aec8..9e4ad2d69 100644 --- a/src/superannotate/lib/core/reporter.py +++ b/src/superannotate/lib/core/reporter.py @@ -6,6 +6,7 @@ from typing import Union import tqdm +from lib.core import CONFIG from superannotate.logger import get_default_logger @@ -49,6 +50,7 @@ def __init__( self.debug_messages = [] self.custom_messages = defaultdict(set) self.progress_bar = None + self.session = CONFIG.get_current_session() self._spinner = None def start_spinner(self): @@ -127,6 +129,10 @@ def messages(self): for key, values in self.custom_messages.items(): yield f"{key} [{', '.join(values)}]" + def track(self, key, value): + if self.session: + self.session[key] = value + class Progress: def __init__(self, iterations: Union[int, range], description: str = "Processing"): diff --git a/src/superannotate/lib/core/repositories.py b/src/superannotate/lib/core/repositories.py index a94258a5c..f60c7b44e 100644 --- a/src/superannotate/lib/core/repositories.py +++ b/src/superannotate/lib/core/repositories.py @@ -67,7 +67,11 @@ def __init__(self, service: SuperannotateServiceProvider, project: ProjectEntity class BaseS3Repository(BaseManageableRepository): def __init__( - self, access_key: str, secret_key: str, session_token: str, bucket: str, + self, + access_key: str, + secret_key: str, + session_token: str, + bucket: str, ): self._session = boto3.Session( aws_access_key_id=access_key, diff --git a/src/superannotate/lib/core/response.py b/src/superannotate/lib/core/response.py index ddc5e413e..107bd28c3 100644 --- a/src/superannotate/lib/core/response.py +++ b/src/superannotate/lib/core/response.py @@ -8,6 +8,9 @@ def __init__(self, status: str = None, data: Union[dict, list] = None): self._report = [] self._errors = [] + def __str__(self): + return f"Response object with status:{self.status}, data : {self.data}, errors: {self.errors} " + @property def data(self): return self._data @@ -30,7 +33,7 @@ def report_messages(self): @property def status(self): - return self.data + return self._status @status.setter def status(self, value): diff --git a/src/superannotate/lib/core/service_types.py b/src/superannotate/lib/core/service_types.py index f793234f3..c661c6101 100644 --- a/src/superannotate/lib/core/service_types.py +++ b/src/superannotate/lib/core/service_types.py @@ -6,6 +6,7 @@ from pydantic import BaseModel from pydantic import Extra +from pydantic import parse_obj_as class Limit(BaseModel): @@ -79,11 +80,13 @@ def __init__(self, response, content_type=None): "reason": response.reason, "content": response.content, } - if response.ok: - if content_type: - data["data"] = content_type(**response.json()) + try: + if content_type and content_type is not self.__class__: + data["data"] = parse_obj_as(content_type, response.json()) else: data["data"] = response.json() + except Exception as e: + data["data"] = {} super().__init__(**data) @property @@ -92,4 +95,8 @@ def ok(self): @property def error(self): - return getattr(self.data, "error", "Unknown error.") + default_message = self.reason if self.reason else "Unknown Error" + if isinstance(self.data, dict): + return self.data.get("error", default_message) + else: + return getattr(self.data, "error", default_message) diff --git a/src/superannotate/lib/core/serviceproviders.py b/src/superannotate/lib/core/serviceproviders.py index ea120e4ea..6b5ead326 100644 --- a/src/superannotate/lib/core/serviceproviders.py +++ b/src/superannotate/lib/core/serviceproviders.py @@ -106,7 +106,11 @@ def get_download_token( raise NotImplementedError def get_upload_token( - self, project_id: int, team_id: int, folder_id: int, image_id: int, + self, + project_id: int, + team_id: int, + folder_id: int, + image_id: int, ) -> dict: raise NotImplementedError @@ -161,6 +165,9 @@ def set_images_statuses_bulk( def delete_images(self, project_id: int, team_id: int, image_ids: List[int]): raise NotImplementedError + def delete_items(self, project_id: int, team_id: int, item_ids: List[int]): + raise NotImplementedError + def assign_images( self, team_id: int, @@ -171,13 +178,26 @@ def assign_images( ): raise NotImplementedError + def assign_items( + self, + team_id: int, + project_id: int, + folder_name: str, + user: str, + item_names: list, + ) -> ServiceResponse: + raise NotImplementedError + def get_bulk_images( self, project_id: int, team_id: int, folder_id: int, images: List[str] ) -> List[dict]: raise NotImplementedError def un_assign_folder( - self, team_id: int, project_id: int, folder_name: str, + self, + team_id: int, + project_id: int, + folder_name: str, ): raise NotImplementedError @@ -187,12 +207,28 @@ def assign_folder( raise NotImplementedError def un_assign_images( - self, team_id: int, project_id: int, folder_name: str, image_names: list, + self, + team_id: int, + project_id: int, + folder_name: str, + image_names: list, + ): + raise NotImplementedError + + def un_assign_items( + self, + team_id: int, + project_id: int, + folder_name: str, + item_names: list, ): raise NotImplementedError def un_share_project( - self, team_id: int, project_id: int, user_id: str, + self, + team_id: int, + project_id: int, + user_id: str, ): raise NotImplementedError @@ -304,7 +340,10 @@ async def download_annotations( postfix: str, items: List[str] = None, callback: Callable = None, - ) -> List[dict]: + ) -> int: + """ + Returns the number of items downloaded + """ raise NotImplementedError def upload_priority_scores( @@ -326,9 +365,17 @@ def attach_integrations( raise NotImplementedError def saqul_query( - self, team_id: int, project_id: int, query: str, folder_id: int + self, + team_id: int, + project_id: int, + folder_id: int, + query: str = None, + subset_id: int = None, ) -> ServiceResponse: raise NotImplementedError def validate_saqul_query(self, team_id: int, project_id: int, query: str) -> dict: raise NotImplementedError + + def list_sub_sets(self, team_id: int, project_id: int) -> ServiceResponse: + raise NotImplementedError diff --git a/src/superannotate/lib/core/usecases/annotations.py b/src/superannotate/lib/core/usecases/annotations.py index 8b1666bba..2a12bdb56 100644 --- a/src/superannotate/lib/core/usecases/annotations.py +++ b/src/superannotate/lib/core/usecases/annotations.py @@ -33,7 +33,7 @@ from lib.core.service_types import UploadAnnotationAuthData from lib.core.serviceproviders import SuperannotateServiceProvider from lib.core.types import PriorityScore -from lib.core.usecases.base import BaseReportableUseCae +from lib.core.usecases.base import BaseReportableUseCase from lib.core.usecases.images import GetBulkImages from lib.core.usecases.images import ValidateAnnotationUseCase from lib.core.video_convertor import VideoFrameGenerator @@ -46,7 +46,7 @@ asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy()) -class UploadAnnotationsUseCase(BaseReportableUseCae): +class UploadAnnotationsUseCase(BaseReportableUseCase): MAX_WORKERS = 10 CHUNK_SIZE = 100 AUTH_DATA_CHUNK_SIZE = 500 @@ -312,7 +312,7 @@ def execute(self): return self._response -class UploadAnnotationUseCase(BaseReportableUseCae): +class UploadAnnotationUseCase(BaseReportableUseCase): def __init__( self, project: ProjectEntity, @@ -444,7 +444,10 @@ def prepare_annotations( handlers_chain.attach(LastActionHandler(team.creator_id)) return handlers_chain.handle(annotations) - def clean_json(self, json_data: dict,) -> Tuple[bool, dict]: + def clean_json( + self, + json_data: dict, + ) -> Tuple[bool, dict]: use_case = ValidateAnnotationUseCase( constances.ProjectType.get_name(self._project.type), annotation=json_data, @@ -500,7 +503,7 @@ def execute(self): return self._response -class GetAnnotations(BaseReportableUseCae): +class GetAnnotations(BaseReportableUseCase): def __init__( self, reporter: Reporter, @@ -584,7 +587,7 @@ def execute(self): return self._response -class GetVideoAnnotationsPerFrame(BaseReportableUseCae): +class GetVideoAnnotationsPerFrame(BaseReportableUseCase): def __init__( self, reporter: Reporter, @@ -645,7 +648,7 @@ def execute(self): return self._response -class UploadPriorityScoresUseCase(BaseReportableUseCae): +class UploadPriorityScoresUseCase(BaseReportableUseCase): CHUNK_SIZE = 100 def __init__( @@ -733,7 +736,7 @@ def execute(self): return self._response -class DownloadAnnotations(BaseReportableUseCae): +class DownloadAnnotations(BaseReportableUseCase): def __init__( self, reporter: Reporter, @@ -793,8 +796,9 @@ def get_postfix(self): def download_annotation_classes(self, path: str): classes = self._classes.get_all() - os.mkdir(f"{path}/classes") - with open(f"{path}/classes/classes.json", "w+") as file: + classes_path = Path(path) / "classes" + classes_path.mkdir(parents=True, exist_ok=True) + with open(classes_path / "classes.json", "w+") as file: json.dump([i.dict() for i in classes], file, indent=4) @staticmethod @@ -805,18 +809,20 @@ def get_items_count(path: str): def coroutine_wrapper(coroutine): loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) - loop.run_until_complete(coroutine) + count = loop.run_until_complete(coroutine) loop.close() + return count def execute(self): if self.is_valid(): - export_prefix = f"{self._project.name}{f'/{self._folder.name}' if not self._folder.is_root else ''}" export_path = str( self.destination - / Path(f"{export_prefix} {datetime.now().strftime('%B %d %Y %H_%M')}") + / Path( + f"{self._project.name} {datetime.now().strftime('%B %d %Y %H_%M')}" + ) ) self.reporter.log_info( - f"Downloading the annotations of the requested items to {export_path} \nThis might take a while…" + f"Downloading the annotations of the requested items to {export_path}\nThis might take a while…" ) self.reporter.start_spinner() folders = [] @@ -837,7 +843,7 @@ def execute(self): if not folders: loop = asyncio.new_event_loop() - loop.run_until_complete( + count = loop.run_until_complete( self._backend_client.download_annotations( team_id=self._project.team_id, project_id=self._project.id, @@ -865,12 +871,12 @@ def execute(self): callback=self._callback, ) ) - _ = [_ for _ in executor.map(self.coroutine_wrapper, coroutines)] + count = sum( + [i for i in executor.map(self.coroutine_wrapper, coroutines)] + ) self.reporter.stop_spinner() - self.reporter.log_info( - f"SA-PYTHON-SDK - INFO - Downloaded annotations for {self.get_items_count(export_path)} items." - ) + self.reporter.log_info(f"Downloaded annotations for {count} items.") self.download_annotation_classes(export_path) self._response.data = os.path.abspath(export_path) return self._response diff --git a/src/superannotate/lib/core/usecases/base.py b/src/superannotate/lib/core/usecases/base.py index 3f3e3750f..97ed22ee8 100644 --- a/src/superannotate/lib/core/usecases/base.py +++ b/src/superannotate/lib/core/usecases/base.py @@ -58,13 +58,13 @@ def execute(self) -> Iterable: raise NotImplementedError -class BaseReportableUseCae(BaseUseCase, metaclass=ABCMeta): +class BaseReportableUseCase(BaseUseCase, metaclass=ABCMeta): def __init__(self, reporter: Reporter): super().__init__() self.reporter = reporter -class BaseUserBasedUseCase(BaseReportableUseCae, metaclass=ABCMeta): +class BaseUserBasedUseCase(BaseReportableUseCase, metaclass=ABCMeta): """ class contain validation of unique emails """ diff --git a/src/superannotate/lib/core/usecases/folders.py b/src/superannotate/lib/core/usecases/folders.py index a9733652f..b98c79e88 100644 --- a/src/superannotate/lib/core/usecases/folders.py +++ b/src/superannotate/lib/core/usecases/folders.py @@ -142,7 +142,9 @@ def execute(self): class UpdateFolderUseCase(BaseUseCase): def __init__( - self, folders: BaseManageableRepository, folder: FolderEntity, + self, + folders: BaseManageableRepository, + folder: FolderEntity, ): super().__init__() self._folders = folders diff --git a/src/superannotate/lib/core/usecases/images.py b/src/superannotate/lib/core/usecases/images.py index 31b4eeba0..0bf4461fe 100644 --- a/src/superannotate/lib/core/usecases/images.py +++ b/src/superannotate/lib/core/usecases/images.py @@ -42,7 +42,7 @@ from lib.core.response import Response from lib.core.serviceproviders import SuperannotateServiceProvider from lib.core.usecases.base import BaseInteractiveUseCase -from lib.core.usecases.base import BaseReportableUseCae +from lib.core.usecases.base import BaseReportableUseCase from lib.core.usecases.base import BaseUseCase from lib.core.usecases.projects import GetAnnotationClassesUseCase from PIL import UnidentifiedImageError @@ -581,7 +581,11 @@ def execute(self): image = ImagePlugin(io.BytesIO(file.read())) images = [ - Image("fuse", f"{self._image_path}___fuse.png", image.get_empty(),) + Image( + "fuse", + f"{self._image_path}___fuse.png", + image.get_empty(), + ) ] if self._generate_overlay: images.append( @@ -711,7 +715,9 @@ def execute(self): class GetS3ImageUseCase(BaseUseCase): def __init__( - self, s3_bucket, image_path: str, + self, + s3_bucket, + image_path: str, ): super().__init__() self._s3_bucket = s3_bucket @@ -1536,7 +1542,8 @@ def execute(self) -> Response: image_bytes = ( GetImageBytesUseCase( - image=image, backend_service_provider=self._backend_service, + image=image, + backend_service_provider=self._backend_service, ) .execute() .data @@ -1876,7 +1883,7 @@ def execute(self): return self._response -class GetImageAnnotationsUseCase(BaseReportableUseCae): +class GetImageAnnotationsUseCase(BaseReportableUseCase): def __init__( self, reporter: Reporter, @@ -2056,61 +2063,6 @@ def execute(self): return self._response -class SetImageAnnotationStatuses(BaseUseCase): - CHUNK_SIZE = 500 - - def __init__( - self, - service: SuperannotateServiceProvider, - projects: BaseReadOnlyRepository, - image_names: list, - team_id: int, - project_id: int, - folder_id: int, - images_repo: BaseManageableRepository, - annotation_status: int, - ): - super().__init__() - self._service = service - self._projects = projects - self._image_names = image_names - self._team_id = team_id - self._project_id = project_id - self._folder_id = folder_id - self._annotation_status = annotation_status - self._images_repo = images_repo - - def validate_project_type(self): - project = self._projects.get_one(uuid=self._project_id, team_id=self._team_id) - if project.type in constances.LIMITED_FUNCTIONS: - raise AppValidationException(constances.LIMITED_FUNCTIONS[project.type]) - - def execute(self): - if self.is_valid(): - if self._image_names is None: - condition = ( - Condition("team_id", self._team_id, EQ) - & Condition("project_id", self._project_id, EQ) - & Condition("folder_id", self._folder_id, EQ) - ) - self._image_names = [ - image.name for image in self._images_repo.get_all(condition) - ] - for i in range(0, len(self._image_names), self.CHUNK_SIZE): - status_changed = self._service.set_images_statuses_bulk( - image_names=self._image_names[ - i : i + self.CHUNK_SIZE # noqa: E203 - ], - team_id=self._team_id, - project_id=self._project_id, - folder_id=self._folder_id, - annotation_status=self._annotation_status, - ) - if not status_changed: - self._response.errors = AppException("Failed to change status.") - return self._response - - class CreateAnnotationClassUseCase(BaseUseCase): def __init__( self, @@ -2438,7 +2390,7 @@ def execute(self) -> Response: return self._response -class UploadVideosAsImages(BaseReportableUseCae): +class UploadVideosAsImages(BaseReportableUseCase): def __init__( self, reporter: Reporter, diff --git a/src/superannotate/lib/core/usecases/integrations.py b/src/superannotate/lib/core/usecases/integrations.py index c61379de0..4f0c42d1e 100644 --- a/src/superannotate/lib/core/usecases/integrations.py +++ b/src/superannotate/lib/core/usecases/integrations.py @@ -9,10 +9,10 @@ from lib.core.repositories import BaseReadOnlyRepository from lib.core.response import Response from lib.core.serviceproviders import SuperannotateServiceProvider -from lib.core.usecases import BaseReportableUseCae +from lib.core.usecases import BaseReportableUseCase -class GetIntegrations(BaseReportableUseCae): +class GetIntegrations(BaseReportableUseCase): def __init__( self, reporter: Reporter, @@ -32,7 +32,7 @@ def execute(self) -> Response: return self._response -class AttachIntegrations(BaseReportableUseCae): +class AttachIntegrations(BaseReportableUseCase): def __init__( self, reporter: Reporter, diff --git a/src/superannotate/lib/core/usecases/items.py b/src/superannotate/lib/core/usecases/items.py index 0bb2fa294..96a10f019 100644 --- a/src/superannotate/lib/core/usecases/items.py +++ b/src/superannotate/lib/core/usecases/items.py @@ -1,14 +1,17 @@ import copy from typing import List +from typing import Optional -import superannotate.lib.core as constances +import superannotate.lib.core as constants from lib.core.conditions import Condition from lib.core.conditions import CONDITION_EQ as EQ from lib.core.entities import AttachmentEntity from lib.core.entities import DocumentEntity from lib.core.entities import Entity from lib.core.entities import FolderEntity +from lib.core.entities import ImageEntity from lib.core.entities import ProjectEntity +from lib.core.entities import SubSetEntity from lib.core.entities import TmpImageEntity from lib.core.entities import VideoEntity from lib.core.exceptions import AppException @@ -18,10 +21,49 @@ from lib.core.repositories import BaseReadOnlyRepository from lib.core.response import Response from lib.core.serviceproviders import SuperannotateServiceProvider -from lib.core.usecases.base import BaseReportableUseCae +from lib.core.usecases.base import BaseReportableUseCase +from lib.core.usecases.base import BaseUseCase +from superannotate.logger import get_default_logger +logger = get_default_logger() -class GetItem(BaseReportableUseCae): + +class GetBulkItems(BaseUseCase): + def __init__( + self, + service: SuperannotateServiceProvider, + project_id: int, + team_id: int, + folder_id: int, + items: List[str], + ): + super().__init__() + self._service = service + self._project_id = project_id + self._team_id = team_id + self._folder_id = folder_id + self._items = items + self._chunk_size = 500 + + def execute(self): + res = [] + for i in range(0, len(self._items), self._chunk_size): + response = self._service.get_bulk_items( + project_id=self._project_id, + team_id=self._team_id, + folder_id=self._folder_id, + items=self._items[i : i + self._chunk_size], # noqa: E203 + ) + + if not response.ok: + raise AppException(response.error) + # TODO stop using Image Entity when it gets deprecated and from_dict gets implemented for items + res += [ImageEntity.from_dict(**item) for item in response.data] + self._response.data = res + return self._response + + +class GetItem(BaseReportableUseCase): def __init__( self, reporter: Reporter, @@ -38,22 +80,22 @@ def __init__( @staticmethod def serialize_entity(entity: Entity, project: ProjectEntity): - if project.upload_state != constances.UploadState.EXTERNAL.value: + if project.upload_state != constants.UploadState.EXTERNAL.value: entity.url = None if project.type in ( - constances.ProjectType.VECTOR.value, - constances.ProjectType.PIXEL.value, + constants.ProjectType.VECTOR.value, + constants.ProjectType.PIXEL.value, ): tmp_entity = entity - if project.type == constances.ProjectType.VECTOR.value: + if project.type == constants.ProjectType.VECTOR.value: entity.segmentation_status = None - if project.upload_state == constances.UploadState.EXTERNAL.value: + if project.upload_state == constants.UploadState.EXTERNAL.value: tmp_entity.prediction_status = None tmp_entity.segmentation_status = None return TmpImageEntity(**tmp_entity.dict(by_alias=True)) - elif project.type == constances.ProjectType.VIDEO.value: + elif project.type == constants.ProjectType.VIDEO.value: return VideoEntity(**entity.dict(by_alias=True)) - elif project.type == constances.ProjectType.DOCUMENT.value: + elif project.type == constants.ProjectType.DOCUMENT.value: return DocumentEntity(**entity.dict(by_alias=True)) return entity @@ -74,7 +116,7 @@ def execute(self) -> Response: return self._response -class QueryEntities(BaseReportableUseCae): +class QueryEntitiesUseCase(BaseReportableUseCase): def __init__( self, reporter: Reporter, @@ -82,33 +124,71 @@ def __init__( folder: FolderEntity, backend_service_provider: SuperannotateServiceProvider, query: str, + subset: str, ): super().__init__(reporter) self._project = project self._folder = folder self._backend_client = backend_service_provider self._query = query + self._subset = subset - def validate_query(self): - response = self._backend_client.validate_saqul_query( - self._project.team_id, self._project.id, self._query - ) - if response.get("error"): - raise AppException(response["error"]) - if response["isValidQuery"]: - self._query = response["parsedQuery"] + def validate_arguments(self): + if self._query: + response = self._backend_client.validate_saqul_query( + self._project.team_id, self._project.id, self._query + ) + if response.get("error"): + raise AppException(response["error"]) + if response["isValidQuery"]: + self._query = response["parsedQuery"] + else: + raise AppException("Incorrect query.") else: - raise AppException("Incorrect query.") - if self._project.sync_status != constances.ProjectState.SYNCED.value: - raise AppException("Data is not synced.") + response = self._backend_client.validate_saqul_query( + self._project.team_id, self._project.id, "-" + ) + if response.get("error"): + raise AppException(response["error"]) + + if not any([self._query, self._subset]): + raise AppException( + "The query and subset params cannot have the value None at the same time." + ) + if all([self._query, self._subset]) and not self._folder.is_root: + raise AppException( + "The folder name should be specified in the query string." + ) def execute(self) -> Response: if self.is_valid(): + query_kwargs = {} + if self._subset: + subset: Optional[SubSetEntity] = None + response = self._backend_client.list_sub_sets( + team_id=self._project.team_id, project_id=self._project.id + ) + if response.ok: + subset = next( + (_sub for _sub in response.data if _sub.name == self._subset), + None, + ) + if not subset: + self._response.errors = AppException( + "Subset not found. Use the superannotate." + "get_subsets() function to get a list of the available subsets." + ) + return self._response + query_kwargs["subset_id"] = subset.id + if self._query: + query_kwargs["query"] = self._query + query_kwargs["folder_id"] = ( + None if self._folder.name == "root" else self._folder.uuid + ) service_response = self._backend_client.saqul_query( self._project.team_id, self._project.id, - self._query, - folder_id=None if self._folder.name == "root" else self._folder.uuid, + **query_kwargs, ) if service_response.ok: data = [] @@ -125,7 +205,7 @@ def execute(self) -> Response: return self._response -class ListItems(BaseReportableUseCae): +class ListItems(BaseReportableUseCase): def __init__( self, reporter: Reporter, @@ -187,7 +267,86 @@ def execute(self) -> Response: return self._response -class AttachItems(BaseReportableUseCae): +class AssignItemsUseCase(BaseUseCase): + CHUNK_SIZE = 500 + + def __init__( + self, + service: SuperannotateServiceProvider, + project: ProjectEntity, + folder: FolderEntity, + item_names: list, + user: str, + ): + super().__init__() + self._project = project + self._folder = folder + self._item_names = item_names + self._user = user + self._service = service + + def validate_item_names( + self, + ): + self._item_names = list(set(self._item_names)) + + def execute(self): + cnt_assigned = 0 + total_count = len(self._item_names) + if self.is_valid(): + for i in range(0, len(self._item_names), self.CHUNK_SIZE): + response = self._service.assign_items( + team_id=self._project.team_id, + project_id=self._project.id, + folder_name=self._folder.name, + user=self._user, + item_names=self._item_names[i : i + self.CHUNK_SIZE], # noqa: E203 + ) + if not response.ok and response.error: # User not found + self._response.errors += response.error + return self._response + + cnt_assigned += response.data["successCount"] + logger.info( + f"Assigned {cnt_assigned}/{total_count} items to user {self._user}" + ) + return self._response + + +class UnAssignItemsUseCase(BaseUseCase): + CHUNK_SIZE = 500 + + def __init__( + self, + service: SuperannotateServiceProvider, + project_entity: ProjectEntity, + folder: FolderEntity, + item_names: list, + ): + super().__init__() + self._project_entity = project_entity + self._folder = folder + self._item_names = item_names + self._service = service + + def execute(self): + # todo handling to backend side + for i in range(0, len(self._item_names), self.CHUNK_SIZE): + is_un_assigned = self._service.un_assign_items( + team_id=self._project_entity.team_id, + project_id=self._project_entity.id, + folder_name=self._folder.name, + item_names=self._item_names[i : i + self.CHUNK_SIZE], # noqa: E203 + ) + if not is_un_assigned: + self._response.errors = AppException( + f"Cant un assign {', '.join(self._item_names[i: i + self.CHUNK_SIZE])}" + ) + + return self._response + + +class AttachItems(BaseReportableUseCase): CHUNK_SIZE = 500 def __init__( @@ -198,13 +357,13 @@ def __init__( attachments: List[AttachmentEntity], annotation_status: str, backend_service_provider: SuperannotateServiceProvider, - upload_state_code: int = constances.UploadState.EXTERNAL.value, + upload_state_code: int = constants.UploadState.EXTERNAL.value, ): super().__init__(reporter) self._project = project self._folder = folder self._attachments = attachments - self._annotation_status_code = constances.AnnotationStatus.get_value( + self._annotation_status_code = constants.AnnotationStatus.get_value( annotation_status ) self._upload_state_code = upload_state_code @@ -227,18 +386,18 @@ def validate_limitations(self): if not response.ok: raise AppValidationException(response.error) if attachments_count > response.data.folder_limit.remaining_image_count: - raise AppValidationException(constances.ATTACH_FOLDER_LIMIT_ERROR_MESSAGE) + raise AppValidationException(constants.ATTACH_FOLDER_LIMIT_ERROR_MESSAGE) elif attachments_count > response.data.project_limit.remaining_image_count: - raise AppValidationException(constances.ATTACH_PROJECT_LIMIT_ERROR_MESSAGE) + raise AppValidationException(constants.ATTACH_PROJECT_LIMIT_ERROR_MESSAGE) elif ( response.data.user_limit and attachments_count > response.data.user_limit.remaining_image_count ): - raise AppValidationException(constances.ATTACH_USER_LIMIT_ERROR_MESSAGE) + raise AppValidationException(constants.ATTACH_USER_LIMIT_ERROR_MESSAGE) def validate_upload_state(self): - if self._project.upload_state == constances.UploadState.BASIC.value: - raise AppValidationException(constances.ATTACHING_UPLOAD_STATE_ERROR) + if self._project.upload_state == constants.UploadState.BASIC.value: + raise AppValidationException(constants.ATTACHING_UPLOAD_STATE_ERROR) @staticmethod def generate_meta(): @@ -288,13 +447,13 @@ def execute(self) -> Response: return self._response -class CopyItems(BaseReportableUseCae): +class CopyItems(BaseReportableUseCase): """ Copy items in bulk between folders in a project. Return skipped item names. """ - CHUNK_SIZE = 1000 + CHUNK_SIZE = 500 def __init__( self, @@ -325,9 +484,9 @@ def _validate_limitations(self, items_count): if not response.ok: raise AppValidationException(response.error) if items_count > response.data.folder_limit.remaining_image_count: - raise AppValidationException(constances.COPY_FOLDER_LIMIT_ERROR_MESSAGE) + raise AppValidationException(constants.COPY_FOLDER_LIMIT_ERROR_MESSAGE) if items_count > response.data.project_limit.remaining_image_count: - raise AppValidationException(constances.COPY_PROJECT_LIMIT_ERROR_MESSAGE) + raise AppValidationException(constants.COPY_PROJECT_LIMIT_ERROR_MESSAGE) def validate_item_names(self): if self._item_names: @@ -345,12 +504,18 @@ def execute(self): ) items = [item.name for item in self._items.get_all(condition)] - existing_items = self._backend_service.get_bulk_images( - project_id=self._project.id, - team_id=self._project.team_id, - folder_id=self._to_folder.uuid, - images=items, - ) + existing_items = [] + for i in range(0, len(items), self.CHUNK_SIZE): + cand_items = self._backend_service.get_bulk_images( + project_id=self._project.id, + team_id=self._project.team_id, + folder_id=self._to_folder.uuid, + images=items[i : i + self.CHUNK_SIZE], + ) + if isinstance(cand_items, dict): + continue + existing_items += cand_items + duplications = [item["name"] for item in existing_items] items_to_copy = list(set(items) - set(duplications)) skipped_items = duplications @@ -362,13 +527,15 @@ def execute(self): if items_to_copy: for i in range(0, len(items_to_copy), self.CHUNK_SIZE): chunk_to_copy = items_to_copy[i : i + self.CHUNK_SIZE] # noqa: E203 - poll_id = self._backend_service.copy_items_between_folders_transaction( - team_id=self._project.team_id, - project_id=self._project.id, - from_folder_id=self._from_folder.uuid, - to_folder_id=self._to_folder.uuid, - items=chunk_to_copy, - include_annotations=self._include_annotations, + poll_id = ( + self._backend_service.copy_items_between_folders_transaction( + team_id=self._project.team_id, + project_id=self._project.id, + from_folder_id=self._from_folder.uuid, + to_folder_id=self._to_folder.uuid, + items=chunk_to_copy, + include_annotations=self._include_annotations, + ) ) if not poll_id: skipped_items.extend(chunk_to_copy) @@ -383,12 +550,19 @@ def execute(self): except BackendError as e: self._response.errors = AppException(e) return self._response - existing_items = self._backend_service.get_bulk_images( - project_id=self._project.id, - team_id=self._project.team_id, - folder_id=self._to_folder.uuid, - images=items, - ) + + existing_items = [] + for i in range(0, len(items), self.CHUNK_SIZE): + cand_items = self._backend_service.get_bulk_images( + project_id=self._project.id, + team_id=self._project.team_id, + folder_id=self._to_folder.uuid, + images=items[i : i + self.CHUNK_SIZE], + ) + if isinstance(cand_items, dict): + continue + existing_items += cand_items + existing_item_names_set = {item["name"] for item in existing_items} items_to_copy_names_set = set(items_to_copy) copied_items = existing_item_names_set.intersection( @@ -404,7 +578,7 @@ def execute(self): return self._response -class MoveItems(BaseReportableUseCae): +class MoveItems(BaseReportableUseCase): CHUNK_SIZE = 1000 def __init__( @@ -438,9 +612,9 @@ def _validate_limitations(self, items_count): if not response.ok: raise AppValidationException(response.error) if items_count > response.data.folder_limit.remaining_image_count: - raise AppValidationException(constances.MOVE_FOLDER_LIMIT_ERROR_MESSAGE) + raise AppValidationException(constants.MOVE_FOLDER_LIMIT_ERROR_MESSAGE) if items_count > response.data.project_limit.remaining_image_count: - raise AppValidationException(constances.MOVE_PROJECT_LIMIT_ERROR_MESSAGE) + raise AppValidationException(constants.MOVE_PROJECT_LIMIT_ERROR_MESSAGE) def execute(self): if self.is_valid(): @@ -479,7 +653,7 @@ def execute(self): return self._response -class SetAnnotationStatues(BaseReportableUseCae): +class SetAnnotationStatues(BaseReportableUseCase): CHUNK_SIZE = 500 ERROR_MESSAGE = "Failed to change status" @@ -498,7 +672,7 @@ def __init__( self._folder = folder self._item_names = item_names self._items = items - self._annotation_status_code = constances.AnnotationStatus.get_value( + self._annotation_status_code = constants.AnnotationStatus.get_value( annotation_status ) self._backend_service = backend_service_provider @@ -512,12 +686,21 @@ def validate_items(self): ) self._item_names = [item.name for item in self._items.get_all(condition)] return - existing_items = self._backend_service.get_bulk_images( - project_id=self._project.id, - team_id=self._project.team_id, - folder_id=self._folder.uuid, - images=self._item_names, - ) + existing_items = [] + for i in range(0, len(self._item_names), self.CHUNK_SIZE): + + search_names = self._item_names[i : i + self.CHUNK_SIZE] + cand_items = self._backend_service.get_bulk_images( + project_id=self._project.id, + team_id=self._project.team_id, + folder_id=self._folder.uuid, + images=search_names, + ) + + if isinstance(cand_items, dict): + continue + existing_items += cand_items + if not existing_items: raise AppValidationException(self.ERROR_MESSAGE) if existing_items: @@ -541,3 +724,58 @@ def execute(self): self._response.errors = AppException(self.ERROR_MESSAGE) break return self._response + + +class DeleteItemsUseCase(BaseUseCase): + CHUNK_SIZE = 1000 + + def __init__( + self, + project: ProjectEntity, + folder: FolderEntity, + backend_service_provider: SuperannotateServiceProvider, + items: BaseReadOnlyRepository, + item_names: List[str] = None, + ): + super().__init__() + self._project = project + self._folder = folder + self._items = items + self._backend_service = backend_service_provider + self._item_names = item_names + + def execute(self): + if self.is_valid(): + if self._item_names: + item_ids = [ + item.uuid + for item in GetBulkItems( + service=self._backend_service, + project_id=self._project.id, + team_id=self._project.team_id, + folder_id=self._folder.uuid, + items=self._item_names, + ) + .execute() + .data + ] + else: + condition = ( + Condition("team_id", self._project.team_id, EQ) + & Condition("project_id", self._project.id, EQ) + & Condition("folder_id", self._folder.uuid, EQ) + ) + item_ids = [item.id for item in self._items.get_all(condition)] + + for i in range(0, len(item_ids), self.CHUNK_SIZE): + response = self._backend_service.delete_items( + project_id=self._project.id, + team_id=self._project.team_id, + item_ids=item_ids[i : i + self.CHUNK_SIZE], # noqa: E203 + ) + + logger.info( + f"Items deleted in project {self._project.name}{'/' + self._folder.name if not self._folder.is_root else ''}" + ) + + return self._response diff --git a/src/superannotate/lib/core/usecases/models.py b/src/superannotate/lib/core/usecases/models.py index 7a290890a..3d6ec6ff9 100644 --- a/src/superannotate/lib/core/usecases/models.py +++ b/src/superannotate/lib/core/usecases/models.py @@ -8,7 +8,6 @@ import boto3 import lib.core as constances -from lib.core.enums import ProjectType import pandas as pd import requests from botocore.exceptions import ClientError @@ -21,12 +20,13 @@ from lib.core.entities import MLModelEntity from lib.core.entities import ProjectEntity from lib.core.enums import ExportStatus +from lib.core.enums import ProjectType from lib.core.exceptions import AppException from lib.core.exceptions import AppValidationException from lib.core.reporter import Reporter from lib.core.repositories import BaseManageableRepository from lib.core.serviceproviders import SuperannotateServiceProvider -from lib.core.usecases.base import BaseReportableUseCae +from lib.core.usecases.base import BaseReportableUseCase from lib.core.usecases.base import BaseUseCase from lib.core.usecases.images import GetBulkImages from superannotate.logger import get_default_logger @@ -179,7 +179,7 @@ def execute(self): return self._response -class DownloadExportUseCase(BaseReportableUseCae): +class DownloadExportUseCase(BaseReportableUseCase): def __init__( self, service: SuperannotateServiceProvider, @@ -635,7 +635,9 @@ def execute(self): class SearchMLModels(BaseUseCase): def __init__( - self, ml_models_repo: BaseManageableRepository, condition: Condition, + self, + ml_models_repo: BaseManageableRepository, + condition: Condition, ): super().__init__() self._ml_models = ml_models_repo diff --git a/src/superannotate/lib/core/usecases/projects.py b/src/superannotate/lib/core/usecases/projects.py index fb97e87de..5362925b7 100644 --- a/src/superannotate/lib/core/usecases/projects.py +++ b/src/superannotate/lib/core/usecases/projects.py @@ -19,8 +19,9 @@ from lib.core.reporter import Reporter from lib.core.repositories import BaseManageableRepository from lib.core.repositories import BaseReadOnlyRepository +from lib.core.response import Response from lib.core.serviceproviders import SuperannotateServiceProvider -from lib.core.usecases.base import BaseReportableUseCae +from lib.core.usecases.base import BaseReportableUseCase from lib.core.usecases.base import BaseUseCase from lib.core.usecases.base import BaseUserBasedUseCase from requests.exceptions import RequestException @@ -31,7 +32,10 @@ class GetProjectsUseCase(BaseUseCase): def __init__( - self, condition: Condition, team_id: int, projects: BaseManageableRepository, + self, + condition: Condition, + team_id: int, + projects: BaseManageableRepository, ): super().__init__() self._condition = condition @@ -47,7 +51,10 @@ def execute(self): class GetProjectByNameUseCase(BaseUseCase): def __init__( - self, name: str, team_id: int, projects: BaseManageableRepository, + self, + name: str, + team_id: int, + projects: BaseManageableRepository, ): super().__init__() self._name = name @@ -300,7 +307,10 @@ def execute(self): class DeleteProjectUseCase(BaseUseCase): def __init__( - self, project_name: str, team_id: int, projects: BaseManageableRepository, + self, + project_name: str, + team_id: int, + projects: BaseManageableRepository, ): super().__init__() @@ -310,7 +320,9 @@ def __init__( def execute(self): use_case = GetProjectByNameUseCase( - name=self._project_name, team_id=self._team_id, projects=self._projects, + name=self._project_name, + team_id=self._team_id, + projects=self._projects, ) project_response = use_case.execute() if project_response.data: @@ -373,7 +385,7 @@ def execute(self): return self._response -class CloneProjectUseCase(BaseReportableUseCae): +class CloneProjectUseCase(BaseReportableUseCase): def __init__( self, reporter: Reporter, @@ -696,7 +708,9 @@ def execute(self): class GetAnnotationClassesUseCase(BaseUseCase): def __init__( - self, classes: BaseManageableRepository, condition: Condition = None, + self, + classes: BaseManageableRepository, + condition: Condition = None, ): super().__init__() self._classes = classes @@ -1066,3 +1080,35 @@ def execute(self): ) self._response.data = invited, list(to_skip) return self._response + + +class ListSubsetsUseCase(BaseReportableUseCase): + def __init__( + self, + reporter: Reporter, + project: ProjectEntity, + backend_client: SuperannotateServiceProvider, + ): + super().__init__(reporter) + self._project = project + self._backend_client = backend_client + + def validate_arguments(self): + response = self._backend_client.validate_saqul_query( + self._project.team_id, self._project.id, "_" + ) + error = response.get("error") + if error: + raise AppException(response["error"]) + + def execute(self) -> Response: + if self.is_valid(): + sub_sets_response = self._backend_client.list_sub_sets( + team_id=self._project.team_id, project_id=self._project.id + ) + if sub_sets_response.ok: + self._response.data = sub_sets_response.data + else: + self._response.data = [] + + return self._response diff --git a/src/superannotate/lib/core/video_convertor.py b/src/superannotate/lib/core/video_convertor.py index 4203b7266..420f0fc18 100644 --- a/src/superannotate/lib/core/video_convertor.py +++ b/src/superannotate/lib/core/video_convertor.py @@ -15,6 +15,7 @@ class Annotation(BaseModel): instanceId: int type: str className: Optional[str] + classId: Optional[int] x: Optional[Any] y: Optional[Any] points: Optional[Dict] @@ -57,6 +58,7 @@ def get_frame(self, frame_no: int): def _interpolate( self, class_name: str, + class_id: int, from_frame: int, to_frame: int, data: dict, @@ -87,6 +89,7 @@ def _interpolate( instanceId=instance_id, type=annotation_type, className=class_name, + classId=class_id, attributes=data["attributes"], keyframe=False, **tmp_data @@ -105,23 +108,21 @@ def pairwise(data: list): return zip(a, b) def get_median(self, annotations: List[dict]) -> dict: - if len(annotations) == 1: + if len(annotations) >= 1: return annotations[0] - first_annotations = annotations[:1][0] - median = ( - first_annotations["timestamp"] // self.ratio - ) * self.ratio + self.ratio / 2 - median_annotation = first_annotations - distance = abs(median - first_annotations["timestamp"]) - for annotation in annotations[1:]: - annotation_distance = abs(median - annotation["timestamp"]) - if annotation_distance < distance: - distance = annotation_distance - median_annotation = annotation - return median_annotation - - def calculate_sped(self, from_frame, to_frame): - pass + # Let's just leave the code for reference. + # first_annotations = annotations[:1][0] + # median = ( + # first_annotations["timestamp"] // self.ratio + # ) * self.ratio + self.ratio / 2 + # median_annotation = first_annotations + # distance = abs(median - first_annotations["timestamp"]) + # for annotation in annotations[1:]: + # annotation_distance = abs(median - annotation["timestamp"]) + # if annotation_distance < distance: + # distance = annotation_distance + # median_annotation = annotation + # return median_annotation @staticmethod def merge_first_frame(frames_mapping): @@ -141,6 +142,7 @@ def _interpolate_frames( to_frame_no, annotation_type, class_name, + class_id, instance_id, ): steps = None @@ -171,6 +173,7 @@ def _interpolate_frames( ] return self._interpolate( class_name=class_name, + class_id=class_id, from_frame=from_frame_no, to_frame=to_frame_no, data=from_frame, @@ -184,6 +187,7 @@ def _process(self): instance_id = next(self.id_generator) annotation_type = instance["meta"]["type"] class_name = instance["meta"].get("className") + class_id = instance["meta"].get("classId", -1) for parameter in instance["parameters"]: frames_mapping = defaultdict(list) interpolated_frames = {} @@ -207,6 +211,7 @@ def _process(self): to_frame=to_frame, to_frame_no=to_frame_no, class_name=class_name, + class_id=class_id, annotation_type=annotation_type, instance_id=instance_id, ) @@ -222,6 +227,7 @@ def _process(self): instanceId=instance_id, type=annotation_type, className=class_name, + classId=class_id, x=frame.get("x"), y=frame.get("y"), points=frame.get("points"), @@ -234,6 +240,7 @@ def _process(self): instanceId=instance_id, type=annotation_type, className=class_name, + classId=class_id, x=median.get("x"), y=median.get("y"), points=median.get("points"), diff --git a/src/superannotate/lib/infrastructure/controller.py b/src/superannotate/lib/infrastructure/controller.py index 1329d2a71..de18dbd0d 100644 --- a/src/superannotate/lib/infrastructure/controller.py +++ b/src/superannotate/lib/infrastructure/controller.py @@ -2,7 +2,6 @@ import io import os from abc import ABCMeta -from os.path import expanduser from pathlib import Path from typing import Callable from typing import Iterable @@ -28,7 +27,6 @@ from lib.core.response import Response from lib.infrastructure.helpers import timed_lru_cache from lib.infrastructure.repositories import AnnotationClassRepository -from lib.infrastructure.repositories import ConfigRepository from lib.infrastructure.repositories import FolderRepository from lib.infrastructure.repositories import ImageRepository from lib.infrastructure.repositories import IntegrationRepository @@ -53,14 +51,21 @@ def build_condition(**kwargs) -> Condition: class BaseController(metaclass=ABCMeta): - def __init__(self, config_path: str = None, token: str = None): - self._team_data = None - self._token = None - self._backend_url = None - self._ssl_verify = False - self._config_path = None - self._backend_client = None + SESSIONS = {} + + def __init__(self, token: str, host: str, ssl_verify: bool, version: str): + self._version = version self._logger = get_default_logger() + self._testing = os.getenv("SA_TESTING", "False").lower() in ("true", "1", "t") + self._token = token + self._backend_client = SuperannotateBackendService( + api_url=host, + auth_token=token, + logger=self._logger, + verify_ssl=ssl_verify, + testing=self._testing, + ) + self._team_data = None self._s3_upload_auth_data = None self._projects = None self._folders = None @@ -72,75 +77,17 @@ def __init__(self, config_path: str = None, token: str = None): self._user_id = None self._team_name = None self._reporter = None - self._testing = os.getenv("SA_TESTING", "False").lower() in ("true", "1", "t") - self._ssl_verify = not self._testing - self._backend_url = os.environ.get("SA_URL", constances.BACKEND_URL) - - if token: - self._token = self._validate_token(token) - elif config_path: - config_path = expanduser(config_path) - self.retrieve_configs(Path(config_path), raise_exception=True) - else: - env_token = os.environ.get("SA_TOKEN") - if env_token: - self._token = self._validate_token(os.environ.get("SA_TOKEN")) - else: - config_path = expanduser(constances.CONFIG_FILE_LOCATION) - self.retrieve_configs(Path(config_path), raise_exception=False) - self.initialize_backend_client() - - def retrieve_configs(self, path: Path, raise_exception=True): - - token, backend_url, ssl_verify = None, None, None - if not Path(path).is_file() or not os.access(path, os.R_OK): - if raise_exception: - raise AppException( - f"SuperAnnotate config file {str(path)} not found." - f" Please provide correct config file location to sa.init() or use " - f"CLI's superannotate init to generate default location config file." - ) - try: - config_repo = ConfigRepository(str(path)) - token, backend_url, ssl_verify = ( - self._validate_token(config_repo.get_one("token").value), - config_repo.get_one("main_endpoint").value, - config_repo.get_one("ssl_verify").value, - ) - except Exception: - if raise_exception: - raise AppException( - f"Incorrect config file: token is not present in the config file {path}" - ) - self._token = token - self._backend_url = backend_url or self._backend_url - self._ssl_verify = ssl_verify if ssl_verify is not None else True @staticmethod - def _validate_token(token: str): + def validate_token(token: str): try: int(token.split("=")[-1]) except ValueError: raise AppException("Invalid token.") return token - def initialize_backend_client(self): - if not self._token: - raise AppException("Team token not provided") - self._backend_client = SuperannotateBackendService( - api_url=self._backend_url, - auth_token=self._token, - logger=self._logger, - verify_ssl=self._ssl_verify, - testing=self._testing, - ) - self._backend_client.get_session.cache_clear() - return self._backend_client - @property def backend_client(self): - if not self._backend_client: - self.initialize_backend_client() return self._backend_client @property @@ -155,12 +102,6 @@ def team_name(self): _, self._team_name = self.get_team() return self._team_name - def set_token(self, token: str, backend_url: str = constances.BACKEND_URL): - self._token = self._validate_token(token) - if backend_url: - self._backend_url = backend_url - self.initialize_backend_client() - @property def projects(self): if not self._projects: @@ -189,7 +130,7 @@ def teams(self): @property def team_data(self): if not self._team_data: - self._team_data = self.get_team() + self._team_data = self.get_team().data return self._team_data @property @@ -212,14 +153,17 @@ def get_integrations_repo(self, team_id: int): @property def team_id(self) -> int: if not self._token: - raise AppException( - f"Invalid credentials provided in the {self._config_path}." - ) + raise AppException("Invalid credentials provided.") return int(self._token.split("=")[-1]) - @property - def default_reporter(self): - return Reporter() + @staticmethod + def get_default_reporter( + log_info: bool = True, + log_warning: bool = True, + disable_progress_bar: bool = False, + log_debug: bool = True, + ) -> Reporter: + return Reporter(log_info, log_warning, disable_progress_bar, log_debug) @timed_lru_cache(seconds=3600) def get_auth_data(self, project_id: int, team_id: int, folder_id: int): @@ -251,19 +195,6 @@ def annotation_validators(self) -> AnnotationValidators: class Controller(BaseController): DEFAULT = None - def __init__(self, config_path: str = None, token: str = None): - super().__init__(config_path, token) - self._team = None - - @classmethod - def get_default(cls): - if not cls.DEFAULT: - try: - cls.DEFAULT = Controller() - except Exception: - pass - return cls.DEFAULT - @classmethod def set_default(cls, obj): cls.DEFAULT = obj @@ -320,7 +251,9 @@ def search_project( condition &= build_condition(**kwargs) use_case = usecases.GetProjectsUseCase( - condition=condition, projects=self.projects, team_id=self.team_id, + condition=condition, + projects=self.projects, + team_id=self.team_id, ) return use_case.execute() @@ -365,7 +298,9 @@ def create_project( def delete_project(self, name: str): use_case = usecases.DeleteProjectUseCase( - project_name=name, team_id=self.team_id, projects=self.projects, + project_name=name, + team_id=self.team_id, + projects=self.projects, ) return use_case.execute() @@ -480,11 +415,15 @@ def clone_project( project = self._get_project(from_name) project_to_create = copy.copy(project) + reporter = self.get_default_reporter() + reporter.track( + "external", project.upload_state == constances.UploadState.EXTERNAL.value + ) project_to_create.name = name if project_description is not None: project_to_create.description = project_description use_case = usecases.CloneProjectUseCase( - reporter=self.default_reporter, + reporter=reporter, project=project, project_to_create=project_to_create, projects=self.projects, @@ -499,13 +438,35 @@ def clone_project( ) return use_case.execute() + def interactive_attach_urls( + self, + project_name: str, + files: List[ImageEntity], + folder_name: str = None, + annotation_status: str = None, + upload_state_code: int = None, + ): + project = self._get_project(project_name) + folder = self._get_folder(project, folder_name) + + return usecases.InteractiveAttachFileUrlsUseCase( + project=project, + folder=folder, + attachments=files, + backend_service_provider=self._backend_client, + annotation_status=annotation_status, + upload_state_code=upload_state_code, + ) + def create_folder(self, project: str, folder_name: str): project = self._get_project(project) folder = FolderEntity( name=folder_name, project_id=project.id, team_id=project.team_id ) use_case = usecases.CreateFolderUseCase( - project=project, folder=folder, folders=self.folders, + project=project, + folder=folder, + folders=self.folders, ) return use_case.execute() @@ -596,7 +557,10 @@ def search_images( return use_case.execute() def _get_image( - self, project: ProjectEntity, image_name: str, folder: FolderEntity = None, + self, + project: ProjectEntity, + image_name: str, + folder: FolderEntity = None, ) -> ImageEntity: response = usecases.GetImageUseCase( service=self._backend_client, @@ -621,7 +585,10 @@ def update_folder(self, project_name: str, folder_name: str, folder_data: dict): folder = self._get_folder(project, folder_name) for field, value in folder_data.items(): setattr(folder, field, value) - use_case = usecases.UpdateFolderUseCase(folders=self.folders, folder=folder,) + use_case = usecases.UpdateFolderUseCase( + folders=self.folders, + folder=folder, + ) return use_case.execute() def copy_image( @@ -705,6 +672,48 @@ def update_image( use_case = usecases.UpdateImageUseCase(image=image, images=self.images) return use_case.execute() + def bulk_copy_images( + self, + project_name: str, + from_folder_name: str, + to_folder_name: str, + image_names: List[str], + include_annotations: bool, + include_pin: bool, + ): + project = self._get_project(project_name) + from_folder = self._get_folder(project, from_folder_name) + to_folder = self._get_folder(project, to_folder_name) + use_case = usecases.ImagesBulkCopyUseCase( + project=project, + from_folder=from_folder, + to_folder=to_folder, + image_names=image_names, + backend_service_provider=self._backend_client, + include_annotations=include_annotations, + include_pin=include_pin, + ) + return use_case.execute() + + def bulk_move_images( + self, + project_name: str, + from_folder_name: str, + to_folder_name: str, + image_names: List[str], + ): + project = self._get_project(project_name) + from_folder = self._get_folder(project, from_folder_name) + to_folder = self._get_folder(project, to_folder_name) + use_case = usecases.ImagesBulkMoveUseCase( + project=project, + from_folder=from_folder, + to_folder=to_folder, + image_names=image_names, + backend_service_provider=self._backend_client, + ) + return use_case.execute() + def get_project_metadata( self, project_name: str, @@ -786,43 +795,69 @@ def set_project_settings(self, project_name: str, new_settings: List[dict]): ) return use_case.execute() - def set_images_annotation_statuses( + def delete_images( self, project_name: str, folder_name: str, - image_names: list, - annotation_status: str, + image_names: List[str] = None, ): - project_entity = self._get_project(project_name) - folder_entity = self._get_folder(project_entity, folder_name) - images_repo = ImageRepository(service=self._backend_client) - use_case = usecases.SetImageAnnotationStatuses( - service=self._backend_client, - projects=self.projects, + project = self._get_project(project_name) + folder = self._get_folder(project, folder_name) + + use_case = usecases.DeleteImagesUseCase( + project=project, + folder=folder, + images=self.images, image_names=image_names, - team_id=project_entity.team_id, - project_id=project_entity.id, - folder_id=folder_entity.uuid, - images_repo=images_repo, - annotation_status=constances.AnnotationStatus.get_value(annotation_status), + backend_service_provider=self._backend_client, ) return use_case.execute() - def delete_images( - self, project_name: str, folder_name: str, image_names: List[str] = None, + def delete_items( + self, + project_name: str, + folder_name: str, + items: List[str] = None, ): project = self._get_project(project_name) folder = self._get_folder(project, folder_name) - use_case = usecases.DeleteImagesUseCase( + use_case = usecases.DeleteItemsUseCase( project=project, folder=folder, - images=self.images, - image_names=image_names, + items=self.items, + item_names=items, backend_service_provider=self._backend_client, ) return use_case.execute() + def assign_items( + self, project_name: str, folder_name: str, item_names: list, user: str + ): + project_entity = self.get_project_metadata( + project_name, include_contributors=True + ).data["project"] + folder = self._get_folder(project_entity, folder_name) + use_case = usecases.AssignItemsUseCase( + project=project_entity, + service=self._backend_client, + folder=folder, + item_names=item_names, + user=user, + ) + return use_case.execute() + + def un_assign_items(self, project_name, folder_name, item_names): + project = self._get_project(project_name) + folder = self._get_folder(project, folder_name) + use_case = usecases.UnAssignItemsUseCase( + project_entity=project, + service=self._backend_client, + folder=folder, + item_names=item_names, + ) + return use_case.execute() + def assign_images( self, project_name: str, folder_name: str, image_names: list, user: str ): @@ -852,7 +887,9 @@ def un_assign_folder(self, project_name: str, folder_name: str): project_entity = self._get_project(project_name) folder = self._get_folder(project_entity, folder_name) use_case = usecases.UnAssignFolderUseCase( - service=self._backend_client, project_entity=project_entity, folder=folder, + service=self._backend_client, + project_entity=project_entity, + folder=folder, ) return use_case.execute() @@ -947,7 +984,8 @@ def delete_annotation_class(self, project_name: str, annotation_class_name: str) use_case = usecases.DeleteAnnotationClassUseCase( annotation_class_name=annotation_class_name, annotation_classes_repo=AnnotationClassRepository( - service=self._backend_client, project=project, + service=self._backend_client, + project=project, ), project_name=project_name, ) @@ -958,7 +996,8 @@ def get_annotation_class(self, project_name: str, annotation_class_name: str): use_case = usecases.GetAnnotationClassUseCase( annotation_class_name=annotation_class_name, annotation_classes_repo=AnnotationClassRepository( - service=self._backend_client, project=project, + service=self._backend_client, + project=project, ), ) return use_case.execute() @@ -967,7 +1006,8 @@ def download_annotation_classes(self, project_name: str, download_path: str): project = self._get_project(project_name) use_case = usecases.DownloadAnnotationClassesUseCase( annotation_classes_repo=AnnotationClassRepository( - service=self._backend_client, project=project, + service=self._backend_client, + project=project, ), download_path=download_path, project_name=project_name, @@ -980,7 +1020,8 @@ def create_annotation_classes(self, project_name: str, annotation_classes: list) use_case = usecases.CreateAnnotationClassesUseCase( service=self._backend_client, annotation_classes_repo=AnnotationClassRepository( - service=self._backend_client, project=project, + service=self._backend_client, + project=project, ), annotation_classes=annotation_classes, project=project, @@ -1050,7 +1091,7 @@ def upload_annotations_from_folder( project=project, folder=folder, images=self.images, - team=self.team_data.data, + team=self.team_data, annotation_paths=annotation_paths, backend_service_provider=self._backend_client, annotation_classes=AnnotationClassRepository( @@ -1062,7 +1103,7 @@ def upload_annotations_from_folder( "data", [] ), validators=self.annotation_validators, - reporter=Reporter(log_info=False, log_warning=False), + reporter=self.get_default_reporter(log_info=False, log_warning=False), folder_path=folder_path, ) return use_case.execute() @@ -1086,7 +1127,7 @@ def upload_image_annotations( project=project, folder=folder, images=self.images, - team=self.team_data.data, + team=self.team_data, annotation_classes=AnnotationClassRepository( service=self._backend_client, project=project ).get_all(), @@ -1098,7 +1139,7 @@ def upload_image_annotations( backend_service_provider=self._backend_client, mask=mask, verbose=verbose, - reporter=self.default_reporter, + reporter=self.get_default_reporter(), validators=self.annotation_validators, ) return use_case.execute() @@ -1131,7 +1172,7 @@ def download_export( folder_path=folder_path, extract_zip_contents=extract_zip_contents, to_s3_bucket=to_s3_bucket, - reporter=self.default_reporter, + reporter=self.get_default_reporter(), ) return use_case.execute() @@ -1164,7 +1205,6 @@ def benchmark( show_plots: bool, ): project = self._get_project(project_name) - export_response = self.prepare_export( project.name, folder_names=folder_names, @@ -1181,7 +1221,7 @@ def benchmark( folder_path=export_root, extract_zip_contents=True, to_s3_bucket=False, - reporter=self.default_reporter, + reporter=self.get_default_reporter(), ).execute() if response.errors: raise AppException(response.errors) @@ -1254,7 +1294,10 @@ def run_prediction( return use_case.execute() def list_images( - self, project_name: str, annotation_status: str = None, name_prefix: str = None, + self, + project_name: str, + annotation_status: str = None, + name_prefix: str = None, ): project = self._get_project(project_name) @@ -1328,7 +1371,7 @@ def add_contributors_to_project(self, project_name: str, emails: list, role: str project_name=project_name, include_contributors=True ) use_case = usecases.AddContributorsToProject( - reporter=self.default_reporter, + reporter=self.get_default_reporter(), team=team.data, project=project.data["project"], emails=emails, @@ -1340,7 +1383,7 @@ def add_contributors_to_project(self, project_name: str, emails: list, role: str def invite_contributors_to_team(self, emails: list, set_admin: bool): team = self.get_team() use_case = usecases.InviteContributorsToTeam( - reporter=self.default_reporter, + reporter=self.get_default_reporter(), team=team.data, emails=emails, set_admin=set_admin, @@ -1365,7 +1408,7 @@ def upload_videos( folder = self._get_folder(project, folder_name) use_case = usecases.UploadVideosAsImages( - reporter=self.default_reporter, + reporter=self.get_default_reporter(), service=self.backend_client, project=project, folder=folder, @@ -1390,7 +1433,7 @@ def get_annotations( project = self._get_project(project_name) folder = self._get_folder(project, folder_name) use_case = usecases.GetAnnotations( - reporter=Reporter(log_info=logging, log_debug=logging), + reporter=self.get_default_reporter(log_info=logging, log_debug=logging), project=project, folder=folder, images=self.images, @@ -1406,7 +1449,7 @@ def get_annotations_per_frame( folder = self._get_folder(project, folder_name) use_case = usecases.GetVideoAnnotationsPerFrame( - reporter=self.default_reporter, + reporter=self.get_default_reporter(), project=project, folder=folder, images=self.images, @@ -1422,7 +1465,7 @@ def upload_priority_scores( project = self._get_project(project_name) folder = self._get_folder(project, folder_name) use_case = usecases.UploadPriorityScoresUseCase( - reporter=self.default_reporter, + reporter=self.get_default_reporter(), project=project, folder=folder, scores=scores, @@ -1432,10 +1475,10 @@ def upload_priority_scores( return use_case.execute() def get_integrations(self): - team = self.team_data.data + team = self.team_data use_cae = usecases.GetIntegrations( - reporter=self.default_reporter, - team=self.team_data.data, + reporter=self.get_default_reporter(), + team=self.team_data, integrations=self.get_integrations_repo(team_id=team.uuid), ) return use_cae.execute() @@ -1447,12 +1490,12 @@ def attach_integrations( integration: IntegrationEntity, folder_path: str, ): - team = self.team_data.data + team = self.team_data project = self._get_project(project_name) folder = self._get_folder(project, folder_name) use_case = usecases.AttachIntegrations( - reporter=self.default_reporter, - team=self.team_data.data, + reporter=self.get_default_reporter(), + team=self.team_data, backend_service=self.backend_client, project=project, folder=folder, @@ -1462,15 +1505,18 @@ def attach_integrations( ) return use_case.execute() - def query_entities(self, project_name: str, folder_name: str, query: str = None): + def query_entities( + self, project_name: str, folder_name: str, query: str = None, subset: str = None + ): project = self._get_project(project_name) folder = self._get_folder(project, folder_name) - use_case = usecases.QueryEntities( - reporter=self.default_reporter, + use_case = usecases.QueryEntitiesUseCase( + reporter=self.get_default_reporter(), project=project, folder=folder, query=query, + subset=subset, backend_service_provider=self.backend_client, ) return use_case.execute() @@ -1479,7 +1525,7 @@ def get_item(self, project_name: str, folder_name: str, item_name: str): project = self._get_project(project_name) folder = self._get_folder(project, folder_name) use_case = usecases.GetItem( - reporter=self.default_reporter, + reporter=self.get_default_reporter(), project=project, folder=folder, item_name=item_name, @@ -1516,7 +1562,7 @@ def list_items( search_condition &= build_condition(**kwargs) use_case = usecases.ListItems( - reporter=self.default_reporter, + reporter=self.get_default_reporter(), project=project, folder=folder, recursive=recursive, @@ -1538,7 +1584,7 @@ def attach_items( folder = self._get_folder(project, folder_name) use_case = usecases.AttachItems( - reporter=self.default_reporter, + reporter=self.get_default_reporter(), project=project, folder=folder, attachments=attachments, @@ -1560,7 +1606,7 @@ def copy_items( to_folder = self._get_folder(project, to_folder) use_case = usecases.CopyItems( - self.default_reporter, + self.get_default_reporter(), project=project, from_folder=from_folder, to_folder=to_folder, @@ -1583,7 +1629,7 @@ def move_items( to_folder = self._get_folder(project, to_folder) use_case = usecases.MoveItems( - self.default_reporter, + self.get_default_reporter(), project=project, from_folder=from_folder, to_folder=to_folder, @@ -1604,7 +1650,7 @@ def set_annotation_statuses( folder = self._get_folder(project, folder_name) use_case = usecases.SetAnnotationStatues( - self.default_reporter, + self.get_default_reporter(), project=project, folder=folder, annotation_status=annotation_status, @@ -1627,7 +1673,7 @@ def download_annotations( folder = self._get_folder(project, folder_name) use_case = usecases.DownloadAnnotations( - reporter=self.default_reporter, + reporter=self.get_default_reporter(), project=project, folder=folder, destination=destination, @@ -1642,3 +1688,12 @@ def download_annotations( callback=callback, ) return use_case.execute() + + def list_subsets(self, project_name: str): + project = self._get_project(project_name) + use_case = usecases.ListSubsetsUseCase( + reporter=self.get_default_reporter(), + project=project, + backend_client=self.backend_client, + ) + return use_case.execute() diff --git a/src/superannotate/lib/infrastructure/services.py b/src/superannotate/lib/infrastructure/services.py index 9d27ae698..fc83240e2 100644 --- a/src/superannotate/lib/infrastructure/services.py +++ b/src/superannotate/lib/infrastructure/services.py @@ -14,6 +14,7 @@ import lib.core as constance import requests.packages.urllib3 +from lib.core import entities from lib.core.exceptions import AppException from lib.core.exceptions import BackendError from lib.core.reporter import Reporter @@ -134,10 +135,10 @@ def _request( if response.status_code == 404 and retried < 3: return self._request( url, - method="get", - data=None, - headers=None, - params=None, + method=method, + data=data, + headers=headers, + params=params, retried=retried + 1, content_type=content_type, ) @@ -187,7 +188,8 @@ class SuperannotateBackendService(BaseBackendService): Manage projects, images and team in the Superannotate """ - DEFAULT_CHUNK_SIZE = 1000 + DEFAULT_CHUNK_SIZE = 5000 + SAQUL_CHUNK_SIZE = 50 URL_USERS = "users" URL_LIST_PROJECTS = "projects" @@ -201,7 +203,9 @@ class SuperannotateBackendService(BaseBackendService): URL_GET_IMAGES = "images" URL_GET_ITEMS = "items" URL_BULK_GET_IMAGES = "images/getBulk" + URL_BULK_GET_ITEMS = "images/getBulk" URL_DELETE_FOLDERS = "image/delete/images" + URL_DELETE_ITEMS = "image/delete/images" URL_CREATE_IMAGE = "image/ext-create" URL_PROJECT_SETTINGS = "project/{}/settings" URL_PROJECT_WORKFLOW = "project/{}/workflow" @@ -215,6 +219,7 @@ class SuperannotateBackendService(BaseBackendService): URL_MOVE_IMAGES_FROM_FOLDER = "image/move" URL_GET_COPY_PROGRESS = "images/copy-image-progress" URL_ASSIGN_IMAGES = "images/editAssignment/" + URL_ASSIGN_ITEMS = "images/editAssignment/" URL_ASSIGN_FOLDER = "folder/editAssignment" URL_GET_EXPORTS = "exports" URL_GET_CLASS = "class/{}" @@ -239,6 +244,7 @@ class SuperannotateBackendService(BaseBackendService): URL_ATTACH_INTEGRATIONS = "image/integration/create" URL_SAQUL_QUERY = "/images/search/advanced" URL_VALIDATE_SAQUL_QUERY = "/images/parse/query/advanced" + URL_LIST_SUBSETS = "/project/{project_id}/subset" def upload_priority_scores( self, team_id: int, project_id: int, folder_id: int, priorities: list @@ -299,7 +305,11 @@ def get_download_token( return response.json() def get_upload_token( - self, project_id: int, team_id: int, folder_id: int, image_id: int, + self, + project_id: int, + team_id: int, + folder_id: int, + image_id: int, ): download_token_url = urljoin( self.api_url, @@ -688,6 +698,7 @@ def set_images_statuses_bulk( def get_bulk_images( self, project_id: int, team_id: int, folder_id: int, images: List[str] ) -> List[dict]: + bulk_get_images_url = urljoin(self.api_url, self.URL_BULK_GET_IMAGES) res = self._request( bulk_get_images_url, @@ -701,6 +712,23 @@ def get_bulk_images( ) return res.json() + def get_bulk_items( + self, project_id: int, team_id: int, folder_id: int, items: List[str] + ) -> List[dict]: + + bulk_get_items_url = urljoin(self.api_url, self.URL_BULK_GET_ITEMS) + res = self._request( + bulk_get_items_url, + "post", + data={ + "project_id": project_id, + "team_id": team_id, + "folder_id": folder_id, + "names": items, + }, + ) + return ServiceResponse(res, ServiceResponse) + def delete_images(self, project_id: int, team_id: int, image_ids: List[int]): delete_images_url = urljoin(self.api_url, self.URL_DELETE_FOLDERS) res = self._request( @@ -711,6 +739,18 @@ def delete_images(self, project_id: int, team_id: int, image_ids: List[int]): ) return res.json() + def delete_items(self, project_id: int, team_id: int, item_ids: List[int]): + delete_items_url = urljoin(self.api_url, self.URL_DELETE_ITEMS) + + res = self._request( + delete_items_url, + "put", + params={"team_id": team_id, "project_id": project_id}, + data={"image_ids": item_ids}, + ) + + return ServiceResponse(res, ServiceResponse) + def assign_images( self, team_id: int, @@ -733,7 +773,11 @@ def assign_images( return res.ok def un_assign_images( - self, team_id: int, project_id: int, folder_name: str, image_names: List[str], + self, + team_id: int, + project_id: int, + folder_name: str, + image_names: List[str], ): un_assign_images_url = urljoin(self.api_url, self.URL_ASSIGN_IMAGES) res = self._request( @@ -748,8 +792,52 @@ def un_assign_images( ) return res.ok + def assign_items( + self, + team_id: int, + project_id: int, + folder_name: str, + user: str, + item_names: list, + ) -> ServiceResponse: + assign_items_url = urljoin(self.api_url, self.URL_ASSIGN_ITEMS) + return self._request( + assign_items_url, + "put", + params={"team_id": team_id, "project_id": project_id}, + data={ + "image_names": item_names, + "assign_user_id": user, + "folder_name": folder_name, + }, + content_type=ServiceResponse, + ) + + def un_assign_items( + self, + team_id: int, + project_id: int, + folder_name: str, + item_names: List[str], + ): + un_assign_items_url = urljoin(self.api_url, self.URL_ASSIGN_ITEMS) + res = self._request( + un_assign_items_url, + "put", + params={"team_id": team_id, "project_id": project_id}, + data={ + "image_names": item_names, + "remove_user_ids": ["all"], + "folder_name": folder_name, + }, + ) + return res.ok + def un_assign_folder( - self, team_id: int, project_id: int, folder_name: str, + self, + team_id: int, + project_id: int, + folder_name: str, ): un_assign_folder_url = urljoin(self.api_url, self.URL_ASSIGN_FOLDER) res = self._request( @@ -1033,7 +1121,7 @@ async def download_annotations( postfix: str, items: List[str] = None, callback: Callable = None, - ) -> List[dict]: + ) -> int: import aiohttp async with aiohttp.ClientSession( @@ -1099,9 +1187,14 @@ def attach_integrations( return response.ok def saqul_query( - self, team_id: int, project_id: int, query: str, folder_id: int + self, + team_id: int, + project_id: int, + folder_id: int, + query: str = None, + subset_id: int = None, ) -> ServiceResponse: - CHUNK_SIZE = 50 + query_url = urljoin(self.api_url, self.URL_SAQUL_QUERY) params = { "team_id": team_id, @@ -1110,18 +1203,22 @@ def saqul_query( } if folder_id: params["folder_id"] = folder_id - data = {"query": query, "image_index": 0} + if subset_id: + params["subset_id"] = subset_id + data = {"image_index": 0} + if query: + data["query"] = query items = [] for _ in range(self.MAX_ITEMS_COUNT): response = self._request(query_url, "post", params=params, data=data) if response.ok: response_items = response.json() items.extend(response_items) - if len(response_items) < CHUNK_SIZE: + if len(response_items) < self.SAQUL_CHUNK_SIZE: service_response = ServiceResponse(response) service_response.data = items return service_response - data["image_index"] += CHUNK_SIZE + data["image_index"] += self.SAQUL_CHUNK_SIZE return ServiceResponse(response) def validate_saqul_query(self, team_id: int, project_id: int, query: str) -> dict: @@ -1136,3 +1233,11 @@ def validate_saqul_query(self, team_id: int, project_id: int, query: str) -> dic return self._request( validate_query_url, "post", params=params, data=data ).json() + + def list_sub_sets(self, team_id: int, project_id: int) -> ServiceResponse: + return self._request( + urljoin(self.api_url, self.URL_LIST_SUBSETS.format(project_id=project_id)), + "get", + params=dict(team_id=team_id), + content_type=List[entities.SubSetEntity], + ) diff --git a/src/superannotate/lib/infrastructure/stream_data_handler.py b/src/superannotate/lib/infrastructure/stream_data_handler.py index 3be1cf8d7..717adb257 100644 --- a/src/superannotate/lib/infrastructure/stream_data_handler.py +++ b/src/superannotate/lib/infrastructure/stream_data_handler.py @@ -19,7 +19,7 @@ def __init__( self._headers = headers self._annotations = [] self._reporter = reporter - self._callback = callback + self._callback: Callable = callback self._map_function = map_function async def fetch( @@ -61,7 +61,6 @@ async def get_data( verify_ssl: bool = False, ): async with aiohttp.ClientSession( - raise_for_status=True, headers=self._headers, connector=aiohttp.TCPConnector(ssl=verify_ssl), ) as session: @@ -88,9 +87,10 @@ async def get_data( return self._annotations @staticmethod - def _store_annotation(path, postfix, annotation: dict): + def _store_annotation(path, postfix, annotation: dict, callback: Callable = None): os.makedirs(path, exist_ok=True) with open(f"{path}/{annotation['metadata']['name']}{postfix}", "w") as file: + annotation = callback(annotation) if callback else annotation json.dump(annotation, file) def _process_data(self, data): @@ -107,8 +107,12 @@ async def download_data( session, method: str = "post", params=None, - chunk_size: int = 100, - ): + chunk_size: int = 5000, + ) -> int: + """ + Returns the number of items downloaded + """ + items_downloaded: int = 0 if chunk_size and data: for i in range(0, len(data), chunk_size): data_to_process = data[i : i + chunk_size] @@ -122,14 +126,16 @@ async def download_data( self._store_annotation( download_path, postfix, - self._callback(annotation) if self._callback else annotation, + annotation, + self._callback, ) + items_downloaded += 1 else: async for annotation in self.fetch( method, session, url, self._process_data(data), params=params ): self._store_annotation( - download_path, - postfix, - self._callback(annotation) if self._callback else annotation, + download_path, postfix, annotation, self._callback ) + items_downloaded += 1 + return items_downloaded diff --git a/src/superannotate/logger.py b/src/superannotate/logger.py index 3345c636f..3af9ce1e0 100644 --- a/src/superannotate/logger.py +++ b/src/superannotate/logger.py @@ -4,7 +4,7 @@ from logging.handlers import RotatingFileHandler from os.path import expanduser -from superannotate import constances +import superannotate.lib.core as constances default_logger = None diff --git a/src/superannotate/version.py b/src/superannotate/version.py index d320d7768..59f031f54 100644 --- a/src/superannotate/version.py +++ b/src/superannotate/version.py @@ -1 +1 @@ -__version__ = "4.3.4" +__version__ = "4.3.5dev17" diff --git a/tests/convertors/test_consensus.py b/tests/convertors/test_consensus.py index 4418bcbf4..52244e062 100644 --- a/tests/convertors/test_consensus.py +++ b/tests/convertors/test_consensus.py @@ -3,7 +3,8 @@ import time from os.path import dirname -import src.superannotate as sa +from src.superannotate import SAClient +sa = SAClient() from tests.integration.base import BaseTestCase diff --git a/tests/convertors/test_conversion.py b/tests/convertors/test_conversion.py index d9766885e..390420945 100644 --- a/tests/convertors/test_conversion.py +++ b/tests/convertors/test_conversion.py @@ -6,7 +6,8 @@ from unittest import TestCase import pytest -import src.superannotate as sa +from src.superannotate import SAClient +sa = SAClient() class TestCocoSplit(TestCase): diff --git a/tests/convertors/test_json_version_conversion.py b/tests/convertors/test_json_version_conversion.py index aa8982156..0b3927446 100644 --- a/tests/convertors/test_json_version_conversion.py +++ b/tests/convertors/test_json_version_conversion.py @@ -5,7 +5,8 @@ from pathlib import Path from unittest import TestCase -import src.superannotate as sa +from src.superannotate import SAClient +sa = SAClient() class TestVersionConversion(TestCase): diff --git a/tests/convertors/test_project_converter.py b/tests/convertors/test_project_converter.py index 411a07183..109021b46 100644 --- a/tests/convertors/test_project_converter.py +++ b/tests/convertors/test_project_converter.py @@ -5,7 +5,8 @@ from pathlib import Path from unittest import TestCase -import src.superannotate as sa +from src.superannotate import SAClient +sa = SAClient() class TestCocoSplit(TestCase): diff --git a/tests/integration/aggregations/test_df_processing.py b/tests/integration/aggregations/test_df_processing.py index 4be91039d..0cd64f720 100644 --- a/tests/integration/aggregations/test_df_processing.py +++ b/tests/integration/aggregations/test_df_processing.py @@ -4,7 +4,8 @@ import pytest -import src.superannotate as sa +from src.superannotate import SAClient +sa = SAClient() from tests.integration.base import BaseTestCase diff --git a/tests/integration/aggregations/test_docuement_annotation_to_df.py b/tests/integration/aggregations/test_docuement_annotation_to_df.py index cab6a5c7d..12a2118f2 100644 --- a/tests/integration/aggregations/test_docuement_annotation_to_df.py +++ b/tests/integration/aggregations/test_docuement_annotation_to_df.py @@ -5,7 +5,8 @@ from unittest import mock from unittest import TestCase -import src.superannotate as sa +from src.superannotate import SAClient +sa = SAClient() from src.superannotate.logger import get_default_logger diff --git a/tests/integration/aggregations/test_video_annotation_to_df.py b/tests/integration/aggregations/test_video_annotation_to_df.py index 65b8e16b7..918259040 100644 --- a/tests/integration/aggregations/test_video_annotation_to_df.py +++ b/tests/integration/aggregations/test_video_annotation_to_df.py @@ -5,7 +5,8 @@ from unittest import mock from unittest import TestCase -import src.superannotate as sa +from src.superannotate import SAClient +sa = SAClient() from src.superannotate.logger import get_default_logger diff --git a/tests/integration/annotation_classes/test_create_annotation_class.py b/tests/integration/annotation_classes/test_create_annotation_class.py index d42187ebd..770b1ac46 100644 --- a/tests/integration/annotation_classes/test_create_annotation_class.py +++ b/tests/integration/annotation_classes/test_create_annotation_class.py @@ -1,7 +1,8 @@ import os from pathlib import Path -import src.superannotate as sa +from src.superannotate import SAClient +sa = SAClient() from tests.integration.base import BaseTestCase diff --git a/tests/integration/annotations/test_annotation_adding.py b/tests/integration/annotations/test_annotation_adding.py index b154c76db..8c4ed4964 100644 --- a/tests/integration/annotations/test_annotation_adding.py +++ b/tests/integration/annotations/test_annotation_adding.py @@ -3,9 +3,11 @@ import tempfile from pathlib import Path -import src.superannotate as sa +from src.superannotate import SAClient from tests.integration.base import BaseTestCase +sa = SAClient() + class TestAnnotationAdding(BaseTestCase): PROJECT_NAME = "test_annotations_adding" diff --git a/tests/integration/annotations/test_annotation_class_new.py b/tests/integration/annotations/test_annotation_class_new.py index 8831874a8..c5bba28c6 100644 --- a/tests/integration/annotations/test_annotation_class_new.py +++ b/tests/integration/annotations/test_annotation_class_new.py @@ -2,7 +2,8 @@ from pathlib import Path from unittest import TestCase -import src.superannotate as sa +from src.superannotate import SAClient +sa = SAClient() from tests.integration.base import BaseTestCase diff --git a/tests/integration/annotations/test_annotation_classes.py b/tests/integration/annotations/test_annotation_classes.py index c0584fa49..06a2ad789 100644 --- a/tests/integration/annotations/test_annotation_classes.py +++ b/tests/integration/annotations/test_annotation_classes.py @@ -1,7 +1,8 @@ from urllib.parse import urlparse import os from pathlib import Path -import src.superannotate as sa +from src.superannotate import SAClient +sa = SAClient() from tests.integration.base import BaseTestCase diff --git a/tests/integration/annotations/test_annotation_delete.py b/tests/integration/annotations/test_annotation_delete.py index 9c852fb24..db1f2fb38 100644 --- a/tests/integration/annotations/test_annotation_delete.py +++ b/tests/integration/annotations/test_annotation_delete.py @@ -3,7 +3,8 @@ import pytest -import src.superannotate as sa +from src.superannotate import SAClient +sa = SAClient() from tests.integration.base import BaseTestCase diff --git a/tests/integration/annotations/test_annotation_upload_pixel.py b/tests/integration/annotations/test_annotation_upload_pixel.py index db974e15e..08d349e46 100644 --- a/tests/integration/annotations/test_annotation_upload_pixel.py +++ b/tests/integration/annotations/test_annotation_upload_pixel.py @@ -3,7 +3,8 @@ from pathlib import Path from unittest.mock import patch -import src.superannotate as sa +from src.superannotate import SAClient +sa = SAClient() from tests.integration.base import BaseTestCase import pytest diff --git a/tests/integration/annotations/test_annotation_upload_vector.py b/tests/integration/annotations/test_annotation_upload_vector.py index e7d0cef74..d0f00bd52 100644 --- a/tests/integration/annotations/test_annotation_upload_vector.py +++ b/tests/integration/annotations/test_annotation_upload_vector.py @@ -7,7 +7,8 @@ from unittest.mock import patch from unittest.mock import MagicMock -import src.superannotate as sa +from src.superannotate import SAClient +sa = SAClient() from tests.integration.base import BaseTestCase diff --git a/tests/integration/annotations/test_annotations_pre_processing.py b/tests/integration/annotations/test_annotations_pre_processing.py index ed82a9379..9653d658c 100644 --- a/tests/integration/annotations/test_annotations_pre_processing.py +++ b/tests/integration/annotations/test_annotations_pre_processing.py @@ -8,7 +8,8 @@ from unittest.mock import patch from unittest.mock import MagicMock -import src.superannotate as sa +from src.superannotate import SAClient +sa = SAClient() from superannotate_schemas.schemas.base import CreationTypeEnum from tests.integration.base import BaseTestCase @@ -45,7 +46,7 @@ def test_annotation_last_action_and_creation_type(self, reporter): self.assertEqual(instance["creationType"], CreationTypeEnum.PRE_ANNOTATION.value) self.assertEqual( type(annotation["metadata"]["lastAction"]["email"]), - type(sa.get_default_controller().team_data.data.creator_id) + type(sa.controller.team_data.creator_id) ) self.assertEqual( type(annotation["metadata"]["lastAction"]["timestamp"]), diff --git a/tests/integration/annotations/test_annotations_upload_status_change.py b/tests/integration/annotations/test_annotations_upload_status_change.py index f64c3e095..d2facc855 100644 --- a/tests/integration/annotations/test_annotations_upload_status_change.py +++ b/tests/integration/annotations/test_annotations_upload_status_change.py @@ -5,7 +5,8 @@ from unittest.mock import patch from unittest.mock import MagicMock -import src.superannotate as sa +from src.superannotate import SAClient +sa = SAClient() import src.superannotate.lib.core as constances from tests.integration.base import BaseTestCase diff --git a/tests/integration/annotations/test_download_annotations.py b/tests/integration/annotations/test_download_annotations.py index 4223b9010..b32709515 100644 --- a/tests/integration/annotations/test_download_annotations.py +++ b/tests/integration/annotations/test_download_annotations.py @@ -5,10 +5,13 @@ import pytest -import src.superannotate as sa +from src.superannotate import SAClient from tests.integration.base import BaseTestCase +sa = SAClient() + + class TestDownloadAnnotations(BaseTestCase): PROJECT_NAME = "Test-download_annotations" FOLDER_NAME = "FOLDER_NAME" @@ -24,7 +27,6 @@ def folder_path(self): @pytest.mark.flaky(reruns=3) def test_download_annotations(self): - sa.init() sa.upload_images_from_folder_to_project( self.PROJECT_NAME, self.folder_path, annotation_status="InProgress" ) @@ -45,7 +47,6 @@ def test_download_annotations(self): @pytest.mark.flaky(reruns=3) def test_download_annotations_from_folders(self): - sa.init() sa.create_folder(self.PROJECT_NAME, self.FOLDER_NAME) sa.create_folder(self.PROJECT_NAME, self.FOLDER_NAME_2) sa.create_annotation_classes_from_classes_json( @@ -58,6 +59,17 @@ def test_download_annotations_from_folders(self): _, _, _ = sa.upload_annotations_from_folder_to_project( f"{self.PROJECT_NAME}{'/' + folder if folder else ''}", self.folder_path ) + with tempfile.TemporaryDirectory() as temp_dir: + annotations_path = sa.download_annotations(f"{self.PROJECT_NAME}", temp_dir, recursive=True) + self.assertEqual(len(os.listdir(annotations_path)), 7) + + @pytest.mark.flaky(reruns=3) + def test_download_empty_annotations_from_folders(self): + sa.create_folder(self.PROJECT_NAME, self.FOLDER_NAME) + sa.create_folder(self.PROJECT_NAME, self.FOLDER_NAME_2) + sa.create_annotation_classes_from_classes_json( + self.PROJECT_NAME, f"{self.folder_path}/classes/classes.json" + ) with tempfile.TemporaryDirectory() as temp_dir: annotations_path = sa.download_annotations(f"{self.PROJECT_NAME}", temp_dir) - self.assertEqual(len(os.listdir(annotations_path)), 5) + self.assertEqual(len(os.listdir(annotations_path)), 1) \ No newline at end of file diff --git a/tests/integration/annotations/test_get_annotations.py b/tests/integration/annotations/test_get_annotations.py index d277a69dd..9dfae7102 100644 --- a/tests/integration/annotations/test_get_annotations.py +++ b/tests/integration/annotations/test_get_annotations.py @@ -8,7 +8,8 @@ from pydantic import parse_obj_as from superannotate_schemas.schemas.internal import VectorAnnotation -import src.superannotate as sa +from src.superannotate import SAClient +sa = SAClient() from tests.integration.base import BaseTestCase @@ -24,9 +25,8 @@ class TestGetAnnotations(BaseTestCase): def folder_path(self): return os.path.join(Path(__file__).parent.parent.parent, self.TEST_FOLDER_PATH) - @pytest.mark.flaky(reruns=3) + # @pytest.mark.flaky(reruns=3) def test_get_annotations(self): - sa.init() sa.upload_images_from_folder_to_project( self.PROJECT_NAME, self.folder_path, annotation_status="InProgress" ) @@ -46,7 +46,6 @@ def test_get_annotations(self): @pytest.mark.flaky(reruns=3) def test_get_annotations_order(self): - sa.init() sa.upload_images_from_folder_to_project( self.PROJECT_NAME, self.folder_path, annotation_status="InProgress" ) @@ -65,7 +64,6 @@ def test_get_annotations_order(self): @pytest.mark.flaky(reruns=3) def test_get_annotations_from_folder(self): - sa.init() sa.create_folder(self.PROJECT_NAME, self.FOLDER_NAME) sa.upload_images_from_folder_to_project( @@ -87,7 +85,6 @@ def test_get_annotations_from_folder(self): @pytest.mark.flaky(reruns=3) def test_get_annotations_all(self): - sa.init() sa.upload_images_from_folder_to_project( self.PROJECT_NAME, self.folder_path, annotation_status="InProgress" ) @@ -147,7 +144,6 @@ def annotations_path(self): return os.path.join(self.folder_path, self.ANNOTATIONS_PATH) def test_video_annotation_upload_root(self): - sa.init() sa.create_annotation_classes_from_classes_json(self.PROJECT_NAME, self.classes_path) _, _, _ = sa.attach_items( @@ -159,7 +155,6 @@ def test_video_annotation_upload_root(self): self.assertEqual(len(annotations), 2) def test_video_annotation_upload_folder(self): - sa.init() sa.create_annotation_classes_from_classes_json(self.PROJECT_NAME, self.classes_path) sa.create_folder(self.PROJECT_NAME, "folder") path = f"{self.PROJECT_NAME}/folder" diff --git a/tests/integration/annotations/test_get_annotations_per_frame.py b/tests/integration/annotations/test_get_annotations_per_frame.py index ad9e7e1f2..16bfaa96c 100644 --- a/tests/integration/annotations/test_get_annotations_per_frame.py +++ b/tests/integration/annotations/test_get_annotations_per_frame.py @@ -4,9 +4,11 @@ from os.path import dirname from pathlib import Path -import src.superannotate as sa +from src.superannotate import SAClient from tests.integration.base import BaseTestCase +sa = SAClient() + class TestGetAnnotations(BaseTestCase): PROJECT_NAME = "test attach video urls" @@ -36,8 +38,7 @@ def annotations_path(self): return os.path.join(self.folder_path, self.ANNOTATIONS_PATH) def test_video_annotation_upload(self): - sa.init() - sa.create_annotation_classes_from_classes_json(self.PROJECT_NAME, self.classes_path) + # sa.create_annotation_classes_from_classes_json(self.PROJECT_NAME, self.classes_path) _, _, _ = sa.attach_items( self.PROJECT_NAME, diff --git a/tests/integration/annotations/test_missing_annotation_upload.py b/tests/integration/annotations/test_missing_annotation_upload.py index e509ddc3c..73728b65c 100644 --- a/tests/integration/annotations/test_missing_annotation_upload.py +++ b/tests/integration/annotations/test_missing_annotation_upload.py @@ -1,7 +1,8 @@ import os from pathlib import Path -import src.superannotate as sa +from src.superannotate import SAClient +sa = SAClient() from tests.integration.base import BaseTestCase diff --git a/tests/integration/annotations/test_preannotation_upload.py b/tests/integration/annotations/test_preannotation_upload.py index 6a5e00e44..d16c35b53 100644 --- a/tests/integration/annotations/test_preannotation_upload.py +++ b/tests/integration/annotations/test_preannotation_upload.py @@ -2,7 +2,8 @@ import tempfile from pathlib import Path -import src.superannotate as sa +from src.superannotate import SAClient +sa = SAClient() from tests.integration.base import BaseTestCase diff --git a/tests/integration/annotations/test_text_annotation_upload.py b/tests/integration/annotations/test_text_annotation_upload.py index a44f51027..3f405d4fd 100644 --- a/tests/integration/annotations/test_text_annotation_upload.py +++ b/tests/integration/annotations/test_text_annotation_upload.py @@ -3,7 +3,8 @@ import json from pathlib import Path import pytest -import src.superannotate as sa +from src.superannotate import SAClient +sa = SAClient() from tests.integration.base import BaseTestCase diff --git a/tests/integration/annotations/test_uopload_annotations_without_classes.py b/tests/integration/annotations/test_uopload_annotations_without_classes.py index c9cc17db9..aa9c03d3c 100644 --- a/tests/integration/annotations/test_uopload_annotations_without_classes.py +++ b/tests/integration/annotations/test_uopload_annotations_without_classes.py @@ -8,7 +8,8 @@ from unittest.mock import patch from unittest.mock import MagicMock -import src.superannotate as sa +from src.superannotate import SAClient +sa = SAClient() from tests.integration.base import BaseTestCase diff --git a/tests/integration/annotations/test_upload_annotations_from_folder_to_project.py b/tests/integration/annotations/test_upload_annotations_from_folder_to_project.py index 175f95d0f..f46a392df 100644 --- a/tests/integration/annotations/test_upload_annotations_from_folder_to_project.py +++ b/tests/integration/annotations/test_upload_annotations_from_folder_to_project.py @@ -5,7 +5,8 @@ import json import pytest -import src.superannotate as sa +from src.superannotate import SAClient +sa = SAClient() from tests.integration.base import BaseTestCase diff --git a/tests/integration/annotations/test_video_annotation_upload.py b/tests/integration/annotations/test_video_annotation_upload.py index cc7c262eb..44795738c 100644 --- a/tests/integration/annotations/test_video_annotation_upload.py +++ b/tests/integration/annotations/test_video_annotation_upload.py @@ -5,8 +5,11 @@ import pytest -import src.superannotate as sa -from lib.core.reporter import Reporter +from src.superannotate import SAClient +from src.superannotate.lib.core.reporter import Reporter + +sa = SAClient() + from src.superannotate.lib.core.data_handlers import VideoFormatHandler from tests.integration.base import BaseTestCase diff --git a/tests/integration/base.py b/tests/integration/base.py index c49899957..e12af6cd4 100644 --- a/tests/integration/base.py +++ b/tests/integration/base.py @@ -1,6 +1,9 @@ from unittest import TestCase -import src.superannotate as sa +from src.superannotate import SAClient + + +sa = SAClient() class BaseTestCase(TestCase): diff --git a/tests/integration/classes/test_create_annotation_class.py b/tests/integration/classes/test_create_annotation_class.py index 779c412ce..7c20f3fa9 100644 --- a/tests/integration/classes/test_create_annotation_class.py +++ b/tests/integration/classes/test_create_annotation_class.py @@ -2,7 +2,8 @@ from os.path import dirname import tempfile -import src.superannotate as sa +from src.superannotate import SAClient +sa = SAClient() from tests.integration.base import BaseTestCase from tests import DATA_SET_PATH diff --git a/tests/integration/classes/test_tag_annotation_classes.py b/tests/integration/classes/test_tag_annotation_classes.py index 5c098e9e9..e86556c0e 100644 --- a/tests/integration/classes/test_tag_annotation_classes.py +++ b/tests/integration/classes/test_tag_annotation_classes.py @@ -1,6 +1,7 @@ import tempfile -import src.superannotate as sa +from src.superannotate import SAClient +sa = SAClient() from tests.integration.base import BaseTestCase diff --git a/tests/integration/folders/test_folders.py b/tests/integration/folders/test_folders.py index 433a17896..1e235fe84 100644 --- a/tests/integration/folders/test_folders.py +++ b/tests/integration/folders/test_folders.py @@ -4,7 +4,8 @@ import time from os.path import dirname -import src.superannotate as sa +from src.superannotate import SAClient +sa = SAClient() from tests.integration.base import BaseTestCase from tests import DATA_SET_PATH diff --git a/tests/integration/integrations/test_get_integrations.py b/tests/integration/integrations/test_get_integrations.py index 56673a9cc..e0800e2e2 100644 --- a/tests/integration/integrations/test_get_integrations.py +++ b/tests/integration/integrations/test_get_integrations.py @@ -2,7 +2,8 @@ from os.path import dirname import pytest -import src.superannotate as sa +from src.superannotate import SAClient +sa = SAClient() from tests.integration.base import BaseTestCase diff --git a/tests/integration/items/test_attach_items.py b/tests/integration/items/test_attach_items.py index 39adfe579..0fc6b18e3 100644 --- a/tests/integration/items/test_attach_items.py +++ b/tests/integration/items/test_attach_items.py @@ -1,7 +1,8 @@ import os from pathlib import Path -import src.superannotate as sa +from src.superannotate import SAClient +sa = SAClient() from tests.integration.base import BaseTestCase diff --git a/tests/integration/items/test_copy_items.py b/tests/integration/items/test_copy_items.py index 18a01faf2..1c2e627c3 100644 --- a/tests/integration/items/test_copy_items.py +++ b/tests/integration/items/test_copy_items.py @@ -2,7 +2,8 @@ from collections import Counter from pathlib import Path -import src.superannotate as sa +from src.superannotate import SAClient +sa = SAClient() from tests.integration.base import BaseTestCase diff --git a/tests/integration/items/test_get_item_metadata.py b/tests/integration/items/test_get_item_metadata.py index ca805b776..571229b4c 100644 --- a/tests/integration/items/test_get_item_metadata.py +++ b/tests/integration/items/test_get_item_metadata.py @@ -1,7 +1,8 @@ import os from pathlib import Path -import src.superannotate as sa +from src.superannotate import SAClient +sa = SAClient() from tests.integration.base import BaseTestCase diff --git a/tests/integration/items/test_move_items.py b/tests/integration/items/test_move_items.py index cf408665f..109704f28 100644 --- a/tests/integration/items/test_move_items.py +++ b/tests/integration/items/test_move_items.py @@ -1,7 +1,8 @@ import os from pathlib import Path -import src.superannotate as sa +from src.superannotate import SAClient +sa = SAClient() from tests.integration.base import BaseTestCase diff --git a/tests/integration/items/test_saqul_query.py b/tests/integration/items/test_saqul_query.py index 2d19e2394..fdf2d0b84 100644 --- a/tests/integration/items/test_saqul_query.py +++ b/tests/integration/items/test_saqul_query.py @@ -1,9 +1,11 @@ import os from pathlib import Path -import src.superannotate as sa +from src.superannotate import SAClient from tests.integration.base import BaseTestCase +sa = SAClient() + class TestEntitiesSearchVector(BaseTestCase): PROJECT_NAME = "TestEntitiesSearchVector" @@ -34,6 +36,16 @@ def test_query(self): self.assertEqual(len(entities), 1) assert all([entity["path"] == f"{self.PROJECT_NAME}/{self.FOLDER_NAME}" for entity in entities]) + try: + self.assertRaises( + Exception, sa.query(f"{self.PROJECT_NAME}/{self.FOLDER_NAME}", self.TEST_QUERY, subset="something") + ) + except Exception as e: + self.assertEqual( + str(e), + "Subset not found. Use the superannotate.get_subsets() function to get a list of the available subsets." + ) + def test_validate_saqul_query(self): try: self.assertRaises(Exception, sa.query(self.PROJECT_NAME, self.TEST_INVALID_QUERY)) diff --git a/tests/integration/items/test_search_items.py b/tests/integration/items/test_search_items.py index 6ae414237..0c1051472 100644 --- a/tests/integration/items/test_search_items.py +++ b/tests/integration/items/test_search_items.py @@ -1,7 +1,8 @@ import os from pathlib import Path -import src.superannotate as sa +from src.superannotate import SAClient +sa = SAClient() import src.superannotate.lib.core as constances from tests.integration.base import BaseTestCase from tests.integration.items import IMAGE_EXPECTED_KEYS @@ -31,8 +32,9 @@ def test_search_items_metadata(self): assert len(sa.search_items(self.PROJECT_NAME, name_contains="1.jp")) == 1 assert len(sa.search_items(self.PROJECT_NAME, name_contains=".jpg")) == 4 assert len(sa.search_items(self.PROJECT_NAME, recursive=True)) == 4 - sa.set_image_annotation_status(self.PROJECT_NAME, self.IMAGE1_NAME, constances.AnnotationStatus.COMPLETED.name) - sa.set_image_annotation_status(self.PROJECT_NAME, self.IMAGE2_NAME, constances.AnnotationStatus.COMPLETED.name) + sa.set_annotation_statuses( + self.PROJECT_NAME, constances.AnnotationStatus.COMPLETED.name, [self.IMAGE1_NAME, self.IMAGE2_NAME] + ) assert len( sa.search_items(self.PROJECT_NAME, annotation_status=constances.AnnotationStatus.COMPLETED.name) ) == 2 diff --git a/tests/integration/items/test_set_annotation_statuses.py b/tests/integration/items/test_set_annotation_statuses.py index d18d4d7f5..ddfb123ff 100644 --- a/tests/integration/items/test_set_annotation_statuses.py +++ b/tests/integration/items/test_set_annotation_statuses.py @@ -1,7 +1,8 @@ import os from pathlib import Path -import src.superannotate as sa +from src.superannotate import SAClient +sa = SAClient() from src.superannotate import AppException from src.superannotate.lib.core.usecases import SetAnnotationStatues from tests.integration.base import BaseTestCase @@ -64,18 +65,18 @@ def test_image_annotation_status_via_names(self): def test_image_annotation_status_via_invalid_names(self): sa.attach_items( - self.PROJECT_NAME, self.ATTACHMENT_LIST, annotation_status="InProgress" + self.PROJECT_NAME, self.ATTACHMENT_LIST, "InProgress" ) with self.assertRaisesRegexp(AppException, SetAnnotationStatues.ERROR_MESSAGE): sa.set_annotation_statuses( self.PROJECT_NAME, "QualityCheck", ["self.EXAMPLE_IMAGE_1", "self.EXAMPLE_IMAGE_2"] ) - def test_set_image_annotation_status(self): + def test_set_annotation_statuses(self): sa.attach_items( self.PROJECT_NAME, [self.ATTACHMENT_LIST[0]] ) - data = sa.set_image_annotation_status( - self.PROJECT_NAME, self.ATTACHMENT_LIST[0]["name"], annotation_status="Completed" + data = sa.set_annotation_statuses( + self.PROJECT_NAME, annotation_status="Completed", items=[self.ATTACHMENT_LIST[0]["name"]] ) assert data["annotation_status"] == "Completed" \ No newline at end of file diff --git a/tests/integration/mixpanel/test_individual_fuinctions.py b/tests/integration/mixpanel/test_individual_fuinctions.py new file mode 100644 index 000000000..685dede23 --- /dev/null +++ b/tests/integration/mixpanel/test_individual_fuinctions.py @@ -0,0 +1,30 @@ +import os +from unittest import TestCase +from unittest.mock import patch + +from src.superannotate import AppException +from src.superannotate import __version__ +from src.superannotate import class_distribution +from tests import DATA_SET_PATH + + +class TestDocumentUrls(TestCase): + PROJECT_NAME = "TEST_MIX" + PROJECT_DESCRIPTION = "Desc" + PROJECT_TYPE = "Vector" + TEST_FOLDER_PATH = "data_set" + + @property + def folder_path(self): + return os.path.join(DATA_SET_PATH, self.TEST_FOLDER_PATH) + + @patch("lib.app.interface.base_interface.Tracker._track") + def test_get_team_metadata(self, track_method): + try: + class_distribution(os.path.join(self.folder_path, "sample_project_vector"), "test") + except AppException: + pass + data = list(track_method.call_args)[0][2] + assert not data["Success"] + assert data["Version"] == __version__ + assert data["project_names"] == "test" diff --git a/tests/integration/mixpanel/test_mixpanel_decorator.py b/tests/integration/mixpanel/test_mixpanel_decorator.py new file mode 100644 index 000000000..86dd9811b --- /dev/null +++ b/tests/integration/mixpanel/test_mixpanel_decorator.py @@ -0,0 +1,139 @@ +import copy +import threading +from unittest import TestCase +from unittest.mock import patch + +from src.superannotate import SAClient +from src.superannotate import AppException +from src.superannotate import __version__ + + +class TestMixpanel(TestCase): + CLIENT = SAClient() + TEAM_DATA = CLIENT.get_team_metadata() + BLANK_PAYLOAD = { + "SDK": True, + "Team": TEAM_DATA["name"], + "Team Owner": TEAM_DATA["creator_id"], + "Version": __version__, + "Success": True + } + PROJECT_NAME = "TEST_MIX" + PROJECT_DESCRIPTION = "Desc" + PROJECT_TYPE = "Vector" + TEST_FOLDER_PATH = "data_set" + + @classmethod + def setUpClass(cls) -> None: + cls.tearDownClass() + print(cls.PROJECT_NAME) + cls._project = cls.CLIENT.create_project( + cls.PROJECT_NAME, cls.PROJECT_DESCRIPTION, cls.PROJECT_TYPE + ) + + @classmethod + def tearDownClass(cls) -> None: + cls._safe_delete_project(cls.PROJECT_NAME) + + + @classmethod + def _safe_delete_project(cls, project_name): + projects = cls.CLIENT.search_projects(project_name, return_metadata=True) + for project in projects: + try: + cls.CLIENT.delete_project(project) + except Exception: + raise + + @property + def default_payload(self): + return copy.copy(self.BLANK_PAYLOAD) + + @patch("lib.app.interface.base_interface.Tracker._track") + def test_get_team_metadata(self, track_method): + team = self.CLIENT.get_team_metadata() + team_owner = team["creator_id"] + result = list(track_method.call_args)[0] + payload = self.default_payload + assert result[0] == team_owner + assert result[1] == "get_team_metadata" + assert payload == list(track_method.call_args)[0][2] + + @patch("lib.app.interface.base_interface.Tracker._track") + def test_search_team_contributors(self, track_method): + kwargs = { + "email": "user@supernnotate.com", + "first_name": "first_name", + "last_name": "last_name", + "return_metadata": False} + self.CLIENT.search_team_contributors(**kwargs) + result = list(track_method.call_args)[0] + payload = self.default_payload + payload.update(kwargs) + assert result[1] == "search_team_contributors" + assert payload == list(track_method.call_args)[0][2] + + @patch("lib.app.interface.base_interface.Tracker._track") + def test_search_projects(self, track_method): + kwargs = { + "name": self.PROJECT_NAME, + "include_complete_image_count": True, + "status": "NotStarted", + "return_metadata": False} + self.CLIENT.search_projects(**kwargs) + result = list(track_method.call_args)[0] + payload = self.default_payload + payload.update(kwargs) + assert result[1] == "search_projects" + assert payload == list(track_method.call_args)[0][2] + + @patch("lib.app.interface.base_interface.Tracker._track") + def test_create_project(self, track_method): + kwargs = { + "project_name": self.PROJECT_NAME, + "project_description": self.PROJECT_DESCRIPTION, + "project_type": self.PROJECT_TYPE, + "settings": {"a": 1, "b": 2} + } + try: + self.CLIENT.create_project(**kwargs) + except AppException: + pass + result = list(track_method.call_args)[0] + payload = self.default_payload + payload["Success"] = False + payload.update(kwargs) + payload["settings"] = list(kwargs["settings"].keys()) + assert result[1] == "create_project" + assert payload == list(track_method.call_args)[0][2] + + @patch("lib.app.interface.base_interface.Tracker._track") + def test_create_project_multi_thread(self, track_method): + project_1 = self.PROJECT_NAME + "_1" + project_2 = self.PROJECT_NAME + "_2" + try: + kwargs_1 = { + "project_name": project_1, + "project_description": self.PROJECT_DESCRIPTION, + "project_type": self.PROJECT_TYPE, + } + kwargs_2 = { + "project_name": project_2, + "project_description": self.PROJECT_DESCRIPTION, + "project_type": self.PROJECT_TYPE, + } + thread_1 = threading.Thread(target=self.CLIENT.create_project, kwargs=kwargs_1) + thread_2 = threading.Thread(target=self.CLIENT.create_project, kwargs=kwargs_2) + thread_1.start() + thread_2.start() + thread_1.join() + thread_2.join() + r1, r2 = track_method.call_args_list + r1_pr_name = r1[0][2].pop("project_name") + r2_pr_name = r2[0][2].pop("project_name") + assert r1_pr_name == project_1 + assert r2_pr_name == project_2 + assert r1[0][2] == r2[0][2] + finally: + self._safe_delete_project(project_1) + self._safe_delete_project(project_2) \ No newline at end of file diff --git a/tests/integration/projects/test_add_contributors_to_project.py b/tests/integration/projects/test_add_contributors_to_project.py index c4c2f34c6..492aada02 100644 --- a/tests/integration/projects/test_add_contributors_to_project.py +++ b/tests/integration/projects/test_add_contributors_to_project.py @@ -1,17 +1,17 @@ import random import string -from unittest.mock import patch from unittest.mock import MagicMock from unittest.mock import PropertyMock +from unittest.mock import patch -import src.superannotate as sa -from src.superannotate import controller -from src.superannotate.lib.core.entities import TeamEntity +from src.superannotate import SAClient from src.superannotate.lib.core.entities import ProjectEntity +from src.superannotate.lib.core.entities import TeamEntity from src.superannotate.lib.core.entities import UserEntity - from tests.integration.base import BaseTestCase +sa = SAClient() + class TestProject(BaseTestCase): PROJECT_NAME = "add_contributors_to_project" @@ -23,6 +23,40 @@ class TestProject(BaseTestCase): def random_email(self): return f"{''.join(random.choice(string.ascii_letters) for _ in range(7))}@gmail.com" + @patch("lib.infrastructure.controller.Controller.get_team") + @patch("lib.infrastructure.controller.Controller.get_project_metadata") + @patch("lib.infrastructure.controller.Controller.backend_client", new_callable=PropertyMock) + def test_add_contributors(self, client, get_project_metadata_mock, get_team_mock): + client.return_value.share_project_bulk.return_value = dict(invalidUsers=[]) + + random_emails = [self.random_email for i in range(20)] + + team_users = [UserEntity(email=email, user_role=3) for email in random_emails[: 10]] + project_users = [dict(user_id=email, user_role=4) for email in random_emails[: 10]] + to_add_emails = random_emails[8: 18] + pending_users = [dict(email=email, user_role=3) for email in random_emails[15: 20]] + unverified_users = [dict(email=email, user_role=4) for email in random_emails[18: 20]] + + team_data = MagicMock() + project_data = MagicMock() + get_team_mock.return_value = team_data + team_data.data = TeamEntity( + uuid=sa.controller.team_id, + users=team_users, + pending_invitations=pending_users + ) + get_project_metadata_mock.return_value = project_data + project_data.data = dict( + project=ProjectEntity( + uuid=sa.controller.team_id, + users=project_users, + unverified_users=unverified_users, + ) + ) + added, skipped = sa.add_contributors_to_project(self.PROJECT_NAME, to_add_emails, "QA") + self.assertEqual(len(added), 3) + self.assertEqual(len(skipped), 7) + @patch("lib.infrastructure.controller.Controller.get_team") @patch("lib.infrastructure.controller.Controller.backend_client", new_callable=PropertyMock) def test_invite_contributors(self, client, get_team_mock): @@ -35,7 +69,7 @@ def test_invite_contributors(self, client, get_team_mock): team_data = MagicMock() get_team_mock.return_value = team_data team_data.data = TeamEntity( - uuid=controller.team_id, + uuid=sa.controller.team_id, users=team_users, pending_invitations=pending_users ) @@ -45,4 +79,4 @@ def test_invite_contributors(self, client, get_team_mock): self.assertEqual(len(skipped), 5) def test_(self): - sa.search_team_contributors(email="vaghinak@superannotate.com", first_name="Vaghinak") \ No newline at end of file + sa.search_team_contributors(email="vaghinak@superannotate.com", first_name="Vaghinak") diff --git a/tests/integration/projects/test_basic_project.py b/tests/integration/projects/test_basic_project.py index 03be9cab7..301464eab 100644 --- a/tests/integration/projects/test_basic_project.py +++ b/tests/integration/projects/test_basic_project.py @@ -5,7 +5,8 @@ import pytest -import src.superannotate as sa +from src.superannotate import SAClient +sa = SAClient() from tests import DATA_SET_PATH from tests.integration.base import BaseTestCase diff --git a/tests/integration/projects/test_clone_project.py b/tests/integration/projects/test_clone_project.py index f26a2e179..5b8c87952 100644 --- a/tests/integration/projects/test_clone_project.py +++ b/tests/integration/projects/test_clone_project.py @@ -1,9 +1,10 @@ import os from unittest import TestCase import pytest -import src.superannotate as sa +from src.superannotate import SAClient +sa = SAClient() from tests import DATA_SET_PATH -from src.superannotate import constances +import src.superannotate.lib.core as constances class TestCloneProject(TestCase): diff --git a/tests/integration/projects/test_create_project.py b/tests/integration/projects/test_create_project.py index decd336da..89a70d42b 100644 --- a/tests/integration/projects/test_create_project.py +++ b/tests/integration/projects/test_create_project.py @@ -1,6 +1,7 @@ from unittest import TestCase -import src.superannotate as sa +from src.superannotate import SAClient +sa = SAClient() class BaseTestCase(TestCase): diff --git a/tests/integration/projects/test_project_rename.py b/tests/integration/projects/test_project_rename.py index 5bbe501e5..3977369da 100644 --- a/tests/integration/projects/test_project_rename.py +++ b/tests/integration/projects/test_project_rename.py @@ -1,4 +1,5 @@ -import src.superannotate as sa +from src.superannotate import SAClient +sa = SAClient() from tests.integration.base import BaseTestCase diff --git a/tests/integration/settings/test_settings.py b/tests/integration/settings/test_settings.py index 420e444b0..7f3aff239 100644 --- a/tests/integration/settings/test_settings.py +++ b/tests/integration/settings/test_settings.py @@ -1,6 +1,7 @@ from unittest import TestCase -import src.superannotate as sa +from src.superannotate import SAClient +sa = SAClient() from src.superannotate import AppException diff --git a/tests/integration/test_basic_images.py b/tests/integration/test_basic_images.py index be4b05eff..552854ba3 100644 --- a/tests/integration/test_basic_images.py +++ b/tests/integration/test_basic_images.py @@ -5,7 +5,8 @@ import pytest -import src.superannotate as sa +from src.superannotate import SAClient +sa = SAClient() from tests.integration.base import BaseTestCase diff --git a/tests/integration/test_benchmark.py b/tests/integration/test_benchmark.py index 5d1df2964..3f487e67b 100644 --- a/tests/integration/test_benchmark.py +++ b/tests/integration/test_benchmark.py @@ -3,7 +3,10 @@ from os.path import dirname import pytest -import src.superannotate as sa + +from src.superannotate import SAClient + +sa = SAClient() from tests.integration.base import BaseTestCase diff --git a/tests/integration/test_cli.py b/tests/integration/test_cli.py index f02bca960..cd333ee31 100644 --- a/tests/integration/test_cli.py +++ b/tests/integration/test_cli.py @@ -6,9 +6,11 @@ import pkg_resources -import src.superannotate as sa +from src.superannotate import SAClient from src.superannotate.lib.app.interface.cli_interface import CLIFacade +sa = SAClient() + try: CLI_VERSION = pkg_resources.get_distribution("superannotate").version except Exception: @@ -131,22 +133,23 @@ def test_vector_pre_annotation_folder_upload_download_cli(self): sa.create_annotation_classes_from_classes_json( self.PROJECT_NAME, f"{self.vector_folder_path}/classes/classes.json" ) - self.safe_run(self._cli.upload_images, self.PROJECT_NAME, folder=str(self.convertor_data_path), + self.safe_run(self._cli.upload_images, project=self.PROJECT_NAME, folder=str(self.convertor_data_path), extensions="jpg", set_annotation_status="QualityCheck") - self.safe_run(self._cli.upload_preannotations, self.PROJECT_NAME, folder=str(self.convertor_data_path), + self.safe_run(self._cli.upload_preannotations, project=self.PROJECT_NAME, folder=str(self.convertor_data_path), format="COCO", dataset_name="instances_test") - # tod add test def test_vector_annotation_folder_upload_download_cli(self): self._create_project() sa.create_annotation_classes_from_classes_json( self.PROJECT_NAME, f"{self.vector_folder_path}/classes/classes.json" ) - self.safe_run(self._cli.upload_images, self.PROJECT_NAME, str(self.convertor_data_path), extensions="jpg", + self.safe_run(self._cli.upload_images, project=self.PROJECT_NAME, folder=str(self.convertor_data_path), + extensions="jpg", set_annotation_status="QualityCheck") - self.safe_run(self._cli.upload_annotations, self.PROJECT_NAME, str(self.convertor_data_path), format="COCO", + self.safe_run(self._cli.upload_annotations, project=self.PROJECT_NAME, folder=str(self.convertor_data_path), + format="COCO", dataset_name="instances_test") count_in = len(list(self.vector_folder_path.glob("*.json"))) @@ -170,7 +173,7 @@ def test_attach_video_urls(self): def test_upload_videos(self): self._create_project() self.safe_run(self._cli.upload_videos, self.PROJECT_NAME, str(self.video_folder_path)) - self.assertEqual(5, len(sa.search_items(self.PROJECT_NAME))) + self.assertEqual(121, len(sa.search_items(self.PROJECT_NAME))) def test_attach_document_urls(self): self._create_project("Document") diff --git a/tests/integration/test_depricated_functions_document.py b/tests/integration/test_depricated_functions_document.py index 4beaf1810..eee0d1ae5 100644 --- a/tests/integration/test_depricated_functions_document.py +++ b/tests/integration/test_depricated_functions_document.py @@ -3,7 +3,7 @@ import pytest from unittest import TestCase -import src.superannotate as sa +from src.superannotate import SAClient from src.superannotate import AppException from src.superannotate.lib.core import LIMITED_FUNCTIONS from src.superannotate.lib.core import INVALID_PROJECT_TYPE_TO_PROCESS @@ -11,6 +11,9 @@ from src.superannotate.lib.core import DEPRICATED_DOCUMENT_VIDEO_MESSAGE +sa = SAClient() + + class TestDeprecatedFunctionsDocument(TestCase): PROJECT_NAME = "TestDeprecatedFunctionsDocument first froject" PROJECT_DESCRIPTION = "desc" @@ -80,10 +83,6 @@ def test_deprecated_functions(self): sa.download_image_annotations(self.PROJECT_NAME, self.UPLOAD_IMAGE_NAME, "./") except AppException as e: self.assertIn(self.EXCEPTION_MESSAGE, str(e)) - try: - sa.set_image_annotation_status(self.PROJECT_NAME, self.UPLOAD_IMAGE_NAME, "Completed") - except AppException as e: - self.assertIn(self.EXCEPTION_MESSAGE, str(e)) try: sa.copy_image(self.PROJECT_NAME, self.UPLOAD_IMAGE_NAME, self.PROJECT_NAME_2) except AppException as e: @@ -120,14 +119,6 @@ def test_deprecated_functions(self): sa.get_project_workflow(self.PROJECT_NAME) except AppException as e: self.assertIn(self.EXCEPTION_MESSAGE, str(e)) - try: - sa.class_distribution(self.video_export_path, [self.PROJECT_NAME]) - except AppException as e: - self.assertIn(self.EXCEPTION_MESSAGE_DOCUMENT_VIDEO, str(e)) - try: - sa.convert_project_type(self.video_export_path, "./") - except AppException as e: - self.assertIn(self.EXCEPTION_MESSAGE_DOCUMENT_VIDEO, str(e)) try: sa.prepare_export(self.PROJECT_NAME, include_fuse=True, only_pinned=True) except AppException as e: @@ -144,14 +135,6 @@ def test_deprecated_functions(self): sa.assign_images(self.PROJECT_NAME, [self.UPLOAD_IMAGE_NAME], "some user") except AppException as e: self.assertIn(self.EXCEPTION_MESSAGE, str(e)) - try: - sa.export_annotation( - "input_dir", "fromSuperAnnotate/panoptic_test", "COCO", "panoptic_test", self.PROJECT_TYPE, - "panoptic_segmentation" - ) - except AppException as e: - self.assertIn(self.EXCEPTION_MESSAGE, str(e)) - try: sa.set_project_default_image_quality_in_editor(self.PROJECT_NAME,"original") except AppException as e: diff --git a/tests/integration/test_depricated_functions_video.py b/tests/integration/test_depricated_functions_video.py index 1831554c7..2540a85bd 100644 --- a/tests/integration/test_depricated_functions_video.py +++ b/tests/integration/test_depricated_functions_video.py @@ -3,7 +3,8 @@ from unittest import TestCase import pytest -import src.superannotate as sa +from src.superannotate import SAClient +sa = SAClient() from src.superannotate import AppException from src.superannotate.lib.core import LIMITED_FUNCTIONS from src.superannotate.lib.core import INVALID_PROJECT_TYPE_TO_PROCESS @@ -75,10 +76,6 @@ def test_deprecated_functions(self): sa.download_image_annotations(self.PROJECT_NAME, self.UPLOAD_IMAGE_NAME, "./") except AppException as e: self.assertIn(self.EXCEPTION_MESSAGE, str(e)) - try: - sa.set_image_annotation_status(self.PROJECT_NAME, self.UPLOAD_IMAGE_NAME, "Completed") - except AppException as e: - self.assertIn(self.EXCEPTION_MESSAGE, str(e)) try: sa.copy_image(self.PROJECT_NAME, self.UPLOAD_IMAGE_NAME, self.PROJECT_NAME_2) except AppException as e: @@ -115,14 +112,6 @@ def test_deprecated_functions(self): sa.set_project_default_image_quality_in_editor(self.PROJECT_NAME, "original") except AppException as e: self.assertIn(self.EXCEPTION_MESSAGE_DOCUMENT_VIDEO, str(e)) - try: - sa.class_distribution(self.video_export_path, [self.PROJECT_NAME]) - except AppException as e: - self.assertIn(self.EXCEPTION_MESSAGE_DOCUMENT_VIDEO, str(e)) - try: - sa.convert_project_type(self.video_export_path, "./") - except AppException as e: - self.assertIn(self.EXCEPTION_MESSAGE_DOCUMENT_VIDEO, str(e)) try: sa.prepare_export(self.PROJECT_NAME, include_fuse=True, only_pinned=True) except AppException as e: @@ -139,10 +128,3 @@ def test_deprecated_functions(self): sa.assign_images(self.PROJECT_NAME, [self.UPLOAD_IMAGE_NAME], "some@user.com") except AppException as e: self.assertIn(self.EXCEPTION_MESSAGE, str(e)) - try: - sa.export_annotation( - "input_dir", "fromSuperAnnotate/panoptic_test", "COCO", "panoptic_test", self.PROJECT_TYPE, - "panoptic_segmentation" - ) - except AppException as e: - self.assertIn(self.EXCEPTION_MESSAGE, str(e)) diff --git a/tests/integration/test_duplicate_image_upload.py b/tests/integration/test_duplicate_image_upload.py index 13730f913..9e8439422 100644 --- a/tests/integration/test_duplicate_image_upload.py +++ b/tests/integration/test_duplicate_image_upload.py @@ -1,9 +1,11 @@ import os from os.path import dirname -import src.superannotate as sa +from src.superannotate import SAClient from tests.integration.base import BaseTestCase +sa = SAClient() + class TestDuplicateImage(BaseTestCase): PROJECT_NAME = "duplicate_image" diff --git a/tests/integration/test_export_import.py b/tests/integration/test_export_import.py index d5370369f..ff1b91fd4 100644 --- a/tests/integration/test_export_import.py +++ b/tests/integration/test_export_import.py @@ -2,7 +2,8 @@ import tempfile from os.path import dirname -import src.superannotate as sa +from src.superannotate import SAClient +sa = SAClient() from tests.integration.base import BaseTestCase diff --git a/tests/integration/test_export_upload_s3.py b/tests/integration/test_export_upload_s3.py index cbd145c88..2b466913c 100644 --- a/tests/integration/test_export_upload_s3.py +++ b/tests/integration/test_export_upload_s3.py @@ -5,8 +5,12 @@ import boto3 import src.superannotate as sa +from src.superannotate import SAClient from tests.integration.base import BaseTestCase + +sa = SAClient() + s3_client = boto3.client("s3") diff --git a/tests/integration/test_fuse_gen.py b/tests/integration/test_fuse_gen.py index 45d5d4405..ffde96e1f 100644 --- a/tests/integration/test_fuse_gen.py +++ b/tests/integration/test_fuse_gen.py @@ -5,7 +5,8 @@ from unittest import TestCase import pytest import numpy as np -import src.superannotate as sa +from src.superannotate import SAClient +sa = SAClient() from PIL import Image diff --git a/tests/integration/test_get_exports.py b/tests/integration/test_get_exports.py index 47a382af5..66f9f7c4b 100644 --- a/tests/integration/test_get_exports.py +++ b/tests/integration/test_get_exports.py @@ -3,10 +3,12 @@ import tempfile from os.path import dirname -import src.superannotate as sa -from src.superannotate import AppException +from src.superannotate import SAClient +from src.superannotate import export_annotation from tests.integration.base import BaseTestCase +sa = SAClient() + class TestGetExports(BaseTestCase): PROJECT_NAME = "get_exports" @@ -60,7 +62,7 @@ def test_convert_pixel_exported_data(self): with tempfile.TemporaryDirectory() as tmp_dir: sa.download_export(self.PROJECT_NAME, export["name"], tmp_dir) with tempfile.TemporaryDirectory() as converted_data_tmp_dir: - sa.export_annotation( + export_annotation( tmp_dir, converted_data_tmp_dir, "COCO", "export", "Pixel", "panoptic_segmentation" ) - self.assertEqual(1, len(list(glob.glob(converted_data_tmp_dir + "/*.json")))) \ No newline at end of file + self.assertEqual(1, len(list(glob.glob(converted_data_tmp_dir + "/*.json")))) diff --git a/tests/integration/test_image_quality.py b/tests/integration/test_image_quality.py index 84af247be..89a748090 100644 --- a/tests/integration/test_image_quality.py +++ b/tests/integration/test_image_quality.py @@ -4,7 +4,8 @@ from os.path import dirname import pytest -import src.superannotate as sa +from src.superannotate import SAClient +sa = SAClient() from tests.integration.base import BaseTestCase from src.superannotate import AppException diff --git a/tests/integration/test_interface.py b/tests/integration/test_interface.py index 96e4b578c..e4b779cee 100644 --- a/tests/integration/test_interface.py +++ b/tests/integration/test_interface.py @@ -4,10 +4,13 @@ import pytest -import src.superannotate as sa from src.superannotate import AppException +from src.superannotate import SAClient +from src.superannotate import export_annotation from tests.integration.base import BaseTestCase +sa = SAClient() + class TestInterface(BaseTestCase): PROJECT_NAME = "Interface test" @@ -92,7 +95,7 @@ def test_download_image_annotations(self): def test_search_project(self): sa.upload_images_from_folder_to_project(self.PROJECT_NAME, self.folder_path) - sa.set_image_annotation_status(self.PROJECT_NAME, self.EXAMPLE_IMAGE_1, "Completed") + sa.set_annotation_statuses(self.PROJECT_NAME, "Completed", [self.EXAMPLE_IMAGE_1]) data = sa.search_projects(self.PROJECT_NAME, return_metadata=True, include_complete_image_count=True) self.assertIsNotNone(data[0]['completed_images_count']) @@ -265,7 +268,7 @@ def test_export_annotation(self): ) sa.download_export(self.PROJECT_NAME, result, export_dir, True) with tempfile.TemporaryDirectory() as convert_path: - sa.export_annotation( + export_annotation( export_dir, convert_path, "COCO", "data_set_name", "Pixel", "panoptic_segmentation" ) pass diff --git a/tests/integration/test_limitations.py b/tests/integration/test_limitations.py index 2c69cfbd1..eda27b13c 100644 --- a/tests/integration/test_limitations.py +++ b/tests/integration/test_limitations.py @@ -2,7 +2,8 @@ from unittest.mock import patch from os.path import dirname -import src.superannotate as sa +from src.superannotate import SAClient +sa = SAClient() from src.superannotate import AppException from src.superannotate.lib.core import UPLOAD_FOLDER_LIMIT_ERROR_MESSAGE from src.superannotate.lib.core import UPLOAD_PROJECT_LIMIT_ERROR_MESSAGE diff --git a/tests/integration/test_ml_funcs.py b/tests/integration/test_ml_funcs.py index f9d7b3536..941f3d1bf 100644 --- a/tests/integration/test_ml_funcs.py +++ b/tests/integration/test_ml_funcs.py @@ -4,7 +4,8 @@ from os.path import dirname import pytest -import src.superannotate as sa +from src.superannotate import SAClient +sa = SAClient() from tests.integration.base import BaseTestCase diff --git a/tests/integration/test_recursive_folder.py b/tests/integration/test_recursive_folder.py index d7d29649e..d5c466dc9 100644 --- a/tests/integration/test_recursive_folder.py +++ b/tests/integration/test_recursive_folder.py @@ -5,7 +5,8 @@ from pathlib import Path import pytest -import src.superannotate as sa +from src.superannotate import SAClient +sa = SAClient() from tests.integration.base import BaseTestCase diff --git a/tests/integration/test_recursive_folder_pixel.py b/tests/integration/test_recursive_folder_pixel.py index 308ac882a..552416a85 100644 --- a/tests/integration/test_recursive_folder_pixel.py +++ b/tests/integration/test_recursive_folder_pixel.py @@ -1,4 +1,5 @@ -import src.superannotate as sa +from src.superannotate import SAClient +sa = SAClient() from tests.integration.base import BaseTestCase diff --git a/tests/integration/test_single_annotation_download.py b/tests/integration/test_single_annotation_download.py index 97343dd53..6e2f4a3ab 100644 --- a/tests/integration/test_single_annotation_download.py +++ b/tests/integration/test_single_annotation_download.py @@ -1,14 +1,16 @@ - import filecmp import json import os import tempfile from os.path import dirname + import pytest -import src.superannotate as sa +from src.superannotate import SAClient from tests.integration.base import BaseTestCase +sa = SAClient() + class TestSingleAnnotationDownloadUpload(BaseTestCase): PROJECT_NAME = "test_single_annotation" @@ -67,7 +69,7 @@ def test_annotation_download_upload_vector(self): ) ) # TODO: - #assert downloaded_json == uploaded_json + # assert downloaded_json == uploaded_json class TestSingleAnnotationDownloadUploadPixel(BaseTestCase): diff --git a/tests/integration/test_single_image_upload.py b/tests/integration/test_single_image_upload.py index 5959497c0..a709686b5 100644 --- a/tests/integration/test_single_image_upload.py +++ b/tests/integration/test_single_image_upload.py @@ -2,7 +2,8 @@ import os from os.path import dirname -import src.superannotate as sa +from src.superannotate import SAClient +sa = SAClient() from tests.integration.base import BaseTestCase diff --git a/tests/integration/test_team_metadata.py b/tests/integration/test_team_metadata.py index a3564f789..f471c07c0 100644 --- a/tests/integration/test_team_metadata.py +++ b/tests/integration/test_team_metadata.py @@ -1,6 +1,9 @@ -import src.superannotate as sa +from src.superannotate import SAClient + from tests.integration.base import BaseTestCase +sa = SAClient() + class TestTeam(BaseTestCase): PROJECT_NAME = "test_team" diff --git a/tests/integration/test_upload_images.py b/tests/integration/test_upload_images.py index bba4d14a3..99967505b 100644 --- a/tests/integration/test_upload_images.py +++ b/tests/integration/test_upload_images.py @@ -1,7 +1,8 @@ import os from os.path import dirname -import src.superannotate as sa +from src.superannotate import SAClient +sa = SAClient() from tests.integration.base import BaseTestCase diff --git a/tests/integration/test_upload_priority_scores.py b/tests/integration/test_upload_priority_scores.py index 70cb75329..ead902e0f 100644 --- a/tests/integration/test_upload_priority_scores.py +++ b/tests/integration/test_upload_priority_scores.py @@ -1,7 +1,8 @@ import os from pathlib import Path -import src.superannotate as sa +from src.superannotate import SAClient +sa = SAClient() from tests.integration.base import BaseTestCase diff --git a/src/superannotate/lib/app/mixp/__init__.py b/tests/integration/test_validate_upload_state.py similarity index 100% rename from src/superannotate/lib/app/mixp/__init__.py rename to tests/integration/test_validate_upload_state.py diff --git a/tests/integration/test_video.py b/tests/integration/test_video.py index 08452ee3a..2e7944cda 100644 --- a/tests/integration/test_video.py +++ b/tests/integration/test_video.py @@ -1,7 +1,8 @@ import os from os.path import dirname -import src.superannotate as sa +from src.superannotate import SAClient +sa = SAClient() from src.superannotate.lib.core.plugin import VideoPlugin from tests.integration.base import BaseTestCase import pytest diff --git a/tests/unit/test_controller_init.py b/tests/unit/test_controller_init.py deleted file mode 100644 index f9aece145..000000000 --- a/tests/unit/test_controller_init.py +++ /dev/null @@ -1,111 +0,0 @@ -import os -from os.path import join -import json -import pkg_resources -import tempfile -import pytest -from unittest import TestCase -from unittest.mock import mock_open -from unittest.mock import patch - - -from src.superannotate.lib.app.interface.cli_interface import CLIFacade -from tests.utils.helpers import catch_prints - - -try: - CLI_VERSION = pkg_resources.get_distribution("superannotate").version -except Exception: - CLI_VERSION = None - - -class CLITest(TestCase): - CONFIG_FILE_DATA = '{"main_endpoint": "https://amazonaws.com:3000","token": "c9c55ct=6085","ssl_verify": false}' - - @pytest.mark.skip(reason="Need to adjust") - @patch('builtins.input') - def test_init_update(self, input_mock): - input_mock.side_effect = ["y", "token"] - with patch('builtins.open', mock_open(read_data=self.CONFIG_FILE_DATA)) as config_file: - try: - with catch_prints() as out: - cli = CLIFacade() - cli.init() - except SystemExit: - input_mock.assert_called_with("Input the team SDK token from https://app.superannotate.com/team : ") - config_file().write.assert_called_once_with( - json.dumps( - {"main_endpoint": "https://api.devsuperannotate.com", "ssl_verify": False, "token": "token"}, - indent=4 - ) - ) - self.assertEqual(out.getvalue().strip(), "Configuration file successfully updated.") - - @pytest.mark.skip(reason="Need to adjust") - @patch('builtins.input') - def test_init_create(self, input_mock): - input_mock.side_effect = ["token"] - with patch('builtins.open', mock_open(read_data="{}")) as config_file: - try: - with catch_prints() as out: - cli = CLIFacade() - cli.init() - except SystemExit: - input_mock.assert_called_with("Input the team SDK token from https://app.superannotate.com/team : ") - config_file().write.assert_called_once_with( - json.dumps( - {"token": "token"}, - indent=4 - ) - ) - self.assertEqual(out.getvalue().strip(), "Configuration file successfully created.") - - -class SKDInitTest(TestCase): - TEST_TOKEN = "toke=123" - - VALID_JSON = { - "token": "a"*28 + "=1234" - } - INVALID_JSON = { - "token": "a" * 28 + "=1234asd" - } - FILE_NAME = "config.json" - FILE_NAME_2 = "config.json" - - def test_env_flow(self): - import superannotate as sa - os.environ.update({"SA_TOKEN": self.TEST_TOKEN}) - sa.init() - self.assertEqual(sa.get_default_controller()._token, self.TEST_TOKEN) - - def test_init_via_config_file(self): - with tempfile.TemporaryDirectory() as temp_dir: - token_path = f"{temp_dir}/config.json" - with open(token_path, "w") as temp_config: - json.dump({"token": self.TEST_TOKEN}, temp_config) - temp_config.close() - import src.superannotate as sa - sa.init(token_path) - - @patch("lib.infrastructure.controller.Controller.retrieve_configs") - def test_init_default_configs_open(self, retrieve_configs): - import src.superannotate as sa - try: - sa.init() - except Exception: - self.assertTrue(retrieve_configs.call_args[0], sa.constances.CONFIG_FILE_LOCATION) - - def test_init(self): - with tempfile.TemporaryDirectory() as temp_dir: - path = join(temp_dir, self.FILE_NAME) - with open(path, "w") as config: - json.dump(self.VALID_JSON, config) - import src.superannotate as sa - sa.init(path) - self.assertEqual(sa.get_default_controller().team_id, 1234) - - def test_(self): - import src.superannotate as sa - sa.init("~/.superannotate/prod_config.json") - sa.search_projects() \ No newline at end of file diff --git a/tests/unit/test_enum_arguments_handeling.py b/tests/unit/test_enum_arguments_handeling.py new file mode 100644 index 000000000..7ff71fbb8 --- /dev/null +++ b/tests/unit/test_enum_arguments_handeling.py @@ -0,0 +1,20 @@ +# from typing import Literal +from pydantic.typing import Literal + +from superannotate import enums +from superannotate import SAClient +from superannotate.lib.app.interface.types import validate_arguments + + + +@validate_arguments +def foo(status: enums.ProjectStatus): + return status + + +def test_enum_arg(): + SAClient() + assert foo(1) == 1 + assert foo("NotStarted") == 1 + assert foo(enums.ProjectStatus.NotStarted.name) == 1 + assert foo(enums.ProjectStatus.NotStarted.value) == 1 diff --git a/tests/unit/test_init.py b/tests/unit/test_init.py new file mode 100644 index 000000000..9b7fcaee7 --- /dev/null +++ b/tests/unit/test_init.py @@ -0,0 +1,21 @@ +import os + +from src.superannotate import SAClient +from src.superannotate.lib.core import CONFIG_PATH +from src.superannotate.lib.core import CONFIG +from src.superannotate.lib.infrastructure.repositories import ConfigRepository + + +def test_init_from_token(): + config_repo = ConfigRepository(CONFIG_PATH) + main_endpoint = config_repo.get_one("main_endpoint").value + os.environ.update({"SA_URL": main_endpoint}) + token = config_repo.get_one("token").value + + sa_1 = SAClient(token=token) + sa_2 = SAClient(token=token) + sa_1.get_team_metadata() + sa_2.get_team_metadata() + + assert len(CONFIG.SESSIONS) == 1 + diff --git a/tests/unit/test_validators.py b/tests/unit/test_validators.py index 3cbb613ea..a10421564 100644 --- a/tests/unit/test_validators.py +++ b/tests/unit/test_validators.py @@ -3,11 +3,12 @@ import tempfile from os.path import dirname from unittest import TestCase -from unittest.mock import patch - +`from unittest.mock import patch +` from pydantic import ValidationError -import src.superannotate as sa +from src.superannotate import SAClient +sa = SAClient() from superannotate_schemas.validators import AnnotationValidators