diff --git a/docs/source/cli.rst b/docs/source/cli.rst index f4a9bfc20..c2f77e8f5 100644 --- a/docs/source/cli.rst +++ b/docs/source/cli.rst @@ -36,6 +36,21 @@ To initialize CLI (and SDK) with team token: ---------- + +.. _ref_create_server: + +Creating a server +~~~~~~~~~~~~~~~~~~ + +This will create a directory by the given name in your current or provided directory: + +.. code-block:: bash + + superannotatecli create-server --name --path + +---------- + + .. _ref_create_project: Creating a project diff --git a/docs/source/index.rst b/docs/source/index.rst index 05f9272d9..10b21ca92 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -17,6 +17,7 @@ tutorial.sdk.rst superannotate.sdk.rst + server.rst cli.rst LICENSE.rst diff --git a/docs/source/server.rst b/docs/source/server.rst new file mode 100644 index 000000000..41bd8ff6a --- /dev/null +++ b/docs/source/server.rst @@ -0,0 +1,122 @@ +.. _ref_server: + +SAServer Reference +====================================== + +.. contents:: + +The SAServer provides interface to create web API and run in development or production servers. + +This will create a directory by the given name in your current or provided directory: + +.. code-block:: bash + + superannotatecli create-server --name --path + +---------- + +Usage +---------------- + +SuperAnnotate Python SDK allows access to the platform without web browser: + +.. code-block:: python + + import random + from superannotate import SAClient + from superannotate import SAServer + + + app = SAServer() + sa_client = SAClient() + QA_EMAILS = [ + 'qa1@superannotate.com', 'qa2@superannotate.com', + 'qa3@superannotate.com', 'qa4@superannotate.com' + ] + + + @app.route("item_completed", methods=["POST"]) + def index(request): + """ + Listening webhooks on items completed events form Superannotate automation + and is randomly assigned to qa + """ + project_id, folder_id = request.data['project_id'], request.data['folder_id'] + project = sa_client.get_project_by_id(project_id) + folder = sa_client.get_folder_by_id(project_id=project_id, folder_id=folder_id) + sa_client.assign_items( + f"{project['name']}/{folder['name']}", + items=[request.data['name']], + user=random.choice(QA_EMAILS) + ) + + + if __name__ == '__main__': + app.run(host='0.0.0.0', port=5002) + +Interface +---------------- + +.. automethod:: superannotate.SAServer.route +.. automethod:: superannotate.SAServer.add_url_rule +.. automethod:: superannotate.SAServer.run + + +uWSGI +---------- + +`uWSGI`_ is a fast, compiled server suite with extensive configuration +and capabilities beyond a basic server. + +* It can be very performant due to being a compiled program. +* It is complex to configure beyond the basic application, and has so + many options that it can be difficult for beginners to understand. +* It does not support Windows (but does run on WSL). +* It requires a compiler to install in some cases. + +This page outlines the basics of running uWSGI. Be sure to read its +documentation to understand what features are available. + +.. _uWSGI: https://uwsgi-docs.readthedocs.io/en/latest/ + +uWSGI has multiple ways to install it. The most straightforward is to +install the ``pyuwsgi`` package, which provides precompiled wheels for +common platforms. However, it does not provide SSL support, which can be +provided with a reverse proxy instead. + +Install ``pyuwsgi``. + +.. code-block:: text + + $ pip install pyuwsgi + +If you have a compiler available, you can install the ``uwsgi`` package +instead. Or install the ``pyuwsgi`` package from sdist instead of wheel. +Either method will include SSL support. + +.. code-block:: text + + $ pip install uwsgi + + # or + $ pip install --no-binary pyuwsgi pyuwsgi + + +Running +------- + +The most basic way to run uWSGI is to tell it to start an HTTP server +and import your application. + +.. code-block:: text + + $ uwsgi --http 127.0.0.1:8000 --master -p 4 -w wsgi:app + + *** Starting uWSGI 2.0.20 (64bit) on [x] *** + *** Operational MODE: preforking *** + spawned uWSGI master process (pid: x) + spawned uWSGI worker 1 (pid: x, cores: 1) + spawned uWSGI worker 2 (pid: x, cores: 1) + spawned uWSGI worker 3 (pid: x, cores: 1) + spawned uWSGI worker 4 (pid: x, cores: 1) + spawned uWSGI http 1 (pid: x) \ No newline at end of file diff --git a/docs/source/superannotate.sdk.rst b/docs/source/superannotate.sdk.rst index 8cecf0c30..8713337cd 100644 --- a/docs/source/superannotate.sdk.rst +++ b/docs/source/superannotate.sdk.rst @@ -81,6 +81,8 @@ ______ .. automethod:: superannotate.SAClient.unassign_items .. automethod:: superannotate.SAClient.get_item_metadata .. automethod:: superannotate.SAClient.set_annotation_statuses +.. automethod:: superannotate.SAClient.set_approval_statuses +.. automethod:: superannotate.SAClient.set_approval ---------- diff --git a/pytest.ini b/pytest.ini index 1c2a50922..46f818c07 100644 --- a/pytest.ini +++ b/pytest.ini @@ -2,5 +2,5 @@ minversion = 3.7 log_cli=true python_files = test_*.py -;pytest_plugins = ['pytest_profiling'] +pytest_plugins = ['pytest_profiling'] ;addopts = -n auto --dist=loadscope \ No newline at end of file diff --git a/requirements.txt b/requirements.txt index 11136aa72..379fc1510 100644 --- a/requirements.txt +++ b/requirements.txt @@ -2,6 +2,7 @@ pydicom>=2.0.0 boto3>=1.14.53 requests==2.26.0 requests-toolbelt>=0.9.1 +aiohttp>=3.8.1 tqdm==4.64.0 pillow>=7.2.0 matplotlib>=3.3.1 @@ -9,16 +10,16 @@ xmltodict==0.12.0 opencv-python>=4.4.0.42 wheel==0.35.1 packaging>=20.4 -plotly==4.1.0 +plotly>=4.1.0 ffmpeg-python>=0.2.0 fire==0.4.0 mixpanel==4.8.3 pydantic>=1.10.2 -setuptools~=57.4.0 -aiohttp==3.8.1 +setuptools>=57.4.0 email-validator>=1.0.3 nest-asyncio==1.5.4 jsonschema==3.2.0 pandas>=1.1.4 aiofiles==0.8.0 - +Werkzeug==2.2.2 +Jinja2==3.0.3 diff --git a/src/superannotate/__init__.py b/src/superannotate/__init__.py index c7540442e..7836dc997 100644 --- a/src/superannotate/__init__.py +++ b/src/superannotate/__init__.py @@ -1,9 +1,8 @@ import os import sys +import typing - -__version__ = "4.4.8" - +__version__ = "4.4.9dev5" sys.path.append(os.path.split(os.path.realpath(__file__))[0]) @@ -19,6 +18,8 @@ from superannotate.lib.app.input_converters import export_annotation # noqa from superannotate.lib.app.input_converters import import_annotation # noqa from superannotate.lib.app.interface.sdk_interface import SAClient # noqa +from superannotate.lib.app.server import SAServer # noqa +from superannotate.lib.app.server.utils import setup_app # noqa from superannotate.lib.core import PACKAGE_VERSION_INFO_MESSAGE # noqa from superannotate.lib.core import PACKAGE_VERSION_MAJOR_UPGRADE # noqa from superannotate.lib.core import PACKAGE_VERSION_UPGRADE # noqa @@ -27,9 +28,18 @@ SESSIONS = {} + +def create_app(apps: typing.List[str] = None) -> SAServer: + setup_app(apps) + server = SAServer() + return server + + __all__ = [ "__version__", "SAClient", + "SAServer", + "create_app", # Utils "enums", "AppException", @@ -52,7 +62,7 @@ def log_version_info(): local_version = parse(__version__) if local_version.is_prerelease: logger.info(PACKAGE_VERSION_INFO_MESSAGE.format(__version__)) - req = requests.get("https://pypi.python.org/pypi/superannotate/json") + req = requests.get("https://pypi.org/pypi/superannotate/json") if req.ok: releases = req.json().get("releases", []) pip_version = parse("0") diff --git a/src/superannotate/lib/app/input_converters/sa_conversion.py b/src/superannotate/lib/app/input_converters/sa_conversion.py index ff1dac54d..e3115f4b1 100644 --- a/src/superannotate/lib/app/input_converters/sa_conversion.py +++ b/src/superannotate/lib/app/input_converters/sa_conversion.py @@ -1,3 +1,4 @@ +import itertools import json import shutil @@ -20,65 +21,63 @@ def copy_file(src_path, dst_path): def from_pixel_to_vector(json_paths, output_dir): img_names = [] + for json_path in json_paths: file_name = str(json_path.name).replace("___pixel.json", "___objects.json") mask_name = str(json_path).replace("___pixel.json", "___save.png") img = cv2.imread(mask_name) H, W, _ = img.shape - sa_json = json.load(open(json_path)) instances = sa_json["instances"] - idx = 0 + new_instances = [] + global_idx = itertools.count() sa_instances = [] + for instance in instances: if "parts" not in instance.keys(): if "type" in instance.keys() and instance["type"] == "meta": sa_instances.append(instance) continue - parts = instance["parts"] + if len(parts) > 1: + group_id = next(global_idx) + else: + group_id = 0 + from collections import defaultdict - polygons = [] for part in parts: color = list(hex_to_rgb(part["color"])) mask = np.zeros((H, W), dtype=np.uint8) mask[np.all((img == color[::-1]), axis=2)] = 255 - contours, _ = cv2.findContours( - mask, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE - ) - part_polygons = [] - for contour in contours: - segment = contour.flatten().tolist() - if len(segment) > 6: - part_polygons.append(segment) - polygons.append(part_polygons) - - for part_polygons in polygons: - if len(part_polygons) > 1: - idx += 1 - group_id = idx - else: - group_id = 0 - for polygon in part_polygons: + # child contour index hierarchy[0][[i][3] + contours, hierarchy = cv2.findContours( + mask, cv2.RETR_CCOMP, cv2.CHAIN_APPROX_SIMPLE + ) + parent_child_map = defaultdict(list) + for idx, _hierarchy in enumerate(hierarchy[0]): + + if len(contours[idx].flatten().tolist()) <= 6: + continue + if _hierarchy[3] < 0: + parent_child_map[idx] = [] + else: + parent_child_map[_hierarchy[3]].append(idx) + + for outer, inners in parent_child_map.items(): + outer_points = contours[outer].flatten().tolist() + exclude_points = [contours[i].flatten().tolist() for i in inners] temp = instance.copy() del temp["parts"] temp["pointLabels"] = {} temp["groupId"] = group_id temp["type"] = "polygon" - temp["points"] = polygon - sa_instances.append(temp.copy()) - temp["type"] = "bbox" - temp["points"] = { - "x1": min(polygon[::2]), - "x2": max(polygon[::2]), - "y1": min(polygon[1::2]), - "y2": max(polygon[1::2]), - } - sa_instances.append(temp.copy()) + temp["points"] = outer_points + temp["exclude"] = exclude_points + new_instances.append(temp) - sa_json["instances"] = sa_instances + sa_json["instances"] = new_instances write_to_json(output_dir / file_name, sa_json) img_names.append(file_name.replace("___objects.json", "")) return img_names diff --git a/src/superannotate/lib/app/interface/cli_interface.py b/src/superannotate/lib/app/interface/cli_interface.py index 410e3c506..b4f2de3f6 100644 --- a/src/superannotate/lib/app/interface/cli_interface.py +++ b/src/superannotate/lib/app/interface/cli_interface.py @@ -1,9 +1,12 @@ import os +import shutil import sys import tempfile +from pathlib import Path from typing import Any from typing import Optional +import lib as sa_lib import lib.core as constances from lib import __file__ as lib_path from lib.app.input_converters.conversion import import_annotation @@ -255,3 +258,20 @@ def upload_videos( image_quality_in_editor=None, ) sys.exit(0) + + def create_server(self, name: str, path: str = None): + """ + This will create a directory by the given name in your current or provided directory. + """ + path = Path(os.path.expanduser(path if path else ".")) / name + if path.exists(): + raise Exception(f"Directory already exists {str(path.absolute())}") + path.mkdir(parents=True) + default_files_path = Path(sa_lib.__file__).parent / "app" / "server" + shutil.copy(default_files_path / "__app.py", path / "app.py") + shutil.copy(default_files_path / "__wsgi.py", path / "wsgi.py") + shutil.copy(default_files_path / "Dockerfile", path / "Dockerfile") + shutil.copy(default_files_path / "requirements.txt", path / "requirements.txt") + shutil.copy(default_files_path / "README.rst", path / "README.rst") + shutil.copy(default_files_path / "run.sh", path / "run.sh") + shutil.copytree(default_files_path / "deployment", path / "deployment") diff --git a/src/superannotate/lib/app/interface/sdk_interface.py b/src/superannotate/lib/app/interface/sdk_interface.py index a44dfd094..871e33a12 100644 --- a/src/superannotate/lib/app/interface/sdk_interface.py +++ b/src/superannotate/lib/app/interface/sdk_interface.py @@ -23,6 +23,7 @@ from lib.app.interface.types import AnnotationStatuses from lib.app.interface.types import AnnotationType from lib.app.interface.types import AnnotatorRole +from lib.app.interface.types import ApprovalStatuses from lib.app.interface.types import AttachmentArg from lib.app.interface.types import AttachmentDict from lib.app.interface.types import ClassType @@ -129,7 +130,6 @@ def get_item_by_id(self, project_id: int, item_id: int): :param item_id: the id of the item :type item_id: int - :return: item metadata :rtype: dict """ @@ -160,10 +160,13 @@ def search_team_contributors( :param email: filter by email :type email: str + :param first_name: filter by first name :type first_name: str + :param last_name: filter by last name :type last_name: str + :param return_metadata: return metadata of contributors instead of names :type return_metadata: bool @@ -252,7 +255,7 @@ def create_project( :param project_description: the new project's description :type project_description: str - :param project_type: the new project type, Vector or Pixel. + :param project_type: the new project type, Vector, Pixel, Video, Document, Tiled, PointCloud, Other. :type project_type: str :param settings: list of settings objects @@ -2330,7 +2333,7 @@ def search_items( ♦ “QualityCheck” \n ♦ “Returned” \n ♦ “Completed” \n - ♦ “Skippe + ♦ “Skip” \n :type annotation_status: str :param annotator_email: returns those items’ names that are assigned to the specified annotator. @@ -2567,13 +2570,13 @@ def set_annotation_statuses( :param project: project name or folder path (e.g., “project1/folder1”). :type project: str - :param annotation_status: annotation status to set, should be one of. - “NotStarted” - “InProgress” - “QualityCheck” - “Returned” - “Completed” - “Skipped” + :param annotation_status: annotation status to set, should be one of. \n + ♦ “NotStarted” \n + ♦ “InProgress” \n + ♦ “QualityCheck” \n + ♦ “Returned” \n + ♦ “Completed” \n + ♦ “Skipped” \n :type annotation_status: str :param items: item names to set the mentioned status for. If None, all the items in the project will be used. @@ -3018,3 +3021,33 @@ def add_items_to_subset( raise AppException(response.errors) return response.data + + def set_approval_statuses( + self, + project: NotEmptyStr, + approval_status: Union[ApprovalStatuses, None], + items: Optional[List[NotEmptyStr]] = None, + ): + """Sets annotation statuses of items + + :param project: project name or folder path (e.g., “project1/folder1”). + :type project: str + + :param approval_status: approval status to set, should be one of. \n + ♦ None \n + ♦ “Approved” \n + ♦ “Disapproved” \n + :type approval_status: str + + :param items: item names to set the mentioned status for. If None, all the items in the project will be used. + :type items: list of strs + """ + project, folder = self.controller.get_project_folder_by_path(project) + response = self.controller.items.set_approval_statuses( + project=project, + folder=folder, + approval_status=approval_status, + item_names=items, + ) + if response.errors: + raise AppException(response.errors) diff --git a/src/superannotate/lib/app/interface/types.py b/src/superannotate/lib/app/interface/types.py index cea32d521..fac2c4346 100644 --- a/src/superannotate/lib/app/interface/types.py +++ b/src/superannotate/lib/app/interface/types.py @@ -5,6 +5,7 @@ from typing import Union from lib.core.enums import AnnotationStatus +from lib.core.enums import ApprovalStatus from lib.core.enums import BaseTitledEnum from lib.core.enums import ClassTypeEnum from lib.core.enums import FolderStatus @@ -199,6 +200,22 @@ def validate(cls, value: Union[str]) -> Union[str]: return value +class ApprovalStatuses(StrictStr): + @classmethod + def validate(cls, value: Union[str]) -> Union[str]: + if value is None: + return value + if value.lower() not in ApprovalStatus.values() or not isinstance(value, str): + raise TypeError( + f"Available approval_status options are {', '.join(map(str, ApprovalStatus.titles()))}." + ) + return value + + @classmethod + def __get_validators__(cls): + yield cls.validate + + def validate_arguments(func): @wraps(func) def wrapped(self, *args, **kwargs): diff --git a/src/superannotate/lib/app/server/Dockerfile b/src/superannotate/lib/app/server/Dockerfile new file mode 100644 index 000000000..6136589ef --- /dev/null +++ b/src/superannotate/lib/app/server/Dockerfile @@ -0,0 +1,30 @@ +FROM tiangolo/uwsgi-nginx:python3.8 + + +# Install requirements +COPY requirements.txt /tmp/requirements.txt +RUN pip install --upgrade pip + +RUN pip install --no-cache-dir -r /tmp/requirements.txt + +# Add the app +COPY . /app +WORKDIR /app + +# Make /app/* available to be imported by Python globally to better support several use cases like Alembic migrations. +ENV PYTHONPATH=/app + +# Move the base entrypoint to reuse it +RUN mv /entrypoint.sh /uwsgi-nginx-entrypoint.sh + +# Copy the entrypoint that will generate Nginx additional configs +COPY deployment/entrypoint.sh /entrypoint.sh +COPY deployment/uwsgi.ini /uwsgi.ini +RUN chmod +x /entrypoint.sh + +ENTRYPOINT ["/entrypoint.sh"] + +# Run the start script provided by the parent image tiangolo/uwsgi-nginx. +# It will check for an /app/prestart.sh script (e.g. for migrations) +# And then will start Supervisor, which in turn will start Nginx and uWSGI +CMD ["/start.sh"] diff --git a/src/superannotate/lib/app/server/README.rst b/src/superannotate/lib/app/server/README.rst new file mode 100644 index 000000000..2ade3cf50 --- /dev/null +++ b/src/superannotate/lib/app/server/README.rst @@ -0,0 +1,13 @@ +================ +Superannotate Server +================ + +Structure +======= + +Usage +----------- + +.. code-block:: bash + + ./run.sh --token= diff --git a/src/superannotate/lib/app/server/__app.py b/src/superannotate/lib/app/server/__app.py new file mode 100644 index 000000000..b40410f93 --- /dev/null +++ b/src/superannotate/lib/app/server/__app.py @@ -0,0 +1,25 @@ +from superannotate import create_app +from superannotate import SAClient + +sa_client = SAClient() +app = create_app([]) + + +@app.route("/", methods=["GET"]) +def health_check(request): + return "Hello World!!!" + + +@app.route("/project_created", methods=["POST"]) +def index(request): + """ + Create default folders when project created. + """ + project_name = request.json["after"]["name"] + sa_client.create_folder(project_name, "default_folder_1") + sa_client.create_folder(project_name, "default_folder_2") + return "Default folders created." + + +if __name__ == "__main__": + app.run(host="0.0.0.0", port=5002) diff --git a/src/superannotate/lib/app/server/__init__.py b/src/superannotate/lib/app/server/__init__.py new file mode 100644 index 000000000..af01d3128 --- /dev/null +++ b/src/superannotate/lib/app/server/__init__.py @@ -0,0 +1,5 @@ +from lib.app.server.core import Response +from lib.app.server.core import SAServer + + +__all__ = ["SAServer", "Response"] diff --git a/src/superannotate/lib/app/server/__wsgi.py b/src/superannotate/lib/app/server/__wsgi.py new file mode 100644 index 000000000..bd9ac4fca --- /dev/null +++ b/src/superannotate/lib/app/server/__wsgi.py @@ -0,0 +1,5 @@ +from superannotate import create_app + +APPS = ["app"] + +app = create_app(APPS) diff --git a/src/superannotate/lib/app/server/core.py b/src/superannotate/lib/app/server/core.py new file mode 100644 index 000000000..29459d375 --- /dev/null +++ b/src/superannotate/lib/app/server/core.py @@ -0,0 +1,249 @@ +import json +import pathlib +import typing +from datetime import datetime + +from jinja2 import Environment +from jinja2 import FileSystemLoader +from superannotate.logger import get_server_logger +from werkzeug.exceptions import HTTPException +from werkzeug.routing import Map +from werkzeug.routing import Rule +from werkzeug.serving import run_simple +from werkzeug.wrappers import Request +from werkzeug.wrappers import Response as BaseResponse + +logger = get_server_logger() + + +class Response(BaseResponse): + ... + + +class SingletonMeta(type): + _instances = {} + + def __call__(cls, *args, **kwargs): + if cls not in cls._instances: + instance = super().__call__(*args, **kwargs) + cls._instances[cls] = instance + return cls._instances[cls] + + +class SAServer(metaclass=SingletonMeta): + def __init__(self): + self._url_map: Map = Map([]) + self._view_function_map: typing.Dict[str, typing.Callable] = {} + self.jinja_env = Environment( + loader=FileSystemLoader(str(pathlib.Path(__file__).parent / "templates")), + autoescape=True, + ) + + def route( + self, rule: str, methods: typing.List[str] = None, **options: typing.Any + ) -> typing.Any: + """Decorate a view function to register it with the given URL + rule and options. Calls :meth:`add_url_rule`, which has more + details about the implementation. + + .. code-block:: python + + @route("/") + def index(): + return "Hello, World!" + + The endpoint name for the route defaults to the name of the view + function if the ``endpoint`` parameter isn't passed. + + The ``methods`` parameter defaults to ``["GET"]``. ``HEAD`` and + ``OPTIONS`` are added automatically. + + :param rule: The URL rule string. + :type rule: str. + + :param methods: Allowed HTTP methods. + :type rule: list of str + + :param options: Extra options passed to the + :class:`~werkzeug.routing.Rule` object. + :type options: list of any + + + """ + + def decorator(f): + endpoint = options.pop("endpoint", None) + options["methods"] = methods + if not endpoint: + endpoint = f.__name__ + self.add_url_rule(rule, endpoint, f, **options) + + return decorator + + def add_url_rule( + self, + rule: str, + endpoint: str = None, + view_func: typing.Callable = None, + **options: typing.Any + ): + """ + Register a rule for routing incoming requests and building + URLs. The :meth:`route` decorator is a shortcut to call this + with the ``view_func`` argument. These are equivalent: + + .. code-block:: python + + @app.route("/") + def index(): + ... + + .. code-block:: python + + def index(): + ... + + app.add_url_rule("/", view_func=index) + + See :ref:`url-route-registrations`. + + The endpoint name for the route defaults to the name of the view + function if the ``endpoint`` parameter isn't passed. An error + will be raised if a function has already been registered for the + endpoint. + + The ``methods`` parameter defaults to ``["GET"]``. ``HEAD`` is + always added automatically, and ``OPTIONS`` is added + automatically by default. + + ``view_func`` does not necessarily need to be passed, but if the + rule should participate in routing an endpoint name must be + associated with a view function at some point with the + :meth:`endpoint` decorator. + + .. code-block:: python + + app.add_url_rule("/", endpoint="index") + + @app.endpoint("index") + def index(): + ... + + If ``view_func`` has a ``required_methods`` attribute, those + methods are added to the passed and automatic methods. If it + has a ``provide_automatic_methods`` attribute, it is used as the + default if the parameter is not passed. + :param rule: The URL rule string. + :type rule: str + + :param endpoint: Endpoint name. + :type endpoint: str + + :param view_func: Handler function. + :type view_func: typing.Callable + + :param options: Extra options passed to the + :class:`~werkzeug.routing.Rule` object. + :type options: list of any + """ + self._url_map.add(Rule(rule, endpoint=endpoint, **options)) + self._view_function_map[endpoint] = view_func + + def _dispatch_request(self, request): + """Dispatches the request.""" + adapter = self._url_map.bind_to_environ(request.environ) + try: + endpoint, values = adapter.match() + view_func = self._view_function_map.get(endpoint) + if not view_func: + return Response(status=404) + content = view_func(request, **values) + if isinstance(content, Response): + response = content + elif isinstance(content, (list, dict)): + response = Response( + json.dumps(content), content_type="application/json" + ) + else: + response = Response(content) + return response + except HTTPException as e: + return e + + def wsgi_app(self, environ, start_response): + """WSGI application that processes requests and returns responses.""" + request = Request(environ) + response = self._dispatch_request(request) + return_value = response(environ, start_response) + if not any(i in request.full_path for i in ("monitor", "logs")): + data = { + "date": datetime.now().strftime("%Y-%m-%d %H:%M:%S"), + "request": { + "method": request.method, + "path": request.url, + "headers": dict(request.headers.items()), + "data": request.data.decode("utf-8"), + }, + "response": { + "headers": dict(response.headers.items()) + if hasattr(response, "headers") + else {}, + "data": response.data.decode("utf-8") + if hasattr(response, "data") + else response.description, + "status_code": response.status_code + if hasattr(response, "status_code") + else response.code, + }, + } + logger.info(json.dumps(data)) + return return_value + + def __call__(self, environ, start_response): + """The WSGI server calls this method as the WSGI application.""" + return self.wsgi_app(environ, start_response) + + def run( + self, + host: str = "localhost", + port: int = 8000, + use_debugger=True, + use_reloader=True, + ssl_context=None, + **kwargs + ): + """Start a development server for a WSGI application. + + .. warning:: + + Do not use the development server when deploying to production. + It is intended for use only during local development. It is not + designed to be particularly efficient, stable, or secure. + + :param host: The host to bind to, for example ``'localhost'``. + Can be a domain, IPv4 or IPv6 address, or file path starting + with ``unix://`` for a Unix socket. + :param port: The port to bind to, for example ``8080``. Using ``0`` + tells the OS to pick a random free port. + :param use_reloader: Use a reloader process to restart the server + process when files are changed. + :param use_debugger: Use Werkzeug's debugger, which will show + formatted tracebacks on unhandled exceptions. + :param ssl_context: Configure TLS to serve over HTTPS. Can be an + :class:`ssl.SSLContext` object, a ``(cert_file, key_file)`` + tuple to create a typical context, or the string ``'adhoc'`` to + generate a temporary self-signed certificate. + """ + run_simple( + host, + port, + self, + use_debugger=use_debugger, + use_reloader=use_reloader, + ssl_context=ssl_context, + **kwargs + ) + + def render_template(self, template_name, **context): + t = self.jinja_env.get_template(template_name) + return Response(t.render(context), mimetype="text/html") diff --git a/src/superannotate/lib/app/server/default_app.py b/src/superannotate/lib/app/server/default_app.py new file mode 100644 index 000000000..ecceeb13e --- /dev/null +++ b/src/superannotate/lib/app/server/default_app.py @@ -0,0 +1,58 @@ +import json + +from lib.app.server import SAServer +from lib.core import LOG_FILE_LOCATION + +app = SAServer() + +LOG_FILE = "/var/log/orchestra/consumer.log" + + +@app.route("/monitor", methods=["GET"]) +def monitor_view(request): + return app.render_template("monitor.html", **{}) + + +@app.route("/logs", methods=["GET"]) +def logs(request): + offset = request.args.get("offset", None) + if offset: + offset = int(offset) + limit = int(request.args.get("limit", 20)) + response = {"data": []} + + with open(f"{LOG_FILE_LOCATION}/sa_server.log") as log_file: + log_file.seek(0, 2) + if not offset: + offset = log_file.tell() + cursor = max(offset - 2048, 0) + while True: + log_file.seek(cursor, 0) + tmp_cursor = cursor + for line in log_file: + tmp_cursor += len(line) + if tmp_cursor > offset: + cursor = max(cursor - 2048, 0) + break + try: + response["data"].append(json.loads(line)) + except Exception as _: + ... + cursor = max(cursor - 2048, 0) + if len(response["data"]) >= limit or cursor == 0: + break + response["data"] = [] + response["offset"] = cursor + response["data"].reverse() + return response + + +# +# @app.route("/_log_stream", methods=["GET"]) +# def log_stream(request): +# def generate(): +# for line in Pygtail(LOG_FILE, every_n=1): +# yield "data:" + str(line) + "\n\n" +# time.sleep(0.5) +# +# return Response(generate(), mimetype="text/event-stream") diff --git a/src/superannotate/lib/app/server/deployment/entrypoint.sh b/src/superannotate/lib/app/server/deployment/entrypoint.sh new file mode 100644 index 000000000..3dddb4b13 --- /dev/null +++ b/src/superannotate/lib/app/server/deployment/entrypoint.sh @@ -0,0 +1,46 @@ +#! /usr/bin/env sh +set -e + +/uwsgi-nginx-entrypoint.sh + +# Get the URL for static files from the environment variable +USE_STATIC_URL=${STATIC_URL:-'/static'} +# Get the absolute path of the static files from the environment variable +USE_STATIC_PATH=${STATIC_PATH:-'/app/static'} +# Get the listen port for Nginx, default to 80 +USE_LISTEN_PORT=${LISTEN_PORT:-80} + +if [ -f /app/nginx.conf ]; then + cp /app/nginx.conf /etc/nginx/nginx.conf +else + content_server='server {\n' + content_server=$content_server" listen ${USE_LISTEN_PORT};\n" + content_server=$content_server' location / {\n' + content_server=$content_server' try_files $uri @app;\n' + content_server=$content_server' }\n' + content_server=$content_server' location @app {\n' + content_server=$content_server' include uwsgi_params;\n' + content_server=$content_server' uwsgi_pass unix:///tmp/uwsgi.sock;\n' + content_server=$content_server' }\n' + content_server=$content_server" location $USE_STATIC_URL {\n" + content_server=$content_server" alias $USE_STATIC_PATH;\n" + content_server=$content_server' }\n' + # If STATIC_INDEX is 1, serve / with /static/index.html directly (or the static URL configured) + if [ "$STATIC_INDEX" = 1 ] ; then + content_server=$content_server' location = / {\n' + content_server=$content_server" index $USE_STATIC_URL/index.html;\n" + content_server=$content_server' }\n' + fi + content_server=$content_server'}\n' + # Save generated server /etc/nginx/conf.d/nginx.conf + printf "$content_server" > /etc/nginx/conf.d/nginx.conf +fi + +# For Alpine: +# Explicitly add installed Python packages and uWSGI Python packages to PYTHONPATH +# Otherwise uWSGI can't import Flask +if [ -n "$ALPINEPYTHON" ] ; then + export PYTHONPATH=$PYTHONPATH:/usr/local/lib/$ALPINEPYTHON/site-packages:/usr/lib/$ALPINEPYTHON/site-packages +fi + +exec "$@" diff --git a/src/superannotate/lib/app/server/deployment/uwsgi.ini b/src/superannotate/lib/app/server/deployment/uwsgi.ini new file mode 100644 index 000000000..e1b76d4fe --- /dev/null +++ b/src/superannotate/lib/app/server/deployment/uwsgi.ini @@ -0,0 +1,3 @@ +[uwsgi] +psqi = wsgi +callable = app diff --git a/src/superannotate/lib/app/server/requirements.txt b/src/superannotate/lib/app/server/requirements.txt new file mode 100644 index 000000000..560cc128c --- /dev/null +++ b/src/superannotate/lib/app/server/requirements.txt @@ -0,0 +1 @@ +superannotate diff --git a/src/superannotate/lib/app/server/run.sh b/src/superannotate/lib/app/server/run.sh new file mode 100755 index 000000000..cce3f972b --- /dev/null +++ b/src/superannotate/lib/app/server/run.sh @@ -0,0 +1,24 @@ +#!/bin/bash + +######################################################## + +## Shell Script to Build and Run Docker Image + +######################################################## + + +echo "build the docker image" +sudo docker build . -t sa_server +echo "built docker images and proceeding to delete existing container" +result=$(docker ps -q -f name=sa_server) +if [[ $? -eq 0 ]]; then + echo "Container exists" + sudo docker container rm -f sa_server + echo "Deleted the existing docker container" +else + echo "No such container" +fi +echo "Deploying the updated container" +#sudo docker run -d sa_server -p 80:80 +sudo docker run sa_server +echo "Deploying the container" diff --git a/src/superannotate/lib/app/server/templates/monitor.html b/src/superannotate/lib/app/server/templates/monitor.html new file mode 100644 index 000000000..7c3074629 --- /dev/null +++ b/src/superannotate/lib/app/server/templates/monitor.html @@ -0,0 +1,207 @@ + + + + + + + + Document + + + + + + +
+
+
+
+
All Requests
+
+
+ +
+
+
+
+
+
Request
+ +
+
+
+
+
+
+
+
+
Response
+ +
+
+
+
+
+
+
+
+
+
+
+
+ + + + + diff --git a/src/superannotate/lib/app/server/utils.py b/src/superannotate/lib/app/server/utils.py new file mode 100644 index 000000000..187c1cae7 --- /dev/null +++ b/src/superannotate/lib/app/server/utils.py @@ -0,0 +1,9 @@ +import typing +from importlib import import_module + + +def setup_app(apps: typing.List[str] = None): + if apps: + apps.extend(["superannotate.lib.app.server.default_app"]) + for path in apps: + import_module(path) diff --git a/src/superannotate/lib/core/__init__.py b/src/superannotate/lib/core/__init__.py index 7011fe69d..c6a095fad 100644 --- a/src/superannotate/lib/core/__init__.py +++ b/src/superannotate/lib/core/__init__.py @@ -2,6 +2,7 @@ from superannotate.lib.core.config import Config from superannotate.lib.core.enums import AnnotationStatus +from superannotate.lib.core.enums import ApprovalStatus from superannotate.lib.core.enums import FolderStatus from superannotate.lib.core.enums import ImageQuality from superannotate.lib.core.enums import ProjectStatus @@ -15,8 +16,8 @@ CONFIG_PATH = "~/.superannotate/config.json" CONFIG_FILE_LOCATION = expanduser(CONFIG_PATH) -LOG_FILE_LOCATION = expanduser("~/.superannotate/sa.log") -BACKEND_URL = "https://api.annotate.online" +LOG_FILE_LOCATION = expanduser("~/.superannotate") +BACKEND_URL = "https://api.superannotate.com" DEFAULT_IMAGE_EXTENSIONS = ["jpg", "jpeg", "png", "tif", "tiff", "webp", "bmp"] DEFAULT_FILE_EXCLUDE_PATTERNS = ["___save.png", "___fuse.png"] @@ -121,6 +122,7 @@ SegmentationStatus, ImageQuality, AnnotationStatus, + ApprovalStatus, CONFIG_FILE_LOCATION, CONFIG_PATH, BACKEND_URL, diff --git a/src/superannotate/lib/core/enums.py b/src/superannotate/lib/core/enums.py index 782ca678a..d3222966b 100644 --- a/src/superannotate/lib/core/enums.py +++ b/src/superannotate/lib/core/enums.py @@ -12,7 +12,7 @@ def __get__(self, instance, owner): class BaseTitledEnum(int, Enum): def __new__(cls, title, value): - obj = int.__new__(cls, value) + obj = super().__new__(cls, value) obj._value_ = value obj.__doc__ = title obj._type = "titled_enum" @@ -40,7 +40,7 @@ def get_name(cls, value): @classmethod def get_value(cls, name): for enum in list(cls): - if enum.__doc__.lower() == name.lower(): + if enum.__doc__ and name and enum.__doc__.lower() == name.lower(): if isinstance(enum.value, int): if enum.value < 0: return "" @@ -48,7 +48,7 @@ def get_value(cls, name): @classmethod def values(cls): - return [enum.__doc__.lower() for enum in list(cls)] + return [enum.__doc__.lower() if enum else None for enum in list(cls)] @classmethod def titles(cls): @@ -64,6 +64,12 @@ def __hash__(self): return hash(self.name) +class ApprovalStatus(BaseTitledEnum): + NONE = None, 0 + DISAPPROVED = "Disapproved", 1 + APPROVED = "Approved", 2 + + class AnnotationTypes(str, Enum): BBOX = "bbox" EVENT = "event" @@ -170,9 +176,3 @@ class SegmentationStatus(BaseTitledEnum): IN_PROGRESS = "InProgress", 2 COMPLETED = "Completed", 3 FAILED = "Failed", 4 - - -class ApprovalStatus(BaseTitledEnum): - NONE = None, 0 - DISAPPROVED = "disapproved", 1 - APPROVED = "approved", 2 diff --git a/src/superannotate/lib/core/service_types.py b/src/superannotate/lib/core/service_types.py index c766c1654..11f981776 100644 --- a/src/superannotate/lib/core/service_types.py +++ b/src/superannotate/lib/core/service_types.py @@ -1,5 +1,4 @@ from typing import Any -from typing import Callable from typing import Dict from typing import List from typing import Optional @@ -9,7 +8,6 @@ from pydantic import BaseModel from pydantic import Extra from pydantic import Field -from pydantic import parse_obj_as class Limit(BaseModel): @@ -91,53 +89,14 @@ class UploadCustomFieldValues(BaseModel): class ServiceResponse(BaseModel): status: Optional[int] reason: Optional[str] - content: Optional[Union[bytes, str]] - data: Optional[Any] + content: Optional[Union[bytes, str]] = None + data: Optional[Any] = None count: Optional[int] = 0 _error: Optional[str] = None class Config: extra = Extra.allow - def __init__( - self, response=None, content_type=None, dispatcher: Callable = None, data=None - ): - if response is None: - super().__init__(data=data, status=200) - return - data = { - "status": response.status_code, - "reason": response.reason, - "content": response.content, - } - try: - response_json = response.json() - except Exception: - response_json = dict() - if not response.ok: - error = response_json.get("error") - if not error: - error = response_json.get("errors", "Unknown Error") - data["_error"] = error - super().__init__(**data) - return - if dispatcher: - _data = response_json - response_json = dispatcher(_data) - data.update(_data) - try: - if isinstance(response_json, dict): - data["count"] = response_json.get("count", None) - - if content_type and content_type is not self.__class__: - data["data"] = parse_obj_as(content_type, response_json) - else: - data["data"] = response_json - except Exception: - data["data"] = {} - - super().__init__(**data) - @property def status_code(self): return self.status @@ -152,17 +111,16 @@ def ok(self): def error(self): if self._error: return self._error - default_message = self.reason if self.reason else "Unknown Error" - if isinstance(self.data, dict) and "error" in self.data: - return self.data.get("error", default_message) - else: - return getattr(self.data, "error", default_message) + return self.data def set_error(self, value: Union[dict, str]): if isinstance(value, dict) and "error" in value: self._error = value["error"] self._error = value + def __str__(self): + return f"Status: {self.status_code}, Error {self.error}" + class ImageResponse(ServiceResponse): data: entities.ImageEntity = None @@ -196,8 +154,12 @@ class ModelListResponse(ServiceResponse): data: List[entities.AnnotationClassEntity] = None -class IntegrationResponse(ServiceResponse): - data: List[entities.IntegrationEntity] = None +class _IntegrationResponse(ServiceResponse): + integrations: List[entities.IntegrationEntity] = [] + + +class IntegrationListResponse(ServiceResponse): + data: _IntegrationResponse class AnnotationClassListResponse(ServiceResponse): diff --git a/src/superannotate/lib/core/serviceproviders.py b/src/superannotate/lib/core/serviceproviders.py index f7ac2ce80..fd24f699e 100644 --- a/src/superannotate/lib/core/serviceproviders.py +++ b/src/superannotate/lib/core/serviceproviders.py @@ -13,7 +13,7 @@ from lib.core.service_types import DownloadMLModelAuthDataResponse from lib.core.service_types import FolderListResponse from lib.core.service_types import FolderResponse -from lib.core.service_types import IntegrationResponse +from lib.core.service_types import IntegrationListResponse from lib.core.service_types import ItemListResponse from lib.core.service_types import ModelListResponse from lib.core.service_types import ProjectListResponse @@ -293,6 +293,16 @@ def set_statuses( ) -> ServiceResponse: raise NotImplementedError + @abstractmethod + def set_approval_statuses( + self, + project: entities.ProjectEntity, + folder: entities.FolderEntity, + item_names: List[str], + approval_status: int, + ) -> ServiceResponse: + raise NotImplementedError + @abstractmethod def delete_multiple( self, project: entities.ProjectEntity, item_ids: List[int] @@ -466,7 +476,7 @@ def list(self, condition: Condition = None) -> ModelListResponse: class BaseIntegrationService(SuperannotateServiceProvider): @abstractmethod - def list(self) -> IntegrationResponse: + def list(self) -> IntegrationListResponse: raise NotImplementedError @abstractmethod diff --git a/src/superannotate/lib/core/usecases/annotations.py b/src/superannotate/lib/core/usecases/annotations.py index 34917974c..b92fb7c47 100644 --- a/src/superannotate/lib/core/usecases/annotations.py +++ b/src/superannotate/lib/core/usecases/annotations.py @@ -53,7 +53,6 @@ BIG_FILE_THRESHOLD = 15 * 1024 * 1024 ANNOTATION_CHUNK_SIZE_MB = 10 * 1024 * 1024 URI_THRESHOLD = 4 * 1024 - 120 -nest_asyncio.apply() @dataclass @@ -376,6 +375,7 @@ def execute(self): len(items_to_upload), description="Uploading Annotations" ) try: + nest_asyncio.apply() asyncio.run(self.run_workers(items_to_upload)) except Exception: logger.debug(traceback.format_exc()) @@ -463,7 +463,7 @@ def get_name_path_mappings(annotation_paths): for item_path in annotation_paths: name_path_mappings[ - UploadAnnotationsFromFolderUseCase.extract_name(Path(item_path).name) + UploadAnnotationsFromFolderUseCase.extract_name(Path(item_path)) ] = item_path return name_path_mappings @@ -560,12 +560,14 @@ def chunks(data, size: int = 10000): yield {k: data[k] for k in islice(it, size)} @staticmethod - def extract_name(value: str): - return os.path.basename( - value.replace(constants.PIXEL_ANNOTATION_POSTFIX, "") - .replace(constants.VECTOR_ANNOTATION_POSTFIX, "") - .replace(constants.ATTACHED_VIDEO_ANNOTATION_POSTFIX, ""), - ) + def extract_name(value: Path): + if constants.VECTOR_ANNOTATION_POSTFIX in value.name: + path = value.name.replace(constants.VECTOR_ANNOTATION_POSTFIX, "") + elif constants.PIXEL_ANNOTATION_POSTFIX in value.name: + path = value.name.replace(constants.PIXEL_ANNOTATION_POSTFIX, "") + else: + path = value.stem + return path def get_existing_name_item_mapping( self, name_path_mappings: Dict[str, str] @@ -718,6 +720,7 @@ def execute(self): except KeyError: missing_annotations.append(name) try: + nest_asyncio.apply() asyncio.run(self.run_workers(items_to_upload)) except Exception: logger.debug(traceback.format_exc()) @@ -915,6 +918,7 @@ def execute(self): json.dump(annotation_json, annotation_file) size = annotation_file.tell() annotation_file.seek(0) + nest_asyncio.apply() if size > BIG_FILE_THRESHOLD: uploaded = asyncio.run( self._service_provider.annotations.upload_big_annotation( @@ -1107,6 +1111,7 @@ def execute(self): ) small_annotations = [x["name"] for x in items["small"]] try: + nest_asyncio.apply() annotations = asyncio.run( self.run_workers(items["large"], small_annotations) ) @@ -1353,14 +1358,6 @@ def download_annotation_classes(self, path: str): def get_items_count(path: str): return sum([len(files) for r, d, files in os.walk(path)]) - @staticmethod - def coroutine_wrapper(coroutine): - loop = asyncio.new_event_loop() - asyncio.set_event_loop(loop) - count = loop.run_until_complete(coroutine) - loop.close() - return count - async def download_big_annotations(self, queue_idx, export_path): while True: cur_queue = self._big_file_queues[queue_idx] @@ -1461,6 +1458,7 @@ def execute(self): if not folders: folders.append(self._folder) with concurrent.futures.ThreadPoolExecutor(max_workers=5) as executor: + nest_asyncio.apply() futures = [] for folder in folders: if not self._item_names: diff --git a/src/superannotate/lib/core/usecases/images.py b/src/superannotate/lib/core/usecases/images.py index 81ae652c8..9d25a2cbd 100644 --- a/src/superannotate/lib/core/usecases/images.py +++ b/src/superannotate/lib/core/usecases/images.py @@ -654,9 +654,12 @@ def __init__( ) def validate_project_type(self): - if self._project.type in constances.LIMITED_FUNCTIONS: + if ( + self._project.type in constances.LIMITED_FUNCTIONS + or self._project.upload_state == constances.UploadState.EXTERNAL.value + ): raise AppValidationException( - constances.LIMITED_FUNCTIONS[self._project.type] + "The feature does not support projects containing attached URLs." ) def validate_variant_type(self): diff --git a/src/superannotate/lib/core/usecases/integrations.py b/src/superannotate/lib/core/usecases/integrations.py index f175a2aff..da60e2ff5 100644 --- a/src/superannotate/lib/core/usecases/integrations.py +++ b/src/superannotate/lib/core/usecases/integrations.py @@ -17,7 +17,7 @@ def __init__(self, reporter: Reporter, service_provider: BaseServiceProvider): self._service_provider = service_provider def execute(self) -> Response: - integrations = self._service_provider.integrations.list().data + integrations = self._service_provider.integrations.list().data.integrations integrations = list(sorted(integrations, key=lambda x: x.createdAt)) integrations.reverse() self._response.data = integrations @@ -49,7 +49,7 @@ def _upload_path(self): def execute(self) -> Response: integrations: List[ IntegrationEntity - ] = self._service_provider.integrations.list().data + ] = self._service_provider.integrations.list().data.integrations integration_name_lower = self._integration.name.lower() integration = next( (i for i in integrations if i.name.lower() == integration_name_lower), None diff --git a/src/superannotate/lib/core/usecases/items.py b/src/superannotate/lib/core/usecases/items.py index 483749ddf..a62ffc1c5 100644 --- a/src/superannotate/lib/core/usecases/items.py +++ b/src/superannotate/lib/core/usecases/items.py @@ -1,15 +1,15 @@ import copy import traceback from collections import defaultdict -from concurrent.futures import as_completed from concurrent.futures import ThreadPoolExecutor +from concurrent.futures import as_completed from typing import Dict from typing import List from typing import Optional import superannotate.lib.core as constants -from lib.core.conditions import Condition from lib.core.conditions import CONDITION_EQ as EQ +from lib.core.conditions import Condition from lib.core.entities import AttachmentEntity from lib.core.entities import BaseItemEntity from lib.core.entities import DocumentEntity @@ -43,7 +43,7 @@ def __init__(self, item_id, project, service_provider): super().__init__() def execute( - self, + self, ): try: @@ -65,13 +65,13 @@ def execute( class GetItem(BaseReportableUseCase): def __init__( - self, - reporter: Reporter, - project: ProjectEntity, - folder: FolderEntity, - service_provider: BaseServiceProvider, - item_name: str, - include_custom_metadata: bool, + self, + reporter: Reporter, + project: ProjectEntity, + folder: FolderEntity, + service_provider: BaseServiceProvider, + item_name: str, + include_custom_metadata: bool, ): super().__init__(reporter) self._project = project @@ -82,8 +82,8 @@ def __init__( def validate_project_type(self): if ( - self._project.type == constants.ProjectType.PIXEL.value - and self._include_custom_metadata + self._project.type == constants.ProjectType.PIXEL.value + and self._include_custom_metadata ): raise AppException(constants.METADATA_DEPRICATED_FOR_PIXEL) @@ -109,10 +109,10 @@ def serialize_entity(entity: BaseItemEntity, project: ProjectEntity): def execute(self) -> Response: if self.is_valid(): condition = ( - Condition("name", self._item_name, EQ) - & Condition("project_id", self._project.id, EQ) - & Condition("folder_id", self._folder.id, EQ) - & Condition("includeCustomMetadata", self._include_custom_metadata, EQ) + Condition("name", self._item_name, EQ) + & Condition("project_id", self._project.id, EQ) + & Condition("folder_id", self._folder.id, EQ) + & Condition("includeCustomMetadata", self._include_custom_metadata, EQ) ) response = self._service_provider.items.list(condition) if not response.ok: @@ -130,13 +130,13 @@ def execute(self) -> Response: class QueryEntitiesUseCase(BaseReportableUseCase): def __init__( - self, - reporter: Reporter, - project: ProjectEntity, - folder: FolderEntity, - service_provider: BaseServiceProvider, - query: str, - subset: str = None, + self, + reporter: Reporter, + project: ProjectEntity, + folder: FolderEntity, + service_provider: BaseServiceProvider, + query: str, + subset: str = None, ): super().__init__(reporter) self._project = project @@ -182,6 +182,9 @@ def execute(self) -> Response: (_sub for _sub in response.data if _sub.name == self._subset), None, ) + else: + self._response.errors = response.error + return self._response if not subset: self._response.errors = AppException( "Subset not found. Use the superannotate." @@ -215,13 +218,13 @@ def execute(self) -> Response: class ListItems(BaseUseCase): def __init__( - self, - project: ProjectEntity, - folder: FolderEntity, - service_provider: BaseServiceProvider, - search_condition: Condition, - recursive: bool = False, - include_custom_metadata: bool = False, + self, + project: ProjectEntity, + folder: FolderEntity, + service_provider: BaseServiceProvider, + search_condition: Condition, + recursive: bool = False, + include_custom_metadata: bool = False, ): super().__init__() self._project = project @@ -237,8 +240,8 @@ def validate_recursive_case(self): def validate_project_type(self): if ( - self._project.type == constants.ProjectType.PIXEL.value - and self._include_custom_metadata + self._project.type == constants.ProjectType.PIXEL.value + and self._include_custom_metadata ): raise AppException(constants.METADATA_DEPRICATED_FOR_PIXEL) @@ -285,12 +288,12 @@ class AssignItemsUseCase(BaseUseCase): CHUNK_SIZE = 500 def __init__( - self, - service_provider: BaseServiceProvider, - project: ProjectEntity, - folder: FolderEntity, - item_names: list, - user: str, + self, + service_provider: BaseServiceProvider, + project: ProjectEntity, + folder: FolderEntity, + item_names: list, + user: str, ): super().__init__() self._project = project @@ -300,7 +303,7 @@ def __init__( self._service_provider = service_provider def validate_item_names( - self, + self, ): self._item_names = list(set(self._item_names)) @@ -313,7 +316,7 @@ def execute(self): project=self._project, folder=self._folder, user=self._user, - item_names=self._item_names[i : i + self.CHUNK_SIZE], # noqa: E203 + item_names=self._item_names[i: i + self.CHUNK_SIZE], # noqa: E203 ) if not response.ok and response.error: # User not found self._response.errors += response.error @@ -330,11 +333,11 @@ class UnAssignItemsUseCase(BaseUseCase): CHUNK_SIZE = 500 def __init__( - self, - service_provider: BaseServiceProvider, - project: ProjectEntity, - folder: FolderEntity, - item_names: list, + self, + service_provider: BaseServiceProvider, + project: ProjectEntity, + folder: FolderEntity, + item_names: list, ): super().__init__() self._project = project @@ -348,7 +351,7 @@ def execute(self): response = self._service_provider.projects.un_assign_items( project=self._project, folder=self._folder, - item_names=self._item_names[i : i + self.CHUNK_SIZE], # noqa: E203 + item_names=self._item_names[i: i + self.CHUNK_SIZE], # noqa: E203 ) if not response.ok: self._response.errors = AppException( @@ -362,14 +365,14 @@ class AttachItems(BaseReportableUseCase): CHUNK_SIZE = 500 def __init__( - self, - reporter: Reporter, - project: ProjectEntity, - folder: FolderEntity, - attachments: List[AttachmentEntity], - annotation_status: str, - service_provider: BaseServiceProvider, - upload_state_code: int = constants.UploadState.EXTERNAL.value, + self, + reporter: Reporter, + project: ProjectEntity, + folder: FolderEntity, + attachments: List[AttachmentEntity], + annotation_status: str, + service_provider: BaseServiceProvider, + upload_state_code: int = constants.UploadState.EXTERNAL.value, ): super().__init__(reporter) self._project = project @@ -400,8 +403,8 @@ def validate_limitations(self): elif attachments_count > response.data.project_limit.remaining_image_count: raise AppValidationException(constants.ATTACH_PROJECT_LIMIT_ERROR_MESSAGE) elif ( - response.data.user_limit - and attachments_count > response.data.user_limit.remaining_image_count + response.data.user_limit + and attachments_count > response.data.user_limit.remaining_image_count ): raise AppValidationException(constants.ATTACH_USER_LIMIT_ERROR_MESSAGE) @@ -419,7 +422,7 @@ def execute(self) -> Response: attached = [] self.reporter.start_progress(self.attachments_count, "Attaching URLs") for i in range(0, self.attachments_count, self.CHUNK_SIZE): - attachments = self._attachments[i : i + self.CHUNK_SIZE] # noqa: E203 + attachments = self._attachments[i: i + self.CHUNK_SIZE] # noqa: E203 response = self._service_provider.items.list_by_names( project=self._project, folder=self._folder, @@ -465,14 +468,14 @@ class CopyItems(BaseReportableUseCase): CHUNK_SIZE = 500 def __init__( - self, - reporter: Reporter, - project: ProjectEntity, - from_folder: FolderEntity, - to_folder: FolderEntity, - item_names: List[str], - service_provider: BaseServiceProvider, - include_annotations: bool, + self, + reporter: Reporter, + project: ProjectEntity, + from_folder: FolderEntity, + to_folder: FolderEntity, + item_names: List[str], + service_provider: BaseServiceProvider, + include_annotations: bool, ): super().__init__(reporter) self._project = project @@ -515,7 +518,7 @@ def execute(self): cand_items = self._service_provider.items.list_by_names( project=self._project, folder=self._to_folder, - names=items[i : i + self.CHUNK_SIZE], # noqa + names=items[i: i + self.CHUNK_SIZE], # noqa ).data if isinstance(cand_items, dict): continue @@ -530,7 +533,7 @@ def execute(self): return self._response if items_to_copy: for i in range(0, len(items_to_copy), self.CHUNK_SIZE): - chunk_to_copy = items_to_copy[i : i + self.CHUNK_SIZE] # noqa: E203 + chunk_to_copy = items_to_copy[i: i + self.CHUNK_SIZE] # noqa: E203 response = self._service_provider.items.copy_multiple( project=self._project, from_folder=self._from_folder, @@ -555,7 +558,7 @@ def execute(self): cand_items = self._service_provider.items.list_by_names( project=self._project, folder=self._to_folder, - names=items[i : i + self.CHUNK_SIZE], # noqa + names=items[i: i + self.CHUNK_SIZE], # noqa ) if isinstance(cand_items, dict): continue @@ -580,13 +583,13 @@ class MoveItems(BaseReportableUseCase): CHUNK_SIZE = 1000 def __init__( - self, - reporter: Reporter, - project: ProjectEntity, - from_folder: FolderEntity, - to_folder: FolderEntity, - item_names: List[str], - service_provider: BaseServiceProvider, + self, + reporter: Reporter, + project: ProjectEntity, + from_folder: FolderEntity, + to_folder: FolderEntity, + item_names: List[str], + service_provider: BaseServiceProvider, ): super().__init__(reporter) self._project = project @@ -634,7 +637,7 @@ def execute(self): project=self._project, from_folder=self._from_folder, to_folder=self._to_folder, - item_names=items[i : i + self.CHUNK_SIZE], # noqa: E203 + item_names=items[i: i + self.CHUNK_SIZE], # noqa: E203 ) if response.ok and response.data.get("done"): moved_images.extend(response.data["done"]) @@ -654,13 +657,13 @@ class SetAnnotationStatues(BaseReportableUseCase): ERROR_MESSAGE = "Failed to change status" def __init__( - self, - reporter: Reporter, - project: ProjectEntity, - folder: FolderEntity, - annotation_status: str, - service_provider: BaseServiceProvider, - item_names: List[str] = None, + self, + reporter: Reporter, + project: ProjectEntity, + folder: FolderEntity, + annotation_status: str, + service_provider: BaseServiceProvider, + item_names: List[str] = None, ): super().__init__(reporter) self._project = project @@ -682,7 +685,7 @@ def validate_items(self): return existing_items = [] for i in range(0, len(self._item_names), self.CHUNK_SIZE): - search_names = self._item_names[i : i + self.CHUNK_SIZE] # noqa + search_names = self._item_names[i: i + self.CHUNK_SIZE] # noqa response = self._service_provider.items.list_by_names( project=self._project, folder=self._folder, @@ -706,7 +709,7 @@ def execute(self): status_changed = self._service_provider.items.set_statuses( project=self._project, folder=self._folder, - item_names=self._item_names[i : i + self.CHUNK_SIZE], # noqa: E203, + item_names=self._item_names[i: i + self.CHUNK_SIZE], # noqa: E203, annotation_status=self._annotation_status_code, ) if not status_changed: @@ -715,15 +718,96 @@ def execute(self): return self._response +class SetApprovalStatues(BaseReportableUseCase): + CHUNK_SIZE = 3000 + ERROR_MESSAGE = "Failed to change approval status." + + def __init__( + self, + reporter: Reporter, + project: ProjectEntity, + folder: FolderEntity, + approval_status: str, + service_provider: BaseServiceProvider, + item_names: List[str] = None, + ): + super().__init__(reporter) + self._project = project + self._folder = folder + self._item_names = item_names + self._approval_status_code = constants.ApprovalStatus.get_value(approval_status) + self._service_provider = service_provider + + def validate_items(self): + if not self._item_names: + condition = Condition("project_id", self._project.id, EQ) & Condition( + "folder_id", self._folder.id, EQ + ) + self._item_names = [ + item.name for item in self._service_provider.items.list(condition).data + ] + return + else: + _tmp = set(self._item_names) + unique, total = len(_tmp), len(self._item_names) + if unique < total: + logger.info( + f"Dropping duplicates. Found {unique}/{total} unique items." + ) + self._item_names = list(_tmp) + existing_items = [] + for i in range(0, len(self._item_names), self.CHUNK_SIZE): + search_names = self._item_names[i: i + self.CHUNK_SIZE] # noqa + response = self._service_provider.items.list_by_names( + project=self._project, + folder=self._folder, + names=search_names, + ) + if not response.ok: + raise AppValidationException(response.error) + cand_items = response.data + existing_items += cand_items + if not existing_items: + raise AppValidationException("No items found.") + if existing_items: + self._item_names = list( + {i.name for i in existing_items}.intersection(set(self._item_names)) + ) + + def execute(self): + if self.is_valid(): + total_items = 0 + for i in range(0, len(self._item_names), self.CHUNK_SIZE): + response = self._service_provider.items.set_approval_statuses( + project=self._project, + folder=self._folder, + item_names=self._item_names[i: i + self.CHUNK_SIZE], # noqa: E203, + approval_status=self._approval_status_code, + ) + if not response.ok: + if response.error == 'Unsupported project type.': + self._response.errors = f"The function is not supported for" \ + f" {constants.ProjectType.get_name(self._project.type)} projects." + else: + self._response.errors = self.ERROR_MESSAGE + return self._response + total_items += len(response.data) + if total_items: + logger.info( + f"Successfully updated {total_items}/{len(self._item_names)} item(s)" + ) + return self._response + + class DeleteItemsUseCase(BaseUseCase): CHUNK_SIZE = 1000 def __init__( - self, - project: ProjectEntity, - folder: FolderEntity, - service_provider: BaseServiceProvider, - item_names: List[str] = None, + self, + project: ProjectEntity, + folder: FolderEntity, + service_provider: BaseServiceProvider, + item_names: List[str] = None, ): super().__init__() self._project = project @@ -754,7 +838,7 @@ def execute(self): for i in range(0, len(item_ids), self.CHUNK_SIZE): self._service_provider.items.delete_multiple( project=self._project, - item_ids=item_ids[i : i + self.CHUNK_SIZE], # noqa: E203 + item_ids=item_ids[i: i + self.CHUNK_SIZE], # noqa: E203 ) logger.info( f"Items deleted in project {self._project.name}{'/' + self._folder.name if not self._folder.is_root else ''}" @@ -767,13 +851,13 @@ class AddItemsToSubsetUseCase(BaseUseCase): CHUNK_SIZE = 5000 def __init__( - self, - reporter: Reporter, - project: ProjectEntity, - subset_name: str, - items: List[dict], - service_provider: BaseServiceProvider, - root_folder: FolderEntity, + self, + reporter: Reporter, + project: ProjectEntity, + subset_name: str, + items: List[dict], + service_provider: BaseServiceProvider, + root_folder: FolderEntity, ): self.reporter = reporter self.project = project @@ -787,7 +871,7 @@ def __init__( super().__init__() def __filter_duplicates( - self, + self, ): def uniqueQ(item, seen): result = True @@ -809,7 +893,7 @@ def uniqueQ(item, seen): return uniques def __filter_invalid_items( - self, + self, ): def validQ(item): if "id" in item: @@ -824,7 +908,7 @@ def validQ(item): return filtered_items def __separate_to_paths( - self, + self, ): for item in self.items: if "id" in item: @@ -840,13 +924,13 @@ def __separate_to_paths( # so that we don't query them later. # Otherwise include folder in path object in order to later run a query - removeables = [] + removables = [] for path, value in self.path_separated.items(): project, folder = extract_project_folder(path) if project != self.project.name: - removeables.append(path) + removables.append(path) continue # If no folder was provided in the path use "root" @@ -872,13 +956,13 @@ def __separate_to_paths( break # If the folder did not exist add to skipped if not folder_found: - removeables.append(path) + removables.append(path) except Exception as e: - removeables.append(path) + removables.append(path) # Removing completely incorrect paths and their items - for item in removeables: + for item in removables: self.results["skipped"].extend(self.path_separated[item]["items"]) self.path_separated.pop(item) @@ -937,13 +1021,13 @@ def __distribute_to_results(self, item_id, response, item): self.results["failed"].append(item) def validate_items( - self, + self, ): filtered_items = self.__filter_duplicates() if len(filtered_items) != len(self.items): self.reporter.log_info( - f"Dropping duplicates. Found {len(filtered_items)} / {len(self.items)} unique items" + f"Dropping duplicates. Found {len(filtered_items)} / {len(self.items)} unique items." ) self.items = filtered_items self.items = self.__filter_invalid_items() @@ -955,7 +1039,7 @@ def validate_project(self): raise AppException(response.error) def execute( - self, + self, ): if self.is_valid(): @@ -970,6 +1054,7 @@ def execute( ids = future.result() self.item_ids.extend(ids) except Exception: + raise logger.debug(traceback.format_exc()) subsets = self._service_provider.subsets.list(self.project).data @@ -993,7 +1078,7 @@ def execute( for i in range(0, len(self.item_ids), self.CHUNK_SIZE): tmp_response = self._service_provider.subsets.add_items( project=self.project, - item_ids=self.item_ids[i : i + self.CHUNK_SIZE], # noqa + item_ids=self.item_ids[i: i + self.CHUNK_SIZE], # noqa subset=subset, ) @@ -1016,7 +1101,7 @@ def execute( for path, value in self.path_separated.items(): for item in value: item_id = item.pop( - "id" + "id", None ) # Need to remove it, since its added artificially self.__distribute_to_results(item_id, response, item) diff --git a/src/superannotate/lib/infrastructure/controller.py b/src/superannotate/lib/infrastructure/controller.py index ecbc7c540..e02edfaca 100644 --- a/src/superannotate/lib/infrastructure/controller.py +++ b/src/superannotate/lib/infrastructure/controller.py @@ -482,6 +482,23 @@ def set_annotation_statuses( ) return use_case.execute() + def set_approval_statuses( + self, + project: ProjectEntity, + folder: FolderEntity, + approval_status: str, + item_names: List[str] = None, + ): + use_case = usecases.SetApprovalStatues( + Reporter(), + project=project, + folder=folder, + approval_status=approval_status, + item_names=item_names, + service_provider=self.service_provider, + ) + return use_case.execute() + def update(self, project: ProjectEntity, item: BaseItemEntity): use_case = usecases.UpdateItemUseCase( project=project, service_provider=self.service_provider, item=item diff --git a/src/superannotate/lib/infrastructure/serviceprovider.py b/src/superannotate/lib/infrastructure/serviceprovider.py index cb04a2257..7e000488b 100644 --- a/src/superannotate/lib/infrastructure/serviceprovider.py +++ b/src/superannotate/lib/infrastructure/serviceprovider.py @@ -2,7 +2,6 @@ from typing import List import lib.core as constants -import requests from lib.core import entities from lib.core.conditions import Condition from lib.core.service_types import DownloadMLModelAuthDataResponse @@ -253,7 +252,7 @@ def saqul_query( if query: data["query"] = query items = [] - response = requests.Response() + response = None for _ in range(0, self.MAX_ITEMS_COUNT, self.SAQUL_CHUNK_SIZE): response = self.client.request( self.URL_SAQUL_QUERY, "post", params=params, data=data @@ -263,8 +262,14 @@ def saqul_query( response_items = response.data items.extend(response_items) if len(response_items) < self.SAQUL_CHUNK_SIZE: - service_response = ServiceResponse(response) - service_response.data = items - return service_response + break data["image_index"] += self.SAQUL_CHUNK_SIZE - return ServiceResponse(response) + + if response: + response = ServiceResponse(status=response.status_code, data=items) + if not response.ok: + response.set_error(response.error) + response = ServiceResponse(status=response.status_code, data=items) + else: + response = ServiceResponse(status=200, data=[]) + return response diff --git a/src/superannotate/lib/infrastructure/services/http_client.py b/src/superannotate/lib/infrastructure/services/http_client.py index e6cfc9f10..1d0fb9fbe 100644 --- a/src/superannotate/lib/infrastructure/services/http_client.py +++ b/src/superannotate/lib/infrastructure/services/http_client.py @@ -6,7 +6,6 @@ from contextlib import contextmanager from functools import lru_cache from typing import Any -from typing import Callable from typing import Dict from typing import List @@ -20,7 +19,6 @@ from superannotate import __version__ from superannotate.logger import get_default_logger - logger = get_default_logger() @@ -84,6 +82,7 @@ def safe_api(): return safe_api def _request(self, url, method, session, retried=0, **kwargs): + with self.safe_api(): req = requests.Request( method=method, @@ -117,13 +116,12 @@ def request( headers=None, params=None, retried=0, - item_type=None, content_type=ServiceResponse, files=None, - dispatcher: Callable = None, + dispatcher: str = None, ) -> ServiceResponse: - kwargs = {"params": {"team_id": self.team_id}} _url = self._get_url(url) + kwargs = {"params": {"team_id": self.team_id}} if data: kwargs["data"] = json.dumps(data, cls=PydanticEncoder) if params: @@ -135,7 +133,7 @@ def request( response = self._request(_url, method, session=session, retried=0, **kwargs) if files: session.headers.update(self.default_headers) - return content_type(response, dispatcher=dispatcher) + return self.serialize_response(response, content_type, dispatcher) def paginate( self, @@ -143,7 +141,6 @@ def paginate( item_type: Any = None, chunk_size: int = 2000, query_params: Dict[str, Any] = None, - dispatcher: str = "data", ) -> ServiceResponse: offset = 0 total = [] @@ -152,29 +149,24 @@ def paginate( while True: _url = f"{url}{splitter}offset={offset}" _response = self.request( - _url, - method="get", - item_type=List[item_type], - params=query_params, + _url, method="get", params=query_params, dispatcher="data" ) if _response.ok: - if isinstance(_response.data, dict): - data = _response.data.get(dispatcher) - else: - data = _response.data - if data: - total.extend(data) + if _response.data: + total.extend(_response.data) else: break - data_len = len(data) + data_len = len(_response.data) offset += data_len if data_len < chunk_size or _response.count - offset < 0: break else: break + if item_type: response = ServiceResponse( - data=pydantic.parse_obj_as(List[item_type], total) + status=_response.status, + data=pydantic.parse_obj_as(List[item_type], total), ) else: response = ServiceResponse(data=total) @@ -182,3 +174,36 @@ def paginate( response.set_error(_response.error) response.status = _response.status return response + + @staticmethod + def serialize_response( + response: requests.Response, content_type, dispatcher: str = None + ) -> ServiceResponse: + data = { + "status": response.status_code, + } + try: + data_json = response.json() + if not response.ok: + if response.status_code in (502, 504): + data[ + "_error" + ] = "Our service is currently unavailable, please try again later." + else: + data["_error"] = data_json.get( + "error", data_json.get("errors", "Unknown Error") + ) + else: + if dispatcher: + if dispatcher in data_json: + data["data"] = data_json.pop(dispatcher) + else: + data["data"] = data_json + data_json = {} + data.update(data_json) + else: + data["data"] = data_json + return content_type(**data) + except json.decoder.JSONDecodeError: + data["reason"] = response.reason + return content_type(**data) diff --git a/src/superannotate/lib/infrastructure/services/integration.py b/src/superannotate/lib/infrastructure/services/integration.py index 4c05b35ca..0b605cab2 100644 --- a/src/superannotate/lib/infrastructure/services/integration.py +++ b/src/superannotate/lib/infrastructure/services/integration.py @@ -1,5 +1,5 @@ from lib.core import entities -from lib.core.service_types import IntegrationResponse +from lib.core.service_types import IntegrationListResponse from lib.core.serviceproviders import BaseIntegrationService @@ -12,8 +12,7 @@ def list(self): res = self.client.request( self.URL_LIST, "get", - content_type=IntegrationResponse, - dispatcher=lambda x: x["integrations"], + content_type=IntegrationListResponse, ) return res diff --git a/src/superannotate/lib/infrastructure/services/item.py b/src/superannotate/lib/infrastructure/services/item.py index 198b848bd..5837701d1 100644 --- a/src/superannotate/lib/infrastructure/services/item.py +++ b/src/superannotate/lib/infrastructure/services/item.py @@ -29,6 +29,7 @@ class ItemService(BaseItemService): URL_COPY_PROGRESS = "images/copy-image-progress" URL_DELETE_ITEMS = "image/delete/images" URL_SET_ANNOTATION_STATUSES = "image/updateAnnotationStatusBulk" + URL_SET_APPROVAL_STATUSES = "/items/bulk/change" URL_GET_BY_ID = "image/{image_id}" PROJECT_TYPE_RESPONSE_MAP = { @@ -200,6 +201,23 @@ def set_statuses( }, ) + def set_approval_statuses( + self, + project: entities.ProjectEntity, + folder: entities.FolderEntity, + item_names: List[str], + approval_status: int, + ): + return self.client.request( + self.URL_SET_APPROVAL_STATUSES, + "post", + params={"project_id": project.id, "folder_id": folder.id}, + data={ + "item_names": item_names, + "change_actions": {"APPROVAL_STATUS": approval_status}, + }, + ) + def delete_multiple(self, project: entities.ProjectEntity, item_ids: List[int]): return self.client.request( self.URL_DELETE_ITEMS, diff --git a/src/superannotate/logger.py b/src/superannotate/logger.py index 502f1f6f7..fb0288c8e 100644 --- a/src/superannotate/logger.py +++ b/src/superannotate/logger.py @@ -2,7 +2,6 @@ import os from logging import Formatter from logging.handlers import RotatingFileHandler -from os.path import expanduser import superannotate.lib.core as constances @@ -10,6 +9,33 @@ loggers = {} +def get_server_logger(): + global loggers + if loggers.get("sa_server"): + return loggers.get("sa_server") + else: + logger = logging.getLogger("sa_server") + logger.propagate = False + logger.setLevel(logging.INFO) + stream_handler = logging.StreamHandler() + logger.addHandler(stream_handler) + try: + log_file_path = os.path.join(constances.LOG_FILE_LOCATION, "sa_server.log") + if os.access(log_file_path, os.W_OK): + file_handler = RotatingFileHandler( + log_file_path, + maxBytes=5 * 1024 * 1024, + backupCount=2, + mode="a", + ) + logger.addHandler(file_handler) + except OSError: + pass + finally: + loggers["sa_server"] = logger + return logger + + def get_default_logger(): global loggers if loggers.get("sa"): @@ -24,11 +50,11 @@ def get_default_logger(): # logger.handlers[0] = stream_handler logger.addHandler(stream_handler) try: - log_file_path = expanduser(constances.LOG_FILE_LOCATION) + log_file_path = os.path.join(constances.LOG_FILE_LOCATION, "sa.log") open(log_file_path, "w").close() if os.access(log_file_path, os.W_OK): file_handler = RotatingFileHandler( - expanduser(constances.LOG_FILE_LOCATION), + log_file_path, maxBytes=5 * 1024 * 1024, backupCount=5, mode="a", diff --git a/tests/data_set/pixel_with_holes/1.jpg b/tests/data_set/pixel_with_holes/1.jpg new file mode 100644 index 000000000..0ae101bc8 Binary files /dev/null and b/tests/data_set/pixel_with_holes/1.jpg differ diff --git a/tests/data_set/pixel_with_holes/1.jpg___fuse.png b/tests/data_set/pixel_with_holes/1.jpg___fuse.png new file mode 100644 index 000000000..e9e1c51ca Binary files /dev/null and b/tests/data_set/pixel_with_holes/1.jpg___fuse.png differ diff --git a/tests/data_set/pixel_with_holes/1.jpg___pixel.json b/tests/data_set/pixel_with_holes/1.jpg___pixel.json new file mode 100644 index 000000000..c44c4a543 --- /dev/null +++ b/tests/data_set/pixel_with_holes/1.jpg___pixel.json @@ -0,0 +1 @@ +{"metadata":{"lastAction":{"email":"vaghinak@superannotate.com","timestamp":1673510498284},"width":500,"height":365,"name":"1.jpg","projectId":339870,"isPredicted":false,"isSegmented":false,"status":"Completed","pinned":false,"annotatorEmail":null,"qaEmail":null},"comments":[],"tags":[],"instances":[{"classId":2592034,"probability":100,"visible":true,"attributes":[],"parts":[{"color":"#00000f"},{"color":"#00002d"}],"error":null,"locked":false,"createdAt":"2023-01-12T08:03:15.225Z","createdBy":{"email":"vaghinak@superannotate.com","role":"Admin"},"creationType":"Manual","updatedAt":"2023-01-12T08:04:55.686Z","updatedBy":{"email":"vaghinak@superannotate.com","role":"Admin"},"className":"asd"},{"classId":2592034,"probability":100,"visible":true,"attributes":[],"parts":[{"color":"#00003c"}],"error":null,"locked":false,"createdAt":"2023-01-12T08:04:13.118Z","createdBy":{"email":"vaghinak@superannotate.com","role":"Admin"},"creationType":"Manual","updatedAt":"2023-01-12T08:04:52.119Z","updatedBy":{"email":"vaghinak@superannotate.com","role":"Admin"},"className":"asd"},{"classId":2592034,"probability":100,"visible":true,"attributes":[],"parts":[{"color":"#00004b"}],"error":null,"locked":false,"createdAt":"2023-01-12T08:04:23.153Z","createdBy":{"email":"vaghinak@superannotate.com","role":"Admin"},"creationType":"Manual","updatedAt":"2023-01-12T08:04:50.635Z","updatedBy":{"email":"vaghinak@superannotate.com","role":"Admin"},"className":"asd"},{"classId":2592034,"probability":100,"visible":true,"attributes":[],"parts":[{"color":"#00005a"}],"error":null,"locked":false,"createdAt":"2023-01-12T08:04:38.966Z","createdBy":{"email":"vaghinak@superannotate.com","role":"Admin"},"creationType":"Manual","updatedAt":"2023-01-12T08:04:49.691Z","updatedBy":{"email":"vaghinak@superannotate.com","role":"Admin"},"className":"asd"}]} \ No newline at end of file diff --git a/tests/data_set/pixel_with_holes/1.jpg___save.png b/tests/data_set/pixel_with_holes/1.jpg___save.png new file mode 100644 index 000000000..9b9ca5095 Binary files /dev/null and b/tests/data_set/pixel_with_holes/1.jpg___save.png differ diff --git a/tests/data_set/pixel_with_holes/2.webp b/tests/data_set/pixel_with_holes/2.webp new file mode 100644 index 000000000..a5d966d4f Binary files /dev/null and b/tests/data_set/pixel_with_holes/2.webp differ diff --git a/tests/data_set/pixel_with_holes/2.webp___pixel.json b/tests/data_set/pixel_with_holes/2.webp___pixel.json new file mode 100644 index 000000000..fd2918b35 --- /dev/null +++ b/tests/data_set/pixel_with_holes/2.webp___pixel.json @@ -0,0 +1 @@ +{"metadata":{"lastAction":{"email":"vaghinak@superannotate.com","timestamp":1673521182450},"width":1300,"height":1300,"name":"2.webp","projectId":339870,"isPredicted":false,"isSegmented":false,"status":"Completed","pinned":false,"annotatorEmail":null,"qaEmail":null},"comments":[],"tags":[],"instances":[{"classId":2592034,"probability":100,"visible":true,"attributes":[],"parts":[{"color":"#0000a5"}],"error":null,"locked":false,"createdAt":"2023-01-12T11:00:06.454Z","createdBy":{"email":"vaghinak@superannotate.com","role":"Admin"},"creationType":"Manual","updatedAt":"2023-01-12T11:00:35.733Z","updatedBy":{"email":"vaghinak@superannotate.com","role":"Admin"},"className":"asd"}]} \ No newline at end of file diff --git a/tests/data_set/pixel_with_holes/2.webp___save.png b/tests/data_set/pixel_with_holes/2.webp___save.png new file mode 100644 index 000000000..886ca34c9 Binary files /dev/null and b/tests/data_set/pixel_with_holes/2.webp___save.png differ diff --git a/tests/data_set/pixel_with_holes/classes/classes.json b/tests/data_set/pixel_with_holes/classes/classes.json new file mode 100644 index 000000000..888850000 --- /dev/null +++ b/tests/data_set/pixel_with_holes/classes/classes.json @@ -0,0 +1,12 @@ +[ + { + "id": 2592034, + "project_id": 339870, + "name": "asd", + "color": "#b4a2df", + "type": "object", + "createdAt": "2023-01-12T07:58:03.000Z", + "updatedAt": "2023-01-12T07:58:03.000Z", + "attribute_groups": [] + } +] \ No newline at end of file diff --git a/tests/integration/integrations/test_get_integrations.py b/tests/integration/integrations/test_get_integrations.py index 7efaf3a48..49028da51 100644 --- a/tests/integration/integrations/test_get_integrations.py +++ b/tests/integration/integrations/test_get_integrations.py @@ -21,7 +21,7 @@ class TestGetIntegrations(BaseTestCase): def folder_path(self): return os.path.join(dirname(dirname(__file__)), self.TEST_FOLDER_PATH) - @pytest.mark.skip("Need to adjust") + # @pytest.mark.skip("Need to adjust") def test_get(self): integrations = sa.get_integrations() integrations = sa.attach_items_from_integrated_storage(self.PROJECT_NAME, integrations[0]["name"]) diff --git a/tests/integration/items/test_set_approval_statuses.py b/tests/integration/items/test_set_approval_statuses.py new file mode 100644 index 000000000..adff1ba10 --- /dev/null +++ b/tests/integration/items/test_set_approval_statuses.py @@ -0,0 +1,94 @@ +import os +from pathlib import Path + +from src.superannotate import AppException +from src.superannotate import SAClient +from src.superannotate.lib.core.usecases import SetApprovalStatues +from tests.integration.base import BaseTestCase + +sa = SAClient() + + +class TestSetApprovalStatuses(BaseTestCase): + PROJECT_NAME = "TestSetApprovalStatuses" + PROJECT_DESCRIPTION = "TestSetApprovalStatuses" + PROJECT_TYPE = "Document" + FOLDER_NAME = "test_folder" + CSV_PATH = "data_set/attach_urls.csv" + EXAMPLE_IMAGE_1 = "6022a74d5384c50017c366b3" + EXAMPLE_IMAGE_2 = "6022a74b5384c50017c366ad" + ATTACHMENT_LIST = [ + { + "url": "https://drive.google.com/uc?export=download&id=1vwfCpTzcjxoEA4hhDxqapPOVvLVeS7ZS", + "name": "6022a74d5384c50017c366b3" + }, + { + "url": "https://drive.google.com/uc?export=download&id=1geS2YtQiTYuiduEirKVYxBujHJaIWA3V", + "name": "6022a74b5384c50017c366ad" + }, + { + "url": "1SfGcn9hdkVM35ZP0S93eStsE7Ti4GtHU", + "path": "123" + }, + { + "url": "https://drive.google.com/uc?export=download&id=1geS2YtQiTYuiduEirKVYxBujHJaIWA3V", + "name": "6022a74b5384c50017c366ad" + }, + ] + + @property + def scv_path(self): + return os.path.join(Path(__file__).parent.parent.parent, self.CSV_PATH) + + def test_image_approval_status(self): + sa.attach_items( + self.PROJECT_NAME, self.ATTACHMENT_LIST + ) + + sa.set_approval_statuses( + self.PROJECT_NAME, "Approved", + ) + for image in sa.search_items(self.PROJECT_NAME): + self.assertEqual(image["approval_status"], "Approved") + + def test_image_approval_status_via_names(self): + sa.attach_items( + self.PROJECT_NAME, self.ATTACHMENT_LIST + ) + + sa.set_approval_statuses( + self.PROJECT_NAME, "Approved", [self.EXAMPLE_IMAGE_1, self.EXAMPLE_IMAGE_2] + ) + + for image_name in [self.EXAMPLE_IMAGE_1, self.EXAMPLE_IMAGE_2]: + metadata = sa.get_item_metadata(self.PROJECT_NAME, image_name) + self.assertEqual(metadata["approval_status"], "Approved") + + def test_image_approval_status_via_invalid_names(self): + sa.attach_items( + self.PROJECT_NAME, self.ATTACHMENT_LIST, "InProgress" + ) + with self.assertRaisesRegexp(AppException, SetApprovalStatues.ERROR_MESSAGE): + sa.set_approval_statuses( + self.PROJECT_NAME, "Approved", ["self.EXAMPLE_IMAGE_1", "self.EXAMPLE_IMAGE_2"] + ) + + def test_set_approval_statuses(self): + sa.attach_items( + self.PROJECT_NAME, [self.ATTACHMENT_LIST[0]] + ) + sa.set_approval_statuses( + self.PROJECT_NAME, approval_status=None, items=[self.ATTACHMENT_LIST[0]["name"]] + ) + data = sa.search_items(self.PROJECT_NAME)[0] + assert data["approval_status"] is None + + def test_set_invalid_approval_statuses(self): + sa.attach_items( + self.PROJECT_NAME, [self.ATTACHMENT_LIST[0]] + ) + with self.assertRaisesRegexp(AppException, 'Available approval_status options are None, Disapproved, Approved.'): + sa.set_approval_statuses( + self.PROJECT_NAME, approval_status="aaa", items=[self.ATTACHMENT_LIST[0]["name"]] + ) + diff --git a/tests/integration/test_benchmark.py b/tests/integration/test_benchmark.py index b5672cc67..e0417690e 100644 --- a/tests/integration/test_benchmark.py +++ b/tests/integration/test_benchmark.py @@ -31,7 +31,7 @@ def folder_path(self): def export_path(self): return os.path.join(dirname(dirname(__file__)), self.TEST_EXPORT_ROOT) - @pytest.mark.flaky(reruns=2) + @pytest.mark.skip("Need to adjust") def test_benchmark(self): sa.create_folder(self.PROJECT_NAME, self.TEST_FOLDER_NAME) annotation_types = ["polygon", "bbox", "point"] diff --git a/tests/integration/test_cli.py b/tests/integration/test_cli.py index f43f27000..2f84350fb 100644 --- a/tests/integration/test_cli.py +++ b/tests/integration/test_cli.py @@ -166,3 +166,10 @@ def test_attach_document_urls(self): self._create_project("Document") self.safe_run(self._cli.attach_document_urls, self.PROJECT_NAME, str(self.video_csv_path)) self.assertEqual(3, len(sa.search_items(self.PROJECT_NAME))) + + def test_create_server(self): + with tempfile.TemporaryDirectory() as temp_dir: + self._cli.create_server('test', temp_dir) + # self._cli.create_server('testo', '/Users/vaghinak.basentsyan/www/for_fun') + assert (Path(temp_dir) / 'test' / 'app.py').exists() + assert (Path(temp_dir) / 'test' / 'wsgi.py').exists() diff --git a/tests/integration/test_convert_project_type.py b/tests/integration/test_convert_project_type.py new file mode 100644 index 000000000..1aef442ae --- /dev/null +++ b/tests/integration/test_convert_project_type.py @@ -0,0 +1,29 @@ +import json +import os +import tempfile +from pathlib import Path +from unittest import TestCase + +from src.superannotate import convert_project_type +from tests import DATA_SET_PATH + + +class TestConvertProjectType(TestCase): + TEST_FOLDER_PATH = 'pixel_with_holes' + FIRST_IMAGE = '1.jpg' + SECOND_IMAGE = '2.webp' + + @property + def folder_path(self): + return os.path.join(DATA_SET_PATH, self.TEST_FOLDER_PATH) + + def test_convert_pixel_with_holes_to_vector(self): + with tempfile.TemporaryDirectory() as temp_dir: + convert_project_type(self.folder_path, temp_dir) + + assert len(list(Path(temp_dir).glob("*"))) == 5 + annotation_files = [i.name for i in Path(temp_dir).glob("*___objects.json")] + assert len(annotation_files) == 2 + with open(os.path.join(temp_dir, f"{self.SECOND_IMAGE}___objects.json")) as file: + data = json.load(file) + assert len(data['instances'][0]['exclude']) == 4 diff --git a/tests/unit/test_usecases.py b/tests/unit/test_usecases.py index 5c8db613f..8a8eeda8a 100644 --- a/tests/unit/test_usecases.py +++ b/tests/unit/test_usecases.py @@ -30,5 +30,4 @@ def test_validate_should_be_called(self): validate_method.assert_called() def test_validate_should_fill_errors(self): - print(self.use_case.execute().errors) assert len(self.use_case.execute().errors) == 2