diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 73eb042dd..5a835dcec 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -6,6 +6,19 @@ History All release highlights of this project will be documented in this file. +4.4.26 - Oct 29, 2024 +________________________ + +**Added** + + - ``SAClient.copy_items/move_items`` method, added the ability to copy/move categories and duplicate strategies ("skip", "replace", "replace_annotations_only"). +**Updated** + + - Fixed `SAClient.get_annotations() To handle annotations that contain all UTF-8 characters.` + - Renamed project type GenAI to Multimodal + + + 4.4.25 - Oct 7, 2024 ________________________ diff --git a/src/superannotate/__init__.py b/src/superannotate/__init__.py index d691f8b54..5648fb49c 100644 --- a/src/superannotate/__init__.py +++ b/src/superannotate/__init__.py @@ -3,7 +3,7 @@ import sys -__version__ = "4.4.25" +__version__ = "4.4.26" os.environ.update({"sa_version": __version__}) sys.path.append(os.path.split(os.path.realpath(__file__))[0]) @@ -18,7 +18,7 @@ from lib.app.input_converters import convert_project_type from lib.app.input_converters import export_annotation from lib.app.input_converters import import_annotation -from lib.app.interface.sdk_interface import SAClient +from superannotate.lib.app.interface.sdk_interface import SAClient SESSIONS = {} diff --git a/src/superannotate/lib/app/interface/sdk_interface.py b/src/superannotate/lib/app/interface/sdk_interface.py index a9cea019d..baecfba2f 100644 --- a/src/superannotate/lib/app/interface/sdk_interface.py +++ b/src/superannotate/lib/app/interface/sdk_interface.py @@ -79,7 +79,7 @@ "Document", "Tiled", "PointCloud", - "GenAI", + "Multimodal", ] @@ -307,7 +307,7 @@ def create_project( :param project_description: the new project's description :type project_description: str - :param project_type: the new project type, Vector, Pixel, Video, Document, Tiled, PointCloud, GenAI. + :param project_type: the new project type, Vector, Pixel, Video, Document, Tiled, PointCloud, Multimodal. :type project_type: str :param settings: list of settings objects @@ -1215,11 +1215,11 @@ def prepare_export( :param only_pinned: enable only pinned output in export. This option disables all other types of output. :type only_pinned: bool - :param kwargs: - Arbitrary kwargs: + :param kwargs: Arbitrary keyword arguments: - - integration_name: can be provided which will be used as a storage to store export file - - format: can be CSV for the Gen AI projects + - integration_name: The name of the integration within the platform that is being used. + - format: The format in which the data will be exported in multimodal projects. + It can be either CSV or JSON. If None, the data will be exported in the default JSON format. :return: metadata object of the prepared export :rtype: dict @@ -1232,7 +1232,7 @@ def prepare_export( project = "Project Name", folder_names = ["Folder 1", "Folder 2"], annotation_statuses = ["Completed","QualityCheck"], - export_type = "CSV" + format = "CSV" ) client.download_export("Project Name", export, "path_to_download") @@ -2318,7 +2318,7 @@ def invite_contributors_to_team( def get_annotations( self, - project: Union[int, NotEmptyStr], + project: Union[NotEmptyStr, int], items: Optional[Union[List[NotEmptyStr], List[int]]] = None, ): """Returns annotations for the given list of items. @@ -2663,8 +2663,8 @@ def search_items( def list_items( self, - project: Union[int, str], - folder: Optional[Union[int, str]] = None, + project: Union[NotEmptyStr, int], + folder: Optional[Union[NotEmptyStr, int]] = None, *, include: List[Literal["custom_metadata"]] = None, **filters, @@ -2672,14 +2672,14 @@ def list_items( """ Search items by filtering criteria. - :param project: The project name, project ID, or folder path (e.g., "project1/folder1") to search within. + :param project: The project name, project ID, or folder path (e.g., "project1") to search within. This can refer to the root of the project or a specific subfolder. - :type project: Union[int, str] + :type project: Union[NotEmptyStr, int] :param folder: The folder name or ID to search within. If None, the search will be done in the root folder of the project. If both “project” and “folder” specify folders, the “project” value will take priority. - :type folder: Union[int, str], optional + :type folder: Union[NotEmptyStr, int], optional :param include: Specifies additional fields to include in the response. @@ -2694,8 +2694,8 @@ def list_items( Supported operations: - - __ne: Value is in the list. - - __in: Value is not equal. + - __ne: Value is not equal. + - __in: Value is in the list. - __notin: Value is not in the list. - __contains: Value has the substring. - __starts: Value starts with the prefix. @@ -2710,6 +2710,9 @@ def list_items( - name__contains: str - name__starts: str - name__ends: str + - annotation_status: str + - annotation_status__in: list[str] + - annotation_status__ne: list[str] - approval_status: Literal["Approved", "Disapproved", None] - assignments__user_id: str - assignments__user_id__ne: str @@ -2927,26 +2930,46 @@ def copy_items( source: Union[NotEmptyStr, dict], destination: Union[NotEmptyStr, dict], items: Optional[List[NotEmptyStr]] = None, - include_annotations: Optional[bool] = True, + include_annotations: bool = True, + duplicate_strategy: Literal[ + "skip", "replace", "replace_annotations_only" + ] = "skip", ): """Copy images in bulk between folders in a project - :param source: project name or folder path to select items from (e.g., “project1/folder1”). + :param source: project name (root) or folder path to pick items from (e.g., “project1/folder1”). :type source: str - :param destination: project name (root) or folder path to place copied items. + :param destination: project name (root) or folder path to place copied items (e.g., “project1/folder2”). :type destination: str :param items: names of items to copy. If None, all items from the source directory will be copied. :type items: list of str - :param include_annotations: enables annotations copy + :param include_annotations: enables the copying of item data, including annotations, status, priority score, + approval state, and category. If set to False, only the items will be copied without additional data. :type include_annotations: bool + :param duplicate_strategy: Specifies the strategy for handling duplicate items in the destination. + The default value is "skip". + + - "skip": skips duplicate items in the destination and continues with the next item. + - "replace": replaces the annotations, status, priority score, approval state, and category of duplicate items. + - "replace_annotations_only": replaces only the annotations of duplicate items, + leaving other data (status, priority score, approval state, and category) unchanged. + + :type duplicate_strategy: Literal["skip", "replace", "replace_annotations_only"] + :return: list of skipped item names :rtype: list of strs """ + if not include_annotations and duplicate_strategy != "skip": + duplicate_strategy = "skip" + logger.warning( + "Copy operation continuing without annotations and metadata due to include_annotations=False." + ) + project_name, source_folder = extract_project_folder(source) to_project_name, destination_folder = extract_project_folder(destination) if project_name != to_project_name: @@ -2960,6 +2983,7 @@ def copy_items( to_folder=to_folder, item_names=items, include_annotations=include_annotations, + duplicate_strategy=duplicate_strategy, ) if response.errors: raise AppException(response.errors) @@ -2971,18 +2995,31 @@ def move_items( source: Union[NotEmptyStr, dict], destination: Union[NotEmptyStr, dict], items: Optional[List[NotEmptyStr]] = None, + duplicate_strategy: Literal[ + "skip", "replace", "replace_annotations_only" + ] = "skip", ): """Move images in bulk between folders in a project - :param source: project name or folder path to pick items from (e.g., “project1/folder1”). + :param source: project name (root) or folder path to pick items from (e.g., “project1/folder1”). :type source: str - :param destination: project name (root) or folder path to move items to. + :param destination: project name (root) or folder path to move items to (e.g., “project1/folder2”). :type destination: str :param items: names of items to move. If None, all items from the source directory will be moved. :type items: list of str + :param duplicate_strategy: Specifies the strategy for handling duplicate items in the destination. + The default value is "skip". + + - "skip": skips duplicate items in the destination and continues with the next item. + - "replace": replaces the annotations, status, priority score, approval state, and category of duplicate items. + - "replace_annotations_only": replaces only the annotations of duplicate items, + leaving other data (status, priority score, approval state, and category) unchanged. + + :type duplicate_strategy: Literal["skip", "replace", "replace_annotations_only"] + :return: list of skipped item names :rtype: list of strs """ @@ -3000,6 +3037,7 @@ def move_items( from_folder=source_folder, to_folder=destination_folder, item_names=items, + duplicate_strategy=duplicate_strategy, ) if response.errors: raise AppException(response.errors) diff --git a/src/superannotate/lib/core/entities/project.py b/src/superannotate/lib/core/entities/project.py index 1c26162f8..d17f6e6b2 100644 --- a/src/superannotate/lib/core/entities/project.py +++ b/src/superannotate/lib/core/entities/project.py @@ -163,6 +163,7 @@ class TeamEntity(BaseModel): users: Optional[List[UserEntity]] pending_invitations: Optional[List[Any]] creator_id: Optional[str] + owner_id: Optional[str] class Config: extra = Extra.ignore diff --git a/src/superannotate/lib/core/enums.py b/src/superannotate/lib/core/enums.py index 8e24ac5c8..387bb6018 100644 --- a/src/superannotate/lib/core/enums.py +++ b/src/superannotate/lib/core/enums.py @@ -108,7 +108,7 @@ class ProjectType(BaseTitledEnum): TILED = "Tiled", 5 OTHER = "Other", 6 POINT_CLOUD = "PointCloud", 7 - GEN_AI = "GenAI", 8 + MULTIMODAL = "Multimodal", 8 UNSUPPORTED_TYPE_1 = "UnsupportedType", 9 UNSUPPORTED_TYPE_2 = "UnsupportedType", 10 diff --git a/src/superannotate/lib/core/service_types.py b/src/superannotate/lib/core/service_types.py index 08dc94289..3c34a550f 100644 --- a/src/superannotate/lib/core/service_types.py +++ b/src/superannotate/lib/core/service_types.py @@ -246,5 +246,5 @@ class SettingsListResponse(ServiceResponse): ProjectType.PIXEL: ImageResponse, ProjectType.DOCUMENT: DocumentResponse, ProjectType.POINT_CLOUD: PointCloudResponse, - ProjectType.GEN_AI: ImageResponse, + ProjectType.MULTIMODAL: ImageResponse, } diff --git a/src/superannotate/lib/core/serviceproviders.py b/src/superannotate/lib/core/serviceproviders.py index 57a2cbf24..05dc2637c 100644 --- a/src/superannotate/lib/core/serviceproviders.py +++ b/src/superannotate/lib/core/serviceproviders.py @@ -5,6 +5,7 @@ from typing import Callable from typing import Dict from typing import List +from typing import Literal from lib.core import entities from lib.core.conditions import Condition @@ -287,10 +288,30 @@ def copy_multiple( ) -> ServiceResponse: raise NotImplementedError + @abstractmethod + def copy_move_multiple( + self, + project: entities.ProjectEntity, + from_folder: entities.FolderEntity, + to_folder: entities.FolderEntity, + item_names: List[str], + duplicate_strategy: Literal["skip", "replace", "replace_annotations_only"], + operation: Literal["copy", "move"], + include_annotations: bool = True, + include_pin: bool = False, + ) -> ServiceResponse: + raise NotImplementedError + @abstractmethod def await_copy(self, project: entities.ProjectEntity, poll_id: int, items_count): raise NotImplementedError + @abstractmethod + def await_copy_move( + self, project: entities.ProjectEntity, poll_id: int, items_count + ): + raise NotImplementedError + @abstractmethod def set_statuses( self, diff --git a/src/superannotate/lib/core/usecases/items.py b/src/superannotate/lib/core/usecases/items.py index b22d403bf..fc7c0e5de 100644 --- a/src/superannotate/lib/core/usecases/items.py +++ b/src/superannotate/lib/core/usecases/items.py @@ -33,6 +33,7 @@ from lib.core.usecases.folders import SearchFoldersUseCase from lib.infrastructure.utils import divide_to_chunks from lib.infrastructure.utils import extract_project_folder +from typing_extensions import Literal logger = logging.getLogger("sa") @@ -537,6 +538,159 @@ def execute(self): return self._response +class CopyMoveItems(BaseReportableUseCase): + """ + Copy/Move items in bulk between folders in a project. + Return skipped item names. + """ + + def __init__( + self, + reporter: Reporter, + project: ProjectEntity, + from_folder: FolderEntity, + to_folder: FolderEntity, + item_names: List[str], + service_provider: BaseServiceProvider, + include_annotations: bool, + duplicate_strategy: Literal["skip", "replace", "replace_annotations_only"], + operation: Literal["copy", "move"], + chunk_size: int = 1000, + ): + super().__init__(reporter) + self._project = project + self._from_folder = from_folder + self._to_folder = to_folder + self._item_names = item_names + self._service_provider = service_provider + self._include_annotations = include_annotations + self._duplicate_strategy = duplicate_strategy + self._operation = operation + self._chunk_size = chunk_size + + def _validate_limitations(self, items_count): + response = self._service_provider.get_limitations( + project=self._project, + folder=self._to_folder, + ) + if not response.ok: + raise AppValidationException(response.error) + if self._operation == "copy": + folder_limit_err_msg = constants.COPY_FOLDER_LIMIT_ERROR_MESSAGE + project_limit_err_msg = constants.COPY_PROJECT_LIMIT_ERROR_MESSAGE + else: + folder_limit_err_msg = constants.MOVE_FOLDER_LIMIT_ERROR_MESSAGE + project_limit_err_msg = constants.MOVE_PROJECT_LIMIT_ERROR_MESSAGE + if items_count > response.data.folder_limit.remaining_image_count: + raise AppValidationException(folder_limit_err_msg) + if items_count > response.data.project_limit.remaining_image_count: + raise AppValidationException(project_limit_err_msg) + + def validate_item_names(self): + if self._item_names: + provided_items_count = len(self._item_names) + self._item_names = list(set(self._item_names)) + if len(self._item_names) > provided_items_count: + self.reporter.log_info( + f"Dropping duplicates. Found {len(self._item_names)}/{provided_items_count} unique items." + ) + + def execute(self): + if self.is_valid(): + if self._item_names: + items = self._item_names + else: + res = self._service_provider.item_service.list( + self._project.id, self._from_folder.id, EmptyQuery() + ) + if res.error: + raise AppException(res.error) + items = [i.name for i in res.data] + try: + self._validate_limitations(len(items)) + except AppValidationException as e: + self._response.errors = e + return self._response + skipped_items = [] + if self._duplicate_strategy == "skip": + existing_items = [] + for i in range(0, len(items), self._chunk_size): + query = Filter( + "name", items[i : i + self._chunk_size], OperatorEnum.IN + ) # noqa + res = self._service_provider.item_service.list( + self._project.id, self._to_folder.id, query + ) + if res.error: + raise AppException(res.error) + if not res.data: + continue + existing_items += res.data + duplications = [item.name for item in existing_items] + items_to_processing = list(set(items) - set(duplications)) + skipped_items.extend(duplications) + else: + items_to_processing = items + if items_to_processing: + for i in range(0, len(items_to_processing), self._chunk_size): + chunk_to_process = items_to_processing[ + i : i + self._chunk_size + ] # noqa: E203 + response = self._service_provider.items.copy_move_multiple( + project=self._project, + from_folder=self._from_folder, + to_folder=self._to_folder, + item_names=chunk_to_process, + include_annotations=self._include_annotations, + duplicate_strategy=self._duplicate_strategy, + operation=self._operation, + ) + if not response.ok or not response.data.get("poll_id"): + skipped_items.extend(chunk_to_process) + continue + try: + self._service_provider.items.await_copy_move( + project=self._project, + poll_id=response.data["poll_id"], + items_count=len(chunk_to_process), + ) + except BackendError as e: + self._response.errors = AppException(e) + return self._response + existing_items = [] + for i in range(0, len(items_to_processing), self._chunk_size): + res = self._service_provider.item_service.list( + self._project.id, + self._to_folder.id, + Filter( + "name", + items_to_processing[i : i + self._chunk_size], + OperatorEnum.IN, + ), # noqa + ) + if res.error: + raise AppException(res.error) + + existing_items += res.data + + existing_item_names_set = {item.name for item in existing_items} + items_to_processing_names_set = set(items_to_processing) + processed_items = existing_item_names_set.intersection( + items_to_processing_names_set + ) + skipped_items.extend( + list(items_to_processing_names_set - processed_items) + ) + operation_processing_map = {"copy": "Copied", "move": "Moved"} + self.reporter.log_info( + f"{operation_processing_map[self._operation]} {len(processed_items)}/{len(items)} item(s) from " + f"{self._project.name}{'' if self._from_folder.is_root else f'/{self._from_folder.name}'} to " + f"{self._project.name}{'' if self._to_folder.is_root else f'/{self._to_folder.name}'}" + ) + self._response.data = list(set(skipped_items)) + return self._response + + class SetAnnotationStatues(BaseReportableUseCase): CHUNK_SIZE = 500 ERROR_MESSAGE = "Failed to change status" diff --git a/src/superannotate/lib/core/usecases/models.py b/src/superannotate/lib/core/usecases/models.py index aaa629d4a..749e68c52 100644 --- a/src/superannotate/lib/core/usecases/models.py +++ b/src/superannotate/lib/core/usecases/models.py @@ -82,7 +82,10 @@ def validate_export_type(self): raise AppValidationException( "COCO format is not supported for this project." ) - elif self._export_type == 3 and self._project.type != ProjectType.GEN_AI.value: + elif ( + self._export_type == 3 + and self._project.type != ProjectType.MULTIMODAL.value + ): raise AppValidationException( "CSV format is not supported for this project." ) diff --git a/src/superannotate/lib/infrastructure/controller.py b/src/superannotate/lib/infrastructure/controller.py index ea99a337b..658cb743d 100644 --- a/src/superannotate/lib/infrastructure/controller.py +++ b/src/superannotate/lib/infrastructure/controller.py @@ -6,6 +6,7 @@ from typing import Callable from typing import Dict from typing import List +from typing import Literal from typing import Optional from typing import Tuple from typing import Union @@ -28,6 +29,7 @@ from lib.core.entities import UserEntity from lib.core.entities.classes import AnnotationClassEntity from lib.core.entities.integrations import IntegrationEntity +from lib.core.enums import ProjectType from lib.core.exceptions import AppException from lib.core.jsx_conditions import EmptyQuery from lib.core.jsx_conditions import Filter @@ -58,6 +60,7 @@ class ItemFilters(TypedDict, total=False): name__ends: Optional[str] annotation_status: Optional[str] annotation_status__in: Optional[List[str]] + annotation_status__ne: Optional[List[str]] approval_status: Optional[str] approval_status__in: Optional[List[str]] approval_status__ne: Optional[str] @@ -540,18 +543,33 @@ def copy_multiple( project: ProjectEntity, from_folder: FolderEntity, to_folder: FolderEntity, + duplicate_strategy: Literal["skip", "replace", "replace_annotations_only"], item_names: List[str] = None, - include_annotations: bool = False, + include_annotations: bool = True, ): - use_case = usecases.CopyItems( - reporter=Reporter(), - project=project, - from_folder=from_folder, - to_folder=to_folder, - item_names=item_names, - service_provider=self.service_provider, - include_annotations=include_annotations, - ) + if project.type == ProjectType.PIXEL: + use_case = usecases.CopyItems( + reporter=Reporter(), + project=project, + from_folder=from_folder, + to_folder=to_folder, + item_names=item_names, + service_provider=self.service_provider, + include_annotations=include_annotations, + ) + else: + use_case = usecases.CopyMoveItems( + reporter=Reporter(), + project=project, + from_folder=from_folder, + to_folder=to_folder, + item_names=item_names, + service_provider=self.service_provider, + include_annotations=include_annotations, + duplicate_strategy=duplicate_strategy, + operation="copy", + chunk_size=500, + ) return use_case.execute() def move_multiple( @@ -559,16 +577,31 @@ def move_multiple( project: ProjectEntity, from_folder: FolderEntity, to_folder: FolderEntity, + duplicate_strategy: Literal["skip", "replace", "replace_annotations_only"], item_names: List[str] = None, ): - use_case = usecases.MoveItems( - reporter=Reporter(), - project=project, - from_folder=from_folder, - to_folder=to_folder, - item_names=item_names, - service_provider=self.service_provider, - ) + if project.type == ProjectType.PIXEL: + use_case = usecases.MoveItems( + reporter=Reporter(), + project=project, + from_folder=from_folder, + to_folder=to_folder, + item_names=item_names, + service_provider=self.service_provider, + ) + else: + use_case = usecases.CopyMoveItems( + reporter=Reporter(), + project=project, + from_folder=from_folder, + to_folder=to_folder, + item_names=item_names, + service_provider=self.service_provider, + duplicate_strategy=duplicate_strategy, + include_annotations=True, + operation="move", + chunk_size=500, + ) return use_case.execute() def set_annotation_statuses( @@ -909,6 +942,10 @@ def __init__(self, config: ConfigEntity): self.subsets = SubsetManager(self.service_provider) self.integrations = IntegrationManager(self.service_provider) + @property + def org_id(self): + return self._team.owner_id + @property def current_user(self): return self._user diff --git a/src/superannotate/lib/infrastructure/serviceprovider.py b/src/superannotate/lib/infrastructure/serviceprovider.py index 38a91dadd..9c66b8839 100644 --- a/src/superannotate/lib/infrastructure/serviceprovider.py +++ b/src/superannotate/lib/infrastructure/serviceprovider.py @@ -1,3 +1,4 @@ +import base64 import datetime from typing import List @@ -36,6 +37,7 @@ class ServiceProvider(BaseServiceProvider): URL_FOLDERS_IMAGES = "images-folders" URL_INVITE_CONTRIBUTORS = "api/v1/team/{}/inviteUsers" URL_ANNOTATION_UPLOAD_PATH_TOKEN = "images/getAnnotationsPathsAndTokens" + URL_CREATE_WORKFLOW = "api/v1/workflows/submit" def __init__(self, client: HttpClient): self.enum_mapping = {"approval_status": ApprovalStatus.get_mapping()} @@ -254,3 +256,17 @@ def invite_contributors(self, team_id: int, team_role: int, emails: List[str]): "post", data=dict(emails=emails, team_role=team_role), ) + + def create_custom_workflow(self, org_id: str, data: dict): + return self.client.request( + url=self.URL_CREATE_WORKFLOW, + method="post", + headers={ + "x-sa-entity-context": base64.b64encode( + f'{{"team_id":{self.client.team_id},"organization_id":"{org_id}"}}'.encode( + "utf-8" + ) + ).decode() + }, + data=data, + ) diff --git a/src/superannotate/lib/infrastructure/services/item.py b/src/superannotate/lib/infrastructure/services/item.py index 87c093b93..e64195dd7 100644 --- a/src/superannotate/lib/infrastructure/services/item.py +++ b/src/superannotate/lib/infrastructure/services/item.py @@ -1,6 +1,7 @@ import time from typing import Dict from typing import List +from typing import Literal from lib.core import entities from lib.core.exceptions import AppException @@ -19,6 +20,7 @@ class ItemService(BaseItemService): URL_COPY_PROGRESS = "images/copy-image-progress" URL_DELETE_ITEMS = "image/delete/images" URL_SET_APPROVAL_STATUSES = "/items/bulk/change" + URL_COPY_MOVE_MULTIPLE = "images/copy-move-images-folders" def update(self, project: entities.ProjectEntity, item: entities.BaseItemEntity): return self.client.request( @@ -112,6 +114,62 @@ def move_multiple( }, ) + def copy_move_multiple( + self, + project: entities.ProjectEntity, + from_folder: entities.FolderEntity, + to_folder: entities.FolderEntity, + item_names: List[str], + duplicate_strategy: Literal["skip", "replace", "replace_annotations_only"], + operation: Literal["copy", "move"], + include_annotations: bool = True, + include_pin: bool = False, + ): + """ + Returns poll id. + """ + duplicate_behaviour_map = { + "skip": "skip_duplicates", + "replace": "replace_all", + "replace_annotations_only": "replace_annotation", + } + return self.client.request( + self.URL_COPY_MOVE_MULTIPLE, + "post", + params={"project_id": project.id}, + data={ + "is_folder_copy": False, + "image_names": item_names, + "destination_folder_id": to_folder.id, + "source_folder_id": from_folder.id, + "include_annotations": include_annotations, + "keep_pin_status": include_pin, + "duplicate_behaviour": duplicate_behaviour_map[duplicate_strategy], + "operate_function": operation, + }, + ) + + def await_copy_move( + self, project: entities.ProjectEntity, poll_id: int, items_count + ): + try: + await_time = 60 + items_count * 0.3 # time for waiting backend processing + timeout_start = time.time() + while time.time() < timeout_start + await_time: + response = self.client.request( + self.URL_COPY_PROGRESS, + "get", + params={"project_id": project.id, "poll_id": poll_id}, + ) + if not response.ok: + return response + progress = response.data.get("progress") + if progress == "finished": + break + time.sleep(4) + except (AppException, Exception) as e: + raise BackendError(e) + def set_statuses( self, project: entities.ProjectEntity, diff --git a/src/superannotate/lib/infrastructure/services/work_management.py b/src/superannotate/lib/infrastructure/services/work_management.py index 4a9a02142..95fb138d1 100644 --- a/src/superannotate/lib/infrastructure/services/work_management.py +++ b/src/superannotate/lib/infrastructure/services/work_management.py @@ -13,6 +13,8 @@ class WorkManagementService(BaseWorkManagementService): URL_LIST = "workflows" URL_LIST_STATUSES = "workflows/{workflow_id}/workflowstatuses" URL_LIST_ROLES = "workflows/{workflow_id}/workflowroles" + URL_CREATE_ROLE = "roles" + URL_CREATE_STATUS = "statuses" def get_workflow(self, pk: int) -> WorkflowEntity: response = self.list_workflows(Filter("id", pk, OperatorEnum.EQ)) @@ -65,3 +67,31 @@ def list_workflow_roles(self, project_id: int, workflow_id: int): "join": "role", }, ) + + def create_custom_role(self, org_id: str, data: dict): + return self.client.request( + url=self.URL_CREATE_ROLE, + method="post", + headers={ + "x-sa-entity-context": base64.b64encode( + f'{{"team_id":{self.client.team_id},"organization_id":"{org_id}"}}'.encode( + "utf-8" + ) + ).decode() + }, + data=data, + ) + + def create_custom_status(self, org_id: str, data: dict): + return self.client.request( + url=self.URL_CREATE_STATUS, + method="post", + headers={ + "x-sa-entity-context": base64.b64encode( + f'{{"team_id":{self.client.team_id},"organization_id":"{org_id}"}}'.encode( + "utf-8" + ) + ).decode() + }, + data=data, + ) diff --git a/src/superannotate/lib/infrastructure/stream_data_handler.py b/src/superannotate/lib/infrastructure/stream_data_handler.py index 2dc7098e5..cbb77c025 100644 --- a/src/superannotate/lib/infrastructure/stream_data_handler.py +++ b/src/superannotate/lib/infrastructure/stream_data_handler.py @@ -1,5 +1,6 @@ import copy import json +import logging import os import typing from typing import Callable @@ -13,6 +14,8 @@ total=_seconds, sock_connect=_seconds, sock_read=_seconds ) +logger = logging.getLogger("sa") + class StreamedAnnotations: DELIMITER = b"\\n;)\\n" @@ -46,21 +49,32 @@ async def fetch( data: dict = None, params: dict = None, ): - kwargs = {"params": params, "json": {}} - if "folder_id" in kwargs["params"]: - kwargs["json"] = {"folder_id": kwargs["params"].pop("folder_id")} + kwargs = {"params": params, "json": data} if data: kwargs["json"].update(data) response = await session.request(method, url, **kwargs, timeout=TIMEOUT) # noqa - buffer = b"" + if not response.ok: + logger.error(response.text) + buffer = "" + line_groups = b"" + decoder = json.JSONDecoder() async for line in response.content.iter_any(): - slices = (buffer + line).split(self.DELIMITER) - for _slice in slices[:-1]: - yield self.get_json(_slice) - buffer = slices[-1] - if buffer: - yield self.get_json(buffer) - self._reporter.update_progress() + line_groups += line + try: + buffer += line_groups.decode("utf-8") + line_groups = b"" + except UnicodeDecodeError: + continue + while buffer: + try: + json_obj, index = decoder.raw_decode(buffer) + yield json_obj + buffer = buffer[index + len(self.DELIMITER) :].lstrip() + except json.decoder.JSONDecodeError as e: + logger.debug( + f"Failed to parse buffer, buffer_len: {len(buffer)}// buffer_end: ...{buffer[-100:]}, error: {e}" + ) + break async def list_annotations( self, diff --git a/tests/applicatoin/custom_workflow.py b/tests/applicatoin/custom_workflow.py index 538e6c9f4..0ad970732 100644 --- a/tests/applicatoin/custom_workflow.py +++ b/tests/applicatoin/custom_workflow.py @@ -1,4 +1,5 @@ import contextvars +import json import os import re import time @@ -24,9 +25,39 @@ class TestWorkflow(TestCase): CLASSES_PATH = "sample_project_vector/classes/classes.json" ANNOTATIONS_PATH = "sample_project_vector" PROJECT_TYPE = "Vector" + CUSTOM_WORKFLOW = "application/custom_workflow_payload.json" @classmethod def setUpClass(cls, *args, **kwargs): + + # setup custom role + sa.controller.service_provider.work_management.create_custom_role( + org_id=sa.controller.org_id, + data={ + "name": "CustomRole", + "description": "Test custom role", + "rolePermissions": [{"permission_id": 11}, {"permission_id": 12}], + }, + ) + + # setup custom status + sa.controller.service_provider.work_management.create_custom_status( + org_id=sa.controller.org_id, + data={ + "name": "CustomStatus", + "icon_id": 7, + "shortcut_id": 7, + "description": "test status", + }, + ) + + # setup custom workflow + with open(os.path.join(DATA_SET_PATH, cls.CUSTOM_WORKFLOW)) as f: + sa.controller.service_provider.create_custom_workflow( + org_id=sa.controller.org_id, + data=json.load(f), + ) + cls.tearDownClass() cls._project = sa.create_project( cls.PROJECT_NAME, cls.PROJECT_DESCRIPTION, cls.PROJECT_TYPE, workflow="ttp" @@ -37,12 +68,9 @@ def tearDownClass(cls) -> None: try: projects = sa.search_projects(cls.PROJECT_NAME, return_metadata=True) for project in projects: - try: - sa.delete_project(project) - except Exception as e: - print(str(e)) - except Exception as e: - print(str(e)) + sa.delete_project(project) + except Exception: + pass @property def classes_path(self): diff --git a/tests/data_set/application/custom_workflow_payload.json b/tests/data_set/application/custom_workflow_payload.json new file mode 100644 index 000000000..624b87e41 --- /dev/null +++ b/tests/data_set/application/custom_workflow_payload.json @@ -0,0 +1,80 @@ +{ + "name": "ttp", + "description": "mock workflow for test", + "raw_config": { + "roles": [ + "Annotator", + "QA", + "CustomRole" + ], + "statuses": [ + "NotStarted", + "QualityCheck", + "Returned", + "Skipped", + "Completed", + "CustomStatus" + ], + "transitions": [ + { + "to": "QualityCheck", + "from": "NotStarted", + "name": "move to in progress", + "allowed_roles": [ + "Annotator" + ] + }, + { + "to": "QualityCheck", + "from": "Returned", + "name": "move to quality check", + "allowed_roles": [ + "Annotator", + "QA" + ] + }, + { + "to": "Returned", + "from": "QualityCheck", + "name": "move back to in progress", + "allowed_roles": [ + "Annotator", + "QA" + ] + }, + { + "to": "Completed", + "from": "QualityCheck", + "name": "move to completed", + "allowed_roles": [ + "QA" + ] + } + ], + "start_status": "Returned", + "allowed_resources_to_roles": { + "QA": { + "view_items": { + "statuses": [ + "QualityCheck" + ] + } + }, + "Annotator": { + "view_items": { + "statuses": [ + "NotStarted", + "Returned" + ] + } + }, + "CustomRole": { + "view_items": { + "statuses": [ + "CustomStatus" + ] + } + } + } + } +} \ No newline at end of file diff --git a/tests/integration/annotations/validations/test_gen_ai_annotation_validation.py b/tests/integration/annotations/validations/test_gen_ai_annotation_validation.py index 90b4adfde..f9d9f8d4d 100644 --- a/tests/integration/annotations/validations/test_gen_ai_annotation_validation.py +++ b/tests/integration/annotations/validations/test_gen_ai_annotation_validation.py @@ -7,7 +7,7 @@ class TestVectorValidators(TestCase): - PROJECT_TYPE = "GenAi" + PROJECT_TYPE = "Multimodal" @patch("builtins.print") def test_validate_annotation_without_metadata(self, mock_print): diff --git a/tests/integration/items/test_copy_items.py b/tests/integration/items/test_copy_items.py index cd26f323e..a8eee95a5 100644 --- a/tests/integration/items/test_copy_items.py +++ b/tests/integration/items/test_copy_items.py @@ -1,5 +1,8 @@ +import json import os +import tempfile from collections import Counter +from os.path import join from pathlib import Path from src.superannotate import AppException @@ -13,10 +16,26 @@ class TestCopyItems(BaseTestCase): PROJECT_NAME = "TestCopyItemsVector" PROJECT_DESCRIPTION = "TestCopyItemsVector" PROJECT_TYPE = "Vector" - IMAGE_NAME = "test_image" + IMAGE_NAME = "example_image_1.jpg" + IMAGE_NAME_2 = "example_image_2.jpg" FOLDER_1 = "folder_1" FOLDER_2 = "folder_2" CSV_PATH = "data_set/attach_urls.csv" + TEST_FOLDER_PATH = "data_set/sample_project_vector" + ATTACHMENT = [ + { + "url": "https://drive.google.com/uc?export=download&id=1vwfCpTzcjxoEA4hhDxqapPOVvLVeS7ZS", + "name": IMAGE_NAME, + }, + { + "url": "https://drive.google.com/uc?export=download&id=1vwfCpTzcjxoEA4hhDxqapPOVvLVeS7Zw", + "name": IMAGE_NAME_2, + }, + ] + + @property + def folder_path(self): + return os.path.join(Path(__file__).parent.parent.parent, self.TEST_FOLDER_PATH) @property def scv_path(self): @@ -32,6 +51,28 @@ def test_copy_items_from_root(self): assert len(skipped_items) == 0 assert len(sa.search_items(f"{self.PROJECT_NAME}/{self.FOLDER_1}")) == 7 + def test_copy_items_from_root_with_annotations(self): + uploaded, _, _ = sa.attach_items(self.PROJECT_NAME, self.ATTACHMENT) + assert len(uploaded) == 2 + annotation_path = join(self.folder_path, f"{self.IMAGE_NAME}___objects.json") + sa.upload_image_annotations(self.PROJECT_NAME, self.IMAGE_NAME, annotation_path) + sa.create_folder(self.PROJECT_NAME, self.FOLDER_1) + skipped_items = sa.copy_items( + self.PROJECT_NAME, f"{self.PROJECT_NAME}/{self.FOLDER_1}" + ) + assert len(skipped_items) == 0 + assert len(sa.search_items(f"{self.PROJECT_NAME}/{self.FOLDER_1}")) == 2 + with tempfile.TemporaryDirectory() as tmp_dir: + sa.download_image_annotations( + f"{self.PROJECT_NAME}/{self.FOLDER_1}", self.IMAGE_NAME, tmp_dir + ) + origin_annotation = json.load(open(annotation_path)) + annotation = json.load(open(join(tmp_dir, f"{self.IMAGE_NAME}.json"))) + self.assertEqual( + len([i["attributes"] for i in annotation["instances"]]), + len([i["attributes"] for i in origin_annotation["instances"]]), + ) + def test_copy_items_from_not_existing_folder(self): with self.assertRaisesRegexp(AppException, "Folder not found."): sa.copy_items(f"{self.PROJECT_NAME}/{self.FOLDER_1}", self.PROJECT_NAME) @@ -68,18 +109,65 @@ def test_skipped_count(self): def test_copy_items_wrong_items_list(self): uploaded, _, _ = sa.attach_items( self.PROJECT_NAME, - [ - { - "url": "https://drive.google.com/uc?export=download&id=1vwfCpTzcjxoEA4hhDxqapPOVvLVeS7ZS", - "name": self.IMAGE_NAME, - } - ], + self.ATTACHMENT, + ) + sa.set_approval_statuses(self.PROJECT_NAME, "Approved", items=[self.IMAGE_NAME]) + sa.set_annotation_statuses( + self.PROJECT_NAME, "Completed", items=[self.IMAGE_NAME] ) sa.create_folder(self.PROJECT_NAME, self.FOLDER_1) skipped_items = sa.copy_items( self.PROJECT_NAME, f"{self.PROJECT_NAME}/{self.FOLDER_1}", - items=["as", "asd"], + items=["as", "asd", self.IMAGE_NAME], ) + items = sa.search_items(f"{self.PROJECT_NAME}/{self.FOLDER_1}") + assert len(items) == 1 + assert items[0]["name"] == self.IMAGE_NAME + assert items[0]["annotation_status"] == "Completed" + assert items[0]["approval_status"] == "Approved" assert Counter(skipped_items) == Counter(["as", "asd"]) - assert len(sa.search_items(f"{self.PROJECT_NAME}/{self.FOLDER_1}")) == 0 + + def test_copy_duplicated_items_without_data_with_replace_strategy(self): + sa.create_folder(self.PROJECT_NAME, self.FOLDER_1) + sa.create_folder(self.PROJECT_NAME, self.FOLDER_2) + uploaded, _, _ = sa.attach_items( + f"{self.PROJECT_NAME}/{self.FOLDER_1}", self.ATTACHMENT + ) + assert len(uploaded) == 2 + sa.set_approval_statuses( + f"{self.PROJECT_NAME}/{self.FOLDER_1}", + "Approved", + items=[self.IMAGE_NAME, self.IMAGE_NAME_2], + ) + sa.set_annotation_statuses( + f"{self.PROJECT_NAME}/{self.FOLDER_1}", + "Completed", + items=[self.IMAGE_NAME, self.IMAGE_NAME_2], + ) + + uploaded_2, _, _ = sa.attach_items( + f"{self.PROJECT_NAME}/{self.FOLDER_2}", self.ATTACHMENT + ) + assert len(uploaded_2) == 2 + + with self.assertLogs("sa", level="WARNING") as cm: + skipped_items = sa.copy_items( + f"{self.PROJECT_NAME}/{self.FOLDER_1}", + f"{self.PROJECT_NAME}/{self.FOLDER_2}", + include_annotations=False, + duplicate_strategy="replace", + ) + assert ( + "WARNING:sa:Copy operation continuing without annotations and metadata" + " due to include_annotations=False." == cm.output[0] + ) + assert len(skipped_items) == 2 + folder_1_items = sa.search_items(f"{self.PROJECT_NAME}/{self.FOLDER_1}") + folder_2_items = sa.search_items(f"{self.PROJECT_NAME}/{self.FOLDER_2}") + assert len(folder_1_items) == 2 + assert len(folder_2_items) == 2 + + folder_2_items = sa.search_items(f"{self.PROJECT_NAME}/{self.FOLDER_2}") + assert folder_2_items[0]["annotation_status"] == "NotStarted" + assert not folder_2_items[0]["approval_status"] diff --git a/tests/integration/items/test_list_items.py b/tests/integration/items/test_list_items.py index d9181942b..7d2cdabf9 100644 --- a/tests/integration/items/test_list_items.py +++ b/tests/integration/items/test_list_items.py @@ -1,4 +1,6 @@ import os +import random +import string from pathlib import Path from src.superannotate import AppException @@ -47,3 +49,15 @@ def test_invalid_filter(self): sa.list_items(self.PROJECT_NAME, assignments__user_role="Dummy") with self.assertRaisesRegexp(AppException, "Invalid status provided."): sa.list_items(self.PROJECT_NAME, annotation_status="Dummy") + + def test_list_items_URL_limit(self): + items_for_attache = [] + item_names = [] + for i in range(125): + name = f"{''.join(random.choice(string.ascii_letters + string.digits) for _ in range(120))}" + item_names.append(name) + items_for_attache.append({"name": name, "url": f"{name}-{i}"}) + + sa.attach_items(self.PROJECT_NAME, items_for_attache) + items = sa.list_items(self.PROJECT_NAME, name__in=item_names) + assert len(items) == 125 diff --git a/tests/integration/items/test_move_items.py b/tests/integration/items/test_move_items.py index 24b03a32e..00232eb4e 100644 --- a/tests/integration/items/test_move_items.py +++ b/tests/integration/items/test_move_items.py @@ -1,4 +1,7 @@ +import json import os +import tempfile +from os.path import join from pathlib import Path from src.superannotate import SAClient @@ -11,10 +14,27 @@ class TestMoveItems(BaseTestCase): PROJECT_NAME = "TestMoveItemsVector" PROJECT_DESCRIPTION = "TestCopyItemsVector" PROJECT_TYPE = "Vector" - IMAGE_NAME = "test_image" + IMAGE_NAME = "example_image_1.jpg" + IMAGE_NAME_2 = "example_image_2.jpg" FOLDER_1 = "folder_1" FOLDER_2 = "folder_2" CSV_PATH = "data_set/attach_urls.csv" + TEST_FOLDER_PATH = "data_set/sample_project_vector" + + ATTACHMENT = [ + { + "url": "https://drive.google.com/uc?export=download&id=1vwfCpTzcjxoEA4hhDxqapPOVvLVeS7ZS", + "name": IMAGE_NAME, + }, + { + "url": "https://drive.google.com/uc?export=download&id=1vwfCpTzcjxoEA4hhDxqapPOVvLVeS7Zw", + "name": IMAGE_NAME_2, + }, + ] + + @property + def folder_path(self): + return os.path.join(Path(__file__).parent.parent.parent, self.TEST_FOLDER_PATH) @property def scv_path(self): @@ -34,13 +54,144 @@ def test_move_items_from_folder(self): sa.create_folder(self.PROJECT_NAME, self.FOLDER_1) sa.create_folder(self.PROJECT_NAME, self.FOLDER_2) uploaded, _, _ = sa.attach_items( - f"{self.PROJECT_NAME}/{self.FOLDER_1}", self.scv_path + f"{self.PROJECT_NAME}/{self.FOLDER_1}", self.ATTACHMENT ) - assert len(uploaded) == 7 + annotation_path = join(self.folder_path, f"{self.IMAGE_NAME}___objects.json") + sa.upload_image_annotations( + f"{self.PROJECT_NAME}/{self.FOLDER_1}", self.IMAGE_NAME, annotation_path + ) + + assert len(uploaded) == 2 skipped_items = sa.move_items( f"{self.PROJECT_NAME}/{self.FOLDER_1}", f"{self.PROJECT_NAME}/{self.FOLDER_2}", ) assert len(skipped_items) == 0 - assert len(sa.search_items(f"{self.PROJECT_NAME}/{self.FOLDER_2}")) == 7 + assert len(sa.search_items(f"{self.PROJECT_NAME}/{self.FOLDER_2}")) == 2 assert len(sa.search_items(f"{self.PROJECT_NAME}/{self.FOLDER_1}")) == 0 + with tempfile.TemporaryDirectory() as tmp_dir: + sa.download_image_annotations( + f"{self.PROJECT_NAME}/{self.FOLDER_2}", self.IMAGE_NAME, tmp_dir + ) + origin_annotation = json.load(open(annotation_path)) + annotation = json.load(open(join(tmp_dir, f"{self.IMAGE_NAME}.json"))) + self.assertEqual( + len([i["attributes"] for i in annotation["instances"]]), + len([i["attributes"] for i in origin_annotation["instances"]]), + ) + + def test_move_items_from_folder_with_replace(self): + sa.create_folder(self.PROJECT_NAME, self.FOLDER_1) + sa.create_folder(self.PROJECT_NAME, self.FOLDER_2) + uploaded, _, _ = sa.attach_items( + f"{self.PROJECT_NAME}/{self.FOLDER_1}", self.ATTACHMENT + ) + assert len(uploaded) == 2 + sa.set_approval_statuses( + f"{self.PROJECT_NAME}/{self.FOLDER_1}", + "Approved", + items=[self.IMAGE_NAME, self.IMAGE_NAME_2], + ) + sa.set_annotation_statuses( + f"{self.PROJECT_NAME}/{self.FOLDER_1}", + "Completed", + items=[self.IMAGE_NAME, self.IMAGE_NAME_2], + ) + + uploaded_2, _, _ = sa.attach_items( + f"{self.PROJECT_NAME}/{self.FOLDER_2}", self.ATTACHMENT + ) + assert len(uploaded_2) == 2 + folder_2_items = sa.search_items(f"{self.PROJECT_NAME}/{self.FOLDER_2}") + assert folder_2_items[0]["annotation_status"] == "NotStarted" + assert not folder_2_items[0]["approval_status"] + + skipped_items = sa.move_items( + f"{self.PROJECT_NAME}/{self.FOLDER_1}", + f"{self.PROJECT_NAME}/{self.FOLDER_2}", + duplicate_strategy="replace", + ) + assert len(skipped_items) == 0 + folder_1_items = sa.search_items(f"{self.PROJECT_NAME}/{self.FOLDER_1}") + folder_2_items = sa.search_items(f"{self.PROJECT_NAME}/{self.FOLDER_2}") + assert len(folder_1_items) == 0 + assert len(folder_2_items) == 2 + + folder_2_items = sa.search_items(f"{self.PROJECT_NAME}/{self.FOLDER_2}") + assert folder_2_items[0]["annotation_status"] == "Completed" + assert folder_2_items[0]["approval_status"] == "Approved" + + def test_move_items_from_folder_with_replace_annotations_only(self): + sa.create_folder(self.PROJECT_NAME, self.FOLDER_1) + sa.create_folder(self.PROJECT_NAME, self.FOLDER_2) + uploaded, _, _ = sa.attach_items( + f"{self.PROJECT_NAME}/{self.FOLDER_1}", self.ATTACHMENT + ) + assert len(uploaded) == 2 + sa.set_approval_statuses( + f"{self.PROJECT_NAME}/{self.FOLDER_1}", + "Approved", + items=[self.IMAGE_NAME, self.IMAGE_NAME_2], + ) + sa.set_annotation_statuses( + f"{self.PROJECT_NAME}/{self.FOLDER_1}", + "Completed", + items=[self.IMAGE_NAME, self.IMAGE_NAME_2], + ) + annotation_path = join(self.folder_path, f"{self.IMAGE_NAME}___objects.json") + sa.upload_image_annotations( + f"{self.PROJECT_NAME}/{self.FOLDER_1}", self.IMAGE_NAME, annotation_path + ) + + uploaded_2, _, _ = sa.attach_items( + f"{self.PROJECT_NAME}/{self.FOLDER_2}", self.ATTACHMENT + ) + assert len(uploaded_2) == 2 + folder_2_items = sa.search_items(f"{self.PROJECT_NAME}/{self.FOLDER_2}") + assert folder_2_items[0]["annotation_status"] == "NotStarted" + assert not folder_2_items[0]["approval_status"] + + skipped_items = sa.move_items( + f"{self.PROJECT_NAME}/{self.FOLDER_1}", + f"{self.PROJECT_NAME}/{self.FOLDER_2}", + duplicate_strategy="replace_annotations_only", + ) + assert len(skipped_items) == 0 + folder_1_items = sa.search_items(f"{self.PROJECT_NAME}/{self.FOLDER_1}") + folder_2_items = sa.search_items(f"{self.PROJECT_NAME}/{self.FOLDER_2}") + assert len(folder_1_items) == 0 + assert len(folder_2_items) == 2 + + folder_2_items = sa.search_items(f"{self.PROJECT_NAME}/{self.FOLDER_2}") + assert folder_2_items[0]["annotation_status"] == "NotStarted" + assert not folder_2_items[0]["approval_status"] + with tempfile.TemporaryDirectory() as tmp_dir: + sa.download_image_annotations( + f"{self.PROJECT_NAME}/{self.FOLDER_2}", self.IMAGE_NAME, tmp_dir + ) + origin_annotation = json.load(open(annotation_path)) + annotation = json.load(open(join(tmp_dir, f"{self.IMAGE_NAME}.json"))) + self.assertEqual( + len([i["attributes"] for i in annotation["instances"]]), + len([i["attributes"] for i in origin_annotation["instances"]]), + ) + + def test_move_items_from_folder_with_skip(self): + sa.create_folder(self.PROJECT_NAME, self.FOLDER_1) + sa.create_folder(self.PROJECT_NAME, self.FOLDER_2) + uploaded, _, _ = sa.attach_items( + f"{self.PROJECT_NAME}/{self.FOLDER_1}", self.ATTACHMENT + ) + assert len(uploaded) == 2 + + uploaded_2, _, _ = sa.attach_items( + f"{self.PROJECT_NAME}/{self.FOLDER_2}", self.ATTACHMENT + ) + assert len(uploaded_2) == 2 + + skipped_items = sa.move_items( + f"{self.PROJECT_NAME}/{self.FOLDER_1}", + f"{self.PROJECT_NAME}/{self.FOLDER_2}", + duplicate_strategy="skip", + ) + assert len(skipped_items) == 2 diff --git a/tests/integration/projects/test_basic_project.py b/tests/integration/projects/test_basic_project.py index 800ec9052..0639a735a 100644 --- a/tests/integration/projects/test_basic_project.py +++ b/tests/integration/projects/test_basic_project.py @@ -11,9 +11,9 @@ sa = SAClient() -class TestGenAIProjectBasic(BaseTestCase): - PROJECT_NAME = "TestGenAICreate" - PROJECT_TYPE = "GenAI" +class TestMultimodalProjectBasic(BaseTestCase): + PROJECT_NAME = "TestMultimodalCreate" + PROJECT_TYPE = "Multimodal" PROJECT_DESCRIPTION = "DESCRIPTION" ANNOTATION_PATH = ( "data_set/sample_project_vector/example_image_1.jpg___objects.json" diff --git a/tests/integration/projects/test_create_project.py b/tests/integration/projects/test_create_project.py index 43cad9592..6749b5d2d 100644 --- a/tests/integration/projects/test_create_project.py +++ b/tests/integration/projects/test_create_project.py @@ -91,7 +91,7 @@ def test_create_project_datetime(self): def test_create_project_with_wrong_type(self): with self.assertRaisesRegexp( AppException, - "Available values are 'Vector', 'Pixel', 'Video', 'Document', 'Tiled', 'PointCloud', 'GenAI'.", + "Available values are 'Vector', 'Pixel', 'Video', 'Document', 'Tiled', 'PointCloud', 'Multimodal'.", ): sa.create_project(self.PROJECT, "desc", "wrong_type")