Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions src/superannotate/lib/core/entities/classes.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,6 +77,8 @@ class AttributeGroup(BaseModel):

class Config:
use_enum_values = True
exclude_unset = True
exclude_none = True

def __hash__(self):
return hash(f"{self.id}{self.class_id}{self.name}")
Expand Down
40 changes: 25 additions & 15 deletions src/superannotate/lib/core/usecases/annotations.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,7 @@ class UploadAnnotationsUseCase(BaseReportableUseCase):
STATUS_CHANGE_CHUNK_SIZE = 100
AUTH_DATA_CHUNK_SIZE = 500
THREADS_COUNT = 4
URI_THRESHOLD = 4 * 1024 - 120

@dataclass
class AnnotationToUpload:
Expand Down Expand Up @@ -317,26 +318,35 @@ async def _upload_small_annotations(self, chunk) -> Report:

async def upload_small_annotations(self):
chunk = []

async def upload(_chunk):
try:
report = await self._upload_small_annotations(chunk)
self._report.failed_annotations.extend(report.failed_annotations)
self._report.missing_classes.extend(report.missing_classes)
self._report.missing_attr_groups.extend(report.missing_attr_groups)
self._report.missing_attrs.extend(report.missing_attrs)
except Exception:
import traceback
traceback.print_exc()
self._report.failed_annotations.extend([i.name for i in chunk])

while True:
item = await self._small_files_queue.get()
self._small_files_queue.task_done()
if not item:
self._small_files_queue.put_nowait(None)
break
chunk.append(item)
if sys.getsizeof(chunk) >= self.CHUNK_SIZE_MB:
report = await self._upload_small_annotations(chunk)
self._report.failed_annotations.extend(report.failed_annotations)
self._report.missing_classes.extend(report.missing_classes)
self._report.missing_attr_groups.extend(report.missing_attr_groups)
self._report.missing_attrs.extend(report.missing_attrs)
if (
sys.getsizeof(chunk) >= self.CHUNK_SIZE_MB or
sum([len(i.name) for i in chunk]) >= self.URI_THRESHOLD - len(chunk) * 14
):
await upload(chunk)
chunk = []

if chunk:
report = await self._upload_small_annotations(chunk)
self._report.failed_annotations.extend(report.failed_annotations)
self._report.missing_classes.extend(report.missing_classes)
self._report.missing_attr_groups.extend(report.missing_attr_groups)
self._report.missing_attrs.extend(report.missing_attrs)
await upload(chunk)

async def _upload_big_annotation(self, item) -> Tuple[str, bool]:
try:
Expand Down Expand Up @@ -583,7 +593,7 @@ def set_defaults(team_id, annotation_data: dict, project_type: int):
default_data = {}
annotation_data["metadata"]["lastAction"] = {
"email": team_id,
"timestamp": int(time.time())
"timestamp": int(round(time.time() * 1000))
}
instances = annotation_data.get("instances", [])
if project_type in constants.ProjectType.images:
Expand All @@ -594,14 +604,14 @@ def set_defaults(team_id, annotation_data: dict, project_type: int):
instance["meta"] = {
**default_data,
**instance["meta"],
"creationType": "Preannotation",
} # noqa
"creationType": "Preannotation", # noqa
}
else:
for idx, instance in enumerate(instances):
instances[idx] = {
**default_data,
**instance,
"creationType": "Preannotation",
"creationType": "Preannotation", # noqa
}
return annotation_data

Expand Down
1 change: 1 addition & 0 deletions src/superannotate/lib/core/usecases/images.py
Original file line number Diff line number Diff line change
Expand Up @@ -1795,6 +1795,7 @@ def execute(self):
headers=annotation_json_creds["headers"],
)
if not response.ok:
# TODO remove
logger.warning("Couldn't load annotations.")
self._response.data = (None, None)
return self._response
Expand Down
44 changes: 22 additions & 22 deletions src/superannotate/lib/core/usecases/projects.py
Original file line number Diff line number Diff line change
Expand Up @@ -574,31 +574,42 @@ def execute(self):
)
annotation_classes_entity_mapping = defaultdict(AnnotationClassEntity)
annotation_classes_created = False
if self._include_annotation_classes:
if self._include_settings:
self.reporter.log_info(
f"Cloning annotation classes from {self._project.name} to {self._project_to_create.name}."
f"Cloning settings from {self._project.name} to {self._project_to_create.name}."
)
try:
self._copy_annotation_classes(
annotation_classes_entity_mapping, project
)
annotation_classes_created = True
self._copy_settings(project)
except (AppException, RequestException) as e:
self.reporter.log_warning(
f"Failed to clone annotation classes from {self._project.name} to {self._project_to_create.name}."
f"Failed to clone settings from {self._project.name} to {self._project_to_create.name}."
)
self.reporter.log_debug(str(e), exc_info=True)

if self._include_contributors:
self.reporter.log_info(
f"Cloning contributors from {self._project.name} to {self._project_to_create.name}."
)
try:
self._copy_include_contributors(project)
except (AppException, RequestException) as e:
self.reporter.log_warning(
f"Failed to clone contributors from {self._project.name} to {self._project_to_create.name}."
)
self.reporter.log_debug(str(e), exc_info=True)

if self._include_settings:
if self._include_annotation_classes:
self.reporter.log_info(
f"Cloning settings from {self._project.name} to {self._project_to_create.name}."
f"Cloning annotation classes from {self._project.name} to {self._project_to_create.name}."
)
try:
self._copy_settings(project)
self._copy_annotation_classes(
annotation_classes_entity_mapping, project
)
annotation_classes_created = True
except (AppException, RequestException) as e:
self.reporter.log_warning(
f"Failed to clone settings from {self._project.name} to {self._project_to_create.name}."
f"Failed to clone annotation classes from {self._project.name} to {self._project_to_create.name}."
)
self.reporter.log_debug(str(e), exc_info=True)

Expand Down Expand Up @@ -626,17 +637,6 @@ def execute(self):
f"Failed to workflow from {self._project.name} to {self._project_to_create.name}."
)
self.reporter.log_debug(str(e), exc_info=True)
if self._include_contributors:
self.reporter.log_info(
f"Cloning contributors from {self._project.name} to {self._project_to_create.name}."
)
try:
self._copy_include_contributors(project)
except (AppException, RequestException) as e:
self.reporter.log_warning(
f"Failed to clone contributors from {self._project.name} to {self._project_to_create.name}."
)
self.reporter.log_debug(str(e), exc_info=True)

self._response.data = self._projects.get_one(
uuid=project.id, team_id=project.team_id
Expand Down
21 changes: 11 additions & 10 deletions src/superannotate/lib/infrastructure/services.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
import aiohttp
import requests.packages.urllib3
from pydantic import BaseModel
from pydantic import parse_obj_as
from requests.exceptions import HTTPError

import lib.core as constance
Expand Down Expand Up @@ -1374,15 +1375,16 @@ async def upload_annotations(
items_name_file_map: Dict[str, io.StringIO],
) -> UploadAnnotationsResponse:
url = urljoin(
"self.assets_provider_url",
f"{self.URL_UPLOAD_ANNOTATIONS}?{'&'.join(f'image_names[]={item_name}' for item_name in items_name_file_map.keys())}",
self.assets_provider_url,
# "https://0ef1-178-160-196-42.ngrok.io/api/v1.01/",
(f"{self.URL_UPLOAD_ANNOTATIONS}?{'&'.join(f'image_names[]={item_name}' for item_name in items_name_file_map.keys())}"),
)

headers = copy.copy(self.default_headers)
del headers["Content-Type"]
async with aiohttp.ClientSession(
headers=headers,
connector=aiohttp.TCPConnector(ssl=self._verify_ssl),
connector=aiohttp.TCPConnector(ssl=self._verify_ssl)
) as session:
data = aiohttp.FormData()

Expand All @@ -1400,13 +1402,12 @@ async def upload_annotations(
},
data=data
)
from pydantic import parse_obj_as
data_json = await _response.json()
response = ServiceResponse()
response.status = _response.status
response._content = await _response.text()
response.data = parse_obj_as(UploadAnnotationsResponse, data_json)
return response
data_json = await _response.json()
response = ServiceResponse()
response.status = _response.status
response._content = await _response.text()
response.data = parse_obj_as(UploadAnnotationsResponse, data_json)
return response

async def upload_big_annotation(
self,
Expand Down
9 changes: 3 additions & 6 deletions tests/integration/annotations/test_preannotation_upload.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,22 +18,19 @@ def folder_path(self):
return os.path.join(Path(__file__).parent.parent.parent, self.TEST_FOLDER_PATH)

def test_pre_annotation_folder_upload_download(self):
sa.upload_images_from_folder_to_project(
self.PROJECT_NAME, self.folder_path, annotation_status="InProgress"
)
self._attach_items()
sa.create_annotation_classes_from_classes_json(
self.PROJECT_NAME, f"{self.folder_path}/classes/classes.json"
)
_, _, _ = sa.upload_annotations_from_folder_to_project(
uploaded, _, _ = sa.upload_annotations_from_folder_to_project(
self.PROJECT_NAME, self.folder_path
)
assert len(uploaded) == 4
count_in = len(list(Path(self.folder_path).glob("*.json")))
images = sa.search_items(self.PROJECT_NAME)
with tempfile.TemporaryDirectory() as tmp_dir:
for image in images:
image_name = image["name"]
sa.download_image_annotations(self.PROJECT_NAME, image_name, tmp_dir)

count_out = len(list(Path(tmp_dir).glob("*.json")))

self.assertEqual(count_in, count_out)
Original file line number Diff line number Diff line change
Expand Up @@ -35,3 +35,24 @@ def test_validate_instances(self, mock_print):
"instances[0] invalid type\n"
"instances[1] 'points' is a required property"
)

@patch('builtins.print')
def test_validate_create_dby(self, mock_print):
is_valid = sa.validate_annotations(
"vector",
{
"metadata": {"name": "12"},
"instances": [
{
"type": "bbox",
"created_by": {}
},
{"type": "bbox"}
]
}
)
assert not is_valid
mock_print.assert_any_call(
"instances[0] invalid type\n"
"instances[1] 'points' is a required property"
)
24 changes: 0 additions & 24 deletions tests/integration/test_get_exports.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,27 +42,3 @@ def test_get_exports(self):
exports_new = sa.get_exports(self.PROJECT_NAME)

assert len(exports_new) == len(exports_old) + 1


class TestPixelExportConvert(BaseTestCase):
PROJECT_NAME = "Pixel_Export"
PROJECT_DESCRIPTION = "Desc"
PROJECT_TYPE = "Pixel"
TEST_FOLDER_PTH = "data_set"
TEST_FOLDER_PATH = "data_set/sample_project_pixel"

@property
def folder_path(self):
return os.path.join(dirname(dirname(__file__)), self.TEST_FOLDER_PATH)

def test_convert_pixel_exported_data(self):
sa.upload_images_from_folder_to_project(self.PROJECT_NAME, self.folder_path)
sa.upload_annotations_from_folder_to_project(self.PROJECT_NAME, self.folder_path)
export = sa.prepare_export(self.PROJECT_NAME)
with tempfile.TemporaryDirectory() as tmp_dir:
sa.download_export(self.PROJECT_NAME, export["name"], tmp_dir)
with tempfile.TemporaryDirectory() as converted_data_tmp_dir:
export_annotation(
tmp_dir, converted_data_tmp_dir, "COCO", "export", "Pixel", "panoptic_segmentation"
)
self.assertEqual(1, len(list(glob.glob(converted_data_tmp_dir + "/*.json"))))