Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 0 additions & 2 deletions src/superannotate/lib/core/serviceproviders.py
Original file line number Diff line number Diff line change
Expand Up @@ -345,7 +345,6 @@ async def download_big_annotation(
self,
project: entities.ProjectEntity,
download_path: str,
postfix: str,
item: entities.BaseItemEntity,
callback: Callable = None,
):
Expand All @@ -358,7 +357,6 @@ async def download_small_annotations(
folder: entities.FolderEntity,
reporter: Reporter,
download_path: str,
postfix: str,
item_ids: List[int],
callback: Callable = None,
):
Expand Down
19 changes: 1 addition & 18 deletions src/superannotate/lib/core/usecases/annotations.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@
import traceback
import typing
from dataclasses import dataclass
from datetime import datetime
from itertools import islice
from operator import itemgetter
from pathlib import Path
Expand Down Expand Up @@ -1637,13 +1636,6 @@ def validate_destination(self):
def destination(self) -> Path:
return Path(self._destination if self._destination else "")

def get_postfix(self):
if self._project.type == constants.ProjectType.VECTOR:
return "___objects.json"
elif self._project.type == constants.ProjectType.PIXEL.value:
return "___pixel.json"
return ".json"

def download_annotation_classes(self, path: str):
response = self._service_provider.annotation_classes.list(
Condition("project_id", self._project.id, EQ)
Expand Down Expand Up @@ -1678,12 +1670,10 @@ async def download_big_annotations(self, export_path):
item = await self._big_file_queue.get()
self._big_file_queue.task_done()
if item:
postfix = self.get_postfix()
await self._service_provider.annotations.download_big_annotation(
project=self._project,
item=item,
download_path=f"{export_path}{'/' + self._folder.name if not self._folder.is_root else ''}",
postfix=postfix,
callback=self._callback,
)
else:
Expand All @@ -1693,14 +1683,12 @@ async def download_big_annotations(self, export_path):
async def download_small_annotations(
self, item_ids: List[int], export_path, folder: FolderEntity
):
postfix = self.get_postfix()
await self._service_provider.annotations.download_small_annotations(
project=self._project,
folder=folder,
item_ids=item_ids,
reporter=self.reporter,
download_path=f"{export_path}{'/' + self._folder.name if not self._folder.is_root else ''}",
postfix=postfix,
callback=self._callback,
)

Expand Down Expand Up @@ -1738,12 +1726,7 @@ async def run_workers(

def execute(self):
if self.is_valid():
export_path = str(
self.destination
/ Path(
f"{self._project.name} {datetime.now().strftime('%B %d %Y %H_%M')}"
)
)
export_path = str(self.destination)
logger.info(
f"Downloading the annotations of the requested items to {export_path}\nThis might take a while…"
)
Expand Down
2 changes: 1 addition & 1 deletion src/superannotate/lib/core/usecases/items.py
Original file line number Diff line number Diff line change
Expand Up @@ -1078,7 +1078,7 @@ def execute(
response = None

unique_item_ids = list(set(self.item_ids))
processed_items = len(unique_item_ids) + len(self.results['skipped'])
processed_items = len(unique_item_ids) + len(self.results["skipped"])
if self._provided_item_count > processed_items:
self.reporter.log_info(
f"Dropping duplicates. Found {processed_items} / {self._provided_item_count} unique items."
Expand Down
5 changes: 1 addition & 4 deletions src/superannotate/lib/infrastructure/services/annotation.py
Original file line number Diff line number Diff line change
Expand Up @@ -177,7 +177,6 @@ async def download_big_annotation(
self,
project: entities.ProjectEntity,
download_path: str,
postfix: str,
item: entities.BaseItemEntity,
callback: Callable = None,
):
Expand Down Expand Up @@ -208,7 +207,7 @@ async def download_big_annotation(
res = await start_response.json()
Path(download_path).mkdir(exist_ok=True, parents=True)

dest_path = Path(download_path) / (item_name + postfix)
dest_path = Path(download_path) / (item_name + ".json")
with open(dest_path, "w") as fp:
if callback:
res = callback(res)
Expand All @@ -220,7 +219,6 @@ async def download_small_annotations(
folder: entities.FolderEntity,
reporter: Reporter,
download_path: str,
postfix: str,
item_ids: List[int],
callback: Callable = None,
):
Expand All @@ -242,7 +240,6 @@ async def download_small_annotations(
data=item_ids,
params=query_params,
download_path=download_path,
postfix=postfix,
)

async def upload_small_annotations(
Expand Down
6 changes: 2 additions & 4 deletions src/superannotate/lib/infrastructure/stream_data_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,6 @@ async def download_annotations(
method: str,
url: str,
download_path,
postfix,
data: typing.List[int],
params: dict = None,
):
Expand All @@ -119,16 +118,15 @@ async def download_annotations(
)
self._store_annotation(
download_path,
postfix,
annotation,
self._callback,
)
self._items_downloaded += 1

@staticmethod
def _store_annotation(path, postfix, annotation: dict, callback: Callable = None):
def _store_annotation(path, annotation: dict, callback: Callable = None):
os.makedirs(path, exist_ok=True)
with open(f"{path}/{annotation['metadata']['name']}{postfix}", "w") as file:
with open(f"{path}/{annotation['metadata']['name']}.json", "w") as file:
annotation = callback(annotation) if callback else annotation
json.dump(annotation, file)

Expand Down
Loading