diff --git a/README.md b/README.md index e3e4fd5..686cd28 100644 --- a/README.md +++ b/README.md @@ -20,6 +20,7 @@ _If you are using FastLabel prototype, please install version 0.2.2._ - [YOLO](#yolo) - [Pascal VOC](#pascal-voc) - [labelme](#labelme) + - [Segmentation](#segmentation) ## Installation @@ -63,7 +64,7 @@ Supported following project types: #### Create Task -- Create a new task. +Create a new task. ```python task_id = client.create_image_task( @@ -73,7 +74,7 @@ task_id = client.create_image_task( ) ``` -- Create a new task with pre-defined annotations. (Class should be configured on your project in advance) +Create a new task with pre-defined annotations. (Class should be configured on your project in advance) ```python task_id = client.create_image_task( @@ -103,13 +104,13 @@ task_id = client.create_image_task( #### Find Task -- Find a single task. +Find a single task. ```python task = client.find_image_task(task_id="YOUR_TASK_ID") ``` -- Find a single task by name. +Find a single task by name. ```python tasks = client.find_image_task_by_name(project="YOUR_PROJECT_SLUG", task_name="YOUR_TASK_NAME") @@ -117,7 +118,7 @@ tasks = client.find_image_task_by_name(project="YOUR_PROJECT_SLUG", task_name="Y #### Get Tasks -- Get tasks. (Up to 1000 tasks) +Get tasks. (Up to 1000 tasks) ```python tasks = client.get_image_tasks(project="YOUR_PROJECT_SLUG") @@ -133,7 +134,7 @@ tasks = client.get_image_tasks( ) ``` -- Get a large size of tasks. (Over 1000 tasks) +Get a large size of tasks. (Over 1000 tasks) ```python import time @@ -157,7 +158,7 @@ while True: #### Response -- Example of a single image task object +Example of a single image task object ```python { @@ -201,7 +202,7 @@ Supported following project types: #### Create Task -- Create a new task. +Create a new task. ```python task_id = client.create_image_classification_task( @@ -219,7 +220,7 @@ task_id = client.create_image_classification_task( #### Find Task -- Find a single task. +Find a single task. ```python task = client.find_image_classification_task(task_id="YOUR_TASK_ID") @@ -227,7 +228,7 @@ task = client.find_image_classification_task(task_id="YOUR_TASK_ID") #### Get Tasks -- Get tasks. (Up to 1000 tasks) +Get tasks. (Up to 1000 tasks) ```python tasks = client.get_image_classification_tasks(project="YOUR_PROJECT_SLUG") @@ -235,7 +236,7 @@ tasks = client.get_image_classification_tasks(project="YOUR_PROJECT_SLUG") #### Response -- Example of a single image classification task object +Example of a single image classification task object ```python { @@ -274,7 +275,7 @@ Supported following project types: #### Create Task -- Create a new task. +Create a new task. ```python task = client.create_multi_image_task( @@ -309,7 +310,7 @@ task = client.create_multi_image_task( #### Find Task -- Find a single task. +Find a single task. ```python task = client.find_multi_image_task(task_id="YOUR_TASK_ID") @@ -317,7 +318,7 @@ task = client.find_multi_image_task(task_id="YOUR_TASK_ID") #### Get Tasks -- Get tasks. +Get tasks. ```python tasks = client.get_multi_image_tasks(project="YOUR_PROJECT_SLUG") @@ -325,7 +326,7 @@ tasks = client.get_multi_image_tasks(project="YOUR_PROJECT_SLUG") #### Response -- Example of a single task object +Example of a single task object ```python { @@ -379,7 +380,7 @@ Supported following project types: #### Create Task -- Create a new task. +Create a new task. ```python task_id = client.create_video_task( @@ -389,7 +390,7 @@ task_id = client.create_video_task( ) ``` -- Create a new task with pre-defined annotations. (Class should be configured on your project in advance) +Create a new task with pre-defined annotations. (Class should be configured on your project in advance) ```python task_id = client.create_video_task( @@ -436,7 +437,7 @@ task_id = client.create_video_task( #### Find Task -- Find a single task. +Find a single task. ```python task = client.find_video_task(task_id="YOUR_TASK_ID") @@ -444,7 +445,7 @@ task = client.find_video_task(task_id="YOUR_TASK_ID") #### Get Tasks -- Get tasks. (Up to 10 tasks) +Get tasks. (Up to 10 tasks) ```python tasks = client.get_video_tasks(project="YOUR_PROJECT_SLUG") @@ -452,7 +453,7 @@ tasks = client.get_video_tasks(project="YOUR_PROJECT_SLUG") #### Response -- Example of a single image classification task object +Example of a single image classification task object ```python { @@ -518,7 +519,7 @@ APIs for update and delete are same over all tasks. #### Update Task -- Update a single task status and tags. +Update a single task status and tags. ```python task_id = client.update_task( @@ -530,7 +531,7 @@ task_id = client.update_task( #### Delete Task -- Delete a single task. +Delete a single task. ```python client.delete_task(task_id="YOUR_TASK_ID") @@ -539,21 +540,21 @@ client.delete_task(task_id="YOUR_TASK_ID") #### Get Tasks Id and Name map ```python -map = client.get_task_id_name_map(project="YOUR_PROJECT_SLUG") +id_name_map = client.get_task_id_name_map(project="YOUR_PROJECT_SLUG") ``` ## Annotation ### Create Annotaion -- Create a new annotation. +Create a new annotation. ```python annotation_id = client.create_annotation( project="YOUR_PROJECT_SLUG", type="bbox", value="cat", title="Cat") ``` -- Create a new annotation with color and attributes. +Create a new annotation with color and attributes. ```python attributes = [ @@ -582,7 +583,7 @@ annotation_id = client.create_annotation( project="YOUR_PROJECT_SLUG", type="bbox", value="cat", title="Cat", color="#FF0000", attributes=attributes) ``` -- Create a new classification annotation. +Create a new classification annotation. ```python annotation_id = client.create_classification_annotation( @@ -591,19 +592,19 @@ annotation_id = client.create_classification_annotation( ### Find Annotation -- Find an annotation. +Find an annotation. ```python annotation = client.find_annotation(annotation_id="YOUR_ANNOTATION_ID") ``` -- Find an annotation by value. +Find an annotation by value. ```python annotation = client.find_annotation_by_value(project="YOUR_PROJECT_SLUG", value="cat") ``` -- Find an annotation by value in classification project. +Find an annotation by value in classification project. ```python annotation = client.find_annotation_by_value( @@ -612,7 +613,7 @@ annotation = client.find_annotation_by_value( ### Get Annotations -- Get annotations. (Up to 1000 annotations) +Get annotations. (Up to 1000 annotations) ```python annotations = client.get_annotations(project="YOUR_PROJECT_SLUG") @@ -620,7 +621,7 @@ annotations = client.get_annotations(project="YOUR_PROJECT_SLUG") ### Response -- Example of an annotation object +Example of an annotation object ```python { @@ -657,14 +658,14 @@ annotations = client.get_annotations(project="YOUR_PROJECT_SLUG") ### Update Annotation -- Update an annotation. +Update an annotation. ```python annotation_id = client.update_annotation( annotation_id="YOUR_ANNOTATION_ID", value="cat2", title="Cat2", color="#FF0000") ``` -- Update an annotation with attributes. +Update an annotation with attributes. ```python attributes = [ @@ -695,7 +696,7 @@ annotation_id = client.update_annotation( annotation_id="YOUR_ANNOTATION_ID", value="cat2", title="Cat2", color="#FF0000", attributes=attributes) ``` -- Update a classification annotation. +Update a classification annotation. ```python annotation_id = client.update_classification_annotation( @@ -704,7 +705,7 @@ annotation_id = client.update_classification_annotation( ### Delete Annotation -- Delete an annotation. +Delete an annotation. ```python client.delete_annotation(annotation_id="YOUR_ANNOTATION_ID") @@ -714,7 +715,7 @@ client.delete_annotation(annotation_id="YOUR_ANNOTATION_ID") ### Create Project -- Create a new project. +Create a new project. ```python project_id = client.create_project( @@ -723,13 +724,13 @@ project_id = client.create_project( ### Find Project -- Find a project. +Find a project. ```python project = client.find_project(project_id="YOUR_PROJECT_ID") ``` -- Find a project by slug. +Find a project by slug. ```python project = client.find_project_by_slug(slug="YOUR_PROJECT_SLUG") @@ -737,7 +738,7 @@ project = client.find_project_by_slug(slug="YOUR_PROJECT_SLUG") ### Get Projects -- Get projects. (Up to 1000 projects) +Get projects. (Up to 1000 projects) ```python projects = client.get_projects() @@ -745,7 +746,7 @@ projects = client.get_projects() ### Response -- Example of a project object +Example of a project object ```python { @@ -764,7 +765,7 @@ projects = client.get_projects() ### Update Project -- Update a project. +Update a project. ```python project_id = client.update_project( @@ -773,7 +774,7 @@ project_id = client.update_project( ### Delete Project -- Delete a project. +Delete a project. ```python client.delete_project(project_id="YOUR_PROJECT_ID") @@ -785,14 +786,14 @@ Supporting bbox or polygon annotation type. ### COCO -- Get tasks and export as a [COCO format](https://cocodataset.org/#format-data) file. +Get tasks and export as a [COCO format](https://cocodataset.org/#format-data) file. ```python tasks = client.get_image_tasks(project="YOUR_PROJECT_SLUG") client.export_coco(tasks) ``` -- Export with specifying output directory. +Export with specifying output directory. ```python client.export_coco(tasks=tasks, output_dir="YOUR_DIRECTROY") @@ -800,7 +801,7 @@ client.export_coco(tasks=tasks, output_dir="YOUR_DIRECTROY") ### YOLO -- Get tasks and export as YOLO format files. +Get tasks and export as YOLO format files. ```python tasks = client.get_image_tasks(project="YOUR_PROJECT_SLUG") @@ -809,7 +810,7 @@ client.export_yolo(tasks) ### Pascal VOC -- Get tasks and export as Pascal VOC format files. +Get tasks and export as Pascal VOC format files. ```python tasks = client.get_image_tasks(project="YOUR_PROJECT_SLUG") @@ -818,13 +819,34 @@ client.export_pascalvoc(tasks) ### labelme -- Get tasks and export as labelme format files. +Get tasks and export as labelme format files. ```python tasks = client.get_image_tasks(project="YOUR_PROJECT_SLUG") client.export_labelme(tasks) ``` +### Segmentation + +Get tasks and export index color instance/semantic segmentation (PNG files). +Only support the following annotation types. + +- bbox +- polygon +- segmentation (Hollowed points are not supported.) + +```python +tasks = client.get_image_tasks(project="YOUR_PROJECT_SLUG") +client.export_instance_segmentation(tasks) +``` + +```python +tasks = client.get_image_tasks(project="YOUR_PROJECT_SLUG") +client.export_semantic_segmentation(tasks) +``` + +> Please check const.COLOR_PALLETE for index colors. + ## API Docs Check [this](https://api.fastlabel.ai/docs/) for further information. diff --git a/examples/create_image_task.py b/examples/create_image_task.py index d26515d..a7c5b95 100644 --- a/examples/create_image_task.py +++ b/examples/create_image_task.py @@ -14,7 +14,7 @@ "attributes": [ { "key": "kind", - "value": "三毛猫" + "value": "Scottish field" } ], "points": [ diff --git a/fastlabel/__init__.py b/fastlabel/__init__.py index b7c4155..f349ea1 100644 --- a/fastlabel/__init__.py +++ b/fastlabel/__init__.py @@ -2,12 +2,14 @@ import glob import json from logging import getLogger +from PIL import Image, ImageDraw import xmltodict from .exceptions import FastLabelInvalidException from .api import Api -from fastlabel import converters, utils +from fastlabel import converters, utils, const +from fastlabel.const import AnnotationType logger = getLogger(__name__) @@ -528,6 +530,112 @@ def export_labelme(self, tasks: list, output_dir: str = os.path.join("output", " with open(file_path, 'w') as f: json.dump(labelme, f, indent=4, ensure_ascii=False) + + # Instance / Semantic Segmetation + def export_instance_segmentation(self, tasks: list, output_dir: str = os.path.join("output", "instance_segmentation"), pallete: list[int] = const.COLOR_PALETTE) -> None: + """ + Convert tasks to index color instance segmentation (PNG files). + Supports only bbox, polygon and segmentation annotation types. Hollowed points are not supported. + Supports up to 57 instances in default colors palette. Check const.COLOR_PALETTE for more details. + + tasks is a list of tasks. (Required) + output_dir is output directory(default: output/instance_segmentation). (Optional) + pallete is color palette of index color. Ex: [255, 0, 0, ...] (Optional) + """ + tasks = converters.to_pixel_coordinates(tasks) + for task in tasks: + self.__export_index_color_image(task=task, output_dir=output_dir, pallete=pallete, is_instance_segmentation=True) + + def export_semantic_segmentation(self, tasks: list, output_dir: str = os.path.join("output", "semantic_segmentation"), pallete: list[int] = const.COLOR_PALETTE) -> None: + """ + Convert tasks to index color semantic segmentation (PNG files). + Supports only bbox, polygon and segmentation annotation types. Hollowed points are not supported. + Check const.COLOR_PALETTE for color pallete. + + tasks is a list of tasks. (Required) + output_dir is output directory(default: output/semantic_segmentation). (Optional) + pallete is color palette of index color. Ex: [255, 0, 0, ...] (Optional) + """ + classes = [] + for task in tasks: + for annotation in task["annotations"]: + classes.append(annotation["value"]) + classes = list(set(classes)) + classes.sort() + + tasks = converters.to_pixel_coordinates(tasks) + for task in tasks: + self.__export_index_color_image(task=task, output_dir=output_dir, pallete=pallete, is_instance_segmentation=False, classes=classes) + + def __export_index_color_image(self, task: list, output_dir: str, pallete: list[int], is_instance_segmentation: bool = True, classes: list = []) -> None: + image = Image.new("RGB", (task["width"], task["height"]), 0) + image = image.convert('P') + image.putpalette(pallete) + draw = ImageDraw.Draw(image) + + index = 1 + for annotation in task["annotations"]: + color = index if is_instance_segmentation else classes.index(annotation["value"]) + 1 + if annotation["type"] == AnnotationType.segmentation.value: + for region in annotation["points"]: + for points in region: + pillow_draw_points = self.__get_pillow_draw_points(points) + draw.polygon(pillow_draw_points, fill=color) + # hollowd points are not supported + break + elif annotation["type"] == AnnotationType.polygon.value: + pillow_draw_points = self.__get_pillow_draw_points(annotation["points"]) + draw.polygon(pillow_draw_points, fill=color) + elif annotation["type"] == AnnotationType.bbox.value: + pillow_draw_points = self.__get_pillow_draw_points(annotation["points"]) + draw.polygon(pillow_draw_points, fill=color) + else: + continue + index += 1 + + image_path = os.path.join(output_dir, utils.get_basename(task["name"]) + ".png") + os.makedirs(os.path.dirname(image_path), exist_ok=True) + image.save(image_path) + + def __get_pillow_draw_points(self, points: list[int]) -> list[int]: + """ + Convert points to pillow draw points. Diagonal points are not supported. + """ + x_points = [] + x_points.append(points[0]) + x_points.append(points[1]) + for i in range(int(len(points) / 2)): + if i == 0: + continue + x = points[i * 2] + y = points[i * 2 + 1] + if y > x_points[(i - 1) * 2 + 1]: + x_points[(i - 1) * 2] = x_points[(i - 1) * 2] - 1 + x = x - 1 + x_points.append(x) + x_points.append(y) + + y_points = [] + y_points.append(points[0]) + y_points.append(points[1]) + for i in range(int(len(points) / 2)): + if i == 0: + continue + x = points[i * 2] + y = points[i * 2 + 1] + if x < y_points[(i - 1) * 2]: + y_points[(i - 1) * 2 + 1] = y_points[(i - 1) * 2 + 1] - 1 + y = y - 1 + y_points.append(x) + y_points.append(y) + + new_points = [] + for i in range(int(len(points) / 2)): + new_points.append(x_points[i * 2]) + new_points.append(y_points[i * 2 + 1]) + return new_points + + # Annotation def find_annotation(self, annotation_id: str) -> dict: diff --git a/fastlabel/const.py b/fastlabel/const.py new file mode 100644 index 0000000..297bc28 --- /dev/null +++ b/fastlabel/const.py @@ -0,0 +1,12 @@ +from enum import Enum + +# only 57 types +COLOR_PALETTE = [0, 0, 0, 228, 26, 28, 55, 126, 184, 77, 175, 74, 152, 78, 163, 255, 127, 0, 255, 255, 51, 166, 86, 40, 247, 129, 191, 153, 153, 153, 102, 194, 165, 252, 141, 98, 141, 160, 203, 231, 138, 195, 166, 216, 84, 255, 217, 47, 229, 196, 148, 179, 179, 179, 141, 211, 199, 255, 255, 179, 190, 186, 218, 251, 128, 114, 128, 177, 211, 253, 180, 98, 179, 222, 105, 252, 205, 229, 217, 217, 217, 188, 128, 189, 204, 235, 197, 255, 237, 111, 166, 206, 227, 31, 120, 180, 178, 223, 138, 51, 160, 44, 251, 154, 153, 227, 26, 28, 253, 191, 111, 255, 127, 0, 202, 178, 214, 106, 61, 154, 255, 255, 153, 177, 89, 40, 127, 201, 127, 190, 174, 212, 253, 192, 134, 255, 255, 153, 56, 108, 176, 240, 2, 127, 191, 91, 22, 102, 102, 102, 27, 158, 119, 217, 95, 2, 117, 112, 179, 231, 41, 138, 102, 166, 30, 230, 171, 2, 166, 118, 29, 102, 102, 102] + +class AnnotationType(Enum): + bbox = "bbox" + polygon = "polygon" + keypoint = "keypoint" + line = "line" + segmentation = "segmentation" + classification = "classification" \ No newline at end of file diff --git a/fastlabel/converters.py b/fastlabel/converters.py index 2ee6c0e..e3ab609 100644 --- a/fastlabel/converters.py +++ b/fastlabel/converters.py @@ -1,18 +1,9 @@ -from enum import Enum from concurrent.futures import ThreadPoolExecutor +import copy import geojson import numpy as np - - -class AnnotationType(Enum): - bbox = "bbox" - polygon = "polygon" - keypoint = "keypoint" - classification = "classification" - line = "line" - segmentation = "segmentation" - +from fastlabel.const import AnnotationType # COCO @@ -274,6 +265,78 @@ def __to_labelme_shape_type(annotation_type: str) -> str: return None +def to_pixel_coordinates(tasks: list) -> list: + """ + Remove diagonal coordinates and return pixel outline coordinates. + Only support bbox, polygon, and segmentation annotation types. + """ + tasks = copy.deepcopy(tasks) + for task in tasks: + for annotation in task["annotations"]: + if annotation["type"] == AnnotationType.segmentation.value: + new_regions = [] + for region in annotation["points"]: + new_region = [] + for points in region: + new_points = __get_pixel_coordinates(points) + new_region.append(new_points) + new_regions.append(new_region) + annotation["points"] = new_regions + elif annotation["type"] == AnnotationType.polygon.value: + new_points = __get_pixel_coordinates(annotation["points"]) + annotation["points"] = new_points + elif annotation["type"] == AnnotationType.bbox.value: + points = annotation["points"] + points = [int(point) for point in points] + xmin = min([points[0], points[2]]) + ymin = min([points[1], points[3]]) + xmax = max([points[0], points[2]]) + ymax = max([points[1], points[3]]) + annotation["points"] = [ + xmin, ymin, + xmax, ymin, + xmax, ymax, + xmin, ymax, + ] + else: + continue + return tasks + +def __get_pixel_coordinates(points: list[int or float]) -> list[int]: + """ + Remove diagonal coordinates and return pixel outline coordinates. + """ + if len(points) == 0: + return + + new_points = [] + new_points.append(int(points[0])) + new_points.append(int(points[1])) + for i in range(int(len(points) / 2)): + if i == 0: + continue + + prev_x = int(points[(i-1) * 2]) + prev_y = int(points[(i-1) * 2 + 1]) + x = int(points[i * 2]) + y = int(points[i * 2 + 1]) + + if prev_x == x or prev_y == y: + # just add x, y coordinates if not diagonal + new_points.append(x) + new_points.append(y) + else: + # remove diagonal + xdiff = x - prev_x + ydiff = y - prev_y + mindiff = min([abs(xdiff), abs(ydiff)]) + for i in range(mindiff): + new_points.append(int(prev_x + int(xdiff / mindiff * i))) + new_points.append(int(prev_y + int(ydiff / mindiff * (i + 1)))) + new_points.append(int(prev_x + int(xdiff / mindiff * (i + 1)))) + new_points.append(int(prev_y + int(ydiff / mindiff * (i + 1)))) + return new_points + def __coco2pascalvoc(coco: dict) -> list: pascalvoc = [] diff --git a/requirements.txt b/requirements.txt index 066af15..f74a742 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,5 @@ requests==2.25.1 numpy==1.20.2 geojson==2.5.0 -xmltodict==0.12.0 \ No newline at end of file +xmltodict==0.12.0 +Pillow==8.3.1 \ No newline at end of file diff --git a/setup.py b/setup.py index df9259e..dc25c76 100644 --- a/setup.py +++ b/setup.py @@ -8,7 +8,7 @@ setuptools.setup( name="fastlabel", - version="0.9.5", + version="0.9.6", author="eisuke-ueta", author_email="eisuke.ueta@fastlabel.ai", description="The official Python SDK for FastLabel API, the Data Platform for AI",