-
Notifications
You must be signed in to change notification settings - Fork 68
annotation import #163
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
annotation import #163
Changes from all commits
f78f815
5233ff3
d769697
74635e5
c39697a
7b96596
c9e8856
73d10eb
23c8ac1
a2a6c1e
773ef56
a2cd057
3e38d89
b778959
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Large diffs are not rendered by default.
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,126 @@ | ||
| from labelbox import Client | ||
|
|
||
| from typing import Dict, Any, Tuple | ||
| from skimage import measure | ||
| from io import BytesIO | ||
| from PIL import Image | ||
| import numpy as np | ||
| import uuid | ||
|
|
||
|
|
||
| def create_boxes_ndjson(datarow_id: str, schema_id: str, top: float, left: float, | ||
| bottom: float, right: float) -> Dict[str, Any]: | ||
| """ | ||
| * https://docs.labelbox.com/data-model/en/index-en#bounding-box | ||
|
|
||
| Args: | ||
| datarow_id (str): id of the data_row (in this case image) to add this annotation to | ||
| schema_id (str): id of the bbox tool in the current ontology | ||
| top, left, bottom, right (int): pixel coordinates of the bbox | ||
| Returns: | ||
| ndjson representation of a bounding box | ||
| """ | ||
| return { | ||
| "uuid": str(uuid.uuid4()), | ||
| "schemaId": schema_id, | ||
| "dataRow": { | ||
| "id": datarow_id | ||
| }, | ||
| "bbox": { | ||
| "top": int(top), | ||
| "left": int(left), | ||
| "height": int(bottom - top), | ||
| "width": int(right - left) | ||
| } | ||
| } | ||
|
|
||
|
|
||
| def create_polygon_ndjson(datarow_id: str, schema_id: str, | ||
| segmentation_mask: np.ndarray) -> Dict[str, Any]: | ||
| """ | ||
| * https://docs.labelbox.com/data-model/en/index-en#polygon | ||
|
|
||
| Args: | ||
| datarow_id (str): id of the data_row (in this case image) to add this annotation to | ||
| schema_id (str): id of the bbox tool in the current ontology | ||
| segmentation_mask (np.ndarray): Segmentation mask of size (image_h, image_w) | ||
| - Seg mask is turned into a polygon since polygons aren't directly inferred. | ||
| Returns: | ||
| ndjson representation of a polygon | ||
| """ | ||
| contours = measure.find_contours(segmentation_mask, 0.5) | ||
| #Note that complex polygons could break. | ||
| pts = contours[0].astype(np.int32) | ||
| pts = np.roll(pts, 1, axis=-1) | ||
| pts = [{'x': int(x), 'y': int(y)} for x, y in pts] | ||
| return { | ||
| "uuid": str(uuid.uuid4()), | ||
| "schemaId": schema_id, | ||
| "dataRow": { | ||
| "id": datarow_id | ||
| }, | ||
| "polygon": pts | ||
| } | ||
|
|
||
|
|
||
| def create_mask_ndjson(client: Client, datarow_id: str, schema_id: str, | ||
| segmentation_mask: np.ndarray, color: Tuple[int, int, | ||
| int]) -> Dict[str, Any]: | ||
| """ | ||
| Creates a mask for each object in the image | ||
| * https://docs.labelbox.com/data-model/en/index-en#segmentation-mask | ||
|
|
||
| Args: | ||
| client (labelbox.Client): labelbox client used for uploading seg mask to google cloud storage | ||
| datarow_id (str): id of the data_row (in this case image) to add this annotation to | ||
| schema_id (str): id of the segmentation tool in the current ontology | ||
| segmentation_mask is a segmentation mask of size (image_h, image_w) | ||
| color ( Tuple[int,int,int]): rgb color to convert binary mask into 3D colorized mask | ||
| Return: | ||
| ndjson representation of a segmentation mask | ||
| """ | ||
|
|
||
| colorize = np.concatenate(([segmentation_mask[..., np.newaxis] * c for c in color]), | ||
| axis=2) | ||
| img_bytes = BytesIO() | ||
| Image.fromarray(colorize).save(img_bytes, format="PNG") | ||
| #* Use your own signed urls so that you can resign the data | ||
| #* This is just to make the demo work | ||
| url = client.upload_data(content=img_bytes.getvalue(), sign=True) | ||
| return { | ||
| "uuid": str(uuid.uuid4()), | ||
| "schemaId": schema_id, | ||
| "dataRow": { | ||
| "id": datarow_id | ||
| }, | ||
| "mask": { | ||
| "instanceURI": url, | ||
| "colorRGB": color | ||
| } | ||
| } | ||
|
|
||
|
|
||
| def create_point_ndjson(datarow_id: str, schema_id: str, top: float, left: float, | ||
| bottom: float, right: float) -> Dict[str, Any]: | ||
| """ | ||
| * https://docs.labelbox.com/data-model/en/index-en#point | ||
|
|
||
| Args: | ||
| datarow_id (str): id of the data_row (in this case image) to add this annotation to | ||
| schema_id (str): id of the point tool in the current ontology | ||
| t, l, b, r (int): top, left, bottom, right pixel coordinates of the bbox | ||
| - The model doesn't directly predict points, so we grab the centroid of the predicted bounding box | ||
| Returns: | ||
| ndjson representation of a polygon | ||
| """ | ||
| return { | ||
| "uuid": str(uuid.uuid4()), | ||
| "schemaId": schema_id, | ||
| "dataRow": { | ||
| "id": datarow_id | ||
| }, | ||
| "point": { | ||
| "x": int((left + right) / 2.), | ||
| "y": int((top + bottom) / 2.), | ||
| } | ||
| } | ||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -79,7 +79,13 @@ def __init__(self, | |
|
|
||
| @retry.Retry(predicate=retry.if_exception_type( | ||
| labelbox.exceptions.InternalServerError)) | ||
| def execute(self, query, params=None, timeout=30.0, experimental=False): | ||
| def execute(self, | ||
| query=None, | ||
| params=None, | ||
| data=None, | ||
| files=None, | ||
| timeout=30.0, | ||
| experimental=False): | ||
| """ Sends a request to the server for the execution of the | ||
| given query. | ||
|
|
||
|
|
@@ -89,6 +95,8 @@ def execute(self, query, params=None, timeout=30.0, experimental=False): | |
| Args: | ||
| query (str): The query to execute. | ||
| params (dict): Query parameters referenced within the query. | ||
| data (str): json string containing the query to execute | ||
| files (dict): file arguments for request | ||
| timeout (float): Max allowed time for query execution, | ||
| in seconds. | ||
| Returns: | ||
|
|
@@ -107,8 +115,9 @@ def execute(self, query, params=None, timeout=30.0, experimental=False): | |
| most likely due to connection issues. | ||
| labelbox.exceptions.LabelboxError: If an unknown error of any | ||
| kind occurred. | ||
| ValueError: If query and data are both None. | ||
| """ | ||
| logger.debug("Query: %s, params: %r", query, params) | ||
| logger.debug("Query: %s, params: %r, data %r", query, params, data) | ||
|
|
||
| # Convert datetimes to UTC strings. | ||
| def convert_value(value): | ||
|
|
@@ -117,19 +126,35 @@ def convert_value(value): | |
| value = value.strftime("%Y-%m-%dT%H:%M:%SZ") | ||
| return value | ||
|
|
||
| if params is not None: | ||
| params = { | ||
| key: convert_value(value) for key, value in params.items() | ||
| } | ||
|
|
||
| data = json.dumps({'query': query, 'variables': params}).encode('utf-8') | ||
|
|
||
| if query is not None: | ||
| if params is not None: | ||
| params = { | ||
| key: convert_value(value) for key, value in params.items() | ||
| } | ||
| data = json.dumps({ | ||
| 'query': query, | ||
| 'variables': params | ||
| }).encode('utf-8') | ||
|
Comment on lines
+134
to
+137
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. If
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Yeah this should be fine. Here is a minimal example you can try that works without setting params. |
||
| elif data is None: | ||
| raise ValueError("query and data cannot both be none") | ||
| try: | ||
| response = requests.post(self.endpoint.replace('/graphql', '/_gql') | ||
| if experimental else self.endpoint, | ||
| data=data, | ||
| headers=self.headers, | ||
| timeout=timeout) | ||
| request = { | ||
| 'url': | ||
| self.endpoint.replace('/graphql', '/_gql') | ||
| if experimental else self.endpoint, | ||
| 'data': | ||
| data, | ||
| 'headers': | ||
| self.headers, | ||
| 'timeout': | ||
| timeout | ||
| } | ||
| if files: | ||
| request.update({'files': files}) | ||
| request['headers'] = { | ||
| 'Authorization': self.headers['Authorization'] | ||
| } | ||
| response = requests.post(**request) | ||
| logger.debug("Response: %s", response.text) | ||
| except requests.exceptions.Timeout as e: | ||
| raise labelbox.exceptions.TimeoutError(str(e)) | ||
|
|
@@ -548,4 +573,14 @@ def create_model(self, name, ontology_id): | |
| InvalidAttributeError: If the Model type does not contain | ||
| any of the attribute names given in kwargs. | ||
| """ | ||
| return self._create(Model, {"name": name, "ontology_id": ontology_id}) | ||
| query_str = """mutation createModelPyApi($name: String!, $ontologyId: ID!){ | ||
msokoloff1 marked this conversation as resolved.
Show resolved
Hide resolved
|
||
| createModel(data: {name : $name, ontologyId : $ontologyId}){ | ||
| %s | ||
| } | ||
| }""" % query.results_query_part(Model) | ||
|
|
||
| result = self.execute(query_str, { | ||
| "name": name, | ||
| "ontologyId": ontology_id | ||
| }) | ||
| return Model(self, result['createModel']) | ||
Uh oh!
There was an error while loading. Please reload this page.