Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
54 changes: 29 additions & 25 deletions labelbox/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ class Client:

def __init__(self, api_key=None,
endpoint='https://api.labelbox.com/graphql'):
""" Create and initialize a Labelbox Client.
""" Creates and initializes a Labelbox Client.

Args:
api_key (str): API key. If None, the key is obtained from
Expand Down Expand Up @@ -63,19 +63,20 @@ def execute(self, query, params=None, timeout=10.0):
in appropriate labelbox.exceptions.LabelboxError subtypes.

Args:
query (str): the query to execute.
params (dict): query parameters referenced within the query.
query (str): The query to execute.
params (dict): Query parameters referenced within the query.
timeout (float): Max allowed time for query execution,
in seconds.
Return:
Returns:
dict, parsed JSON response.
Raises:
labelbox.exceptions.AuthenticationError: If authentication
failed.
labelbox.exceptions.InvalidQueryError: If `query` is not
syntactically or semantically valid (checked server-side).
labelbox.exceptions.ApiLimitError: If the server API limit was
exceeded. Check Labelbox documentation to see API limits.
exceeded. See "How to import data" in the online documentation
to see API limits.
labelbox.exceptions.TimeoutError: If response was not received
in `timeout` seconds.
labelbox.exceptions.NetworkError: If an unknown error occurred
Expand Down Expand Up @@ -172,12 +173,13 @@ def check_errors(keywords, *path):

def upload_data(self, data):
""" Uploads the given data (bytes) to Labelbox.

Args:
data (bytes): the data to upload.
Return:
data (bytes): The data to upload.
Returns:
str, the URL of uploaded data.
Raises:
labelbox.exceptions.LabelboxError: if upload failes.
labelbox.exceptions.LabelboxError: If upload failed.
"""
request_data = {
"operations": json.dumps({
Expand Down Expand Up @@ -213,7 +215,7 @@ def _get_single(self, db_object_type, uid):
Args:
db_object_type (type): DbObject subclass.
uid (str): Unique ID of the row.
Return:
Returns:
Object of `db_object_type`.
Raises:
labelbox.exceptions.ResourceNotFoundError: If there is no object
Expand All @@ -230,9 +232,10 @@ def _get_single(self, db_object_type, uid):

def get_project(self, project_id):
""" Gets a single Project with the given ID.

Args:
project_id (str): Unique ID of the Project.
Return:
Returns:
The sought Project.
Raises:
labelbox.exceptions.ResourceNotFoundError: If there is no
Expand All @@ -242,9 +245,10 @@ def get_project(self, project_id):

def get_dataset(self, dataset_id):
""" Gets a single Dataset with the given ID.

Args:
dataset_id (str): Unique ID of the Dataset.
Return:
Returns:
The sought Dataset.
Raises:
labelbox.exceptions.ResourceNotFoundError: If there is no
Expand All @@ -267,7 +271,7 @@ def _get_all(self, db_object_type, where):
db_object_type (type): DbObject subclass.
where (Comparison, LogicalOperation or None): The `where` clause
for filtering.
Return:
Returns:
An iterable of `db_object_type` instances.
"""
not_deleted = db_object_type.deleted == False
Expand All @@ -284,7 +288,7 @@ def get_projects(self, where=None):
Args:
where (Comparison, LogicalOperation or None): The `where` clause
for filtering.
Return:
Returns:
An iterable of Projects (typically a PaginatedCollection).
"""
return self._get_all(Project, where)
Expand All @@ -295,7 +299,7 @@ def get_datasets(self, where=None):
Args:
where (Comparison, LogicalOperation or None): The `where` clause
for filtering.
Return:
Returns:
An iterable of Datasets (typically a PaginatedCollection).
"""
return self._get_all(Dataset, where)
Expand All @@ -306,23 +310,23 @@ def get_labeling_frontends(self, where=None):
Args:
where (Comparison, LogicalOperation or None): The `where` clause
for filtering.
Return:
Returns:
An iterable of LabelingFrontends (typically a PaginatedCollection).
"""
return self._get_all(LabelingFrontend, where)

def _create(self, db_object_type, data):
""" Creates a object on the server. Attribute values are
""" Creates an object on the server. Attribute values are
passed as keyword arguments:

Args:
db_object_type (type): A DbObjectType subtype.
data (dict): Keys are attributes or their names (in Python,
snake-case convention) and values are desired attribute values.
Return:
a new object of the given DB object type.
Returns:
A new object of the given DB object type.
Raises:
InvalidAttributeError: in case the DB object type does not contain
InvalidAttributeError: If the DB object type does not contain
any of the attribute names given in `data`.
"""
# Convert string attribute names to Field or Relationship objects.
Expand All @@ -347,10 +351,10 @@ def create_dataset(self, **kwargs):
Keyword arguments with new Dataset attribute values.
Keys are attribute names (in Python, snake-case convention) and
values are desired attribute values.
Return:
a new Dataset object.
Returns:
A new Dataset object.
Raises:
InvalidAttributeError: in case the Dataset type does not contain
InvalidAttributeError: If the Dataset type does not contain
any of the attribute names given in kwargs.
"""
return self._create(Dataset, kwargs)
Expand All @@ -364,10 +368,10 @@ def create_project(self, **kwargs):
Keyword arguments with new Project attribute values.
Keys are attribute names (in Python, snake-case convention) and
values are desired attribute values.
Return:
a new Project object.
Returns:
A new Project object.
Raises:
InvalidAttributeError: in case the Project type does not contain
InvalidAttributeError: If the Project type does not contain
any of the attribute names given in kwargs.
"""
return self._create(Project, kwargs)
4 changes: 3 additions & 1 deletion labelbox/schema/data_row.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ class DataRow(DbObject, Updateable, BulkDeletable):
@staticmethod
def bulk_delete(data_rows):
""" Deletes all the given DataRows.

Args:
data_rows (list of DataRow): The DataRows to delete.
"""
Expand All @@ -35,11 +36,12 @@ def __init__(self, *args, **kwargs):

def create_metadata(self, meta_type, meta_value):
""" Creates an asset metadata for this DataRow.

Args:
meta_type (str): Asset metadata type, must be one of:
VIDEO, IMAGE, TEXT.
meta_value (str): Asset metadata value.
Return:
Returns:
AssetMetadata DB object.
"""
meta_type_param = "metaType"
Expand Down
13 changes: 7 additions & 6 deletions labelbox/schema/dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ class Dataset(DbObject, Updateable, Deletable):

def create_data_row(self, **kwargs):
""" Creates a single DataRow belonging to this dataset.

Kwargs:
Key-value arguments containing new `DataRow` data.
At a minimum they must contain `row_data`. The value for
Expand Down Expand Up @@ -63,17 +64,17 @@ def create_data_rows(self, items):
Args:
items (iterable of (dict or str)): See above for details.

Return:
Returns:
Task representing the data import on the server side. The Task
can be used for inspecting task progress and waiting until it's done.

Raise:
InvalidQueryError: if the `items` parameter does not conform to
Raises:
InvalidQueryError: If the `items` parameter does not conform to
the specification above or if the server did not accept the
DataRow creation request (unknown reason).
ResourceNotFoundError: if unable to retrieve the Task for the
ResourceNotFoundError: If unable to retrieve the Task for the
import process. This could imply that the import failed.
InvalidAttributeError: if there are fields in `items` not valid for
InvalidAttributeError: If there are fields in `items` not valid for
a DataRow.
"""
file_upload_thread_count = 20
Expand Down Expand Up @@ -153,7 +154,7 @@ def data_row_for_external_id(self, external_id):
A single `DataRow` with the given ID.

Raises:
labelbox.exceptions.ResourceNotFoundError: if there is no `DataRow`
labelbox.exceptions.ResourceNotFoundError: If there is no `DataRow`
in this `DataSet` with the given external ID, or if there are
multiple `DataRows` for it.
"""
Expand Down
5 changes: 4 additions & 1 deletion labelbox/schema/label.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,13 +28,15 @@ def __init__(self, *args, **kwargs):
@staticmethod
def bulk_delete(labels):
""" Deletes all the given Labels.

Args:
labels (list of Label): The Labels to delete.
"""
BulkDeletable._bulk_delete(labels, False)

def create_review(self, **kwargs):
""" Creates a Review for this label.

Kwargs:
Review attributes. At a minimum a `Review.score` field
value must be provided.
Expand All @@ -45,7 +47,8 @@ def create_review(self, **kwargs):

def create_benchmark(self):
""" Creates a Benchmark for this Label.
Return:

Returns:
The newly created Benchmark.
"""
label_id_param = "labelId"
Expand Down
29 changes: 19 additions & 10 deletions labelbox/schema/project.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,7 @@ class Project(DbObject, Updateable, Deletable):

def create_label(self, **kwargs):
""" Creates a label on this Project.

Kwargs:
Label attributes. At the minimum the label `DataRow`.
"""
Expand Down Expand Up @@ -107,9 +108,10 @@ def labels(self, datasets=None, order_by=None):
def export_labels(self, timeout_seconds=60):
""" Calls the server-side Label exporting that generates a JSON
payload, and returns the URL to that payload.

Args:
timeout_seconds (float): Max waiting time, in seconds.
Return:
Returns:
URL of the data file with this Project's labels. If the server
didn't generate during the `timeout_seconds` period, None
is returned.
Expand All @@ -136,6 +138,7 @@ def export_labels(self, timeout_seconds=60):

def labeler_performance(self):
""" Returns the labeler performances for this Project.

Returns:
A PaginatedCollection of LabelerPerformance objects.
"""
Expand All @@ -161,9 +164,10 @@ def create_labeler_performance(client, result):

def review_metrics(self, net_score):
""" Returns this Project's review metrics.

Args:
net_score (None or Review.NetScore): Indicates desired metric.
Return:
Returns:
int, aggregation count of reviews for given net_score.
"""
if net_score not in (None,) + tuple(Entity.Review.NetScore):
Expand All @@ -180,8 +184,10 @@ def review_metrics(self, net_score):

def setup(self, labeling_frontend, labeling_frontend_options):
""" Finalizes the Project setup.

Args:
labeling_frontend (LabelingFrontend): The labeling frontend to use.
labeling_frontend (LabelingFrontend): Which UI to use to label the
data.
labeling_frontend_options (dict or str): Labeling frontend options,
a.k.a. project ontology. If given a `dict` it will be converted
to `str` using `json.dumps`.
Expand Down Expand Up @@ -210,8 +216,8 @@ def set_labeling_parameter_overrides(self, data):
Args:
data (iterable): An iterable of tuples. Each tuple must contain
(DataRow, priority, numberOfLabels) for the new override.
Return:
bool indicating if the operation was a success.
Returns:
bool, indicates if the operation was a success.
"""
data_str = ",\n".join(
"{dataRow: {id: \"%s\"}, priority: %d, numLabels: %d }" % (
Expand All @@ -226,10 +232,11 @@ def set_labeling_parameter_overrides(self, data):

def unset_labeling_parameter_overrides(self, data_rows):
""" Removes labeling parameter overrides to this project.

Args:
data_rows (iterable): An iterable of DataRows.
Return:
bool indicating if the operation was a success.
Returns:
bool, indicates if the operation was a success.
"""
id_param = "projectId"
query_str = """mutation UnsetLabelingParameterOverridesPyApi($%s: ID!){
Expand All @@ -241,7 +248,8 @@ def unset_labeling_parameter_overrides(self, data_rows):
return res["project"]["unsetLabelingParameterOverrides"]["success"]

def upsert_review_queue(self, quota_factor):
""" Reinitiate the review queue for this project.
""" Reinitiates the review queue for this project.

Args:
quota_factor (float): Which part (percentage) of the queue
to reinitiate. Between 0 and 1.
Expand All @@ -257,11 +265,12 @@ def upsert_review_queue(self, quota_factor):


def extend_reservations(self, queue_type):
""" Extend all the current reservations for the current user on the given
""" Extends all the current reservations for the current user on the given
queue type.

Args:
queue_type (str): Either "LabelingQueue" or "ReviewQueue"
Return:
Returns:
int, the number of reservations that were extended.
"""
if queue_type not in ("LabelingQueue", "ReviewQueue"):
Expand Down
4 changes: 3 additions & 1 deletion labelbox/schema/webhook.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ class Webhook(DbObject, Updateable):
@staticmethod
def create(client, topics, url, secret, project):
""" Creates a Webhook.

Args:
client (Client): The Labelbox client used to connect
to the server.
Expand All @@ -39,7 +40,7 @@ def create(client, topics, url, secret, project):
project (Project or None): The project for which notifications
should be sent. If None notifications are sent for all
events in your organization.
Return:
Returns:
A newly created Webhook.
"""
project_str = "" if project is None \
Expand All @@ -58,6 +59,7 @@ def create(client, topics, url, secret, project):

def update(self, topics=None, url=None, status=None):
""" Updates this Webhook.

Args:
topics (list of str): The new topics value, optional.
url (str): The new URL value, optional.
Expand Down