Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 5 additions & 1 deletion labelbox/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -1178,9 +1178,13 @@ def _format_failed_rows(rows: List[str],
errors.extend(
_format_failed_rows(data['deletedDataRowGlobalKeys'],
"Data Row deleted"))

# Invalid results may contain empty string, so we must filter
# them prior to checking for PARTIAL_SUCCESS
filtered_results = list(filter(lambda r: r != '', results))
if not errors:
status = CollectionJobStatus.SUCCESS.value
elif errors and results:
elif errors and len(filtered_results) > 0:
status = CollectionJobStatus.PARTIAL_SUCCESS.value
else:
status = CollectionJobStatus.FAILURE.value
Expand Down
9 changes: 4 additions & 5 deletions labelbox/schema/task.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,9 +63,7 @@ def wait_till_done(self, timeout_seconds=300) -> None:
check_frequency = 2 # frequency of checking, in seconds
while True:
if self.status != "IN_PROGRESS":
if self.status == "FAILED" or (self.status == "COMPLETE" and
self.failed_data_rows
is not None):
if self.errors is not None:
logger.warning(
"There are errors present. Please look at `task.errors` for more details"
)
Expand All @@ -87,7 +85,7 @@ def errors(self) -> Optional[Dict[str, Any]]:
result = self._fetch_remote_json()
return result["error"]
elif self.status == "COMPLETE":
return self.failed_data_rows()
return self.failed_data_rows
return None

@property
Expand All @@ -105,11 +103,12 @@ def result(self) -> List[Dict[str, Any]]:
'global_key': data_row.get('globalKey'),
} for data_row in result['createdDataRows']]

@property
def failed_data_rows(self) -> Optional[Dict[str, Any]]:
""" Fetch data rows which failed to be created for an import task.
"""
result = self._fetch_remote_json()
if result.get("errors") is not None:
if len(result.get("errors", [])) > 0:
return result["errors"]
else:
return None
Expand Down
5 changes: 5 additions & 0 deletions tests/integration/test_data_row_media_attributes.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,10 @@
from time import sleep


def test_export_empty_media_attributes(configured_project_with_label):
project, _, _, _ = configured_project_with_label
# Wait for exporter to retrieve latest labels
sleep(10)
labels = project.label_generator()
label = next(labels)
assert label.data.media_attributes == {}
3 changes: 3 additions & 0 deletions tests/integration/test_data_row_metadata.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
from datetime import datetime
from time import sleep

import pytest
import uuid
Expand Down Expand Up @@ -70,6 +71,8 @@ def make_metadata(dr_id) -> DataRowMetadata:

def test_export_empty_metadata(configured_project_with_label):
project, _, _, _ = configured_project_with_label
# Wait for exporter to retrieve latest labels
sleep(10)
labels = project.label_generator()
label = next(labels)
assert label.data.metadata == []
Expand Down
6 changes: 4 additions & 2 deletions tests/integration/test_data_rows.py
Original file line number Diff line number Diff line change
Expand Up @@ -352,7 +352,8 @@ def test_create_data_rows_with_invalid_metadata(dataset, image_url):
DataRow.metadata_fields: fields
}])
task.wait_till_done()
assert task.status == "FAILED"
assert task.status == "COMPLETE"
assert len(task.failed_data_rows) > 0


def test_create_data_rows_with_metadata_missing_value(dataset, image_url):
Expand Down Expand Up @@ -632,7 +633,8 @@ def test_data_row_bulk_creation_with_same_global_keys(dataset, sample_image):
}])

task.wait_till_done()
assert task.status == "FAILED"
assert task.status == "COMPLETE"
assert len(task.failed_data_rows) > 0
assert len(list(dataset.data_rows())) == 0

task = dataset.create_data_rows([{
Expand Down
5 changes: 3 additions & 2 deletions tests/integration/test_global_keys.py
Original file line number Diff line number Diff line change
Expand Up @@ -251,9 +251,10 @@ def test_get_data_row_ids_for_invalid_global_keys(client, dataset, image_url):
assert res['status'] == "PARTIAL SUCCESS"

assert len(res['errors']) == 1
assert len(res['results']) == 1
assert len(res['results']) == 2

assert res['errors'][0]['error'] == "Data Row not found"
assert res['errors'][0]['global_key'] == gk_1

assert res['results'][0] == dr_2.uid
assert res['results'][0] == ''
assert res['results'][1] == dr_2.uid
9 changes: 4 additions & 5 deletions tests/integration/test_task.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,12 +23,11 @@ def test_task_errors(dataset, image_url):
])
assert task in client.get_user().created_tasks()
task.wait_till_done()
assert task.status == "FAILED"
assert task.status == "COMPLETE"
assert len(task.failed_data_rows) > 0
assert task.errors is not None
assert 'message' in task.errors
with pytest.raises(Exception) as exc_info:
task.result
assert str(exc_info.value).startswith("Job failed. Errors : {")
assert 'message' in task.errors[0]
assert len(task.result) == 0


def test_task_success_json(dataset, image_url):
Expand Down