Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions labelbox/data/serialization/ndjson/objects.py
Original file line number Diff line number Diff line change
Expand Up @@ -353,7 +353,7 @@ def from_common(cls, segments: List[DICOMObjectAnnotation], data: VideoData,
segments = [NDDicomSegment.from_common(segment) for segment in segments]

return cls(segments=segments,
dataRow=DataRow(id=data.uid),
dataRow=DataRow(id=data.uid, global_key=data.global_key),
name=name,
schema_id=feature_schema_id,
uuid=extra.get('uuid'),
Expand Down Expand Up @@ -465,7 +465,7 @@ def from_common(cls,
confidence: Optional[float] = None) -> "NDDocumentEntity":

return cls(text_selections=document_entity.text_selections,
dataRow=DataRow(id=data.uid),
dataRow=DataRow(id=data.uid, global_key=data.global_key),
name=name,
schema_id=feature_schema_id,
uuid=extra.get('uuid'),
Expand Down Expand Up @@ -494,7 +494,7 @@ def from_common(
return cls(location=Location(start=conversation_entity.start,
end=conversation_entity.end),
message_id=conversation_entity.message_id,
dataRow=DataRow(id=data.uid),
dataRow=DataRow(id=data.uid, global_key=data.global_key),
name=name,
schema_id=feature_schema_id,
uuid=extra.get('uuid'),
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
[{
"location": {
"start": 67,
"end": 128
},
"messageId": "some-message-id",
"uuid": "5ad9c52f-058d-49c8-a749-3f20b84f8cd4",
"dataRow": {
"globalKey": "05e8ee85-072e-4eb2-b30a-501dee9b0d9d"
},
"name": "some-text-entity",
"schemaId": "cl6xnuwt95lqq07330tbb3mfd",
"classifications": [],
"confidence": 0.53
}]
25 changes: 0 additions & 25 deletions tests/data/assets/ndjson/pdf_document_entity_import.json

This file was deleted.

28 changes: 27 additions & 1 deletion tests/data/assets/ndjson/pdf_import.json
Original file line number Diff line number Diff line change
Expand Up @@ -97,4 +97,30 @@
"height": 203.83000000000004,
"width": 0.37999999999999545
}
}]
},
{
"uuid": "f6879f59-d2b5-49c2-aceb-d9e8dc478673",
"dataRow": {
"id": "ckrb1sf1i1g7i0ybcdc6oc8ct"
},
"name": "named_entity",
"schemaId": "cl6xnuwt95lqq07330tbb3mfd",
"classifications": [],
"textSelections": [
{
"groupId": "2f4336f4-a07e-4e0a-a9e1-5629b03b719b",
"tokenIds": [
"3f984bf3-1d61-44f5-b59a-9658a2e3440f",
"3bf00b56-ff12-4e52-8cc1-08dbddb3c3b8",
"6e1c3420-d4b7-4c5a-8fd6-ead43bf73d80",
"87a43d32-af76-4a1d-b262-5c5f4d5ace3a",
"e8606e8a-dfd9-4c49-a635-ad5c879c75d0",
"67c7c19e-4654-425d-bf17-2adb8cf02c30",
"149c5e80-3e07-49a7-ab2d-29ddfe6a38fa",
"b0e94071-2187-461e-8e76-96c58738a52c"
],
"page": 1
}
]
}
]
27 changes: 26 additions & 1 deletion tests/data/assets/ndjson/pdf_import_global_key.json
Original file line number Diff line number Diff line change
Expand Up @@ -97,4 +97,29 @@
"height": 203.83000000000004,
"width": 0.37999999999999545
}
}]
},
{
"uuid": "f6879f59-d2b5-49c2-aceb-d9e8dc478673",
"dataRow": {
"globalKey": "05e8ee85-072e-4eb2-b30a-501dee9b0d9d"
},
"name": "named_entity",
"classifications": [],
"textSelections": [
{
"groupId": "2f4336f4-a07e-4e0a-a9e1-5629b03b719b",
"tokenIds": [
"3f984bf3-1d61-44f5-b59a-9658a2e3440f",
"3bf00b56-ff12-4e52-8cc1-08dbddb3c3b8",
"6e1c3420-d4b7-4c5a-8fd6-ead43bf73d80",
"87a43d32-af76-4a1d-b262-5c5f4d5ace3a",
"e8606e8a-dfd9-4c49-a635-ad5c879c75d0",
"67c7c19e-4654-425d-bf17-2adb8cf02c30",
"149c5e80-3e07-49a7-ab2d-29ddfe6a38fa",
"b0e94071-2187-461e-8e76-96c58738a52c"
],
"page": 1
}
]
}
]
24 changes: 24 additions & 0 deletions tests/data/assets/ndjson/pdf_import_name_only.json
Original file line number Diff line number Diff line change
Expand Up @@ -90,4 +90,28 @@
"height": 203.83000000000004,
"width": 0.37999999999999545
}
},
{
"uuid": "f6879f59-d2b5-49c2-aceb-d9e8dc478673",
"dataRow": {
"id": "ckrb1sf1i1g7i0ybcdc6oc8ct"
},
"name": "named_entity",
"classifications": [],
"textSelections": [
{
"groupId": "2f4336f4-a07e-4e0a-a9e1-5629b03b719b",
"tokenIds": [
"3f984bf3-1d61-44f5-b59a-9658a2e3440f",
"3bf00b56-ff12-4e52-8cc1-08dbddb3c3b8",
"6e1c3420-d4b7-4c5a-8fd6-ead43bf73d80",
"87a43d32-af76-4a1d-b262-5c5f4d5ace3a",
"e8606e8a-dfd9-4c49-a635-ad5c879c75d0",
"67c7c19e-4654-425d-bf17-2adb8cf02c30",
"149c5e80-3e07-49a7-ab2d-29ddfe6a38fa",
"b0e94071-2187-461e-8e76-96c58738a52c"
],
"page": 1
}
]
}]
2 changes: 0 additions & 2 deletions tests/data/serialization/ndjson/test_classification.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,6 @@
import json
from labelbox.data.serialization.ndjson.classification import NDRadio

from labelbox.data.serialization.ndjson.converter import NDJsonConverter
from labelbox.data.serialization.ndjson.objects import NDLine


def test_classification():
Expand Down
17 changes: 17 additions & 0 deletions tests/data/serialization/ndjson/test_conversation.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
import json

import pytest

from labelbox.data.serialization.ndjson.converter import NDJsonConverter


@pytest.mark.parametrize("filename", [
"tests/data/assets/ndjson/conversation_entity_import.json",
"tests/data/assets/ndjson/conversation_entity_without_confidence_import.json"
])
def test_conversation_entity_import(filename: str):
with open(filename, 'r') as file:
data = json.load(file)
res = list(NDJsonConverter.deserialize(data))
res = list(NDJsonConverter.serialize(res))
assert res == data
55 changes: 55 additions & 0 deletions tests/data/serialization/ndjson/test_dicom.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,9 @@

label = lb_types.Label(data=lb_types.DicomData(uid="test-uid"),
annotations=dicom_polyline_annotations)
label_with_global_key = lb_types.Label(
data=lb_types.DicomData(global_key="test-global-key"),
annotations=dicom_polyline_annotations)

label_ndjson = {
'classifications': [],
Expand Down Expand Up @@ -51,26 +54,78 @@
}],
}

label_ndjson_with_global_key = {
'classifications': [],
'dataRow': {
'globalKey': 'test-global-key'
},
'name':
'dicom_polyline',
'groupKey':
'axial',
'segments': [{
'keyframes': [{
'frame':
2,
'line': [
{
'x': 680.0,
'y': 100.0
},
{
'x': 100.0,
'y': 190.0
},
{
'x': 190.0,
'y': 220.0
},
]
}]
}],
}


def test_serialize_dicom_polyline_annotation():
serialized_label = next(NDJsonConverter().serialize([label]))
serialized_label.pop('uuid')
assert serialized_label == label_ndjson


def test_serialize_dicom_polyline_annotation_with_global_key():
serialized_label = next(NDJsonConverter().serialize([label_with_global_key
]))
serialized_label.pop('uuid')
assert serialized_label == label_ndjson_with_global_key


def test_deserialize_dicom_polyline_annotation():
deserialized_label = next(NDJsonConverter().deserialize([label_ndjson]))
deserialized_label.annotations[0].extra.pop('uuid')
assert deserialized_label == label


def test_deserialize_dicom_polyline_annotation_with_global_key():
deserialized_label = next(NDJsonConverter().deserialize(
[label_ndjson_with_global_key]))
deserialized_label.annotations[0].extra.pop('uuid')
assert deserialized_label == label_with_global_key


def test_serialize_deserialize_dicom_polyline_annotation():
labels = list(NDJsonConverter.deserialize([label_ndjson]))
res = list(NDJsonConverter.serialize(labels))
res[0].pop('uuid')
assert res == [label_ndjson]


def test_serialize_deserialize_dicom_polyline_annotation_with_global_key():
labels = list(NDJsonConverter.deserialize([label_ndjson_with_global_key]))
res = list(NDJsonConverter.serialize(labels))
res[0].pop('uuid')
assert res == [label_ndjson_with_global_key]


def test_deserialize_nd_dicom_segments():
nd_dicom_segments = NDDicomSegments(**label_ndjson)
assert isinstance(nd_dicom_segments, NDDicomSegments)
Expand Down
14 changes: 0 additions & 14 deletions tests/data/serialization/ndjson/test_document.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,20 +28,6 @@ def test_pdf():
f.close()


def test_pdf_document_entity():
"""
Tests a pdf file with bbox annotations only
"""
with open('tests/data/assets/ndjson/pdf_document_entity_import.json',
'r') as f:
data = json.load(f)
res = list(NDJsonConverter.deserialize(data))
res = list(NDJsonConverter.serialize(res))
assert [round_dict(x) for x in res] == [round_dict(x) for x in data]

f.close()


def test_pdf_with_name_only():
"""
Tests a pdf file with bbox annotations only
Expand Down
3 changes: 2 additions & 1 deletion tests/data/serialization/ndjson/test_global_key.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,8 @@ def round_dict(data):
'tests/data/assets/ndjson/classification_import_global_key.json',
'tests/data/assets/ndjson/metric_import_global_key.json',
'tests/data/assets/ndjson/polyline_import_global_key.json',
'tests/data/assets/ndjson/text_entity_import_global_key.json'
'tests/data/assets/ndjson/text_entity_import_global_key.json',
'tests/data/assets/ndjson/conversation_entity_import_global_key.json',
])
def test_many_types(filename: str):
with open(filename, 'r') as f:
Expand Down
12 changes: 0 additions & 12 deletions tests/data/serialization/ndjson/test_text_entity.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,15 +16,3 @@ def test_text_entity_import(filename: str):
res = list(NDJsonConverter.deserialize(data))
res = list(NDJsonConverter.serialize(res))
assert res == data


@pytest.mark.parametrize("filename", [
"tests/data/assets/ndjson/conversation_entity_import.json",
"tests/data/assets/ndjson/conversation_entity_without_confidence_import.json"
])
def test_conversation_entity_import(filename: str):
with open(filename, 'r') as file:
data = json.load(file)
res = list(NDJsonConverter.deserialize(data))
res = list(NDJsonConverter.serialize(res))
assert res == data