Skip to content
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
157 changes: 89 additions & 68 deletions examples/prediction_upload/video_predictions.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -41,13 +41,14 @@
"- Polyline\n",
"- Classification - radio\n",
"- Classification - checklist\n",
"- Classification - free text\n",
"- Nested classifications \n",
"\n",
"**NOT** supported:\n",
"- Polygons \n",
"- Segmentation masks\n",
"- Free form text classifications\n",
"\n",
"Please note that this list of unsupported annotations only refers to limitations for importing annotations. For example, when using the Labelbox editor, segmentation masks can be created and edited on video assets.\n"
"- Polygons [not supported in video editor or model]\n",
"- Raster segmentation masks [not supported in model] \n",
"- Vector segmentation masks [not supported in video editor]\n",
"\n"
],
"cell_type": "markdown"
},
Expand Down Expand Up @@ -680,45 +681,6 @@
"outputs": [],
"execution_count": null
},
{
"metadata": {},
"source": [
"##### Raster Segmentation ########\n",
"\n",
"instance_uri = \"https://storage.googleapis.com/labelbox-datasets/video-sample-data/mask_example.png\"\n",
"\n",
"\n",
"\n",
"video_mask_prediction=[\n",
" lb_types.VideoMaskAnnotation(\n",
" frames=[\n",
" lb_types.MaskFrame(index=10, instance_uri=instance_uri)\n",
" ],\n",
" instances=[\n",
" lb_types.MaskInstance(color_rgb=(255,255,255), name=\"video_mask\")\n",
" ] \n",
" )\n",
"]\n",
"\n",
"video_mask_prediction_ndjson = {\n",
" \"masks\": {\n",
" \"frames\": [{\n",
" \"index\": 10,\n",
" \"instanceURI\": instance_uri\n",
" }],\n",
" \"instances\": [\n",
" {\n",
" \"colorRGB\": (255, 255, 255),\n",
" \"name\": \"video_mask\",\n",
" }\n",
" ]\n",
" }\n",
"}"
],
"cell_type": "code",
"outputs": [],
"execution_count": null
},
{
"metadata": {},
"source": [
Expand Down Expand Up @@ -763,7 +725,23 @@
"print(\"Failed data rows: \",task.failed_data_rows)"
],
"cell_type": "code",
"outputs": [],
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"There are errors present. Please look at `task.errors` for more details\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Errors: Duplicate global keys found: sample-video-2.mp4\n",
"Failed data rows: [{'message': 'Duplicate global keys found: sample-video-2.mp4', 'failedDataRows': [{'globalKey': 'sample-video-2.mp4', 'rowData': 'https://storage.googleapis.com/labelbox-datasets/video-sample-data/sample-video-2.mp4', 'attachmentInputs': []}]}]\n"
]
}
],
"execution_count": null
},
{
Expand All @@ -782,7 +760,6 @@
" lb.Tool(tool=lb.Tool.Type.BBOX, name=\"bbox_video\"),\n",
" lb.Tool(tool=lb.Tool.Type.POINT, name=\"point_video\"),\n",
" lb.Tool(tool=lb.Tool.Type.LINE, name=\"line_video_frame\"),\n",
" lb.Tool(tool=lb.Tool.Type.RASTER_SEGMENTATION, name=\"video_mask\"),\n",
" lb.Tool(\n",
" tool=lb.Tool.Type.BBOX, name=\"bbox_class\",\n",
" classifications=[\n",
Expand Down Expand Up @@ -872,7 +849,7 @@
"\n",
"ontology = client.create_ontology(\"Ontology Video Annotations\", \n",
" ontology_builder.asdict(), \n",
" # media_type=lb.MediaType.Video\n",
" media_type=lb.MediaType.Video\n",
" )"
],
"cell_type": "code",
Expand Down Expand Up @@ -912,7 +889,18 @@
"model_run.upsert_data_rows(global_keys=[global_key])"
],
"cell_type": "code",
"outputs": [],
"outputs": [
{
"data": {
"text/plain": [
"True"
]
},
"execution_count": 90,
"metadata": {},
"output_type": "execute_result"
}
],
"execution_count": null
},
{
Expand Down Expand Up @@ -947,7 +935,6 @@
" frame_bbox_with_checklist_subclass_prediction,\n",
" global_radio_prediction,\n",
" global_checklist_prediction,\n",
" video_mask_prediction,\n",
" text_prediction\n",
" ]\n",
"\n",
Expand Down Expand Up @@ -988,7 +975,6 @@
" frame_bbox_with_checklist_subclass_prediction_ndjson,\n",
" global_radio_classification_ndjson,\n",
" global_checklist_classification_ndjson,\n",
" video_mask_prediction_ndjson,\n",
" text_prediction_ndjson\n",
"]: \n",
" annotation.update({\n",
Expand Down Expand Up @@ -1022,7 +1008,16 @@
"print(\"Status of uploads: \", upload_job_prediction.statuses)"
],
"cell_type": "code",
"outputs": [],
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Errors: []\n",
"Status of uploads: [{'uuid': 'e3145fa9-42b8-466f-9ac5-0130aeab1060', 'dataRow': {'id': 'clfco73at0080079n5dhm9y3a', 'globalKey': 'sample-video-2.mp4'}, 'status': 'SUCCESS'}, {'uuid': '04e035b4-3083-4408-9a67-9cb52cbe027b', 'dataRow': {'id': 'clfco73at0080079n5dhm9y3a', 'globalKey': 'sample-video-2.mp4'}, 'status': 'SUCCESS'}, {'uuid': 'aafaeda7-8ba3-4df1-8e42-a02fe72add94', 'dataRow': {'id': 'clfco73at0080079n5dhm9y3a', 'globalKey': 'sample-video-2.mp4'}, 'status': 'SUCCESS'}, {'uuid': '956687f4-091c-4dc0-9c84-0b4e35ae451b', 'dataRow': {'id': 'clfco73at0080079n5dhm9y3a', 'globalKey': 'sample-video-2.mp4'}, 'status': 'SUCCESS'}, {'uuid': 'b7c6e33e-2cc4-46be-8a1f-d920cabf115b', 'dataRow': {'id': 'clfco73at0080079n5dhm9y3a', 'globalKey': 'sample-video-2.mp4'}, 'status': 'SUCCESS'}, {'uuid': 'daff0aff-8834-4e80-97f5-cf2d38684c5c', 'dataRow': {'id': 'clfco73at0080079n5dhm9y3a', 'globalKey': 'sample-video-2.mp4'}, 'status': 'SUCCESS'}, {'uuid': 'a611c275-39d8-47aa-808c-969692eb1698', 'dataRow': {'id': 'clfco73at0080079n5dhm9y3a', 'globalKey': 'sample-video-2.mp4'}, 'status': 'SUCCESS'}, {'uuid': '9ed7be94-bf89-432b-99ff-c834f31087f0', 'dataRow': {'id': 'clfco73at0080079n5dhm9y3a', 'globalKey': 'sample-video-2.mp4'}, 'status': 'SUCCESS'}, {'uuid': '6ed2ef7c-e83a-48e6-8073-c291779c7497', 'dataRow': {'id': 'clfco73at0080079n5dhm9y3a', 'globalKey': 'sample-video-2.mp4'}, 'status': 'SUCCESS'}, {'uuid': '14a4950f-0835-49bb-a968-81c41cda6869', 'dataRow': {'id': 'clfco73at0080079n5dhm9y3a', 'globalKey': 'sample-video-2.mp4'}, 'status': 'SUCCESS'}, {'uuid': '5bd23f6c-ab87-44b0-a32b-49c84f72b06c', 'dataRow': {'id': 'clfco73at0080079n5dhm9y3a', 'globalKey': 'sample-video-2.mp4'}, 'status': 'SUCCESS'}]\n"
]
}
],
"execution_count": null
},
{
Expand Down Expand Up @@ -1051,7 +1046,15 @@
"project.setup_editor(ontology)"
],
"cell_type": "code",
"outputs": [],
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"Default createProject behavior will soon be adjusted to prefer batch projects. Pass in `queue_mode` parameter explicitly to opt-out for the time being.\n"
]
}
],
"execution_count": null
},
{
Expand All @@ -1071,7 +1074,18 @@
")"
],
"cell_type": "code",
"outputs": [],
"outputs": [
{
"data": {
"text/plain": [
"<Batch ID: 28e7da10-f660-11ed-9f2b-f9c234af8129>"
]
},
"execution_count": 95,
"metadata": {},
"output_type": "execute_result"
}
],
"execution_count": null
},
{
Expand Down Expand Up @@ -1368,18 +1382,6 @@
"]\n",
"\n",
"\n",
"instance_uri = \"https://storage.googleapis.com/labelbox-datasets/video-sample-data/mask_example.png\"\n",
"video_mask_annotation=[\n",
" lb_types.VideoMaskAnnotation(\n",
" frames=[\n",
" lb_types.MaskFrame(index=10, instance_uri=instance_uri)\n",
" ],\n",
" instances=[\n",
" lb_types.MaskInstance(color_rgb=(255,255,255), name=\"video_mask\")\n",
" ] \n",
" )\n",
"]\n",
"\n",
"text_annotation = [lb_types.ClassificationAnnotation(\n",
" name=\"free_text\", # must match your ontology feature's name\n",
" value=lb_types.Text(answer=\"sample text\")\n",
Expand Down Expand Up @@ -1416,7 +1418,6 @@
" polyline_annotation,\n",
" global_checklist_annotation,\n",
" global_radio_annotation,\n",
" video_mask_annotation,\n",
" nested_checklist_annotation,\n",
" nested_radio_annotation,\n",
" text_annotation\n",
Expand Down Expand Up @@ -1457,7 +1458,16 @@
"print(\"Status of uploads: \", upload_job_annotation.statuses)"
],
"cell_type": "code",
"outputs": [],
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Errors: []\n",
"Status of uploads: [{'uuid': 'c5ddce8f-c672-49d7-bc43-f4cc5afbe0f2', 'dataRow': {'id': 'clfco73at0080079n5dhm9y3a', 'globalKey': 'sample-video-2.mp4'}, 'status': 'SUCCESS'}, {'uuid': '9b2bf816-6f22-4b05-818a-d200a2061a94', 'dataRow': {'id': 'clfco73at0080079n5dhm9y3a', 'globalKey': 'sample-video-2.mp4'}, 'status': 'SUCCESS'}, {'uuid': 'f6bf224d-a295-484b-8078-16e49e7583ec', 'dataRow': {'id': 'clfco73at0080079n5dhm9y3a', 'globalKey': 'sample-video-2.mp4'}, 'status': 'SUCCESS'}, {'uuid': '0602a136-8383-49c0-be64-36ea1499cd31', 'dataRow': {'id': 'clfco73at0080079n5dhm9y3a', 'globalKey': 'sample-video-2.mp4'}, 'status': 'SUCCESS'}, {'uuid': '306df4bd-a22d-48c7-bfee-fb0e8695d965', 'dataRow': {'id': 'clfco73at0080079n5dhm9y3a', 'globalKey': 'sample-video-2.mp4'}, 'status': 'SUCCESS'}, {'uuid': '60ec1dfc-64bb-4354-98f0-67aec7794bac', 'dataRow': {'id': 'clfco73at0080079n5dhm9y3a', 'globalKey': 'sample-video-2.mp4'}, 'status': 'SUCCESS'}, {'uuid': '6fd8c58d-883d-4fdb-9bdc-598c663e0ad4', 'dataRow': {'id': 'clfco73at0080079n5dhm9y3a', 'globalKey': 'sample-video-2.mp4'}, 'status': 'SUCCESS'}, {'uuid': '200c256f-b6cd-4469-987a-e9d773dc5715', 'dataRow': {'id': 'clfco73at0080079n5dhm9y3a', 'globalKey': 'sample-video-2.mp4'}, 'status': 'SUCCESS'}, {'uuid': 'b1c3ccbe-21fd-4c41-aeee-e2df9732cd5f', 'dataRow': {'id': 'clfco73at0080079n5dhm9y3a', 'globalKey': 'sample-video-2.mp4'}, 'status': 'SUCCESS'}, {'uuid': '7682c660-147a-4cf8-9b3a-a15859100142', 'dataRow': {'id': 'clfco73at0080079n5dhm9y3a', 'globalKey': 'sample-video-2.mp4'}, 'status': 'SUCCESS'}, {'uuid': '4e80a895-3722-49e9-8d00-15eded343a60', 'dataRow': {'id': 'clfco73at0080079n5dhm9y3a', 'globalKey': 'sample-video-2.mp4'}, 'status': 'SUCCESS'}]\n"
]
}
],
"execution_count": null
},
{
Expand All @@ -1474,7 +1484,18 @@
"model_run.upsert_labels(project_id=project.uid)"
],
"cell_type": "code",
"outputs": [],
"outputs": [
{
"data": {
"text/plain": [
"True"
]
},
"execution_count": 99,
"metadata": {},
"output_type": "execute_result"
}
],
"execution_count": null
},
{
Expand Down