From 3b37a12191b67607e907ddae93807dd552d29dc2 Mon Sep 17 00:00:00 2001
From: lmoehlenbrock <120418910+lmoehlenbrock@users.noreply.github.com>
Date: Tue, 24 Jan 2023 13:54:26 -0600
Subject: [PATCH] Added Colab notebook for video prediction import
---
.../prediction_upload/video_predictions.ipynb | 1257 +++++++++++++++++
1 file changed, 1257 insertions(+)
create mode 100644 examples/prediction_upload/video_predictions.ipynb
diff --git a/examples/prediction_upload/video_predictions.ipynb b/examples/prediction_upload/video_predictions.ipynb
new file mode 100644
index 000000000..2a50a34ff
--- /dev/null
+++ b/examples/prediction_upload/video_predictions.ipynb
@@ -0,0 +1,1257 @@
+{
+ "nbformat": 4,
+ "nbformat_minor": 0,
+ "metadata": {
+ "colab": {
+ "provenance": [],
+ "collapsed_sections": [
+ "RgBYFUxa-VGT",
+ "6FZyvnrqSGuc",
+ "viFHCnBeTD1Y",
+ "T-ZHWWI3JgmX"
+ ]
+ },
+ "kernelspec": {
+ "name": "python3",
+ "display_name": "Python 3"
+ },
+ "language_info": {
+ "name": "python"
+ }
+ },
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "id": "a6a048e8-b5fe-418b-aec4-829b5b6802e5"
+ },
+ "source": [
+ "
\n",
+ " \n",
+ " | "
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "id": "51cf1362-1cde-4749-aac7-5fb94473baa7"
+ },
+ "source": [
+ "\n",
+ " \n",
+ " | \n",
+ "\n",
+ "\n",
+ " \n",
+ " | "
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "source": [
+ "# Video Prediction Import \n",
+ "* This notebook walks you through the process of uploading model predictions to a Model Run. This notebook provides an example for each supported prediction type for video assets.\n",
+ "\n",
+ "A Model Run is a container for the predictions, annotations and metrics of a specific experiment in your ML model development cycle.\n",
+ "\n",
+ "**Supported annotations that can be uploaded through the SDK**\n",
+ "- Bounding box\n",
+ "- Point\n",
+ "- Polyline\n",
+ "- Classification - radio\n",
+ "- Classification - checklist\n",
+ "\n",
+ "**NOT** supported:\n",
+ "- Polygons \n",
+ "- Segmentation masks\n",
+ "- Free form text classifications\n",
+ "\n",
+ "Please note that this list of unsupported annotations only refers to limitations for importing annotations. For example, when using the Labelbox editor, segmentation masks can be created and edited on video assets.\n"
+ ],
+ "metadata": {
+ "id": "9znxMjDYGi0Y"
+ }
+ },
+ {
+ "cell_type": "markdown",
+ "source": [
+ "## Setup"
+ ],
+ "metadata": {
+ "id": "UtJHIuE8HDRI"
+ }
+ },
+ {
+ "cell_type": "code",
+ "source": [
+ "!pip install -q 'labelbox[data]'"
+ ],
+ "metadata": {
+ "id": "cm8xMaLbGb7v",
+ "colab": {
+ "base_uri": "https://localhost:8080/"
+ },
+ "outputId": "5c29838c-4c03-4903-851f-807967cf3e79"
+ },
+ "execution_count": 2,
+ "outputs": [
+ {
+ "output_type": "stream",
+ "name": "stdout",
+ "text": [
+ "\u001b[?25l \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m0.0/185.5 KB\u001b[0m \u001b[31m?\u001b[0m eta \u001b[36m-:--:--\u001b[0m\r\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m185.5/185.5 KB\u001b[0m \u001b[31m5.4 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
+ "\u001b[?25h Preparing metadata (setup.py) ... \u001b[?25l\u001b[?25hdone\n",
+ "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m7.8/7.8 MB\u001b[0m \u001b[31m104.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
+ "\u001b[?25h Building wheel for pygeotile (setup.py) ... \u001b[?25l\u001b[?25hdone\n"
+ ]
+ }
+ ]
+ },
+ {
+ "cell_type": "code",
+ "source": [
+ "from labelbox.schema.ontology import OntologyBuilder, Tool, Classification, Option\n",
+ "from labelbox import Client, MALPredictionImport, LabelImport\n",
+ "from labelbox.data.serialization import NDJsonConverter\n",
+ "from labelbox.schema.media_type import MediaType\n",
+ "from labelbox.data.annotation_types import (\n",
+ " Label, ImageData, ObjectAnnotation, MaskData,\n",
+ " Rectangle, Point, Line, Mask,\n",
+ " Radio, Checklist,\n",
+ " ClassificationAnnotation, ClassificationAnswer\n",
+ ")\n",
+ "import uuid\n",
+ "import numpy as np\n",
+ "from labelbox.schema.queue_mode import QueueMode"
+ ],
+ "metadata": {
+ "id": "NIq-6M9kHKSs"
+ },
+ "execution_count": 3,
+ "outputs": []
+ },
+ {
+ "cell_type": "markdown",
+ "source": [
+ "## Replace with your API Key \n",
+ "Guides on [Create an API key](https://docs.labelbox.com/docs/create-an-api-key)"
+ ],
+ "metadata": {
+ "id": "pZ2rBqY8HQoe"
+ }
+ },
+ {
+ "cell_type": "code",
+ "source": [
+ "API_KEY = None\n",
+ "client = Client(API_KEY)"
+ ],
+ "metadata": {
+ "id": "z7ZLKLYLHP__"
+ },
+ "execution_count": 4,
+ "outputs": []
+ },
+ {
+ "cell_type": "markdown",
+ "source": [
+ "## Supported Predictions\n",
+ "- Only NDJSON annotations are supported with video assets\n",
+ "- Confidence scores are currently not supported for segment or frame annotations, which are required for bounding box, point, and line for video assets. For this tutorial, only the radio and checklist annotations will have confidence scores."
+ ],
+ "metadata": {
+ "id": "RgBYFUxa-VGT"
+ }
+ },
+ {
+ "cell_type": "code",
+ "source": [
+ "########### Radio Classification ###########\n",
+ "\n",
+ "# NDJSON\n",
+ "radio_prediction_ndjson = {\n",
+ " 'name': 'radio_question',\n",
+ " 'answer': {'name': 'second_radio_answer', 'confidence': 0.5}\n",
+ "} "
+ ],
+ "metadata": {
+ "id": "FJhCAqeR-cUJ"
+ },
+ "execution_count": 86,
+ "outputs": []
+ },
+ {
+ "cell_type": "code",
+ "source": [
+ "########## Nested Classification ##########\n",
+ "\n",
+ "### Radio #### \n",
+ "\n",
+ "# NDJSON \n",
+ "nested_radio_prediction_ndjson = {\n",
+ " \"name\": \"nested_radio_question\",\n",
+ " \"answer\": {\"name\": \"first_radio_answer\", 'confidence': 0.5 },\n",
+ " \"classifications\" : [\n",
+ " {\n",
+ " 'name': 'sub_radio_question', \n",
+ " 'answer': {'name': 'first_sub_radio_answer', 'confidence': 0.5 }\n",
+ " }\n",
+ " ]\n",
+ "}\n",
+ "\n",
+ "### Checklist #### \n",
+ "\n",
+ "nested_checklist_prediction_ndjson = {\n",
+ " \"name\": \"nested_checklist_question\",\n",
+ " \"answer\": [{\"name\": \"first_checklist_answer\",\n",
+ " 'confidence': 0.5,\n",
+ " \"classifications\" : [\n",
+ " {\n",
+ " \"name\": \"sub_checklist_question\", \n",
+ " \"answer\": {\"name\": \"first_sub_checklist_answer\", 'confidence': 0.5 }\n",
+ " }\n",
+ " ]\n",
+ "}]\n",
+ "}\n",
+ "\n",
+ "\n"
+ ],
+ "metadata": {
+ "id": "R--ZQXHkkYFd"
+ },
+ "execution_count": 87,
+ "outputs": []
+ },
+ {
+ "cell_type": "code",
+ "source": [
+ "############ Checklist question ############\n",
+ "\n",
+ "# NDJSON\n",
+ "checklist_prediction_ndjson = {\n",
+ " 'name': 'checklist_question',\n",
+ " 'answer': [\n",
+ " {'name': 'first_checklist_answer', 'confidence': 0.5},\n",
+ " {'name': 'second_checklist_answer', 'confidence': 0.5}\n",
+ " ]\n",
+ "}"
+ ],
+ "metadata": {
+ "id": "TY4ejoZZkeZn"
+ },
+ "execution_count": 88,
+ "outputs": []
+ },
+ {
+ "cell_type": "code",
+ "source": [
+ "####### Bounding box #######\n",
+ "\n",
+ "#NDJSON \n",
+ "bbox_prediction_ndjson = {\n",
+ " \"name\" : \"bbox_video\",\n",
+ " \"segments\" : [{\n",
+ " \"keyframes\" : [\n",
+ " {\n",
+ " \"frame\": 1,\n",
+ " \"bbox\" : {\n",
+ " \"top\": 146.0,\n",
+ " \"left\": 98.0,\n",
+ " \"height\": 382.0,\n",
+ " \"width\": 341.0\n",
+ " } \n",
+ " },\n",
+ " {\n",
+ " \"frame\": 2,\n",
+ " \"bbox\" : {\n",
+ " \"top\": 146.0,\n",
+ " \"left\": 98.0,\n",
+ " \"height\": 382.0,\n",
+ " \"width\": 341.0\n",
+ " }\n",
+ " },\n",
+ " {\n",
+ " \"frame\": 3,\n",
+ " \"bbox\" : {\n",
+ " \"top\": 146.0,\n",
+ " \"left\": 98.0,\n",
+ " \"height\": 382.0,\n",
+ " \"width\": 341.0\n",
+ " }\n",
+ " },\n",
+ " {\n",
+ " \"frame\": 4,\n",
+ " \"bbox\" : {\n",
+ " \"top\": 146.0,\n",
+ " \"left\": 98.0,\n",
+ " \"height\": 382.0,\n",
+ " \"width\": 341.0\n",
+ " }\n",
+ " },\n",
+ " {\n",
+ " \"frame\": 5,\n",
+ " \"bbox\" : {\n",
+ " \"top\": 146.0,\n",
+ " \"left\": 98.0,\n",
+ " \"height\": 382.0,\n",
+ " \"width\": 341.0\n",
+ " }\n",
+ " }\n",
+ " ]\n",
+ " }\n",
+ " ]\n",
+ "}\n"
+ ],
+ "metadata": {
+ "id": "hMRgfWFykvm7"
+ },
+ "execution_count": 106,
+ "outputs": []
+ },
+ {
+ "cell_type": "code",
+ "source": [
+ "####### Bounding box with nested classification #######\n",
+ "\n",
+ "## NDJSON\n",
+ "#Confidence scores are not supported for bounding box, point, or line annotations,\n",
+ "#but they can be used for radio and checklist classifications, even when they're\n",
+ "#nested under a bounding box, point, or line annotation as demonstrated here.\n",
+ "\n",
+ "bbox_with_radio_subclass_prediction_ndjson = {\n",
+ " \"name\": \"bbox_with_radio_subclass\", \n",
+ " \"segments\" : [{\n",
+ " \"keyframes\" : [\n",
+ " {\n",
+ " \"frame\": 13,\n",
+ " \"bbox\" : {\n",
+ " \"top\": 146.0,\n",
+ " \"left\": 98.0,\n",
+ " \"height\": 382.0,\n",
+ " \"width\": 341.0\n",
+ " },\n",
+ " \"classifications\": [{\n",
+ " \"schemaId\": \"cldae51x50uph08zb3lqg4dgh\",\n",
+ " \"answer\": \n",
+ " {\"schemaId\":\"cldae51x50upi08zb5gzd8fw3\", 'confidence': 0.5} \n",
+ " }] \n",
+ " },\n",
+ " {\n",
+ " \"frame\": 14,\n",
+ " \"bbox\" : {\n",
+ " \"top\": 146.0,\n",
+ " \"left\": 98.0,\n",
+ " \"height\": 382.0,\n",
+ " \"width\": 341.0\n",
+ " },\n",
+ " \"classifications\": [{\n",
+ " \"schemaId\": \"cldae51x50uph08zb3lqg4dgh\",\n",
+ " \"answer\": \n",
+ " {\"schemaId\":\"cldae51x50upi08zb5gzd8fw3\", 'confidence': 0.5}\n",
+ " }] \n",
+ " },\n",
+ " {\n",
+ " \"frame\": 15,\n",
+ " \"bbox\" : {\n",
+ " \"top\": 146.0,\n",
+ " \"left\": 98.0,\n",
+ " \"height\": 382.0,\n",
+ " \"width\": 341.0\n",
+ " },\n",
+ " \"classifications\": [{\n",
+ " \"schemaId\": \"cldae51x50uph08zb3lqg4dgh\",\n",
+ " \"answer\": \n",
+ " {\"schemaId\":\"cldae51x50upi08zb5gzd8fw3\", 'confidence': 0.5}\n",
+ " }]\n",
+ " }\n",
+ " ]\n",
+ " }\n",
+ " ]\n",
+ " \n",
+ "\n",
+ "}"
+ ],
+ "metadata": {
+ "id": "bmDBu_YHkymO"
+ },
+ "execution_count": 120,
+ "outputs": []
+ },
+ {
+ "cell_type": "code",
+ "source": [
+ "######## Point ########\n",
+ "\n",
+ "# NDJSON\n",
+ "point_prediction_ndjson = {\n",
+ " \"name\": \"point_video\", \n",
+ " \"segments\": [{\n",
+ " \"keyframes\": [{\n",
+ " \"frame\": 17,\n",
+ " \"point\" : {\n",
+ " \"x\": 660.134 ,\n",
+ " \"y\": 407.926\n",
+ " },\n",
+ " }]\n",
+ " }] \n",
+ "}"
+ ],
+ "metadata": {
+ "id": "Xil6CUrqk63g"
+ },
+ "execution_count": 91,
+ "outputs": []
+ },
+ {
+ "cell_type": "code",
+ "source": [
+ "###### Polyline ######\n",
+ "\n",
+ "# NDJSON\n",
+ "polyline_prediction_ndjson = {\n",
+ " \"name\": \"line_video_frame\", \n",
+ " \"segments\": [\n",
+ " {\n",
+ " \"keyframes\": [\n",
+ " {\n",
+ " \"frame\": 5,\n",
+ " \"line\": [{\n",
+ " \"x\": 680,\n",
+ " \"y\": 100\n",
+ " },{\n",
+ " \"x\": 100,\n",
+ " \"y\": 190\n",
+ " },{\n",
+ " \"x\": 190,\n",
+ " \"y\": 220\n",
+ " }],\n",
+ " },\n",
+ " {\n",
+ " \"frame\": 12,\n",
+ " \"line\": [{\n",
+ " \"x\": 680,\n",
+ " \"y\": 280\n",
+ " },{\n",
+ " \"x\": 300,\n",
+ " \"y\": 380\n",
+ " },{\n",
+ " \"x\": 400,\n",
+ " \"y\": 460\n",
+ " }],\n",
+ " },\n",
+ " {\n",
+ " \"frame\": 20,\n",
+ " \"line\": [{\n",
+ " \"x\": 680,\n",
+ " \"y\": 180\n",
+ " },{\n",
+ " \"x\": 100,\n",
+ " \"y\": 200\n",
+ " },{\n",
+ " \"x\": 200,\n",
+ " \"y\": 260\n",
+ " }],\n",
+ " }\n",
+ " ]\n",
+ " },\n",
+ " {\n",
+ " \"keyframes\": [\n",
+ " {\n",
+ " \"frame\": 24,\n",
+ " \"line\": [{\n",
+ " \"x\": 300,\n",
+ " \"y\": 310\n",
+ " },{\n",
+ " \"x\": 330,\n",
+ " \"y\": 430\n",
+ " }],\n",
+ " },\n",
+ " {\n",
+ " \"frame\": 45,\n",
+ " \"line\": [{\n",
+ " \"x\": 600,\n",
+ " \"y\": 810\n",
+ " },{\n",
+ " \"x\": 900,\n",
+ " \"y\": 930\n",
+ " }],\n",
+ " }\n",
+ " ]\n",
+ " }\n",
+ " ]\n",
+ "}\n"
+ ],
+ "metadata": {
+ "id": "gu0ITN_flCAq"
+ },
+ "execution_count": 92,
+ "outputs": []
+ },
+ {
+ "cell_type": "markdown",
+ "source": [
+ "## Step 1: Import data rows into Catalog"
+ ],
+ "metadata": {
+ "id": "U-o15yu9IPDo"
+ }
+ },
+ {
+ "cell_type": "code",
+ "source": [
+ "# send a sample image as batch to the project\n",
+ "test_img_url = {\n",
+ " \"row_data\": \"https://storage.googleapis.com/labelbox-datasets/video-sample-data/sample-video-2.mp4\",\n",
+ " \"global_key\": str(uuid.uuid4())\n",
+ "}\n",
+ "dataset = client.create_dataset(name=\"video_prediction_demo\")\n",
+ "data_row = dataset.create_data_row(test_img_url)\n",
+ "print(data_row)"
+ ],
+ "metadata": {
+ "id": "HjH9gTV8IBG9",
+ "colab": {
+ "base_uri": "https://localhost:8080/"
+ },
+ "outputId": "f05014e9-f61a-4da4-b1e1-20e721a4a890"
+ },
+ "execution_count": 93,
+ "outputs": [
+ {
+ "output_type": "stream",
+ "name": "stdout",
+ "text": [
+ "\n"
+ ]
+ }
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "source": [
+ "## Step 2: Create/select an Ontology for your model predictions\n",
+ "Your project should have the correct ontology setup with all the tools and classifications supported for your annotations, and the tool names and classification instructions should match the name/instructions fields in your annotations to ensure the correct feature schemas are matched.\n"
+ ],
+ "metadata": {
+ "id": "oy0umzuNIceP"
+ }
+ },
+ {
+ "cell_type": "code",
+ "source": [
+ "ontology_builder = OntologyBuilder(\n",
+ " classifications=[ # List of Classification objects\n",
+ " Classification( # Radio classification given the name \"text\" with two options: \"first_radio_answer\" and \"second_radio_answer\"\n",
+ " class_type=Classification.Type.RADIO, \n",
+ " instructions=\"radio_question\", \n",
+ " options=[\n",
+ " Option(value=\"first_radio_answer\"),\n",
+ " Option(value=\"second_radio_answer\")\n",
+ " ]\n",
+ " ),\n",
+ " Classification( # Checklist classification given the name \"text\" with two options: \"first_checklist_answer\" and \"second_checklist_answer\"\n",
+ " class_type=Classification.Type.CHECKLIST, \n",
+ " instructions=\"checklist_question\", \n",
+ " options=[\n",
+ " Option(value=\"first_checklist_answer\"),\n",
+ " Option(value=\"second_checklist_answer\") \n",
+ " ]\n",
+ " ), \n",
+ " Classification(\n",
+ " class_type=Classification.Type.RADIO, \n",
+ " instructions=\"nested_radio_question\",\n",
+ " options=[\n",
+ " Option(\"first_radio_answer\",\n",
+ " options=[\n",
+ " Classification(\n",
+ " class_type=Classification.Type.RADIO,\n",
+ " instructions=\"sub_radio_question\",\n",
+ " options=[Option(\"first_sub_radio_answer\")]\n",
+ " )\n",
+ " ]\n",
+ " )\n",
+ " ] \n",
+ " ),\n",
+ " Classification(\n",
+ " class_type=Classification.Type.CHECKLIST, \n",
+ " instructions=\"nested_checklist_question\",\n",
+ " options=[\n",
+ " Option(value=\"first_checklist_answer\",\n",
+ " options=[\n",
+ " Classification(\n",
+ " class_type=Classification.Type.CHECKLIST, \n",
+ " instructions=\"sub_checklist_question\", \n",
+ " options=[Option(\"first_sub_checklist_answer\")]\n",
+ " )\n",
+ " ]\n",
+ " )\n",
+ " ]\n",
+ " ) \n",
+ " ],\n",
+ " tools=[ # List of Tool objects\n",
+ " Tool( # Bounding Box tool given the name \"box\"\n",
+ " tool=Tool.Type.BBOX, \n",
+ " name=\"bbox_video\"), \n",
+ " Tool( # Bounding Box tool given the name \"box\"\n",
+ " tool=Tool.Type.BBOX, \n",
+ " name=\"bbox_with_radio_subclass\",\n",
+ " classifications=[\n",
+ " Classification(\n",
+ " class_type=Classification.Type.RADIO,\n",
+ " instructions=\"sub_radio_question\",\n",
+ " options=[\n",
+ " Option(value=\"first_sub_radio_answer\")\n",
+ " ]\n",
+ " ),\n",
+ " ]\n",
+ " ),\n",
+ " \t Tool( # Point tool given the name \"point\"\n",
+ " tool=Tool.Type.POINT, \n",
+ " name=\"point_video\"), \n",
+ " Tool( # Polyline tool given the name \"line\"\n",
+ " tool=Tool.Type.LINE, \n",
+ " name=\"line_video_frame\")]\n",
+ ")\n",
+ "\n",
+ "ontology = client.create_ontology(\"Video Prediction Import Demo\", ontology_builder.asdict(), media_type=MediaType.Video)"
+ ],
+ "metadata": {
+ "id": "Kt4XWWqgIiWk"
+ },
+ "execution_count": 6,
+ "outputs": []
+ },
+ {
+ "cell_type": "markdown",
+ "source": [
+ "## Step 3: Create a Model and Model Run"
+ ],
+ "metadata": {
+ "id": "ZjN8jxHvIvHP"
+ }
+ },
+ {
+ "cell_type": "code",
+ "source": [
+ "# create Model\n",
+ "model = client.create_model(name=\"video_model_run_\" + str(uuid.uuid4()), \n",
+ " ontology_id=ontology.uid)\n",
+ "# create Model Run\n",
+ "model_run = model.create_model_run(\"iteration 1\")"
+ ],
+ "metadata": {
+ "id": "8n-AvzdiOR6d"
+ },
+ "execution_count": 107,
+ "outputs": []
+ },
+ {
+ "cell_type": "markdown",
+ "source": [
+ "## Step 4: Send data rows to the Model Run"
+ ],
+ "metadata": {
+ "id": "NX6L0axRJN5J"
+ }
+ },
+ {
+ "cell_type": "code",
+ "source": [
+ "model_run.upsert_data_rows([data_row.uid])"
+ ],
+ "metadata": {
+ "id": "6sngCgIwJSae",
+ "colab": {
+ "base_uri": "https://localhost:8080/"
+ },
+ "outputId": "1ac4ce7f-9121-4f24-d4e2-cd97628c26d5"
+ },
+ "execution_count": 108,
+ "outputs": [
+ {
+ "output_type": "execute_result",
+ "data": {
+ "text/plain": [
+ "True"
+ ]
+ },
+ "metadata": {},
+ "execution_count": 108
+ }
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "source": [
+ "## Step 5. Create the predictions payload\n",
+ "\n",
+ "Create the annotations payload using the snippets of [code here](https://docs.labelbox.com/reference/import-video-annotations).\n",
+ "\n",
+ "Labelbox only supports NDJSON annotation payloads for importing video annotations.\n"
+ ],
+ "metadata": {
+ "id": "6FZyvnrqSGuc"
+ }
+ },
+ {
+ "cell_type": "code",
+ "source": [
+ "label_ndjson = []\n",
+ "for prediction in [\n",
+ " radio_prediction_ndjson,\n",
+ " checklist_prediction_ndjson,\n",
+ " bbox_prediction_ndjson, \n",
+ " bbox_with_radio_subclass_prediction_ndjson,\n",
+ " point_prediction_ndjson,\n",
+ " polyline_prediction_ndjson, \n",
+ " nested_radio_prediction_ndjson,\n",
+ " nested_checklist_prediction_ndjson\n",
+ "]:\n",
+ " prediction.update({\n",
+ " 'uuid': str(uuid.uuid4()),\n",
+ " 'dataRow': {'id': data_row.uid},\n",
+ " })\n",
+ " label_ndjson.append(prediction)"
+ ],
+ "metadata": {
+ "id": "F-Y7sSyAV3tn"
+ },
+ "execution_count": 109,
+ "outputs": []
+ },
+ {
+ "cell_type": "markdown",
+ "source": [
+ "## Step 6. Upload the predictions payload to the Model Run "
+ ],
+ "metadata": {
+ "id": "viFHCnBeTD1Y"
+ }
+ },
+ {
+ "cell_type": "code",
+ "source": [
+ "# Upload the prediction label to the Model Run\n",
+ "upload_job_prediction = model_run.add_predictions(\n",
+ " name=\"prediction_upload_job\"+str(uuid.uuid4()),\n",
+ " predictions=label_ndjson)\n",
+ "\n",
+ "# Errors will appear for annotation uploads that failed.\n",
+ "print(\"Errors:\", upload_job_prediction.errors)"
+ ],
+ "metadata": {
+ "id": "uCI8pLTITQNG",
+ "colab": {
+ "base_uri": "https://localhost:8080/"
+ },
+ "outputId": "043b7f57-4f13-4afa-e5dc-e8bda70211f8"
+ },
+ "execution_count": 110,
+ "outputs": [
+ {
+ "output_type": "stream",
+ "name": "stdout",
+ "text": [
+ "Errors: []\n"
+ ]
+ }
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "source": [
+ "## Step 7: Send annotations to the Model Run \n",
+ "To send annotations to a Model Run, we must first import them into a project, create a label payload and then send them to the Model Run."
+ ],
+ "metadata": {
+ "id": "T-ZHWWI3JgmX"
+ }
+ },
+ {
+ "cell_type": "markdown",
+ "source": [
+ "##### 7.1. Create a labelbox project"
+ ],
+ "metadata": {
+ "id": "CYRiqHr2O_aL"
+ }
+ },
+ {
+ "cell_type": "code",
+ "source": [
+ "# Create a Labelbox project\n",
+ "project = client.create_project(name=\"video_prediction_demo\", \n",
+ " queue_mode=QueueMode.Batch,\n",
+ " # Quality Settings setup \n",
+ " auto_audit_percentage=1,\n",
+ " auto_audit_number_of_labels=1,\n",
+ " media_type=MediaType.Video)\n",
+ "project.setup_editor(ontology)"
+ ],
+ "metadata": {
+ "id": "jEtoDiDrPFvI"
+ },
+ "execution_count": 111,
+ "outputs": []
+ },
+ {
+ "cell_type": "markdown",
+ "source": [
+ "##### 7.2. Create a batch to send to the project "
+ ],
+ "metadata": {
+ "id": "7FEyC-nBPPuD"
+ }
+ },
+ {
+ "cell_type": "code",
+ "source": [
+ "project.create_batch(\n",
+ " \"batch_video_prediction_demo\", # Each batch in a project must have a unique name\n",
+ " dataset.export_data_rows(), # A list of data rows or data row ids\n",
+ " 5 # priority between 1(Highest) - 5(lowest)\n",
+ ")"
+ ],
+ "metadata": {
+ "id": "WRr5tdVEPXXy",
+ "colab": {
+ "base_uri": "https://localhost:8080/"
+ },
+ "outputId": "17415a52-82d0-487a-824e-e63c59beaef6"
+ },
+ "execution_count": 112,
+ "outputs": [
+ {
+ "output_type": "execute_result",
+ "data": {
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "execution_count": 112
+ }
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "source": [
+ "##### 7.3 Create the annotations payload"
+ ],
+ "metadata": {
+ "id": "FTGAI730UlZ3"
+ }
+ },
+ {
+ "cell_type": "code",
+ "source": [
+ "########### Radio Classification ###########\n",
+ "\n",
+ "radio_annotation_ndjson = {\n",
+ " 'name': 'radio_question',\n",
+ " 'answer': {'name': 'second_radio_answer'}\n",
+ "} \n",
+ "\n",
+ "########## Nested Classification ##########\n",
+ "\n",
+ "### Radio #### \n",
+ "\n",
+ "nested_radio_annotation_ndjson = {\n",
+ " \"name\": \"nested_radio_question\",\n",
+ " \"answer\": {\"name\": \"first_radio_answer\"},\n",
+ " \"classifications\" : [\n",
+ " {\n",
+ " 'name': 'sub_radio_question', \n",
+ " 'answer': {'name': 'first_sub_radio_answer'}\n",
+ " }\n",
+ " ]\n",
+ "}\n",
+ "\n",
+ "### Checklist #### \n",
+ "\n",
+ "nested_checklist_annotation_ndjson = {\n",
+ " \"name\": \"nested_checklist_question\",\n",
+ " \"answer\": [{\"name\": \"first_checklist_answer\",\n",
+ " \"classifications\" : [\n",
+ " {\n",
+ " \"name\": \"sub_checklist_question\", \n",
+ " \"answer\": {\"name\": \"first_sub_checklist_answer\"}\n",
+ " }\n",
+ " ]\n",
+ "}]\n",
+ "}\n",
+ "\n",
+ "############ Checklist question ############\n",
+ "\n",
+ "checklist_annotation_ndjson = {\n",
+ " 'name': 'checklist_question',\n",
+ " 'answer': [\n",
+ " {'name': 'first_checklist_answer'},\n",
+ " {'name': 'second_checklist_answer'}\n",
+ " ]\n",
+ "}\n",
+ "\n",
+ "####### Bounding box #######\n",
+ "\n",
+ "bbox_annotation_ndjson = {\n",
+ " \"name\" : \"bbox_video\",\n",
+ " \"segments\" : [{\n",
+ " \"keyframes\" : [\n",
+ " {\n",
+ " \"frame\": 1,\n",
+ " \"bbox\" : {\n",
+ " \"top\": 146.0,\n",
+ " \"left\": 98.0,\n",
+ " \"height\": 382.0,\n",
+ " \"width\": 341.0\n",
+ " } \n",
+ " },\n",
+ " {\n",
+ " \"frame\": 2,\n",
+ " \"bbox\" : {\n",
+ " \"top\": 146.0,\n",
+ " \"left\": 98.0,\n",
+ " \"height\": 382.0,\n",
+ " \"width\": 341.0\n",
+ " }\n",
+ " },\n",
+ " {\n",
+ " \"frame\": 3,\n",
+ " \"bbox\" : {\n",
+ " \"top\": 146.0,\n",
+ " \"left\": 98.0,\n",
+ " \"height\": 382.0,\n",
+ " \"width\": 341.0\n",
+ " }\n",
+ " },\n",
+ " {\n",
+ " \"frame\": 4,\n",
+ " \"bbox\" : {\n",
+ " \"top\": 146.0,\n",
+ " \"left\": 98.0,\n",
+ " \"height\": 382.0,\n",
+ " \"width\": 341.0\n",
+ " }\n",
+ " },\n",
+ " {\n",
+ " \"frame\": 5,\n",
+ " \"bbox\" : {\n",
+ " \"top\": 146.0,\n",
+ " \"left\": 98.0,\n",
+ " \"height\": 382.0,\n",
+ " \"width\": 341.0\n",
+ " }\n",
+ " }\n",
+ " ]\n",
+ " }\n",
+ " ]\n",
+ "}\n",
+ "\n",
+ "####### Bounding box with nested classification #######\n",
+ "\n",
+ "bbox_with_radio_subclass_annotation_ndjson = {\n",
+ " \"name\": \"bbox_with_radio_subclass\", \n",
+ " \"segments\" : [{\n",
+ " \"keyframes\" : [\n",
+ " {\n",
+ " \"frame\": 13,\n",
+ " \"bbox\" : {\n",
+ " \"top\": 146.0,\n",
+ " \"left\": 98.0,\n",
+ " \"height\": 382.0,\n",
+ " \"width\": 341.0\n",
+ " },\n",
+ " \"classifications\": [{\n",
+ " \"schemaId\": \"cldae51x50uph08zb3lqg4dgh\",\n",
+ " \"answer\": \n",
+ " {\"schemaId\":\"cldae51x50upi08zb5gzd8fw3\"}\n",
+ " }] \n",
+ " },\n",
+ " {\n",
+ " \"frame\": 14,\n",
+ " \"bbox\" : {\n",
+ " \"top\": 146.0,\n",
+ " \"left\": 98.0,\n",
+ " \"height\": 382.0,\n",
+ " \"width\": 341.0\n",
+ " },\n",
+ " \"classifications\": [{\n",
+ " \"schemaId\": \"cldae51x50uph08zb3lqg4dgh\",\n",
+ " \"answer\": \n",
+ " {\"schemaId\":\"cldae51x50upi08zb5gzd8fw3\"}\n",
+ " }] \n",
+ " },\n",
+ " {\n",
+ " \"frame\": 15,\n",
+ " \"bbox\" : {\n",
+ " \"top\": 146.0,\n",
+ " \"left\": 98.0,\n",
+ " \"height\": 382.0,\n",
+ " \"width\": 341.0\n",
+ " },\n",
+ " \"classifications\": [{\n",
+ " \"schemaId\": \"cldae51x50uph08zb3lqg4dgh\",\n",
+ " \"answer\": \n",
+ " {\"schemaId\":\"cldae51x50upi08zb5gzd8fw3\"}\n",
+ " }]\n",
+ " }\n",
+ " ]\n",
+ " }\n",
+ " ]\n",
+ "}\n",
+ "\n",
+ "\n",
+ " ######## Point ########\n",
+ "\n",
+ "point_annotation_ndjson = {\n",
+ " \"name\": \"point_video\", \n",
+ " \"segments\": [{\n",
+ " \"keyframes\": [{\n",
+ " \"frame\": 17,\n",
+ " \"point\" : {\n",
+ " \"x\": 660.134 ,\n",
+ " \"y\": 407.926\n",
+ " },\n",
+ " }]\n",
+ " }] \n",
+ "}\n",
+ "\n",
+ "###### Polyline ######\n",
+ "\n",
+ "polyline_annotation_ndjson = {\n",
+ " \"name\": \"line_video_frame\", \n",
+ " \"segments\": [\n",
+ " {\n",
+ " \"keyframes\": [\n",
+ " {\n",
+ " \"frame\": 5,\n",
+ " \"line\": [{\n",
+ " \"x\": 680,\n",
+ " \"y\": 100\n",
+ " },{\n",
+ " \"x\": 100,\n",
+ " \"y\": 190\n",
+ " },{\n",
+ " \"x\": 190,\n",
+ " \"y\": 220\n",
+ " }],\n",
+ " },\n",
+ " {\n",
+ " \"frame\": 12,\n",
+ " \"line\": [{\n",
+ " \"x\": 680,\n",
+ " \"y\": 280\n",
+ " },{\n",
+ " \"x\": 300,\n",
+ " \"y\": 380\n",
+ " },{\n",
+ " \"x\": 400,\n",
+ " \"y\": 460\n",
+ " }],\n",
+ " },\n",
+ " {\n",
+ " \"frame\": 20,\n",
+ " \"line\": [{\n",
+ " \"x\": 680,\n",
+ " \"y\": 180\n",
+ " },{\n",
+ " \"x\": 100,\n",
+ " \"y\": 200\n",
+ " },{\n",
+ " \"x\": 200,\n",
+ " \"y\": 260\n",
+ " }],\n",
+ " }\n",
+ " ]\n",
+ " },\n",
+ " {\n",
+ " \"keyframes\": [\n",
+ " {\n",
+ " \"frame\": 24,\n",
+ " \"line\": [{\n",
+ " \"x\": 300,\n",
+ " \"y\": 310\n",
+ " },{\n",
+ " \"x\": 330,\n",
+ " \"y\": 430\n",
+ " }],\n",
+ " },\n",
+ " {\n",
+ " \"frame\": 45,\n",
+ " \"line\": [{\n",
+ " \"x\": 600,\n",
+ " \"y\": 810\n",
+ " },{\n",
+ " \"x\": 900,\n",
+ " \"y\": 930\n",
+ " }],\n",
+ " }\n",
+ " ]\n",
+ " }\n",
+ " ]\n",
+ "}"
+ ],
+ "metadata": {
+ "id": "A8_HVvu9Uvfl"
+ },
+ "execution_count": 117,
+ "outputs": []
+ },
+ {
+ "cell_type": "markdown",
+ "source": [
+ "##### 7.4. Create the label object"
+ ],
+ "metadata": {
+ "id": "8QwmguFvPltl"
+ }
+ },
+ {
+ "cell_type": "code",
+ "source": [
+ "# Create a Label object by identifying the applicable data row in Labelbox and providing a list of annotations\n",
+ "ndjson_annotation = []\n",
+ "for annot in [\n",
+ " radio_annotation_ndjson, \n",
+ " checklist_annotation_ndjson, \n",
+ " bbox_annotation_ndjson, \n",
+ " point_annotation_ndjson, \n",
+ " polyline_annotation_ndjson,\n",
+ " nested_radio_annotation_ndjson,\n",
+ " nested_checklist_prediction_ndjson\n",
+ "]:\n",
+ " annot.update({\n",
+ " 'uuid': str(uuid.uuid4()),\n",
+ " 'dataRow': {'id': data_row.uid},\n",
+ " })\n",
+ " ndjson_annotation.append(annot) "
+ ],
+ "metadata": {
+ "id": "9gD_alThQA3G"
+ },
+ "execution_count": 118,
+ "outputs": []
+ },
+ {
+ "cell_type": "markdown",
+ "source": [
+ "##### 7.5. Upload annotations to the project using Label Import"
+ ],
+ "metadata": {
+ "id": "nGVNQlvPQ-kF"
+ }
+ },
+ {
+ "cell_type": "code",
+ "source": [
+ "upload_job_annotation = LabelImport.create_from_objects(\n",
+ " client = client,\n",
+ " project_id = project.uid,\n",
+ " name=\"video_annotations_import_\" + str(uuid.uuid4()),\n",
+ " labels=ndjson_annotation)\n",
+ "\n",
+ "upload_job_annotation.wait_until_done()\n",
+ "# Errors will appear for annotation uploads that failed.\n",
+ "print(\"Errors:\", upload_job_annotation.errors)\n"
+ ],
+ "metadata": {
+ "id": "HYh9AzrlRYX-",
+ "colab": {
+ "base_uri": "https://localhost:8080/"
+ },
+ "outputId": "eae9f53c-9dce-4958-bc03-e63d788527a1"
+ },
+ "execution_count": 119,
+ "outputs": [
+ {
+ "output_type": "stream",
+ "name": "stderr",
+ "text": [
+ "WARNING:labelbox.schema.annotation_import:\n",
+ " Confidence scores are not supported in Label Import.\n",
+ " Corresponding confidence score values will be ignored.\n",
+ " \n"
+ ]
+ },
+ {
+ "output_type": "stream",
+ "name": "stdout",
+ "text": [
+ "Errors: []\n"
+ ]
+ }
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "source": [
+ "##### 7.6. Send the annotations to the Model Run"
+ ],
+ "metadata": {
+ "id": "Y3rgM-5cRrxM"
+ }
+ },
+ {
+ "cell_type": "code",
+ "source": [
+ "# get the labels id from the project\n",
+ "label_ids = [x['ID'] for x in project.export_labels(download=True)]\n",
+ "model_run.upsert_labels(label_ids)"
+ ],
+ "metadata": {
+ "id": "i2BrS8CcSBzo",
+ "colab": {
+ "base_uri": "https://localhost:8080/"
+ },
+ "outputId": "e82b9c98-a6e3-4696-bc8e-d84dd8f96378"
+ },
+ "execution_count": 116,
+ "outputs": [
+ {
+ "output_type": "execute_result",
+ "data": {
+ "text/plain": [
+ "True"
+ ]
+ },
+ "metadata": {},
+ "execution_count": 116
+ }
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "source": [
+ "## Optional deletions for cleanup \n"
+ ],
+ "metadata": {
+ "id": "DMtOfWWDWFbJ"
+ }
+ },
+ {
+ "cell_type": "code",
+ "source": [
+ "#upload_job\n",
+ "# project.delete()\n",
+ "# dataset.delete()"
+ ],
+ "metadata": {
+ "id": "aAhkyvJlWK1p"
+ },
+ "execution_count": null,
+ "outputs": []
+ }
+ ]
+}
\ No newline at end of file