From 8368e0d41346a96af6cd3d3642692d48aa3dd2e5 Mon Sep 17 00:00:00 2001 From: Andrea Ovalle <74880762+ovalle15@users.noreply.github.com> Date: Thu, 9 Feb 2023 14:45:00 -0500 Subject: [PATCH] Ontology updates and added nested classification --- examples/annotation_import/image.ipynb | 333 ++++++++++++++----------- 1 file changed, 186 insertions(+), 147 deletions(-) diff --git a/examples/annotation_import/image.ipynb b/examples/annotation_import/image.ipynb index 1388f6a2a..87cdf093d 100644 --- a/examples/annotation_import/image.ipynb +++ b/examples/annotation_import/image.ipynb @@ -82,7 +82,7 @@ }, { "cell_type": "code", - "execution_count": 30, + "execution_count": 45, "id": "4d63074b-2379-48af-b9d6-2a66190f03c4", "metadata": { "id": "4d63074b-2379-48af-b9d6-2a66190f03c4" @@ -94,26 +94,19 @@ }, { "cell_type": "code", - "execution_count": 31, + "execution_count": 46, "id": "01fca8c9-0680-4a9c-a11e-1b49f31e9121", "metadata": { "id": "01fca8c9-0680-4a9c-a11e-1b49f31e9121" }, "outputs": [], "source": [ - "from labelbox.schema.ontology import OntologyBuilder, Tool, Classification, Option\n", - "from labelbox import Client, MALPredictionImport, LabelImport\n", - "from labelbox.data.annotation_types import (\n", - " Label, ImageData, ObjectAnnotation, MaskData,\n", - " Rectangle, Point, Line, Mask, Polygon,\n", - " Radio, Checklist, Text,\n", - " ClassificationAnnotation, ClassificationAnswer\n", - ")\n", - "from labelbox.data.serialization import NDJsonConverter\n", - "from labelbox.schema.media_type import MediaType\n", + "import labelbox as lb\n", + "import labelbox.data.annotation_types as lb_types\n", + "import labelbox.data.serialization as lb_serializers\n", + "import labelbox.schema.queue_mode as lb_queue_mode\n", "import uuid\n", - "import numpy as np\n", - "from labelbox.schema.queue_mode import QueueMode\n" + "import numpy as np\n" ] }, { @@ -130,14 +123,14 @@ }, { "cell_type": "code", - "execution_count": 32, + "execution_count": 47, "metadata": { "id": "wsbqcuCqfZmg" }, "outputs": [], "source": [ "API_KEY = None\n", - "client = Client(API_KEY)" + "client = lb.Client(API_KEY)" ], "id": "wsbqcuCqfZmg" }, @@ -167,11 +160,12 @@ "########### Radio Classification ###########\n", "\n", "# Python annotation\n", - "radio_annotation = ClassificationAnnotation(\n", + "radio_annotation = lb_types.ClassificationAnnotation(\n", " name=\"radio_question\", \n", - " value=Radio(answer = ClassificationAnswer(name = \"second_radio_answer\"))\n", + " value=lb_types.Radio(answer = lb_types.ClassificationAnswer(name = \"second_radio_answer\"))\n", ")\n", "\n", + "\n", "# NDJSON\n", "radio_annotation_ndjson = {\n", " 'name': 'radio_question',\n", @@ -182,13 +176,13 @@ "id": "v5wL6oojz9Ge" }, "id": "v5wL6oojz9Ge", - "execution_count": 33, + "execution_count": 48, "outputs": [] }, { "cell_type": "code", "source": [ - "########## Nested Radio Classification is only supported with NDJSON tools##########\n", + "########## Nested Radio and Checklist Classification is only supported with NDJSON tools##########\n", "\n", "# NDJSON \n", "nested_radio_annotation_ndjson = {\n", @@ -197,13 +191,28 @@ " \"classifications\" : [\n", " {'name': 'sub_radio_question', 'answer': {'name': 'first_sub_radio_answer'}}\n", " ]\n", + "}\n", + "\n", + "\n", + "\n", + "nested_checklist_annotation_ndjson = {\n", + " \"name\": \"nested_checklist_question\",\n", + " \"answer\": [{\n", + " \"name\": \"first_checklist_answer\", \n", + " \"classifications\" : [\n", + " {\n", + " \"name\": \"sub_checklist_question\", \n", + " \"answer\": {\"name\": \"first_sub_checklist_answer\"}\n", + " } \n", + " ] \n", + " }]\n", "}\n" ], "metadata": { "id": "I75K-wx7_sDs" }, "id": "I75K-wx7_sDs", - "execution_count": 34, + "execution_count": 49, "outputs": [] }, { @@ -212,9 +221,19 @@ "############ Checklist question ############\n", "\n", "# Python Annotations\n", - "checklist_annotation = ClassificationAnnotation(\n", + "# Python Annotations\n", + "checklist_annotation= lb_types.ClassificationAnnotation(\n", " name=\"checklist_question\", # must match your ontology feature's name\n", - " value=Checklist(answer = [ClassificationAnswer(name = \"first_checklist_answer\"), ClassificationAnswer(name = \"second_checklist_answer\")])\n", + " value=lb_types.Checklist(\n", + " answer = [\n", + " lb_types.ClassificationAnswer(\n", + " name = \"first_checklist_answer\"\n", + " ), \n", + " lb_types.ClassificationAnswer(\n", + " name = \"second_checklist_answer\"\n", + " )\n", + " ]\n", + " )\n", " )\n", "\n", "# NDJSON\n", @@ -230,7 +249,7 @@ "id": "b2UjSoYez9I1" }, "id": "b2UjSoYez9I1", - "execution_count": 35, + "execution_count": 50, "outputs": [] }, { @@ -239,9 +258,9 @@ "############# Free text Classification #############\n", "\n", "# Python annotation\n", - "text_annotation = ClassificationAnnotation(\n", + "text_annotation = lb_types.ClassificationAnnotation(\n", " name=\"free_text\", # must match your ontology feature's name\n", - " value=Text(answer=\"sample text\")\n", + " value=lb_types.Text(answer=\"sample text\")\n", ")\n", "\n", "\n", @@ -255,7 +274,7 @@ "id": "qGSXRtKpz9LQ" }, "id": "qGSXRtKpz9LQ", - "execution_count": 36, + "execution_count": 51, "outputs": [] }, { @@ -265,11 +284,11 @@ "\n", "\n", "# Python Annotation \n", - "bbox_annotation = ObjectAnnotation(\n", + "bbox_annotation = lb_types.ObjectAnnotation(\n", " name = \"bounding_box\", # must match your ontology feature's name\n", - " value = Rectangle(\n", - " start=Point(x=977, y=1690), # Top left\n", - " end=Point(x=330, y=225), # Bottom right\n", + " value = lb_types.Rectangle(\n", + " start=lb_types.Point(x=977, y=1690), # Top left\n", + " end=lb_types.Point(x=330, y=225), # Bottom right\n", " ),\n", ")\n", "\n", @@ -288,23 +307,24 @@ "id": "xCU4JRP0z9Nh" }, "id": "xCU4JRP0z9Nh", - "execution_count": 37, + "execution_count": 52, "outputs": [] }, { "cell_type": "code", "source": [ "# Bounding box with nested classification\n", - "bbox_with_radio_subclass_annotation = ObjectAnnotation(\n", - " name=\"bbox_with_radio_subclass\", # must match your ontology feature's name\n", - " value=Rectangle(\n", - " start=Point(x=933, y=541), # Top left\n", - " end=Point(x=191, y=330), # Bottom right\n", + "bbox_with_radio_subclass_annotation = lb_types.ObjectAnnotation(\n", + " name=\"bbox_with_radio_subclass\",\n", + " confidence=0.5, # must match your ontology feature's name\n", + " value=lb_types.Rectangle(\n", + " start=lb_types.Point(x=933, y=541), # Top left\n", + " end=lb_types.Point(x=191, y=330), # Bottom right\n", " ),\n", " classifications=[\n", - " \tClassificationAnnotation(\n", + " \tlb_types.ClassificationAnnotation(\n", " \tname=\"sub_radio_question\",\n", - " \t\tvalue=Radio(answer=ClassificationAnswer(name=\"first_sub_radio_answer\"))\n", + " \t\tvalue=lb_types.Radio(answer=lb_types.ClassificationAnswer(name=\"first_sub_radio_answer\", confidence=0.5))\n", " )\n", " ]\n", ")\n", @@ -331,25 +351,27 @@ "id": "gAIzsxEjLmhv" }, "id": "gAIzsxEjLmhv", - "execution_count": 38, + "execution_count": 53, "outputs": [] }, { "cell_type": "code", "source": [ "########## Polygon ##########\n", - "# Python Anotation \n", - "polygon_annotation = ObjectAnnotation(\n", - " name = \"polygon\", # must match your ontology feature's name\n", - " value=Polygon( # Coordinates for the verticies of your polygon\n", - " points=[Point(x=1489.581,y=183.934),Point(x=2278.306,y=256.885),Point(x=2428.197,y=200.437),Point(x=2560.0,y=335.419),\n", - " Point(x=2557.386,y=503.165),Point(x=2320.596,y=503.103),Point(x=2156.083, y=628.943),Point(x=2161.111,y=785.519),\n", - " Point(x=2002.115, y=894.647),Point(x=1838.456,y=877.874),Point(x=1436.53,y=874.636),Point(x=1411.403,y=758.579),\n", - " Point(x=1353.853,y=751.74),Point(x=1345.264, y=453.461),Point(x=1426.011,y=421.129)]\n", + "# Python AnotationTypes \n", + "polygon_annotation = lb_types.ObjectAnnotation(\n", + " name = \"polygon\", # must match your ontology feature's name \n", + " confidence = 0.5, \n", + " value=lb_types.Polygon( # Coordinates for the verticies of your polygon\n", + " points=[lb_types.Point(x=1489.581,y=183.934), lb_types.Point(x=2278.306,y=256.885), lb_types.Point(x=2428.197,y=200.437), lb_types.Point(x=2560.0,y=335.419),\n", + " lb_types.Point(x=2557.386,y=503.165), lb_types.Point(x=2320.596,y=503.103), lb_types.Point(x=2156.083, y=628.943), lb_types.Point(x=2161.111,y=785.519),\n", + " lb_types.Point(x=2002.115, y=894.647), lb_types.Point(x=1838.456,y=877.874), lb_types.Point(x=1436.53,y=874.636), lb_types.Point(x=1411.403,y=758.579),\n", + " lb_types.Point(x=1353.853,y=751.74), lb_types.Point(x=1345.264, y=453.461), lb_types.Point(x=1426.011,y=421.129)]\n", " ),\n", ")\n", "\n", "\n", + "\n", "# NDJSON\n", "\n", "polygon_annotation_ndjson = {\n", @@ -378,7 +400,7 @@ "id": "jRwfE4MFz9Ph" }, "id": "jRwfE4MFz9Ph", - "execution_count": 39, + "execution_count": 54, "outputs": [] }, { @@ -386,25 +408,26 @@ "source": [ "######### Mask #########\n", "\n", + "\n", "# Python \n", "# Identifying what values in the numpy array correspond to the mask annotation\n", "color = (0, 0, 0)\n", "\n", "# convert a polygon to mask\n", "im_height, im_width = 100,100 #need to provide the height and width of image.\n", - "mask_data = MaskData(arr=\n", + "mask_data = lb_types.MaskData(arr=\n", " polygon_annotation.value.draw(height=im_height,width=im_width,color=color))\n", "\n", "# convert a 2D array to 3D array\n", "arr_2d = np.zeros((100,100), dtype='uint8')\n", - "mask_data = MaskData.from_2D_arr(arr_2d)\n", + "mask_data = lb_types.MaskData.from_2D_arr(arr_2d)\n", "\n", "# a 3D array where 3rd axis is RGB values.\n", - "mask_data = MaskData(arr= np.zeros([400,450,3],dtype='uint8'))\n", + "mask_data = lb_types.MaskData(arr= np.zeros([400,450,3],dtype='uint8'))\n", "\n", - "mask_annotation = ObjectAnnotation(\n", + "mask_annotation = lb_types.ObjectAnnotation(\n", " name = \"mask\", # must match your ontology feature's name\n", - " value = Mask(mask=mask_data, color=color),\n", + " value=lb_types.Mask(mask=mask_data, color=color),\n", ")\n", "\n", "\n", @@ -421,7 +444,7 @@ "id": "39vz-tYsz9Ry" }, "id": "39vz-tYsz9Ry", - "execution_count": 40, + "execution_count": 55, "outputs": [] }, { @@ -430,12 +453,12 @@ "######## Point Annotation ########\n", "\n", "# Python Annotation\n", - "point_annotation = ObjectAnnotation(\n", + "point_annotation = lb_types.ObjectAnnotation(\n", " name = \"point\", # must match your ontology feature's name\n", - " value = Point(x=1166.606, y=1441.768),\n", + " confidence=0.5,\n", + " value = lb_types.Point(x=1166.606, y=1441.768),\n", ")\n", "\n", - "\n", "# NDJSON\n", "point_annotation_ndjson = {\n", " 'name': 'point',\n", @@ -447,7 +470,7 @@ "id": "UelSiWN2z9Tg" }, "id": "UelSiWN2z9Tg", - "execution_count": 41, + "execution_count": 56, "outputs": [] }, { @@ -458,14 +481,14 @@ "\n", "# Python Annotation \n", "\n", - "polyline_annotation = ObjectAnnotation(\n", + "polyline_annotation = lb_types.ObjectAnnotation(\n", " name = \"polyline\", # must match your ontology feature's name\n", - " value=Line( # Coordinates for the keypoints in your polyline\n", - " points=[Point(x=2534.353, y=249.471),Point(x=2429.492, y=182.092),Point(x=2294.322, y=221.962),Point(x=2224.491, y=180.463),Point(x=2136.123, y=204.716),\n", - " Point(x=1712.247, y=173.949),Point(x=1703.838, y=84.438),Point(x=1579.772, y=82.61),Point(x=1583.442, y=167.552),\n", - " Point(x=1478.869, y=164.903),Point(x=1418.941, y=318.149),Point(x=1243.128, y=400.815),Point(x=1022.067, y=319.007),\n", - " Point(x=892.367, y=379.216),Point(x=670.273, y=364.408),Point(x=613.114, y=288.16),Point(x=377.559, y=238.251),\n", - " Point(x=368.087, y=185.064),Point(x=246.557, y=167.286),Point(x=236.648, y=285.61),Point(x=90.929, y=326.412)]\n", + " value=lb_types.Line( # Coordinates for the keypoints in your polyline\n", + " points=[lb_types.Point(x=2534.353, y=249.471), lb_types.Point(x=2429.492, y=182.092), lb_types.Point(x=2294.322, y=221.962), lb_types.Point(x=2224.491, y=180.463), lb_types.Point(x=2136.123, y=204.716),\n", + " lb_types.Point(x=1712.247, y=173.949), lb_types.Point(x=1703.838, y=84.438), lb_types.Point(x=1579.772, y=82.61), lb_types.Point(x=1583.442, y=167.552),\n", + " lb_types.Point(x=1478.869, y=164.903), lb_types.Point(x=1418.941, y=318.149), lb_types.Point(x=1243.128, y=400.815), lb_types.Point(x=1022.067, y=319.007),\n", + " lb_types.Point(x=892.367, y=379.216), lb_types.Point(x=670.273, y=364.408), lb_types.Point(x=613.114, y=288.16), lb_types.Point(x=377.559, y=238.251),\n", + " lb_types.Point(x=368.087, y=185.064), lb_types.Point(x=246.557, y=167.286), lb_types.Point(x=236.648, y=285.61), lb_types.Point(x=90.929, y=326.412)]\n", " ),\n", ")\n", "\n", @@ -502,7 +525,7 @@ "id": "mrjb8qY3z9VY" }, "id": "mrjb8qY3z9VY", - "execution_count": 42, + "execution_count": 57, "outputs": [] }, { @@ -549,25 +572,25 @@ "colab": { "base_uri": "https://localhost:8080/" }, - "outputId": "2c447152-e7d8-4a42-889b-0a6344ad371d" + "outputId": "0a916784-78a4-404a-96b1-66ae842780cf" }, "id": "xJ3DZLv_LGsK", - "execution_count": 45, + "execution_count": 58, "outputs": [ { "output_type": "stream", "name": "stdout", "text": [ "\n" ] }, @@ -579,7 +602,7 @@ ] }, "metadata": {}, - "execution_count": 45 + "execution_count": 58 } ] }, @@ -601,84 +624,97 @@ { "cell_type": "code", "source": [ - "\n", - "ontology_builder = OntologyBuilder(\n", + "ontology_builder = lb.OntologyBuilder(\n", " classifications=[ # List of Classification objects\n", - " Classification( # Radio classification given the name \"text\" with two options: \"first_radio_answer\" and \"second_radio_answer\"\n", - " class_type=Classification.Type.RADIO, \n", - " instructions=\"radio_question\", \n", + " lb.Classification( # Radio classification given the name \"text\" with two options: \"first_radio_answer\" and \"second_radio_answer\"\n", + " class_type=lb.Classification.Type.RADIO,\n", + " name=\"radio_question\", \n", " options=[\n", - " Option(value=\"first_radio_answer\"),\n", - " Option(value=\"second_radio_answer\")\n", + " lb.Option(value=\"first_radio_answer\"),\n", + " lb.Option(value=\"second_radio_answer\")\n", " ]\n", " ),\n", - " Classification( # Checklist classification given the name \"text\" with two options: \"first_checklist_answer\" and \"second_checklist_answer\"\n", - " class_type=Classification.Type.CHECKLIST, \n", - " instructions=\"checklist_question\", \n", + " lb.Classification( # Checklist classification given the name \"text\" with two options: \"first_checklist_answer\" and \"second_checklist_answer\"\n", + " class_type=lb.Classification.Type.CHECKLIST,\n", + " name=\"checklist_question\", \n", " options=[\n", - " Option(value=\"first_checklist_answer\"),\n", - " Option(value=\"second_checklist_answer\") \n", + " lb.Option(value=\"first_checklist_answer\"),\n", + " lb.Option(value=\"second_checklist_answer\")\n", " ]\n", " ), \n", - " Classification( # Text classification given the name \"text\"\n", - " class_type=Classification.Type.TEXT,\n", - " instructions=\"free_text\"\n", - " ),\n", - " Classification(\n", - " class_type=Classification.Type.RADIO, \n", - " instructions=\"nested_radio_question\",\n", - " options=[\n", - " Option(\"first_radio_answer\",\n", - " options=[\n", - " Classification(\n", - " class_type=Classification.Type.RADIO,\n", - " instructions=\"sub_radio_question\",\n", - " options=[Option(\"first_sub_radio_answer\")]\n", - " )\n", - " ]\n", + " lb.Classification( # Text classification given the name \"text\"\n", + " class_type=lb.Classification.Type.TEXT,\n", + " name=\"free_text\"\n", + " ),\n", + " lb.Classification(\n", + " class_type=lb.Classification.Type.RADIO,\n", + " name=\"nested_radio_question\",\n", + " options=[\n", + " lb.Option(\"first_radio_answer\",\n", + " options=[\n", + " lb.Classification(\n", + " class_type=lb.Classification.Type.RADIO,\n", + " name=\"sub_radio_question\",\n", + " options=[lb.Option(\"first_sub_radio_answer\")]\n", + " )\n", + " ]\n", + " )\n", + " ] \n", + " ),\n", + " lb.Classification(\n", + " class_type=lb.Classification.Type.CHECKLIST,\n", + " name=\"nested_checklist_question\",\n", + " options=[\n", + " lb.Option(\"first_checklist_answer\",\n", + " options=[\n", + " lb.Classification(\n", + " class_type=lb.Classification.Type.CHECKLIST,\n", + " name=\"sub_checklist_question\", \n", + " options=[lb.Option(\"first_sub_checklist_answer\")]\n", " )\n", - " ] \n", - " ) \n", - " \n", - " ],\n", + " ]\n", + " )\n", + " ]\n", + " ), \n", + " ],\n", " tools=[ # List of Tool objects\n", - " Tool( # Bounding Box tool given the name \"box\"\n", - " tool=Tool.Type.BBOX, \n", + " lb.Tool( # Bounding Box tool given the name \"box\"\n", + " tool=lb.Tool.Type.BBOX,\n", " name=\"bounding_box\"), \n", - " Tool( # Bounding Box tool given the name \"box\"\n", - " tool=Tool.Type.BBOX, \n", + " lb.Tool( # Bounding Box tool given the name \"box\"\n", + " tool=lb.Tool.Type.BBOX,\n", " name=\"bbox_with_radio_subclass\",\n", " classifications=[\n", - " Classification(\n", - " class_type=Classification.Type.RADIO,\n", - " instructions=\"sub_radio_question\",\n", + " lb.Classification(\n", + " class_type=lb.Classification.Type.RADIO,\n", + " name=\"sub_radio_question\",\n", " options=[\n", - " Option(value=\"first_sub_radio_answer\")\n", + " lb.Option(value=\"first_sub_radio_answer\")\n", " ]\n", " ),\n", " ]\n", " ), \n", - " Tool( # Polygon tool given the name \"polygon\"\n", - " tool=Tool.Type.POLYGON, \n", + " lb.Tool( # Polygon tool given the name \"polygon\"\n", + " tool=lb.Tool.Type.POLYGON,\n", " name=\"polygon\"),\n", - " Tool( # Segmentation mask tool given the name \"mask\"\n", - " tool=Tool.Type.SEGMENTATION, \n", + " lb.Tool( # Segmentation mask tool given the name \"mask\"\n", + " tool=lb.Tool.Type.SEGMENTATION,\n", " name=\"mask\"),\n", - " \t Tool( # Point tool given the name \"point\"\n", - " tool=Tool.Type.POINT, \n", + " \t lb.Tool( # Point tool given the name \"point\"\n", + " tool=lb.Tool.Type.POINT,\n", " name=\"point\"), \n", - " Tool( # Polyline tool given the name \"line\"\n", - " tool=Tool.Type.LINE, \n", - " name=\"polyline\"),]\n", + " lb.Tool( # Polyline tool given the name \"line\"\n", + " tool=lb.Tool.Type.LINE,\n", + " name=\"polyline\")]\n", ")\n", "\n", - "ontology = client.create_ontology(\"Ontology Image Annotations\", ontology_builder.asdict())\n" + "ontology = client.create_ontology(\"Image Prediction Import Demo\", ontology_builder.asdict(), media_type=lb.MediaType.Image)" ], "metadata": { "id": "ojonQWaO3Rfv" }, "id": "ojonQWaO3Rfv", - "execution_count": 46, + "execution_count": 59, "outputs": [] }, { @@ -700,8 +736,8 @@ "# create a project and configure the ontology \n", "project = client.create_project(\n", " name=\"annotations_import_project_demo\",\n", - " media_type=MediaType.Image,\n", - " queue_mode=QueueMode.Batch)\n", + " media_type=lb.MediaType.Image,\n", + " queue_mode=lb_queue_mode.QueueMode.Batch)\n", "\n", "project.setup_editor(ontology) # Connect your ontology and editor to your MAL project" ], @@ -709,7 +745,7 @@ "id": "w1G8Mmb_z9Zx" }, "id": "w1G8Mmb_z9Zx", - "execution_count": 47, + "execution_count": 60, "outputs": [] }, { @@ -737,10 +773,10 @@ "base_uri": "https://localhost:8080/" }, "id": "eP8N9JOX6QZ_", - "outputId": "b1b66425-a276-4599-fd73-4c168588efe3" + "outputId": "5f89475d-6bd3-4e49-fb4a-8594d6dbd61e" }, "id": "eP8N9JOX6QZ_", - "execution_count": 48, + "execution_count": 61, "outputs": [ { "output_type": "stream", @@ -748,11 +784,11 @@ "text": [ "Batch \n" ] } @@ -791,8 +827,8 @@ "\n", "label = []\n", "for data_row in dataset.export_data_rows():\n", - " label.append(Label(\n", - " data=ImageData(\n", + " label.append(lb_types.Label(\n", + " data=lb_types.ImageData(\n", " uid=data_row.uid),\n", " annotations = [\n", " checklist_annotation, \n", @@ -809,13 +845,13 @@ "\n", "\n", "# Convert our label from a Labelbox class object to the underlying NDJSON format required for upload \n", - "label_ndjson = list(NDJsonConverter.serialize(label))" + "label_ndjson = list(lb_serializers.NDJsonConverter.serialize(label))" ], "metadata": { "id": "gwExeqRHz9eW" }, "id": "gwExeqRHz9eW", - "execution_count": 49, + "execution_count": 62, "outputs": [] }, { @@ -842,7 +878,10 @@ " mask_annotation_ndjson, \n", " point_annotation_ndjson, \n", " polyline_annotation_ndjson,\n", - " nested_radio_annotation_ndjson]:\n", + " nested_radio_annotation_ndjson,\n", + " nested_checklist_annotation_ndjson \n", + "\n", + " ]:\n", " annotation.update({\n", " 'uuid': str(uuid.uuid4()),\n", " 'dataRow': {'id': data_row.uid},\n", @@ -853,7 +892,7 @@ "id": "QPBF3n0Cehsq" }, "id": "QPBF3n0Cehsq", - "execution_count": 50, + "execution_count": 63, "outputs": [] }, { @@ -881,7 +920,7 @@ "cell_type": "code", "source": [ "# Upload MAL label for this data row in project\n", - "upload_job = MALPredictionImport.create_from_objects(\n", + "upload_job = lb.MALPredictionImport.create_from_objects(\n", " client = client, \n", " project_id = project.uid, \n", " name=\"mal_job\"+str(uuid.uuid4()), \n", @@ -895,10 +934,10 @@ "colab": { "base_uri": "https://localhost:8080/" }, - "outputId": "0672edf2-c841-4f80-cfd9-02100259f694" + "outputId": "f8338323-00cf-415a-9558-4fde9586879e" }, "id": "wPflwCr3_03e", - "execution_count": 51, + "execution_count": 64, "outputs": [ { "output_type": "stream", @@ -924,7 +963,7 @@ "cell_type": "code", "source": [ "# Upload label for this data row in project\n", - "upload_job = LabelImport.create_from_objects(\n", + "upload_job = lb.LabelImport.create_from_objects(\n", " client = client, \n", " project_id = project.uid, \n", " name=\"label_import_job\"+str(uuid.uuid4()), \n", @@ -938,10 +977,10 @@ "base_uri": "https://localhost:8080/" }, "id": "l2I_dRyXz9i2", - "outputId": "0507f043-e037-4ef5-ae2c-490739a01267" + "outputId": "b1c0b385-73c4-472b-a1ca-a9f3f1a8bdf1" }, "id": "l2I_dRyXz9i2", - "execution_count": 54, + "execution_count": 65, "outputs": [ { "output_type": "stream", @@ -963,7 +1002,7 @@ "id": "y2Z76TrVDSp6" }, "id": "y2Z76TrVDSp6", - "execution_count": null, + "execution_count": 66, "outputs": [] } ],