diff --git a/examples/prediction_upload/image_predictions.ipynb b/examples/prediction_upload/image_predictions.ipynb index 25c1beba9..72e4dcaae 100644 --- a/examples/prediction_upload/image_predictions.ipynb +++ b/examples/prediction_upload/image_predictions.ipynb @@ -96,30 +96,22 @@ "metadata": { "id": "cm8xMaLbGb7v" }, - "execution_count": 36, + "execution_count": null, "outputs": [] }, { "cell_type": "code", "source": [ - "from labelbox.schema.ontology import OntologyBuilder, Tool, Classification, Option\n", - "from labelbox import Client, MALPredictionImport, LabelImport\n", - "from labelbox.data.serialization import NDJsonConverter\n", - "from labelbox.schema.media_type import MediaType\n", - "from labelbox.data.annotation_types import (\n", - " Label, ImageData, ObjectAnnotation, MaskData,\n", - " Rectangle, Point, Line, Mask, Polygon,\n", - " Radio, Checklist, Text,\n", - " ClassificationAnnotation, ClassificationAnswer\n", - ")\n", + "import labelbox as lb\n", + "import labelbox.data.annotation_types as lb_types\n", + "import labelbox.data.serialization as lb_serializers\n", "import uuid\n", - "import numpy as np\n", - "from labelbox.schema.queue_mode import QueueMode" + "import numpy as np" ], "metadata": { "id": "NIq-6M9kHKSs" }, - "execution_count": 37, + "execution_count": null, "outputs": [] }, { @@ -136,12 +128,12 @@ "cell_type": "code", "source": [ "API_KEY = None\n", - "client = Client(API_KEY)" + "client = lb.Client(API_KEY)" ], "metadata": { "id": "z7ZLKLYLHP__" }, - "execution_count": 38, + "execution_count": null, "outputs": [] }, { @@ -159,9 +151,9 @@ "########### Radio Classification ###########\n", "\n", "# Python annotation\n", - "radio_prediction = ClassificationAnnotation(\n", + "radio_prediction = lb_types.ClassificationAnnotation(\n", " name=\"radio_question\", \n", - " value=Radio(answer = ClassificationAnswer(name = \"second_radio_answer\", confidence=0.5))\n", + " value=lb_types.Radio(answer = lb_types.ClassificationAnswer(name = \"second_radio_answer\", confidence=0.5))\n", ")\n", "\n", "# NDJSON\n", @@ -173,7 +165,7 @@ "metadata": { "id": "v5wL6oojz9Ge" }, - "execution_count": 39, + "execution_count": null, "outputs": [] }, { @@ -213,7 +205,7 @@ "metadata": { "id": "I75K-wx7_sDs" }, - "execution_count": 40, + "execution_count": null, "outputs": [] }, { @@ -222,15 +214,15 @@ "############ Checklist ############\n", "\n", "# Python Annotations\n", - "checklist_prediction = ClassificationAnnotation(\n", + "checklist_prediction = lb_types.ClassificationAnnotation(\n", " name=\"checklist_question\", # must match your ontology feature's name\n", - " value=Checklist(\n", + " value=lb_types.Checklist(\n", " answer = [\n", - " ClassificationAnswer(\n", + " lb_types.ClassificationAnswer(\n", " name = \"first_checklist_answer\", \n", " confidence=0.5\n", " ), \n", - " ClassificationAnswer(\n", + " lb_types.ClassificationAnswer(\n", " name = \"second_checklist_answer\", \n", " confidence=0.5\n", " )\n", @@ -250,7 +242,7 @@ "metadata": { "id": "b2UjSoYez9I1" }, - "execution_count": 41, + "execution_count": null, "outputs": [] }, { @@ -260,12 +252,12 @@ "\n", "\n", "# Python Annotation \n", - "bbox_prediction = ObjectAnnotation(\n", + "bbox_prediction = lb_types.ObjectAnnotation(\n", " name = \"bounding_box\", # must match your ontology feature's name\n", " confidence=0.5, \n", - " value=Rectangle(\n", - " start=Point(x=977, y=1690), # Top left\n", - " end=Point(x=330, y=225), # Bottom right\n", + " value=lb_types.Rectangle(\n", + " start=lb_types.Point(x=977, y=1690), # Top left\n", + " end=lb_types.Point(x=330, y=225), # Bottom right\n", " ),\n", " \n", ")\n", @@ -285,24 +277,24 @@ "metadata": { "id": "xCU4JRP0z9Nh" }, - "execution_count": 42, + "execution_count": null, "outputs": [] }, { "cell_type": "code", "source": [ "####### Bounding box with nested classification #######\n", - "bbox_with_radio_subclass_prediction = ObjectAnnotation(\n", + "bbox_with_radio_subclass_prediction = lb_types.ObjectAnnotation(\n", " name=\"bbox_with_radio_subclass\",\n", " confidence=0.5, # must match your ontology feature's name\n", - " value=Rectangle(\n", - " start=Point(x=933, y=541), # Top left\n", - " end=Point(x=191, y=330), # Bottom right\n", + " value=lb_types.Rectangle(\n", + " start=lb_types.Point(x=933, y=541), # Top left\n", + " end=lb_types.Point(x=191, y=330), # Bottom right\n", " ),\n", " classifications=[\n", - " \tClassificationAnnotation(\n", + " \tlb_types.ClassificationAnnotation(\n", " \tname=\"sub_radio_question\",\n", - " \t\tvalue=Radio(answer=ClassificationAnswer(name=\"first_sub_radio_answer\", confidence=0.5))\n", + " \t\tvalue=lb_types.Radio(answer=lb_types.ClassificationAnswer(name=\"first_sub_radio_answer\", confidence=0.5))\n", " )\n", " ]\n", ")\n", @@ -330,7 +322,7 @@ "metadata": { "id": "gAIzsxEjLmhv" }, - "execution_count": 43, + "execution_count": null, "outputs": [] }, { @@ -338,14 +330,14 @@ "source": [ "########## Polygon ##########\n", "# Python Anotation \n", - "polygon_prediction = ObjectAnnotation(\n", + "polygon_prediction = lb_types.ObjectAnnotation(\n", " name = \"polygon\", # must match your ontology feature's name \n", " confidence = 0.5, \n", - " value=Polygon( # Coordinates for the verticies of your polygon\n", - " points=[Point(x=1489.581,y=183.934),Point(x=2278.306,y=256.885),Point(x=2428.197,y=200.437),Point(x=2560.0,y=335.419),\n", - " Point(x=2557.386,y=503.165),Point(x=2320.596,y=503.103),Point(x=2156.083, y=628.943),Point(x=2161.111,y=785.519),\n", - " Point(x=2002.115, y=894.647),Point(x=1838.456,y=877.874),Point(x=1436.53,y=874.636),Point(x=1411.403,y=758.579),\n", - " Point(x=1353.853,y=751.74),Point(x=1345.264, y=453.461),Point(x=1426.011,y=421.129)]\n", + " value=lb_types.Polygon( # Coordinates for the verticies of your polygon\n", + " points=[lb_types.Point(x=1489.581,y=183.934), lb_types.Point(x=2278.306,y=256.885), lb_types.Point(x=2428.197,y=200.437), lb_types.Point(x=2560.0,y=335.419),\n", + " lb_types.Point(x=2557.386,y=503.165), lb_types.Point(x=2320.596,y=503.103), lb_types.Point(x=2156.083, y=628.943), lb_types.Point(x=2161.111,y=785.519),\n", + " lb_types.Point(x=2002.115, y=894.647), lb_types.Point(x=1838.456,y=877.874), lb_types.Point(x=1436.53,y=874.636), lb_types.Point(x=1411.403,y=758.579),\n", + " lb_types.Point(x=1353.853,y=751.74), lb_types.Point(x=1345.264, y=453.461), lb_types.Point(x=1426.011,y=421.129)]\n", " ),\n", ")\n", "\n", @@ -378,7 +370,7 @@ "metadata": { "id": "jRwfE4MFz9Ph" }, - "execution_count": 44, + "execution_count": null, "outputs": [] }, { @@ -387,9 +379,9 @@ "####### Free text #######\n", "# Confidence is not supported for text prediction\n", "# Python annotation\n", - "text_annotation = ClassificationAnnotation(\n", + "text_annotation = lb_types.ClassificationAnnotation(\n", " name=\"free_text\", # must match your ontology feature's name\n", - " value=Text(answer=\"sample text\")\n", + " value=lb_types.Text(answer=\"sample text\")\n", ")\n", "\n", "# NDJSON\n", @@ -401,7 +393,7 @@ "metadata": { "id": "PBB37YpWTiVR" }, - "execution_count": 45, + "execution_count": null, "outputs": [] }, { @@ -415,20 +407,20 @@ "\n", "# convert a polygon to mask\n", "im_height, im_width = 100,100 #need to provide the height and width of image.\n", - "mask_data = MaskData(arr=\n", + "mask_data = lb_types.MaskData(arr=\n", " polygon_prediction.value.draw(height=im_height,width=im_width,color=color))\n", "\n", "# convert a 2D array to 3D array\n", "arr_2d = np.zeros((100,100), dtype='uint8')\n", - "mask_data = MaskData.from_2D_arr(arr_2d)\n", + "mask_data = lb_types.MaskData.from_2D_arr(arr_2d)\n", "\n", "# a 3D array where 3rd axis is RGB values.\n", - "mask_data = MaskData(arr= np.zeros([400,450,3],dtype='uint8'))\n", + "mask_data = lb_types.MaskData(arr= np.zeros([400,450,3],dtype='uint8'))\n", "\n", - "mask_prediction = ObjectAnnotation(\n", + "mask_prediction = lb_types.ObjectAnnotation(\n", " name = \"mask\", # must match your ontology feature's name\n", " confidence=0.5,\n", - " value=Mask(mask=mask_data, color=color),\n", + " value=lb_types.Mask(mask=mask_data, color=color),\n", ")\n", "\n", "\n", @@ -445,7 +437,7 @@ "metadata": { "id": "39vz-tYsz9Ry" }, - "execution_count": 46, + "execution_count": null, "outputs": [] }, { @@ -454,10 +446,10 @@ "######## Point ########\n", "\n", "# Python Annotation\n", - "point_prediction = ObjectAnnotation(\n", + "point_prediction = lb_types.ObjectAnnotation(\n", " name = \"point\", # must match your ontology feature's name\n", " confidence=0.5,\n", - " value = Point(x=1166.606, y=1441.768),\n", + " value = lb_types.Point(x=1166.606, y=1441.768),\n", ")\n", "\n", "\n", @@ -472,7 +464,7 @@ "metadata": { "id": "UelSiWN2z9Tg" }, - "execution_count": 47, + "execution_count": null, "outputs": [] }, { @@ -483,15 +475,15 @@ "\n", "# Python Annotation \n", "\n", - "polyline_prediction = ObjectAnnotation(\n", + "polyline_prediction = lb_types.ObjectAnnotation(\n", " name = \"polyline\", # must match your ontology feature's name\n", " confidence=0.5, ## Not supported for python annotation tools\n", - " value=Line( # Coordinates for the keypoints in your polyline\n", - " points=[Point(x=2534.353, y=249.471),Point(x=2429.492, y=182.092),Point(x=2294.322, y=221.962),Point(x=2224.491, y=180.463),Point(x=2136.123, y=204.716),\n", - " Point(x=1712.247, y=173.949),Point(x=1703.838, y=84.438),Point(x=1579.772, y=82.61),Point(x=1583.442, y=167.552),\n", - " Point(x=1478.869, y=164.903),Point(x=1418.941, y=318.149),Point(x=1243.128, y=400.815),Point(x=1022.067, y=319.007),\n", - " Point(x=892.367, y=379.216),Point(x=670.273, y=364.408),Point(x=613.114, y=288.16),Point(x=377.559, y=238.251),\n", - " Point(x=368.087, y=185.064),Point(x=246.557, y=167.286),Point(x=236.648, y=285.61),Point(x=90.929, y=326.412)]\n", + " value=lb_types.Line( # Coordinates for the keypoints in your polyline\n", + " points=[lb_types.Point(x=2534.353, y=249.471), lb_types.Point(x=2429.492, y=182.092), lb_types.Point(x=2294.322, y=221.962), lb_types.Point(x=2224.491, y=180.463), lb_types.Point(x=2136.123, y=204.716),\n", + " lb_types.Point(x=1712.247, y=173.949), lb_types.Point(x=1703.838, y=84.438), lb_types.Point(x=1579.772, y=82.61), lb_types.Point(x=1583.442, y=167.552),\n", + " lb_types.Point(x=1478.869, y=164.903), lb_types.Point(x=1418.941, y=318.149), lb_types.Point(x=1243.128, y=400.815), lb_types.Point(x=1022.067, y=319.007),\n", + " lb_types.Point(x=892.367, y=379.216), lb_types.Point(x=670.273, y=364.408), lb_types.Point(x=613.114, y=288.16), lb_types.Point(x=377.559, y=238.251),\n", + " lb_types.Point(x=368.087, y=185.064), lb_types.Point(x=246.557, y=167.286), lb_types.Point(x=236.648, y=285.61), lb_types.Point(x=90.929, y=326.412)]\n", " ),\n", ")\n", "\n", @@ -528,7 +520,7 @@ "metadata": { "id": "mrjb8qY3z9VY" }, - "execution_count": 48, + "execution_count": null, "outputs": [] }, { @@ -557,24 +549,24 @@ "colab": { "base_uri": "https://localhost:8080/" }, - "outputId": "800b120e-c41e-4f6f-a509-8d089f3e20bc" + "outputId": "9f4d0970-d26e-438b-da20-8d09d8b3b665" }, - "execution_count": 49, + "execution_count": null, "outputs": [ { "output_type": "stream", "name": "stdout", "text": [ "\n" ] } @@ -593,53 +585,53 @@ { "cell_type": "code", "source": [ - "ontology_builder = OntologyBuilder(\n", + "ontology_builder = lb.OntologyBuilder(\n", " classifications=[ # List of Classification objects\n", - " Classification( # Radio classification given the name \"text\" with two options: \"first_radio_answer\" and \"second_radio_answer\"\n", - " class_type=Classification.Type.RADIO, \n", + " lb.Classification( # Radio classification given the name \"text\" with two options: \"first_radio_answer\" and \"second_radio_answer\"\n", + " class_type=lb.Classification.Type.RADIO,\n", " instructions=\"radio_question\", \n", " options=[\n", - " Option(value=\"first_radio_answer\"),\n", - " Option(value=\"second_radio_answer\")\n", + " lb.Option(value=\"first_radio_answer\"),\n", + " lb.Option(value=\"second_radio_answer\")\n", " ]\n", " ),\n", - " Classification( # Checklist classification given the name \"text\" with two options: \"first_checklist_answer\" and \"second_checklist_answer\"\n", - " class_type=Classification.Type.CHECKLIST, \n", + " lb.Classification( # Checklist classification given the name \"text\" with two options: \"first_checklist_answer\" and \"second_checklist_answer\"\n", + " class_type=lb.Classification.Type.CHECKLIST,\n", " instructions=\"checklist_question\", \n", " options=[\n", - " Option(value=\"first_checklist_answer\"),\n", - " Option(value=\"second_checklist_answer\") \n", + " lb.Option(value=\"first_checklist_answer\"),\n", + " lb.Option(value=\"second_checklist_answer\")\n", " ]\n", " ), \n", - " Classification( # Text classification given the name \"text\"\n", - " class_type=Classification.Type.TEXT,\n", + " lb.Classification( # Text classification given the name \"text\"\n", + " class_type=lb.Classification.Type.TEXT,\n", " instructions=\"free_text\"\n", " ),\n", - " Classification(\n", - " class_type=Classification.Type.RADIO, \n", + " lb.Classification(\n", + " class_type=lb.Classification.Type.RADIO,\n", " instructions=\"nested_radio_question\",\n", " options=[\n", - " Option(\"first_radio_answer\",\n", + " lb.Option(\"first_radio_answer\",\n", " options=[\n", - " Classification(\n", - " class_type=Classification.Type.RADIO,\n", + " lb.Classification(\n", + " class_type=lb.Classification.Type.RADIO,\n", " instructions=\"sub_radio_question\",\n", - " options=[Option(\"first_sub_radio_answer\")]\n", + " options=[lb.Option(\"first_sub_radio_answer\")]\n", " )\n", " ]\n", " )\n", " ] \n", " ),\n", - " Classification(\n", - " class_type=Classification.Type.CHECKLIST, \n", + " lb.Classification(\n", + " class_type=lb.Classification.Type.CHECKLIST,\n", " instructions=\"nested_checklist_question\",\n", " options=[\n", - " Option(\"first_checklist_answer\",\n", + " lb.Option(\"first_checklist_answer\",\n", " options=[\n", - " Classification(\n", - " class_type=Classification.Type.CHECKLIST, \n", + " lb.Classification(\n", + " class_type=lb.Classification.Type.CHECKLIST,\n", " instructions=\"sub_checklist_question\", \n", - " options=[Option(\"first_sub_checklist_answer\")]\n", + " options=[lb.Option(\"first_sub_checklist_answer\")]\n", " )\n", " ]\n", " )\n", @@ -647,42 +639,42 @@ " ), \n", " ],\n", " tools=[ # List of Tool objects\n", - " Tool( # Bounding Box tool given the name \"box\"\n", - " tool=Tool.Type.BBOX, \n", + " lb.Tool( # Bounding Box tool given the name \"box\"\n", + " tool=lb.Tool.Type.BBOX,\n", " name=\"bounding_box\"), \n", - " Tool( # Bounding Box tool given the name \"box\"\n", - " tool=Tool.Type.BBOX, \n", + " lb.Tool( # Bounding Box tool given the name \"box\"\n", + " tool=lb.Tool.Type.BBOX,\n", " name=\"bbox_with_radio_subclass\",\n", " classifications=[\n", - " Classification(\n", - " class_type=Classification.Type.RADIO,\n", + " lb.Classification(\n", + " class_type=lb.Classification.Type.RADIO,\n", " instructions=\"sub_radio_question\",\n", " options=[\n", - " Option(value=\"first_sub_radio_answer\")\n", + " lb.Option(value=\"first_sub_radio_answer\")\n", " ]\n", " ),\n", " ]\n", " ), \n", - " Tool( # Polygon tool given the name \"polygon\"\n", - " tool=Tool.Type.POLYGON, \n", + " lb.Tool( # Polygon tool given the name \"polygon\"\n", + " tool=lb.Tool.Type.POLYGON,\n", " name=\"polygon\"),\n", - " Tool( # Segmentation mask tool given the name \"mask\"\n", - " tool=Tool.Type.SEGMENTATION, \n", + " lb.Tool( # Segmentation mask tool given the name \"mask\"\n", + " tool=lb.Tool.Type.SEGMENTATION,\n", " name=\"mask\"),\n", - " \t Tool( # Point tool given the name \"point\"\n", - " tool=Tool.Type.POINT, \n", + " \t lb.Tool( # Point tool given the name \"point\"\n", + " tool=lb.Tool.Type.POINT,\n", " name=\"point\"), \n", - " Tool( # Polyline tool given the name \"line\"\n", - " tool=Tool.Type.LINE, \n", + " lb.Tool( # Polyline tool given the name \"line\"\n", + " tool=lb.Tool.Type.LINE,\n", " name=\"polyline\")]\n", ")\n", "\n", - "ontology = client.create_ontology(\"Image Prediction Import Demo\", ontology_builder.asdict(), media_type=MediaType.Image)" + "ontology = client.create_ontology(\"Image Prediction Import Demo\", ontology_builder.asdict(), media_type=lb.MediaType.Image)" ], "metadata": { "id": "Kt4XWWqgIiWk" }, - "execution_count": 50, + "execution_count": null, "outputs": [] }, { @@ -706,7 +698,7 @@ "metadata": { "id": "8n-AvzdiOR6d" }, - "execution_count": 51, + "execution_count": null, "outputs": [] }, { @@ -728,9 +720,9 @@ "colab": { "base_uri": "https://localhost:8080/" }, - "outputId": "4eb2e619-1139-49c5-a9e7-d29838336c05" + "outputId": "e0b4c92c-1415-4557-e134-60323b167442" }, - "execution_count": 52, + "execution_count": null, "outputs": [ { "output_type": "execute_result", @@ -740,7 +732,7 @@ ] }, "metadata": {}, - "execution_count": 52 + "execution_count": 84 } ] }, @@ -761,13 +753,14 @@ "cell_type": "code", "source": [ "# Create a Label for predictions\n", - "label_prediction = Label(\n", - " data=ImageData(uid=data_row.uid),\n", + "label_prediction = lb_types.Label(\n", + " data=lb_types.ImageData(uid=data_row.uid),\n", " annotations = [\n", " radio_prediction,\n", " checklist_prediction, \n", " bbox_prediction, \n", " bbox_with_radio_subclass_prediction, \n", + " polyline_prediction,\n", " polygon_prediction, \n", " mask_prediction, \n", " point_prediction,\n", @@ -779,18 +772,18 @@ "label_list_prediction = [label_prediction]\n", "\n", "# Convert the prediction label from a Labelbox class object to the underlying NDJSON format required for upload - uploads can be directly built in this syntax as well\n", - "ndjson_prediction = list(NDJsonConverter.serialize(label_list_prediction))" + "ndjson_prediction = list(lb_serializers.NDJsonConverter.serialize(label_list_prediction))" ], "metadata": { "id": "zv2OLTXKSGWv" }, - "execution_count": 53, + "execution_count": null, "outputs": [] }, { "cell_type": "markdown", "source": [ - "If using NDJSON" + "If using NDJSON:" ], "metadata": { "id": "HaIjOzZggv56" @@ -799,8 +792,8 @@ { "cell_type": "code", "source": [ - "\n", "ndjson_prediction_method2 = []\n", + "\n", "for annot in [\n", " radio_prediction_ndjson,\n", " checklist_prediction_ndjson, \n", @@ -813,8 +806,6 @@ " text_annotation_ndjson, \n", " nested_radio_prediction_ndjson,\n", " nested_checklist_prediction_ndjson\n", - " \n", - " \n", "]:\n", " annot.update({\n", " 'uuid': str(uuid.uuid4()),\n", @@ -825,7 +816,7 @@ "metadata": { "id": "F-Y7sSyAV3tn" }, - "execution_count": 60, + "execution_count": null, "outputs": [] }, { @@ -853,9 +844,9 @@ "colab": { "base_uri": "https://localhost:8080/" }, - "outputId": "c6ab8349-c327-49df-e3f6-00e06370c7e3" + "outputId": "cd1c04aa-31a8-4891-b1db-ef28d1469539" }, - "execution_count": 61, + "execution_count": null, "outputs": [ { "output_type": "stream", @@ -890,19 +881,30 @@ "cell_type": "code", "source": [ "# Create a Labelbox project\n", - "project = client.create_project(name=\"image_prediction_demo\", \n", - " queue_mode=QueueMode.Batch,\n", + "project = client.create_project(name=\"image_prediction_demo\",\n", " # Quality Settings setup \n", " auto_audit_percentage=1,\n", " auto_audit_number_of_labels=1,\n", - " media_type=MediaType.Image)\n", + " media_type=lb.MediaType.Image)\n", "project.setup_editor(ontology)" ], "metadata": { - "id": "jEtoDiDrPFvI" + "id": "jEtoDiDrPFvI", + "outputId": "48a0f1d3-2c79-41bf-d739-a7e84e74749a", + "colab": { + "base_uri": "https://localhost:8080/" + } }, - "execution_count": 62, - "outputs": [] + "execution_count": null, + "outputs": [ + { + "output_type": "stream", + "name": "stderr", + "text": [ + "WARNING:labelbox.client:Default createProject behavior will soon be adjusted to prefer batch projects. Pass in `queue_mode` parameter explicitly to opt-out for the time being.\n" + ] + } + ] }, { "cell_type": "markdown", @@ -927,19 +929,19 @@ "colab": { "base_uri": "https://localhost:8080/" }, - "outputId": "262e1be8-3e43-42dd-ac99-e47378f9a705" + "outputId": "9ab1b671-b3a3-4056-bf45-7bda1d6e20a0" }, - "execution_count": 63, + "execution_count": null, "outputs": [ { "output_type": "execute_result", "data": { "text/plain": [ - "" + "" ] }, "metadata": {}, - "execution_count": 63 + "execution_count": 89 } ] }, @@ -1094,7 +1096,7 @@ "metadata": { "id": "A8_HVvu9Uvfl" }, - "execution_count": 64, + "execution_count": null, "outputs": [] }, { @@ -1134,7 +1136,7 @@ "metadata": { "id": "9gD_alThQA3G" }, - "execution_count": 65, + "execution_count": null, "outputs": [] }, { @@ -1149,7 +1151,7 @@ { "cell_type": "code", "source": [ - "upload_job_annotation = LabelImport.create_from_objects(\n", + "upload_job_annotation = lb.LabelImport.create_from_objects(\n", " client = client,\n", " project_id = project.uid,\n", " name=\"annotation_import_\" + str(uuid.uuid4()),\n", @@ -1164,9 +1166,9 @@ "colab": { "base_uri": "https://localhost:8080/" }, - "outputId": "eba0209a-bcde-4816-b386-d69f97899678" + "outputId": "02ca8f6b-a7ce-4626-9751-0f39ab0adf01" }, - "execution_count": 66, + "execution_count": null, "outputs": [ { "output_type": "stream", @@ -1198,9 +1200,9 @@ "colab": { "base_uri": "https://localhost:8080/" }, - "outputId": "b2d68ab6-6d1f-4ce2-d633-8048e8209af3" + "outputId": "0184b7df-067d-4e7a-d14d-21b718d4e857" }, - "execution_count": 67, + "execution_count": null, "outputs": [ { "output_type": "execute_result", @@ -1210,7 +1212,7 @@ ] }, "metadata": {}, - "execution_count": 67 + "execution_count": 93 } ] }, @@ -1232,7 +1234,7 @@ "metadata": { "id": "aAhkyvJlWK1p" }, - "execution_count": 68, + "execution_count": null, "outputs": [] } ]