Skip to content

Commit

Permalink
Update model names (#434)
Browse files Browse the repository at this point in the history
  • Loading branch information
logankilpatrick committed May 24, 2024
1 parent 1827b09 commit 695b0fd
Showing 1 changed file with 13 additions and 22 deletions.
35 changes: 13 additions & 22 deletions site/en/gemini-api/docs/get-started/python.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -231,8 +231,8 @@
"\n",
"Now you're ready to call the Gemini API. Use `list_models` to see the available Gemini models:\n",
"\n",
"* `gemini-pro`: optimized for text-only prompts.\n",
"* `gemini-pro-vision`: optimized for text-and-images prompts."
"* `gemini-1.5-pro`: optimized for high intelligence tasks, the most powerful Gemini model\n",
"* `gemini-1.5-flash`: optimized for multi-modal use-cases where speed and cost are important"
]
},
{
Expand Down Expand Up @@ -278,7 +278,7 @@
},
"outputs": [],
"source": [
"model = genai.GenerativeModel('gemini-pro')"
"model = genai.GenerativeModel('gemini-1.5-flash')"
]
},
{
Expand Down Expand Up @@ -712,7 +712,7 @@
"id": "7r99TN2R8EUD"
},
"source": [
"Use the `gemini-pro-vision` model and pass the image to the model with `generate_content`."
"Use the `gemini-1.5-flash` model and pass the image to the model with `generate_content`."
]
},
{
Expand All @@ -723,7 +723,7 @@
},
"outputs": [],
"source": [
"model = genai.GenerativeModel('gemini-pro-vision')"
"model = genai.GenerativeModel('gemini-1.5-flash')"
]
},
{
Expand Down Expand Up @@ -839,20 +839,11 @@
}
],
"source": [
"model = genai.GenerativeModel('gemini-pro')\n",
"model = genai.GenerativeModel('gemini-1.5-flash')\n",
"chat = model.start_chat(history=[])\n",
"chat"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "88Il02N-km9j"
},
"source": [
"Note: The vision model `gemini-pro-vision` is not optimized for multi-turn chat."
]
},
{
"cell_type": "markdown",
"metadata": {
Expand Down Expand Up @@ -1139,7 +1130,7 @@
],
"source": [
"result = genai.embed_content(\n",
" model=\"models/embedding-001\",\n",
" model=\"models/text-embedding-004\",\n",
" content=\"What is the meaning of life?\",\n",
" task_type=\"retrieval_document\",\n",
" title=\"Embedding of single string\")\n",
Expand Down Expand Up @@ -1178,7 +1169,7 @@
],
"source": [
"result = genai.embed_content(\n",
" model=\"models/embedding-001\",\n",
" model=\"models/text-embedding-004\",\n",
" content=[\n",
" 'What is the meaning of life?',\n",
" 'How much wood would a woodchuck chuck?',\n",
Expand Down Expand Up @@ -1244,7 +1235,7 @@
],
"source": [
"result = genai.embed_content(\n",
" model = 'models/embedding-001',\n",
" model = 'models/text-embedding-004',\n",
" content = response.candidates[0].content)\n",
"\n",
"# 1 input > 1 vector output\n",
Expand Down Expand Up @@ -1317,7 +1308,7 @@
],
"source": [
"result = genai.embed_content(\n",
" model = 'models/embedding-001',\n",
" model = 'models/text-embedding-004',\n",
" content = chat.history)\n",
"\n",
"# 1 input > 1 vector output\n",
Expand Down Expand Up @@ -1537,7 +1528,7 @@
},
"outputs": [],
"source": [
"model = genai.GenerativeModel('gemini-pro-vision')\n",
"model = genai.GenerativeModel('gemini-1.5-flash')\n",
"response = model.generate_content(\n",
" glm.Content(\n",
" parts = [\n",
Expand Down Expand Up @@ -1631,7 +1622,7 @@
}
],
"source": [
"model = genai.GenerativeModel('gemini-pro')\n",
"model = genai.GenerativeModel('gemini-1.5-flash')\n",
"\n",
"messages = [\n",
" {'role':'user',\n",
Expand Down Expand Up @@ -1754,7 +1745,7 @@
},
"outputs": [],
"source": [
"model = genai.GenerativeModel('gemini-pro')\n",
"model = genai.GenerativeModel('gemini-1.5-flash')\n",
"response = model.generate_content(\n",
" 'Tell me a story about a magic backpack.',\n",
" generation_config=genai.types.GenerationConfig(\n",
Expand Down

0 comments on commit 695b0fd

Please sign in to comment.