From f2f41ff70d33cc36a2cf92c3187d8126369ad9e7 Mon Sep 17 00:00:00 2001 From: Thibault Genaitay Date: Thu, 5 Dec 2024 17:52:21 +0100 Subject: [PATCH] fix(ai): update matching OpenAI specs --- ai-data/generative-apis/api-cli/using-chat-api.mdx | 1 + ai-data/generative-apis/how-to/query-language-models.mdx | 5 +++-- ai-data/generative-apis/how-to/query-vision-models.mdx | 5 +++-- 3 files changed, 7 insertions(+), 4 deletions(-) diff --git a/ai-data/generative-apis/api-cli/using-chat-api.mdx b/ai-data/generative-apis/api-cli/using-chat-api.mdx index c95706a383..90247422e2 100644 --- a/ai-data/generative-apis/api-cli/using-chat-api.mdx +++ b/ai-data/generative-apis/api-cli/using-chat-api.mdx @@ -67,6 +67,7 @@ Our chat API is OpenAI compatible. Use OpenAI’s [API reference](https://platfo - top_p - max_tokens - stream +- stream_options - presence_penalty - [response_format](/ai-data/generative-apis/how-to/use-structured-outputs) - logprobs diff --git a/ai-data/generative-apis/how-to/query-language-models.mdx b/ai-data/generative-apis/how-to/query-language-models.mdx index 6791561cdd..ab40d37398 100644 --- a/ai-data/generative-apis/how-to/query-language-models.mdx +++ b/ai-data/generative-apis/how-to/query-language-models.mdx @@ -139,7 +139,7 @@ response = client.chat.completions.create( ) for chunk in response: - if chunk.choices[0].delta.content: + if chunk.choices and chunk.choices[0].delta.content: print(chunk.choices[0].delta.content, end="") ``` @@ -167,7 +167,8 @@ async def main(): stream=True, ) async for chunk in stream: - print(chunk.choices[0].delta.content, end="") + if chunk.choices and chunk.choices[0].delta.content: + print(chunk.choices[0].delta.content, end="") asyncio.run(main()) ``` diff --git a/ai-data/generative-apis/how-to/query-vision-models.mdx b/ai-data/generative-apis/how-to/query-vision-models.mdx index 8b18cad665..9d2fd5419d 100644 --- a/ai-data/generative-apis/how-to/query-vision-models.mdx +++ b/ai-data/generative-apis/how-to/query-vision-models.mdx @@ -201,7 +201,7 @@ response = client.chat.completions.create( ) for chunk in response: - if chunk.choices[0].delta.content: + if chunk.choices and chunk.choices[0].delta.content: print(chunk.choices[0].delta.content, end="") ``` @@ -232,7 +232,8 @@ async def main(): stream=True, ) async for chunk in stream: - print(chunk.choices[0].delta.content, end="") + if chunk.choices and chunk.choices[0].delta.content: + print(chunk.choices[0].delta.content, end="") asyncio.run(main()) ```