From dfe1c8da2a7ab0680722811bc019a551eef1a03e Mon Sep 17 00:00:00 2001 From: Sahand Sojoodi Date: Sat, 9 Dec 2023 15:16:52 -0500 Subject: [PATCH] docs: small Improvement in the async chat response code (#959) --- README.md | 19 +++++++++++-------- 1 file changed, 11 insertions(+), 8 deletions(-) diff --git a/README.md b/README.md index 471fd88ab..b7f278fe5 100644 --- a/README.md +++ b/README.md @@ -108,14 +108,17 @@ from openai import AsyncOpenAI client = AsyncOpenAI() -stream = await client.chat.completions.create( - model="gpt-4", - messages=[{"role": "user", "content": "Say this is a test"}], - stream=True, -) -async for chunk in stream: - if chunk.choices[0].delta.content is not None: - print(chunk.choices[0].delta.content) +async def main(): + stream = await client.chat.completions.create( + model="gpt-4", + messages=[{"role": "user", "content": "Say this is a test"}], + stream=True, + ) + async for chunk in stream: + if chunk.choices[0].delta.content is not None: + print(chunk.choices[0].delta.content) + +asyncio.run(main()) ``` ## Module-level client