Skip to content

Commit

Permalink
Enhance Assistant and OpenAIChat classes for better response handling
Browse files Browse the repository at this point in the history
- Import 'time' module in 'assistant.py' for timestamp generation.
- Modify 'stream_response' method in 'assistant.py' to generate unique chunk IDs and set finish reasons.
- Convert chunk dictionary to JSON before appending it to the prompt in 'stream_response' method.
- Update 'OpenAIChat' class in 'openai_chat.py' to handle 'gpt-' models using OpenAI API.
- Use existing 'completion' function for non 'gpt-' models in 'OpenAIChat' class.
  • Loading branch information
yangbobo2021 committed Sep 12, 2023
1 parent 617b2ee commit 64f4ba4
Show file tree
Hide file tree
Showing 2 changed files with 31 additions and 8 deletions.
21 changes: 15 additions & 6 deletions devchat/assistant.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import json
import time
from typing import Optional, List, Iterator
from devchat.message import Message
from devchat.chat import Chat
Expand Down Expand Up @@ -91,14 +92,22 @@ def iterate_response(self) -> Iterator[str]:
"""
if self._chat.config.stream:
first_chunk = True
for chunk in self._chat.stream_response(self._prompt):
chunk_str = str(chunk)
created_time = int(time.time())
config_params = self._chat.config.dict(exclude_unset=True)
chunks = list(self._chat.stream_response(self._prompt))
for index, chunk in enumerate(chunks):
if "index" not in chunk["choices"][0]:
chunk["choices"][0]["index"] = 0
chunk["choices"][0]["finish_reason"] = "stop"
chunk_str = json.dumps(chunk)
chunk["id"] = "chatcmpl-7vdfQI02x-" + str(created_time)
chunk["object"] = "chat.completion.chunk"
chunk["created"] = created_time
chunk["model"] = config_params["model"]
chunk["choices"][0]["index"] = 0
stop_reason = "null"
if index + 1 == len(chunks):
stop_reason = "stop"
chunk["choices"][0]["finish_reason"] = stop_reason

delta = self._prompt.append_response(chunk_str)
delta = self._prompt.append_response(json.dumps(chunk))
if first_chunk:
first_chunk = False
yield self._prompt.formatted_header()
Expand Down
18 changes: 16 additions & 2 deletions devchat/openai/openai_chat.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,14 @@ def complete_response(self, prompt: OpenAIPrompt) -> str:
**config_params
)
else:
response = completion(messages=prompt.messages, **config_params, api_key=api_key)
if config_params["model"].startswith("gpt-"):
# call gpt- model by openai api and openai api key
response = openai.ChatCompletion.create(
messages=prompt.messages,
**config_params
)
else:
response = completion(messages=prompt.messages, **config_params, api_key=api_key)

return str(response)

Expand All @@ -97,6 +104,13 @@ def stream_response(self, prompt: OpenAIPrompt) -> Iterator:
**config_params
)
else:
response = completion(**config_params, messages=prompt.messages, api_key=api_key)
if config_params["model"].startswith("gpt-"):
# call gpt- model by openai api and openai api key
response = openai.ChatCompletion.create(
messages=prompt.messages,
**config_params
)
else:
response = completion(**config_params, messages=prompt.messages, api_key=api_key)

return response

0 comments on commit 64f4ba4

Please sign in to comment.