Skip to content

Commit

Permalink
Fix format issues
Browse files Browse the repository at this point in the history
  • Loading branch information
basicthinker committed Jul 18, 2023
1 parent 9deb481 commit 8450c2a
Show file tree
Hide file tree
Showing 6 changed files with 48 additions and 48 deletions.
1 change: 1 addition & 0 deletions .flake8
Original file line number Diff line number Diff line change
@@ -1,2 +1,3 @@
[flake8]
max-line-length = 100
ignore = E712,W503
2 changes: 1 addition & 1 deletion devchat/assistant.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ def _check_limit(self):
def make_prompt(self, request: str,
instruct_contents: Optional[List[str]], context_contents: Optional[List[str]],
functions: Optional[List[dict]],
parent: Optional[str] = None, references: Optional[List[str]] = None,
parent: Optional[str] = None, references: Optional[List[str]] = None,
function_name: Optional[str] = None):
"""
Make a prompt for the chat API.
Expand Down
1 change: 0 additions & 1 deletion devchat/openai/openai_message.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,6 @@ def function_call_to_json(self):
pass
return '\n```command\n' + json.dumps(function_call_copy) + '\n```\n'


def stream_from_dict(self, message_data: dict) -> str:
"""Append to the message from a dictionary returned from a streaming chat API."""
delta = message_data.get('content', '')
Expand Down
4 changes: 2 additions & 2 deletions devchat/openai/openai_prompt.py
Original file line number Diff line number Diff line change
Expand Up @@ -140,7 +140,7 @@ def prepend_history(self, prompt: 'OpenAIPrompt', token_limit: int = math.inf) -
def set_request(self, content: str, function_name: Optional[str] = None) -> int:
if not content.strip():
raise ValueError("The request cannot be empty.")
message = OpenAIMessage(content, role = ('user' if not function_name else 'function'),
message = OpenAIMessage(content, role=('user' if not function_name else 'function'),
name=function_name)
self._new_messages['request'] = message
self._request_tokens += message_tokens(message.to_dict(), self.model)
Expand All @@ -165,7 +165,7 @@ def set_response(self, response_str: str):
if index >= len(self.response):
self.response.extend([None] * (index - len(self.response) + 1))
self.response[index] = OpenAIMessage(**choice['message'],
finish_reason = choice['finish_reason'])
finish_reason=choice['finish_reason'])
self.set_hash()

def append_response(self, delta_str: str) -> str:
Expand Down
4 changes: 2 additions & 2 deletions devchat/prompt.py
Original file line number Diff line number Diff line change
Expand Up @@ -218,8 +218,8 @@ def shortlog(self) -> List[dict]:
"date": self._timestamp,
"context": [msg.to_dict() for msg in self.new_context],
"request": self.request.content,
"response": (message.content if message.content else "") + \
message.function_call_to_json(),
"response": ((message.content if message.content else "")
+ message.function_call_to_json()),
"hash": self.hash,
"parent": self.parent
}
Expand Down
84 changes: 42 additions & 42 deletions tests/test_cli_prompt.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,33 +83,35 @@ def fixture_temp_files(tmpdir):
context.write("It is summer.")
return str(instruct0), str(instruct1), str(instruct2), str(context)


@pytest.fixture(name="functions_file")
def fixture_functions_file(tmpdir):
functions_file = tmpdir.join('functions.json')
functions_file.write("""
[
{
"name": "get_current_weather",
"description": "Get the current weather in a given location",
"parameters": {
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "The city and state, e.g. San Francisco, CA"
},
"unit": {
"type": "string",
"enum": ["celsius", "fahrenheit"]
}
},
"required": ["location"]
}
}
]
""")
[
{
"name": "get_current_weather",
"description": "Get the current weather in a given location",
"parameters": {
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "The city and state, e.g. San Francisco, CA"
},
"unit": {
"type": "string",
"enum": ["celsius", "fahrenheit"]
}
},
"required": ["location"]
}
}
]
""")
return str(functions_file)


def test_prompt_with_instruct(git_repo, temp_files): # pylint: disable=W0613
result = runner.invoke(main, ['prompt', '-m', 'gpt-4',
'-i', temp_files[0], '-i', temp_files[1],
Expand All @@ -129,19 +131,18 @@ def test_prompt_with_instruct_and_context(git_repo, temp_files): # pylint: disa

def test_prompt_with_functions(git_repo, functions_file): # pylint: disable=W0613
# call with -f option
result = runner.invoke(main, ['prompt', '-m', 'gpt-4',
'-f', functions_file,
"What is the weather like in Boston?"])
result = runner.invoke(main, ['prompt', '-m', 'gpt-4', '-f', functions_file,
"What is the weather like in Boston?"])

core_content = _get_core_content(result.output)
assert result.exit_code == 0
assert core_content.find("finish_reason: function_call") >= 0
assert core_content.find('"name": "get_current_weather"') >= 0
assert core_content.find('command') > 0

# compare with no -f options
# compare with no -f options
result = runner.invoke(main, ['prompt', '-m', 'gpt-4',
"What is the weather like in Boston?"])
'What is the weather like in Boston?'])

core_content = _get_core_content(result.output)
assert result.exit_code == 0
Expand All @@ -151,9 +152,8 @@ def test_prompt_with_functions(git_repo, functions_file): # pylint: disable=W06

def test_prompt_log_with_functions(git_repo, functions_file): # pylint: disable=W0613
# call with -f option
result = runner.invoke(main, ['prompt', '-m', 'gpt-4',
'-f', functions_file,
"What is the weather like in Boston?"])
result = runner.invoke(main, ['prompt', '-m', 'gpt-4', '-f', functions_file,
'What is the weather like in Boston?'])

prompt_hash = _get_prompt_hash(result.output)
result = runner.invoke(main, ['log', '-t', prompt_hash])
Expand All @@ -166,17 +166,17 @@ def test_prompt_log_with_functions(git_repo, functions_file): # pylint: disable


def test_prompt_log_compatibility():
# import test!!
# Historical Record Compatibility Test
# create git repo folder
# install old devchat
# run prompt, create old version records
# run topic -l, expect topic list
# uninstall old devchat
# install new devchat
# run topic -l, expect topic list
# run prompt -f ./.chat/functions.json "list files in porject", expect function call return
# run topic -l, expect function call in topic list
# import test!!
# Historical Record Compatibility Test
# create git repo folder
# install old devchat
# run prompt, create old version records
# run topic -l, expect topic list
# uninstall old devchat
# install new devchat
# run topic -l, expect topic list
# run prompt -f ./.chat/functions.json "list files in porject", expect function call return
# run topic -l, expect function call in topic list
assert True


Expand All @@ -185,7 +185,7 @@ def test_prompt_with_function_replay(git_repo, functions_file): # pylint: disab
result = runner.invoke(main, ['prompt', '-m', 'gpt-4',
'-f', functions_file,
'-n', 'get_current_weather',
'{"temperature": "22", "unit": "celsius", "description": "Sunny"}'])
'{"temperature": "22", "unit": "celsius", "weather": "Sunny"}'])

core_content = _get_core_content(result.output)
assert result.exit_code == 0
Expand All @@ -195,7 +195,7 @@ def test_prompt_with_function_replay(git_repo, functions_file): # pylint: disab
prompt_hash = _get_prompt_hash(result.output)
result = runner.invoke(main, ['prompt', '-m', 'gpt-4',
'-p', prompt_hash,
'what is the GPT function name?'])
'what is the GPT function name?'])

core_content = _get_core_content(result.output)
assert result.exit_code == 0
Expand Down

0 comments on commit 8450c2a

Please sign in to comment.