Skip to content

Commit

Permalink
Add topic option to log command and update tests
Browse files Browse the repository at this point in the history
- Add '--topic' option to log command in _cli.py
- Update log command to filter prompts by topic if provided
- Rename test_cli.py to test_cli_prompt.py
- Update test names to reflect prompt command changes
  • Loading branch information
basicthinker committed Jun 6, 2023
1 parent edf3b4d commit 8299aef
Show file tree
Hide file tree
Showing 2 changed files with 10 additions and 11 deletions.
7 changes: 3 additions & 4 deletions devchat/_cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -172,25 +172,24 @@ def prompt(content: Optional[str], parent: Optional[str], reference: Optional[Li
sys.exit(os.EX_DATAERR)


@main.command()
@click.option('--skip', default=0, help='Skip number prompts before showing the prompt history.')
@click.option('-n', '--max-count', default=100, help='Limit the number of commits to output.')
def log(skip, max_count):
@click.option('-t', '--topic', default=None, help='Hash of the root prompt of the topic to select prompts from.')
def log(skip, max_count, topic):
"""
Show the prompt history.
"""
config, chat_dir = init_dir()
provider = config.get('provider')
recent_prompts = []
if provider == 'OpenAI':
openai_config = OpenAIChatConfig(model=config['model'], **config['OpenAI'])
chat = OpenAIChat(openai_config)
store = Store(chat_dir, chat)
recent_prompts = store.select_prompts(skip, skip + max_count)
else:
click.echo(f"Error: Invalid LLM in configuration '{provider}'", err=True)
sys.exit(os.EX_DATAERR)

recent_prompts = store.select_prompts(skip, skip + max_count, topic)
logs = []
for record in recent_prompts:
try:
Expand Down
14 changes: 7 additions & 7 deletions tests/test_cli.py → tests/test_cli_prompt.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ def cleanup():
return repo_dir


def test_main_no_args(git_repo): # pylint: disable=W0613
def test_prompt_no_args(git_repo): # pylint: disable=W0613
result = runner.invoke(main, ['prompt'])
assert result.exit_code == 0

Expand All @@ -57,15 +57,15 @@ def _get_core_content(output) -> str:
return core_content


def test_main_with_content(git_repo): # pylint: disable=W0613
def test_prompt_with_content(git_repo): # pylint: disable=W0613
content = "What is the capital of France?"
result = runner.invoke(main, ['prompt', content])
assert result.exit_code == 0
assert _check_output_format(result.output)
assert "Paris" in result.output


def test_main_with_temp_config_file(git_repo):
def test_prompt_with_temp_config_file(git_repo):
config_data = {
'model': 'gpt-3.5-turbo-0301',
'provider': 'OpenAI',
Expand Down Expand Up @@ -104,15 +104,15 @@ def fixture_temp_files(tmpdir):
return str(instruct0), str(instruct1), str(instruct2), str(context)


def test_main_with_instruct(git_repo, temp_files): # pylint: disable=W0613
def test_prompt_with_instruct(git_repo, temp_files): # pylint: disable=W0613
result = runner.invoke(main, ['prompt', '-m', 'gpt-4',
'-i', temp_files[0], '-i', temp_files[1],
"It is really scorching."])
assert result.exit_code == 0
assert _get_core_content(result.output) == "hot\n"


def test_main_with_instruct_and_context(git_repo, temp_files): # pylint: disable=W0613
def test_prompt_with_instruct_and_context(git_repo, temp_files): # pylint: disable=W0613
result = runner.invoke(main, ['prompt', '-m', 'gpt-4',
'-i', temp_files[0], '-i', temp_files[2],
'--context', temp_files[3],
Expand All @@ -121,7 +121,7 @@ def test_main_with_instruct_and_context(git_repo, temp_files): # pylint: disabl
assert _get_core_content(result.output) == "hot summer\n"


def test_main_response_tokens_exceed_config(git_repo): # pylint: disable=W0613
def test_prompt_response_tokens_exceed_config(git_repo): # pylint: disable=W0613
config_data = {
'model': 'gpt-3.5-turbo',
'provider': 'OpenAI',
Expand All @@ -147,7 +147,7 @@ def test_main_response_tokens_exceed_config(git_repo): # pylint: disable=W0613
assert "beyond limit" in result.output


def test_main_response_tokens_exceed_config_with_file(git_repo, tmpdir): # pylint: disable=W0613
def test_prompt_response_tokens_exceed_config_with_file(git_repo, tmpdir): # pylint: disable=W0613
config_data = {
'model': 'gpt-3.5-turbo',
'provider': 'OpenAI',
Expand Down

0 comments on commit 8299aef

Please sign in to comment.