Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions docs/cli_reference.md
Original file line number Diff line number Diff line change
Expand Up @@ -26,9 +26,9 @@ Done! You can now use the Llama Stack Client CLI with endpoint http://localhost:
```


#### `llama-stack-client providers`
#### `llama-stack-client providers list`
```bash
$ llama-stack-client providers
$ llama-stack-client providers list
```
```
+-----------+----------------+-----------------+
Expand Down
108 changes: 39 additions & 69 deletions src/llama_stack_client/lib/cli/configure.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,13 +4,12 @@
# This source code is licensed under the terms described in the LICENSE file in
# the root directory of this source tree.

import argparse
import os

import click
import yaml

from llama_stack_client.lib.cli.constants import get_config_file_path, LLAMA_STACK_CLIENT_CONFIG_DIR
from llama_stack_client.lib.cli.subcommand import Subcommand
from prompt_toolkit import prompt
from prompt_toolkit.validation import Validator


def get_config():
Expand All @@ -21,74 +20,45 @@ def get_config():
return None


class ConfigureParser(Subcommand):
@click.command()
@click.option("--host", type=str, help="Llama Stack distribution host")
@click.option("--port", type=str, help="Llama Stack distribution port number")
@click.option("--endpoint", type=str, help="Llama Stack distribution endpoint")
def configure(host: str | None, port: str | None, endpoint: str | None):
"""Configure Llama Stack Client CLI"""
os.makedirs(LLAMA_STACK_CLIENT_CONFIG_DIR, exist_ok=True)
config_path = get_config_file_path()

def __init__(self, subparsers: argparse._SubParsersAction):
super().__init__()
self.parser = subparsers.add_parser(
"configure",
prog="llama-stack-client configure",
description="Configure Llama Stack Client CLI",
formatter_class=argparse.RawTextHelpFormatter,
)
self._add_arguments()
self.parser.set_defaults(func=self._run_configure_cmd)

def _add_arguments(self):
self.parser.add_argument(
"--host",
type=str,
help="Llama Stack distribution host",
)
self.parser.add_argument(
"--port",
type=str,
help="Llama Stack distribution port number",
)
self.parser.add_argument(
"--endpoint",
type=str,
help="Llama Stack distribution endpoint",
)

def _run_configure_cmd(self, args: argparse.Namespace):
from prompt_toolkit import prompt
from prompt_toolkit.validation import Validator

os.makedirs(LLAMA_STACK_CLIENT_CONFIG_DIR, exist_ok=True)
config_path = get_config_file_path()

if args.endpoint:
endpoint = args.endpoint
if endpoint:
final_endpoint = endpoint
else:
if host and port:
final_endpoint = f"http://{host}:{port}"
else:
if args.host and args.port:
endpoint = f"http://{args.host}:{args.port}"
else:
host = prompt(
"> Enter the host name of the Llama Stack distribution server: ",
validator=Validator.from_callable(
lambda x: len(x) > 0,
error_message="Host cannot be empty, please enter a valid host",
),
)
port = prompt(
"> Enter the port number of the Llama Stack distribution server: ",
validator=Validator.from_callable(
lambda x: x.isdigit(),
error_message="Please enter a valid port number",
),
)
endpoint = f"http://{host}:{port}"
host = prompt(
"> Enter the host name of the Llama Stack distribution server: ",
validator=Validator.from_callable(
lambda x: len(x) > 0,
error_message="Host cannot be empty, please enter a valid host",
),
)
port = prompt(
"> Enter the port number of the Llama Stack distribution server: ",
validator=Validator.from_callable(
lambda x: x.isdigit(),
error_message="Please enter a valid port number",
),
)
final_endpoint = f"http://{host}:{port}"

with open(config_path, "w") as f:
f.write(
yaml.dump(
{
"endpoint": endpoint,
},
sort_keys=True,
)
with open(config_path, "w") as f:
f.write(
yaml.dump(
{
"endpoint": final_endpoint,
},
sort_keys=True,
)
)

print(f"Done! You can now use the Llama Stack Client CLI with endpoint {endpoint}")
print(f"Done! You can now use the Llama Stack Client CLI with endpoint {final_endpoint}")
4 changes: 2 additions & 2 deletions src/llama_stack_client/lib/cli/datasets/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,6 @@
# This source code is licensed under the terms described in the LICENSE file in
# the root directory of this source tree.

from .datasets import DatasetsParser
from .datasets import datasets

__all__ = ["DatasetsParser"]
__all__ = ["datasets"]
24 changes: 8 additions & 16 deletions src/llama_stack_client/lib/cli/datasets/datasets.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,24 +4,16 @@
# This source code is licensed under the terms described in the LICENSE file in
# the root directory of this source tree.

import argparse
import click

from llama_stack_client.lib.cli.subcommand import Subcommand
from .list import DatasetsList
from .list import list_datasets


class DatasetsParser(Subcommand):
"""Parser for datasets commands"""
@click.group()
def datasets():
"""Query details about available datasets on Llama Stack distribution."""
pass

@classmethod
def create(cls, subparsers: argparse._SubParsersAction):
parser = subparsers.add_parser(
"datasets",
help="Manage datasets",
formatter_class=argparse.RawTextHelpFormatter,
)
parser.set_defaults(func=lambda _: parser.print_help())

# Create subcommands
datasets_subparsers = parser.add_subparsers(title="subcommands")
DatasetsList(datasets_subparsers)
# Register subcommands
datasets.add_command(list_datasets)
43 changes: 10 additions & 33 deletions src/llama_stack_client/lib/cli/datasets/list.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,42 +4,19 @@
# This source code is licensed under the terms described in the LICENSE file in
# the root directory of this source tree.

import argparse
import click

from llama_stack_client import LlamaStackClient
from llama_stack_client.lib.cli.common.utils import print_table_from_response
from llama_stack_client.lib.cli.configure import get_config
from llama_stack_client.lib.cli.subcommand import Subcommand


class DatasetsList(Subcommand):
def __init__(self, subparsers: argparse._SubParsersAction):
super().__init__()
self.parser = subparsers.add_parser(
"list",
prog="llama-stack-client datasets list",
description="Show available datasets on distribution endpoint",
formatter_class=argparse.RawTextHelpFormatter,
)
self._add_arguments()
self.parser.set_defaults(func=self._run_datasets_list_cmd)
@click.command("list")
@click.pass_context
def list_datasets(ctx):
"""Show available datasets on distribution endpoint"""
client = ctx.obj["client"]

def _add_arguments(self):
self.parser.add_argument(
"--endpoint",
type=str,
help="Llama Stack distribution endpoint",
)
headers = ["identifier", "provider_id", "metadata", "type"]

def _run_datasets_list_cmd(self, args: argparse.Namespace):
args.endpoint = get_config().get("endpoint") or args.endpoint

client = LlamaStackClient(
base_url=args.endpoint,
)

headers = ["identifier", "provider_id", "metadata", "type"]

datasets_list_response = client.datasets.list()
if datasets_list_response:
print_table_from_response(datasets_list_response, headers)
datasets_list_response = client.datasets.list()
if datasets_list_response:
print_table_from_response(datasets_list_response, headers)
2 changes: 1 addition & 1 deletion src/llama_stack_client/lib/cli/eval_tasks/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,4 +4,4 @@
# This source code is licensed under the terms described in the LICENSE file in
# the root directory of this source tree.

from .eval_tasks import EvalTasksParser # noqa
from .eval_tasks import eval_tasks
24 changes: 8 additions & 16 deletions src/llama_stack_client/lib/cli/eval_tasks/eval_tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,24 +5,16 @@
# the root directory of this source tree.


import argparse
import click

from llama_stack_client.lib.cli.eval_tasks.list import EvalTasksList
from .list import list_eval_tasks

from llama_stack_client.lib.cli.subcommand import Subcommand

@click.group()
def eval_tasks():
"""Query details about available eval tasks type on distribution."""
pass

class EvalTasksParser(Subcommand):
"""List details about available eval banks type on distribution."""

def __init__(self, subparsers: argparse._SubParsersAction):
super().__init__()
self.parser = subparsers.add_parser(
"eval_tasks",
prog="llama-stack-client eval_tasks",
description="Query details about available eval tasks type on distribution.",
formatter_class=argparse.RawTextHelpFormatter,
)

subparsers = self.parser.add_subparsers(title="eval_tasks_subcommands")
EvalTasksList.create(subparsers)
# Register subcommands
eval_tasks.add_command(list_eval_tasks)
40 changes: 10 additions & 30 deletions src/llama_stack_client/lib/cli/eval_tasks/list.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,40 +4,20 @@
# This source code is licensed under the terms described in the LICENSE file in
# the root directory of this source tree.

import argparse
import click

from llama_stack_client import LlamaStackClient
from llama_stack_client.lib.cli.common.utils import print_table_from_response
from llama_stack_client.lib.cli.configure import get_config
from llama_stack_client.lib.cli.subcommand import Subcommand


class EvalTasksList(Subcommand):
def __init__(self, subparsers: argparse._SubParsersAction):
super().__init__()
self.parser = subparsers.add_parser(
"list",
prog="llama-stack-client eval_tasks list",
description="Show available evaluation tasks on distribution endpoint",
formatter_class=argparse.RawTextHelpFormatter,
)
self._add_arguments()
self.parser.set_defaults(func=self._run_eval_tasks_list_cmd)
@click.command("list")
@click.pass_context
def list_eval_tasks(ctx):
"""Show available eval tasks on distribution endpoint"""

def _add_arguments(self):
self.parser.add_argument(
"--endpoint",
type=str,
help="Llama Stack distribution endpoint",
)
client = ctx.obj["client"]

def _run_eval_tasks_list_cmd(self, args: argparse.Namespace):
args.endpoint = get_config().get("endpoint") or args.endpoint
headers = ["identifier", "provider_id", "description", "type"]

client = LlamaStackClient(
base_url=args.endpoint,
)

eval_tasks_list_response = client.eval_tasks.list()
if eval_tasks_list_response:
print_table_from_response(eval_tasks_list_response)
eval_tasks_list_response = client.eval_tasks.list()
if eval_tasks_list_response:
print_table_from_response(eval_tasks_list_response, headers)
Loading