Skip to content
This repository was archived by the owner on Aug 14, 2025. It is now read-only.
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .github/CODEOWNERS
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
@ehhuang @ashwinb @raghotham @reluctantfuturist
6 changes: 6 additions & 0 deletions src/llama_stack_client/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,12 @@
from ._base_client import DefaultHttpxClient, DefaultAioHttpClient, DefaultAsyncHttpxClient
from ._utils._logs import setup_logging as _setup_logging

from .lib.agents.agent import Agent
from .lib.agents.event_logger import EventLogger as AgentEventLogger
from .lib.inference.event_logger import EventLogger as InferenceEventLogger
from .types.agents.turn_create_params import Document
from .types.shared_params.document import Document as RAGDocument

__all__ = [
"types",
"__version__",
Expand Down
17 changes: 15 additions & 2 deletions src/llama_stack_client/_client.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.

from __future__ import annotations
import json

import os
from typing import Any, Union, Mapping
Expand Down Expand Up @@ -126,6 +127,7 @@ def __init__(
# outlining your use-case to help us decide if it should be
# part of our public interface in the future.
_strict_response_validation: bool = False,
provider_data: Mapping[str, Any] | None = None,
) -> None:
"""Construct a new synchronous LlamaStackClient client instance.

Expand All @@ -140,13 +142,18 @@ def __init__(
if base_url is None:
base_url = f"http://any-hosted-llama-stack.com"

custom_headers = default_headers or {}
custom_headers["X-LlamaStack-Client-Version"] = __version__
if provider_data is not None:
custom_headers["X-LlamaStack-Provider-Data"] = json.dumps(provider_data)

super().__init__(
version=__version__,
base_url=base_url,
max_retries=max_retries,
timeout=timeout,
http_client=http_client,
custom_headers=default_headers,
custom_headers=custom_headers,
custom_query=default_query,
_strict_response_validation=_strict_response_validation,
)
Expand Down Expand Up @@ -344,6 +351,7 @@ def __init__(
# outlining your use-case to help us decide if it should be
# part of our public interface in the future.
_strict_response_validation: bool = False,
provider_data: Mapping[str, Any] | None = None,
) -> None:
"""Construct a new async AsyncLlamaStackClient client instance.

Expand All @@ -358,13 +366,18 @@ def __init__(
if base_url is None:
base_url = f"http://any-hosted-llama-stack.com"

custom_headers = default_headers or {}
custom_headers["X-LlamaStack-Client-Version"] = __version__
if provider_data is not None:
custom_headers["X-LlamaStack-Provider-Data"] = json.dumps(provider_data)

super().__init__(
version=__version__,
base_url=base_url,
max_retries=max_retries,
timeout=timeout,
http_client=http_client,
custom_headers=default_headers,
custom_headers=custom_headers,
custom_query=default_query,
_strict_response_validation=_strict_response_validation,
)
Expand Down
2 changes: 2 additions & 0 deletions src/llama_stack_client/_utils/_logs.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import os
import logging
from rich.logging import RichHandler

logger: logging.Logger = logging.getLogger("llama_stack_client")
httpx_logger: logging.Logger = logging.getLogger("httpx")
Expand All @@ -10,6 +11,7 @@ def _basic_config() -> None:
logging.basicConfig(
format="[%(asctime)s - %(name)s:%(lineno)d - %(levelname)s] %(message)s",
datefmt="%Y-%m-%d %H:%M:%S",
handlers=[RichHandler(rich_tracebacks=True)],
)


Expand Down
2 changes: 1 addition & 1 deletion src/llama_stack_client/_version.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.

__title__ = "llama_stack_client"
__version__ = "0.1.0-alpha.2" # x-release-please-version
__version__ = "0.2.12"
2 changes: 1 addition & 1 deletion src/llama_stack_client/lib/.keep
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
File generated from our OpenAPI spec by Stainless.

This directory can be used to store custom files to expand the SDK.
It is ignored by Stainless code generation and its content (other than this keep file) won't be touched.
It is ignored by Stainless code generation and its content (other than this keep file) won't be touched.
9 changes: 9 additions & 0 deletions src/llama_stack_client/lib/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
# All rights reserved.
#
# This source code is licensed under the terms described in the LICENSE file in
# the root directory of this source tree.

from .tools.mcp_oauth import get_oauth_token_for_mcp_server

__all__ = ["get_oauth_token_for_mcp_server"]
5 changes: 5 additions & 0 deletions src/llama_stack_client/lib/agents/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
# All rights reserved.
#
# This source code is licensed under the terms described in the LICENSE file in
# the root directory of this source tree.
Loading
Loading