Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 12 additions & 2 deletions docs/openapi_generator/pyopenapi/generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,13 +5,15 @@
# the root directory of this source tree.

import hashlib
import inspect
import ipaddress
import types
import typing
from dataclasses import make_dataclass
from typing import Annotated, Any, Dict, get_args, get_origin, Set, Union

from fastapi import UploadFile
from pydantic import BaseModel

from llama_stack.apis.datatypes import Error
from llama_stack.strong_typing.core import JsonType
Expand Down Expand Up @@ -632,14 +634,22 @@ def _build_operation(self, op: EndpointOperation) -> Operation:
base_type = get_args(param_type)[0]
else:
base_type = param_type

# Check if the type is optional
is_optional = is_type_optional(base_type)
if is_optional:
base_type = unwrap_optional_type(base_type)

if base_type is UploadFile:
# File upload
properties[name] = {"type": "string", "format": "binary"}
else:
# Form field
# All other types - generate schema reference
# This includes enums, BaseModels, and simple types
properties[name] = self.schema_builder.classdef_to_ref(base_type)

required_fields.append(name)
if not is_optional:
required_fields.append(name)

multipart_schema = {
"type": "object",
Expand Down
692 changes: 422 additions & 270 deletions docs/static/llama-stack-spec.html

Large diffs are not rendered by default.

377 changes: 229 additions & 148 deletions docs/static/llama-stack-spec.yaml

Large diffs are not rendered by default.

7 changes: 3 additions & 4 deletions llama_stack/apis/files/files.py
Original file line number Diff line number Diff line change
Expand Up @@ -111,20 +111,19 @@ async def openai_upload_file(
self,
file: Annotated[UploadFile, File()],
purpose: Annotated[OpenAIFilePurpose, Form()],
expires_after_anchor: Annotated[str | None, Form(alias="expires_after[anchor]")] = None,
expires_after_seconds: Annotated[int | None, Form(alias="expires_after[seconds]")] = None,
# TODO: expires_after is producing strange openapi spec, params are showing up as a required w/ oneOf being null
expires_after: Annotated[ExpiresAfter | None, Form()] = None,
) -> OpenAIFileObject:
"""
Upload a file that can be used across various endpoints.

The file upload should be a multipart form request with:
- file: The File object (not file name) to be uploaded.
- purpose: The intended purpose of the uploaded file.
- expires_after: Optional form values describing expiration for the file. Expected expires_after[anchor] = "created_at", expires_after[seconds] = {integer}. Seconds must be between 3600 and 2592000 (1 hour to 30 days).
- expires_after: Optional form values describing expiration for the file.

:param file: The uploaded file object containing content and metadata (filename, content_type, etc.).
:param purpose: The intended purpose of the uploaded file (e.g., "assistants", "fine-tune").
:param expires_after: Optional form values describing expiration for the file.
:returns: An OpenAIFileObject representing the uploaded file.
"""
...
Expand Down
6 changes: 3 additions & 3 deletions llama_stack/providers/inline/files/localfs/files.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
from llama_stack.apis.common.errors import ResourceNotFoundError
from llama_stack.apis.common.responses import Order
from llama_stack.apis.files import (
ExpiresAfter,
Files,
ListOpenAIFileResponse,
OpenAIFileDeleteResponse,
Expand Down Expand Up @@ -86,14 +87,13 @@ async def openai_upload_file(
self,
file: Annotated[UploadFile, File()],
purpose: Annotated[OpenAIFilePurpose, Form()],
expires_after_anchor: Annotated[str | None, Form(alias="expires_after[anchor]")] = None,
expires_after_seconds: Annotated[int | None, Form(alias="expires_after[seconds]")] = None,
expires_after: Annotated[ExpiresAfter | None, Form()] = None,
) -> OpenAIFileObject:
"""Upload a file that can be used across various endpoints."""
if not self.sql_store:
raise RuntimeError("Files provider not initialized")

if expires_after_anchor is not None or expires_after_seconds is not None:
if expires_after is not None:
raise NotImplementedError("File expiration is not supported by this provider")

file_id = self._generate_file_id()
Expand Down
11 changes: 1 addition & 10 deletions llama_stack/providers/remote/files/s3/files.py
Original file line number Diff line number Diff line change
Expand Up @@ -195,23 +195,14 @@ async def openai_upload_file(
self,
file: Annotated[UploadFile, File()],
purpose: Annotated[OpenAIFilePurpose, Form()],
expires_after_anchor: Annotated[str | None, Form(alias="expires_after[anchor]")] = None,
expires_after_seconds: Annotated[int | None, Form(alias="expires_after[seconds]")] = None,
expires_after: Annotated[ExpiresAfter | None, Form()] = None,
) -> OpenAIFileObject:
file_id = f"file-{uuid.uuid4().hex}"

filename = getattr(file, "filename", None) or "uploaded_file"

created_at = self._now()

expires_after = None
if expires_after_anchor is not None or expires_after_seconds is not None:
# we use ExpiresAfter to validate input
expires_after = ExpiresAfter(
anchor=expires_after_anchor, # type: ignore[arg-type]
seconds=expires_after_seconds, # type: ignore[arg-type]
)

# the default is no expiration.
# to implement no expiration we set an expiration beyond the max.
# we'll hide this fact from users when returning the file object.
Expand Down
16 changes: 16 additions & 0 deletions llama_stack/strong_typing/inspection.py
Original file line number Diff line number Diff line change
Expand Up @@ -567,6 +567,22 @@ def get_class_properties(typ: type) -> Iterable[Tuple[str, type | str]]:

if is_dataclass_type(typ):
return ((field.name, field.type) for field in dataclasses.fields(typ))
elif hasattr(typ, "model_fields"):
# Pydantic BaseModel - use model_fields to exclude ClassVar and other non-field attributes
# Reconstruct Annotated type if discriminator exists to preserve metadata
from typing import Annotated, Any, cast
from pydantic.fields import FieldInfo

def get_field_type(name: str, field: Any) -> type | str:
# If field has discriminator, wrap in Annotated to preserve it for schema generation
if field.discriminator:
field_info = FieldInfo(annotation=None, discriminator=field.discriminator)
# Annotated returns _AnnotatedAlias which isn't a type but is valid here
return Annotated[field.annotation, field_info] # type: ignore[return-value]
# field.annotation can be Union types, Annotated, etc. which aren't type but are valid
return field.annotation # type: ignore[return-value,no-any-return]

return ((name, get_field_type(name, field)) for name, field in typ.model_fields.items())
else:
resolved_hints = get_resolved_hints(typ)
return resolved_hints.items()
Expand Down
7 changes: 6 additions & 1 deletion llama_stack/strong_typing/schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,12 @@ def get_class_property_docstrings(
:returns: A dictionary mapping property names to descriptions.
"""

result = {}
result: Dict[str, str] = {}
# Only try to get MRO if data_type is actually a class
# Special types like Literal, Union, etc. don't have MRO
if not inspect.isclass(data_type):
return result

for base in inspect.getmro(data_type):
docstr = docstring.parse_type(base)
for param in docstr.params.values():
Expand Down
23 changes: 13 additions & 10 deletions tests/unit/providers/files/test_s3_files.py
Original file line number Diff line number Diff line change
Expand Up @@ -228,12 +228,13 @@ async def test_expired_file_is_unavailable(self, s3_provider, sample_text_file,

mock_now.return_value = 0

from llama_stack.apis.files import ExpiresAfter

sample_text_file.filename = "test_expired_file"
uploaded = await s3_provider.openai_upload_file(
file=sample_text_file,
purpose=OpenAIFilePurpose.ASSISTANTS,
expires_after_anchor="created_at",
expires_after_seconds=two_hours,
expires_after=ExpiresAfter(anchor="created_at", seconds=two_hours),
)

mock_now.return_value = two_hours * 2 # fast forward 4 hours
Expand All @@ -259,42 +260,44 @@ async def test_expired_file_is_unavailable(self, s3_provider, sample_text_file,

async def test_unsupported_expires_after_anchor(self, s3_provider, sample_text_file):
"""Unsupported anchor value should raise ValueError."""
from llama_stack.apis.files import ExpiresAfter

sample_text_file.filename = "test_unsupported_expires_after_anchor"

with pytest.raises(ValueError, match="Input should be 'created_at'"):
await s3_provider.openai_upload_file(
file=sample_text_file,
purpose=OpenAIFilePurpose.ASSISTANTS,
expires_after_anchor="now",
expires_after_seconds=3600,
expires_after=ExpiresAfter(anchor="now", seconds=3600), # type: ignore
)

async def test_nonint_expires_after_seconds(self, s3_provider, sample_text_file):
"""Non-integer seconds in expires_after should raise ValueError."""
from llama_stack.apis.files import ExpiresAfter

sample_text_file.filename = "test_nonint_expires_after_seconds"

with pytest.raises(ValueError, match="should be a valid integer"):
await s3_provider.openai_upload_file(
file=sample_text_file,
purpose=OpenAIFilePurpose.ASSISTANTS,
expires_after_anchor="created_at",
expires_after_seconds="many",
expires_after=ExpiresAfter(anchor="created_at", seconds="many"), # type: ignore
)

async def test_expires_after_seconds_out_of_bounds(self, s3_provider, sample_text_file):
"""Seconds outside allowed range should raise ValueError."""
from llama_stack.apis.files import ExpiresAfter

with pytest.raises(ValueError, match="greater than or equal to 3600"):
await s3_provider.openai_upload_file(
file=sample_text_file,
purpose=OpenAIFilePurpose.ASSISTANTS,
expires_after_anchor="created_at",
expires_after_seconds=3599,
expires_after=ExpiresAfter(anchor="created_at", seconds=3599),
)

with pytest.raises(ValueError, match="less than or equal to 2592000"):
await s3_provider.openai_upload_file(
file=sample_text_file,
purpose=OpenAIFilePurpose.ASSISTANTS,
expires_after_anchor="created_at",
expires_after_seconds=2592001,
expires_after=ExpiresAfter(anchor="created_at", seconds=2592001),
)
Loading