From d638c4845beec42f14840e8bc34291235b8f01c6 Mon Sep 17 00:00:00 2001 From: Pavel Tisnovsky Date: Wed, 17 Sep 2025 09:27:43 +0200 Subject: [PATCH] LCORE-634: update Llama Stack version in documentation --- README.md | 2 +- docs/deployment_guide.md | 2 +- docs/getting_started.md | 2 +- docs/openapi.json | 2 +- docs/openapi.md | 4 ++-- docs/output.md | 2 +- examples/pyproject.llamastack.toml | 2 +- src/models/responses.py | 2 +- tests/e2e/features/info.feature | 2 +- 9 files changed, 10 insertions(+), 10 deletions(-) diff --git a/README.md b/README.md index c7d816bb..03f629fa 100644 --- a/README.md +++ b/README.md @@ -245,7 +245,7 @@ version = "0.1.0" description = "Llama Stack runner" authors = [] dependencies = [ - "llama-stack==0.2.19", + "llama-stack==0.2.20", "fastapi>=0.115.12", "opentelemetry-sdk>=1.34.0", "opentelemetry-exporter-otlp>=1.34.0", diff --git a/docs/deployment_guide.md b/docs/deployment_guide.md index 5dd100a1..82574c5a 100644 --- a/docs/deployment_guide.md +++ b/docs/deployment_guide.md @@ -390,7 +390,7 @@ cp examples/run.yaml /tmp/llama-stack-server The output should be in this form: ```json { - "version": "0.2.19" + "version": "0.2.20" } ``` diff --git a/docs/getting_started.md b/docs/getting_started.md index 134572c1..a9e452cc 100644 --- a/docs/getting_started.md +++ b/docs/getting_started.md @@ -24,7 +24,7 @@ It is possible to run Lightspeed Core Stack service with Llama Stack "embedded" 1. Add and install all required dependencies ```bash uv add \ - "llama-stack==0.2.19" \ + "llama-stack==0.2.20" \ "fastapi>=0.115.12" \ "opentelemetry-sdk>=1.34.0" \ "opentelemetry-exporter-otlp>=1.34.0" \ diff --git a/docs/openapi.json b/docs/openapi.json index 19b245da..09bd0771 100644 --- a/docs/openapi.json +++ b/docs/openapi.json @@ -1716,7 +1716,7 @@ "llama_stack_version" ], "title": "InfoResponse", - "description": "Model representing a response to an info request.\n\nAttributes:\n name: Service name.\n service_version: Service version.\n llama_stack_version: Llama Stack version.\n\nExample:\n ```python\n info_response = InfoResponse(\n name=\"Lightspeed Stack\",\n service_version=\"1.0.0\",\n llama_stack_version=\"0.2.19\",\n )\n ```", + "description": "Model representing a response to an info request.\n\nAttributes:\n name: Service name.\n service_version: Service version.\n llama_stack_version: Llama Stack version.\n\nExample:\n ```python\n info_response = InfoResponse(\n name=\"Lightspeed Stack\",\n service_version=\"1.0.0\",\n llama_stack_version=\"0.2.20\",\n )\n ```", "examples": [ { "llama_stack_version": "1.0.0", diff --git a/docs/openapi.md b/docs/openapi.md index 08157a4f..12295667 100644 --- a/docs/openapi.md +++ b/docs/openapi.md @@ -779,7 +779,7 @@ Example: llm_response="You need to use Docker and Kubernetes for everything.", user_feedback="This response is too general and doesn't provide specific steps.", sentiment=-1, - categories=["incomplete", "not_relevant"] + categories=[FeedbackCategory.INCORRECT, FeedbackCategory.INCOMPLETE] ) ``` @@ -907,7 +907,7 @@ Example: info_response = InfoResponse( name="Lightspeed Stack", service_version="1.0.0", - llama_stack_version="0.2.19", + llama_stack_version="0.2.20", ) ``` diff --git a/docs/output.md b/docs/output.md index fb7fca7f..7ae0d544 100644 --- a/docs/output.md +++ b/docs/output.md @@ -898,7 +898,7 @@ Example: info_response = InfoResponse( name="Lightspeed Stack", service_version="1.0.0", - llama_stack_version="0.2.19", + llama_stack_version="0.2.20", ) ``` diff --git a/examples/pyproject.llamastack.toml b/examples/pyproject.llamastack.toml index 9f295f15..11c3e873 100644 --- a/examples/pyproject.llamastack.toml +++ b/examples/pyproject.llamastack.toml @@ -4,7 +4,7 @@ version = "0.1.0" description = "Default template for PDM package" authors = [] dependencies = [ - "llama-stack==0.2.19", + "llama-stack==0.2.20", "fastapi>=0.115.12", "opentelemetry-sdk>=1.34.0", "opentelemetry-exporter-otlp>=1.34.0", diff --git a/src/models/responses.py b/src/models/responses.py index 6684a0d3..489f532b 100644 --- a/src/models/responses.py +++ b/src/models/responses.py @@ -92,7 +92,7 @@ class InfoResponse(BaseModel): info_response = InfoResponse( name="Lightspeed Stack", service_version="1.0.0", - llama_stack_version="0.2.19", + llama_stack_version="0.2.20", ) ``` """ diff --git a/tests/e2e/features/info.feature b/tests/e2e/features/info.feature index 30075a78..6865388f 100644 --- a/tests/e2e/features/info.feature +++ b/tests/e2e/features/info.feature @@ -18,7 +18,7 @@ Feature: Info tests When I access REST API endpoint "info" using HTTP GET method Then The status code of the response is 200 And The body of the response has proper name Lightspeed Core Service (LCS) and version 0.2.0 - And The body of the response has llama-stack version 0.2.19 + And The body of the response has llama-stack version 0.2.20 Scenario: Check if info endpoint reports error when llama-stack connection is not working Given The system is in default state