-
Notifications
You must be signed in to change notification settings - Fork 55
LCORE-437: check Llama Stack version on startup #422
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change | ||||
|---|---|---|---|---|---|---|
|
|
@@ -36,6 +36,7 @@ dependencies = [ | |||||
| "email-validator>=2.2.0", | ||||||
| "openai==1.99.9", | ||||||
| "sqlalchemy>=2.0.42", | ||||||
| "semver<4.0.0", | ||||||
|
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Pin semver to >=3,<4 to avoid runtime ImportError utils/llama_stack_version.py imports Apply this diff: - "semver<4.0.0",
+ "semver>=3,<4.0.0",📝 Committable suggestion
Suggested change
🤖 Prompt for AI Agents |
||||||
| ] | ||||||
|
|
||||||
|
|
||||||
|
|
@@ -91,6 +92,7 @@ dev = [ | |||||
| "build>=1.2.2.post1", | ||||||
| "twine>=6.1.0", | ||||||
| "openapi-to-md>=0.1.0b2", | ||||||
| "pytest-subtests>=0.14.2", | ||||||
| ] | ||||||
| llslibdev = [ | ||||||
| # To check llama-stack API provider dependecies: | ||||||
|
|
||||||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,51 @@ | ||
| """Check if the Llama Stack version is supported by the LCS.""" | ||
|
|
||
| import logging | ||
|
|
||
| from semver import Version | ||
|
|
||
| from llama_stack_client._client import AsyncLlamaStackClient | ||
|
|
||
|
|
||
| from constants import ( | ||
| MINIMAL_SUPPORTED_LLAMA_STACK_VERSION, | ||
| MAXIMAL_SUPPORTED_LLAMA_STACK_VERSION, | ||
| ) | ||
|
|
||
| logger = logging.getLogger("utils.llama_stack_version") | ||
|
|
||
|
|
||
| class InvalidLlamaStackVersionException(Exception): | ||
| """Llama Stack version is not valid.""" | ||
|
|
||
|
|
||
| async def check_llama_stack_version( | ||
| client: AsyncLlamaStackClient, | ||
| ) -> None: | ||
| """Check if the Llama Stack version is supported by the LCS.""" | ||
| version_info = await client.inspect.version() | ||
| compare_versions( | ||
| version_info.version, | ||
| MINIMAL_SUPPORTED_LLAMA_STACK_VERSION, | ||
| MAXIMAL_SUPPORTED_LLAMA_STACK_VERSION, | ||
| ) | ||
|
|
||
|
|
||
| def compare_versions(version_info: str, minimal: str, maximal: str) -> None: | ||
| """Compare current Llama Stack version with minimal and maximal allowed versions.""" | ||
| current_version = Version.parse(version_info) | ||
| minimal_version = Version.parse(minimal) | ||
| maximal_version = Version.parse(maximal) | ||
| logger.debug("Current version: %s", current_version) | ||
| logger.debug("Minimal version: %s", minimal_version) | ||
| logger.debug("Maximal version: %s", maximal_version) | ||
|
|
||
| if current_version < minimal_version: | ||
| raise InvalidLlamaStackVersionException( | ||
| f"Llama Stack version >= {minimal_version} is required, but {current_version} is used" | ||
| ) | ||
| if current_version > maximal_version: | ||
| raise InvalidLlamaStackVersionException( | ||
| f"Llama Stack version <= {maximal_version} is required, but {current_version} is used" | ||
| ) | ||
| logger.info("Correct Llama Stack version : %s", current_version) |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,101 @@ | ||
| """Unit tests for utility function to check Llama Stack version.""" | ||
|
|
||
| import pytest | ||
| from semver import Version | ||
|
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. 💡 Verification agent 🧩 Analysis chainsemver API mismatch risk: Version is only guaranteed in semver>=3 Using Two fix options (pick one):
-from semver import Version
+try:
+ # semver >= 3
+ from semver import Version
+except ImportError: # semver < 3
+ from semver import VersionInfo as Version
# pyproject.toml
semver = ">=3.0.0,<4.0.0"Run this to confirm the declared semver constraint and catch any 2.x resolution: 🏁 Script executed: #!/bin/bash
set -euo pipefail
echo "Semver dependency line(s):"
rg -n -C1 -i '^\s*semver\s*=' pyproject.toml || trueLength of output: 136 🏁 Script executed: #!/bin/bash
set -euo pipefail
echo "1) All occurrences of 'semver' in the repo:"
rg -n "semver" || true
echo
echo "2) 'semver =' in TOML files:"
rg -nP "semver\s*=" --glob "*.toml" || true
echo
echo "3) 'semver' in setup.py / setup.cfg:"
grep -R "semver" setup.py setup.cfg || true
echo
echo "4) 'semver' in Pipfile / requirements*.txt:"
grep -R "semver" Pipfile requirements*.txt || trueLength of output: 1648 Tighten semver lower bound to ≥3.0.0 The project currently pins only Please update your dependency declaration: pyproject.toml (inside - "semver<4.0.0",
+ "semver>=3.0.0,<4.0.0",If you really need to support semver 2.x, apply a fallback import in both code and tests: -from semver import Version
+try:
+ from semver import Version
+except ImportError:
+ from semver import VersionInfo as Version🤖 Prompt for AI Agents |
||
|
|
||
| from llama_stack_client.types import VersionInfo | ||
|
|
||
| from utils.llama_stack_version import ( | ||
| check_llama_stack_version, | ||
| InvalidLlamaStackVersionException, | ||
| ) | ||
|
|
||
| from constants import ( | ||
| MINIMAL_SUPPORTED_LLAMA_STACK_VERSION, | ||
| MAXIMAL_SUPPORTED_LLAMA_STACK_VERSION, | ||
| ) | ||
|
|
||
|
|
||
| @pytest.mark.asyncio | ||
| async def test_check_llama_stack_version_minimal_supported_version(mocker): | ||
| """Test the check_llama_stack_version function.""" | ||
|
|
||
| # mock the Llama Stack client | ||
| mock_client = mocker.AsyncMock() | ||
| mock_client.inspect.version.return_value = VersionInfo( | ||
| version=MINIMAL_SUPPORTED_LLAMA_STACK_VERSION | ||
| ) | ||
|
|
||
| # test if the version is checked | ||
| await check_llama_stack_version(mock_client) | ||
|
|
||
|
|
||
| @pytest.mark.asyncio | ||
| async def test_check_llama_stack_version_maximal_supported_version(mocker): | ||
| """Test the check_llama_stack_version function.""" | ||
|
|
||
| # mock the Llama Stack client | ||
| mock_client = mocker.AsyncMock() | ||
| mock_client.inspect.version.return_value = VersionInfo( | ||
| version=MAXIMAL_SUPPORTED_LLAMA_STACK_VERSION | ||
| ) | ||
|
|
||
| # test if the version is checked | ||
| await check_llama_stack_version(mock_client) | ||
|
|
||
|
|
||
| @pytest.mark.asyncio | ||
| async def test_check_llama_stack_version_too_small_version(mocker): | ||
| """Test the check_llama_stack_version function.""" | ||
|
|
||
| # mock the Llama Stack client | ||
| mock_client = mocker.AsyncMock() | ||
|
|
||
| # that is surely out of range | ||
| mock_client.inspect.version.return_value = VersionInfo(version="0.0.0") | ||
|
|
||
| expected_exception_msg = ( | ||
| f"Llama Stack version >= {MINIMAL_SUPPORTED_LLAMA_STACK_VERSION} " | ||
| + "is required, but 0.0.0 is used" | ||
| ) | ||
| # test if the version is checked | ||
| with pytest.raises(InvalidLlamaStackVersionException, match=expected_exception_msg): | ||
| await check_llama_stack_version(mock_client) | ||
|
|
||
|
|
||
| async def _check_version_must_fail(mock_client, bigger_version): | ||
| mock_client.inspect.version.return_value = VersionInfo(version=str(bigger_version)) | ||
|
|
||
| expected_exception_msg = ( | ||
| f"Llama Stack version <= {MAXIMAL_SUPPORTED_LLAMA_STACK_VERSION} is required, " | ||
| + f"but {bigger_version} is used" | ||
| ) | ||
| # test if the version is checked | ||
| with pytest.raises(InvalidLlamaStackVersionException, match=expected_exception_msg): | ||
| await check_llama_stack_version(mock_client) | ||
|
|
||
|
|
||
| @pytest.mark.asyncio | ||
| async def test_check_llama_stack_version_too_big_version(mocker, subtests): | ||
| """Test the check_llama_stack_version function.""" | ||
|
|
||
| # mock the Llama Stack client | ||
| mock_client = mocker.AsyncMock() | ||
|
|
||
| max_version = Version.parse(MAXIMAL_SUPPORTED_LLAMA_STACK_VERSION) | ||
|
|
||
| with subtests.test(msg="Increased patch number"): | ||
| bigger_version = max_version.bump_patch() | ||
| await _check_version_must_fail(mock_client, bigger_version) | ||
|
|
||
| with subtests.test(msg="Increased minor number"): | ||
| bigger_version = max_version.bump_minor() | ||
| await _check_version_must_fail(mock_client, bigger_version) | ||
|
|
||
| with subtests.test(msg="Increased major number"): | ||
| bigger_version = max_version.bump_major() | ||
| await _check_version_must_fail(mock_client, bigger_version) | ||
|
|
||
| with subtests.test(msg="Increased all numbers"): | ||
| bigger_version = max_version.bump_major().bump_minor().bump_patch() | ||
| await _check_version_must_fail(mock_client, bigger_version) | ||
Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Documented version check is good; update earlier example to avoid guaranteed startup failure
The new section clearly explains the startup guard. However, the earlier “Llama Stack project and configuration” example pins
llama-stack==0.2.14. With MIN/MAX set to 0.2.17, following that example will make lightspeed-core refuse to start. Please update the example to 0.2.17 (or expand the allowed range in constants if you intend broader compatibility).Suggested change to the example block (shown here for clarity):
🤖 Prompt for AI Agents