From caf23d922195e2d228aa05e08ce302a20f677347 Mon Sep 17 00:00:00 2001 From: Radovan Fuchs Date: Tue, 16 Sep 2025 10:31:39 +0200 Subject: [PATCH] E2E tests for Info, Models and Metrics endpoints --- .github/workflows/e2e_tests.yaml | 2 +- tests/e2e/features/info.feature | 101 +++++++++++++++++-------------- tests/e2e/features/steps/info.py | 67 +++++++++++++++----- 3 files changed, 107 insertions(+), 63 deletions(-) diff --git a/.github/workflows/e2e_tests.yaml b/.github/workflows/e2e_tests.yaml index e5713a4d..ad86c563 100644 --- a/.github/workflows/e2e_tests.yaml +++ b/.github/workflows/e2e_tests.yaml @@ -65,7 +65,7 @@ jobs: isAbsolutePath: false file: 'lightspeed-stack.yaml' content: | - name: foo bar baz + name: Lightspeed Core Service (LCS) service: host: 0.0.0.0 port: 8080 diff --git a/tests/e2e/features/info.feature b/tests/e2e/features/info.feature index 8bad3538..30075a78 100644 --- a/tests/e2e/features/info.feature +++ b/tests/e2e/features/info.feature @@ -1,46 +1,55 @@ -# Feature: Info endpoint API tests -#TODO: fix test - -# Background: -# Given The service is started locally -# And REST API service hostname is localhost -# And REST API service port is 8080 -# And REST API service prefix is /v1 - -# Scenario: Check if the OpenAPI endpoint works as expected -# Given The system is in default state -# When I access endpoint "openapi.json" using HTTP GET method -# Then The status code of the response is 200 -# And The body of the response contains OpenAPI - -# Scenario: Check if info endpoint is working -# Given The system is in default state -# When I access REST API endpoint "info" using HTTP GET method -# Then The status code of the response is 200 -# And The body of the response has proper name "lightspeed_stack" and version "0.2.0" - -# Scenario: Check if models endpoint is working -# Given The system is in default state -# When I access REST API endpoint "models" using HTTP GET method -# Then The status code of the response is 200 -# And The body of the response contains gpt - - -# Scenario: Check if models endpoint is working -# Given The system is in default state -# And The llama-stack connection is disrupted -# When I access REST API endpoint "models" using HTTP GET method -# Then The status code of the response is 503 - -# Scenario: Check if metrics endpoint is working -# Given The system is in default state -# When I access REST API endpoint "metrics" using HTTP GET method -# Then The status code of the response is 200 -# And The body of the response has proper metrics - -# Scenario: Check if metrics endpoint is working -# Given The system is in default state -# And The llama-stack connection is disrupted -# When I access REST API endpoint "metrics" using HTTP GET method -# Then The status code of the response is 500 - +Feature: Info tests + + + Background: + Given The service is started locally + And REST API service hostname is localhost + And REST API service port is 8080 + And REST API service prefix is /v1 + + Scenario: Check if the OpenAPI endpoint works as expected + Given The system is in default state + When I access endpoint "openapi.json" using HTTP GET method + Then The status code of the response is 200 + And The body of the response contains OpenAPI + + Scenario: Check if info endpoint is working + Given The system is in default state + When I access REST API endpoint "info" using HTTP GET method + Then The status code of the response is 200 + And The body of the response has proper name Lightspeed Core Service (LCS) and version 0.2.0 + And The body of the response has llama-stack version 0.2.19 + + Scenario: Check if info endpoint reports error when llama-stack connection is not working + Given The system is in default state + And The llama-stack connection is disrupted + When I access REST API endpoint "info" using HTTP GET method + Then The status code of the response is 500 + And The body of the response is the following + """ + {"detail": {"response": "Unable to connect to Llama Stack", "cause": "Connection error."}} + """ + + Scenario: Check if models endpoint is working + Given The system is in default state + When I access REST API endpoint "models" using HTTP GET method + Then The status code of the response is 200 + And The body of the response for model gpt-4o-mini has proper structure + + + Scenario: Check if models endpoint is working + Given The system is in default state + And The llama-stack connection is disrupted + When I access REST API endpoint "models" using HTTP GET method + Then The status code of the response is 500 + And The body of the response is the following + """ + {"detail": {"response": "Unable to connect to Llama Stack", "cause": "Connection error."}} + """ + + + Scenario: Check if metrics endpoint is working + Given The system is in default state + When I access endpoint "metrics" using HTTP GET method + Then The status code of the response is 200 + And The body of the response contains ls_provider_model_configuration diff --git a/tests/e2e/features/steps/info.py b/tests/e2e/features/steps/info.py index c6244f12..d9777e26 100644 --- a/tests/e2e/features/steps/info.py +++ b/tests/e2e/features/steps/info.py @@ -4,19 +4,54 @@ from behave.runner import Context -@then( - "The body of the response has proper name {system_prompt:w} and version {version:w}" -) -def check_name_version(context: Context, system_prompt: str, version: str) -> None: - """Check proper name and version number.""" - context.system_prompt = system_prompt - context.version = version - # TODO: add step implementation - assert context is not None - - -@then("The body of the response has proper metrics") -def check_metrics(context: Context) -> None: - """Check proper metrics.""" - # TODO: add step implementation - assert context is not None +@then("The body of the response has proper name {service_name} and version {version}") +def check_name_version(context: Context, service_name: str, version: str) -> None: + """Check proper service name and version number.""" + response_json = context.response.json() + assert response_json is not None, "Response is not valid JSON" + + assert response_json["name"] == service_name, f"name is {response_json["name"]}" + assert ( + response_json["service_version"] == version + ), f"version is {response_json["service_version"]}" + + +@then("The body of the response has llama-stack version {llama_version}") +def check_llama_version(context: Context, llama_version: str) -> None: + """Check proper llama-stack version number.""" + response_json = context.response.json() + assert response_json is not None, "Response is not valid JSON" + + assert ( + response_json["llama_stack_version"] == llama_version + ), f"llama-stack version is {response_json["llama_stack_version"]}" + + +@then("The body of the response for model {model} has proper structure") +def check_model_structure(context: Context, model: str) -> None: + """Check that the gpt-4o-mini model has the correct structure and required fields.""" + response_json = context.response.json() + assert response_json is not None, "Response is not valid JSON" + + assert "models" in response_json, "Response missing 'models' field" + models = response_json["models"] + assert len(models) > 0, "Models list should not be empty" + + gpt_model = None + for model_id in models: + if "gpt-4o-mini" in model_id.get("identifier", ""): + gpt_model = model_id + break + + assert gpt_model is not None + + assert gpt_model["type"] == "model", "type should be 'model'" + assert gpt_model["api_model_type"] == "llm", "api_model_type should be 'llm'" + assert gpt_model["model_type"] == "llm", "model_type should be 'llm'" + assert gpt_model["provider_id"] == "openai", "provider_id should be 'openai'" + assert ( + gpt_model["provider_resource_id"] == model + ), "provider_resource_id should be 'gpt-4o-mini'" + assert ( + gpt_model["identifier"] == f"openai/{model}" + ), "identifier should be 'openai/gpt-4o-mini'"