Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
23 changes: 23 additions & 0 deletions .github/workflows/outdated_dependencies.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
name: List outdated dependencies

on:
- push
- pull_request

jobs:
list_outdated_dependencies:
runs-on: ubuntu-latest
permissions:
contents: read
pull-requests: read
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: '3.11'
- name: Check Python version
run: python --version
- name: PDM installation
run: pip install --user pdm
- name: List outdated dependencies
run: pdm outdated
23 changes: 23 additions & 0 deletions .github/workflows/pydocstyle.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
name: Pydocstyle

on:
- push
- pull_request

jobs:
pydocstyle:
runs-on: ubuntu-latest
permissions:
contents: read
pull-requests: read
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: '3.11'
- name: Python version
run: python --version
- name: Pydocstyle install
run: pip install --user pydocstyle
- name: Python docstring checks
run: pydocstyle -v .
20 changes: 20 additions & 0 deletions .github/workflows/radon.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
name: Radon

on:
- push
- pull_request

jobs:
radon:
runs-on: ubuntu-latest
permissions:
contents: read
pull-requests: read
name: "radon"
steps:
- uses: actions/checkout@v3
- uses: davidslusser/actions_python_radon@v1.0.0
with:
src: "src"
min: "A"
grade: "B"
16 changes: 16 additions & 0 deletions .github/workflows/shellcheck.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
name: Shell check

on:
- push
- pull_request

jobs:
shellcheck:
runs-on: ubuntu-latest
permissions:
contents: read
pull-requests: read
steps:
- uses: actions/checkout@v4
- name: Shell check
run: make shellcheck
5 changes: 5 additions & 0 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -38,3 +38,8 @@ docs/config.png: docs/config.puml ## Generate an image with configuration graph
mv classes.png config.png && \
popd

shellcheck: ## Run shellcheck
wget -qO- "https://github.com/koalaman/shellcheck/releases/download/stable/shellcheck-stable.linux.x86_64.tar.xz" | tar -xJv \
shellcheck --version
shellcheck -- */*.sh

53 changes: 53 additions & 0 deletions scripts/codecov.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
#!/usr/bin/env bash

set -o nounset
set -o pipefail
set -x

CI_SERVER_URL=https://prow.svc.ci.openshift.org/view/gcs/origin-ci-test
COVER_PROFILE=${COVER_PROFILE:-"$1"}
JOB_TYPE=${JOB_TYPE:-"local"}

# Configure the git refs and job link based on how the job was triggered via prow
if [[ "${JOB_TYPE}" == "presubmit" ]]; then
echo "detected PR code coverage job for #${PULL_NUMBER}"
REF_FLAGS="-P ${PULL_NUMBER} -C ${PULL_PULL_SHA}"
JOB_LINK="${CI_SERVER_URL}/pr-logs/pull/${REPO_OWNER}_${REPO_NAME}/${PULL_NUMBER}/${JOB_NAME}/${BUILD_ID}"
elif [[ "${JOB_TYPE}" == "batch" ]] || [[ "${JOB_TYPE}" == "postsubmit" ]]; then
echo "detected branch code coverage job for ${PULL_BASE_REF}"
REF_FLAGS="-B ${PULL_BASE_REF} -C ${PULL_BASE_SHA}"
JOB_LINK="${CI_SERVER_URL}/logs/${JOB_NAME}/${BUILD_ID}"
elif [[ "${JOB_TYPE}" == "local" ]]; then
echo "coverage report available at ${COVER_PROFILE}"
exit 0
else
echo "${JOB_TYPE} jobs not supported" >&2
exit 1
fi

# Configure certain internal codecov variables with values from prow.
export CI_BUILD_URL="${JOB_LINK}"
export CI_BUILD_ID="${JOB_NAME}"
export CI_JOB_ID="${BUILD_ID}"

if [[ "${JOB_TYPE}" != "local" ]]; then
if [[ -z "${ARTIFACT_DIR:-}" ]] || [[ ! -d "${ARTIFACT_DIR}" ]] || [[ ! -w "${ARTIFACT_DIR}" ]]; then
# shellcheck disable=SC2016
echo '${ARTIFACT_DIR} must be set for non-local jobs, and must point to a writable directory' >&2
exit 1
fi
curl -sS https://codecov.io/bash -o "${ARTIFACT_DIR}/codecov.sh"
bash <(cat "${ARTIFACT_DIR}/codecov.sh") -Z -K -f "${COVER_PROFILE}" -r "${REPO_OWNER}/${REPO_NAME}" "${REF_FLAGS}"
# shellcheck disable=SC2181
if [ $? -ne 0 ]; then
echo "Failed uploading coverage report from a non local environment. Exiting gracefully with status code 0."
exit 0
fi
else
bash <(curl -s https://codecov.io/bash) -Z -K -f "${COVER_PROFILE}" -r "${REPO_OWNER}/${REPO_NAME}" "${REF_FLAGS}"
# shellcheck disable=SC2181
if [ $? -ne 0 ]; then
echo "Failed uploading coverage report from local environment. Exiting gracefully with status code 0."
exit 0
fi
fi
1 change: 1 addition & 0 deletions src/app/endpoints/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,4 +22,5 @@

@router.get("/config", responses=get_config_responses)
def config_endpoint_handler(request: Request) -> Configuration:
"""Handle requests to the /config endpoint."""
return configuration.configuration
1 change: 1 addition & 0 deletions src/app/endpoints/info.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,4 +22,5 @@

@router.get("/info", responses=get_into_responses)
def info_endpoint_handler(request: Request) -> InfoResponse:
"""Handle request to the /info endpoint."""
return InfoResponse(name="foo", version=__version__)
1 change: 1 addition & 0 deletions src/app/endpoints/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@

@router.get("/models", responses=models_responses)
def models_endpoint_handler(request: Request) -> ModelsResponse:
"""Handle requests to the /models endpoint."""
llama_stack_config = configuration.llama_stack_configuration
logger.info("LLama stack config: %s", llama_stack_config)

Expand Down
4 changes: 3 additions & 1 deletion src/app/endpoints/query.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@
def query_endpoint_handler(
request: Request, query_request: QueryRequest
) -> QueryResponse:
"""Handle request to the /query endpoint."""
llama_stack_config = configuration.llama_stack_configuration
logger.info("LLama stack config: %s", llama_stack_config)
client = get_llama_stack_client(llama_stack_config)
Expand Down Expand Up @@ -84,7 +85,7 @@ def select_model_id(client: LlamaStackClient, query_request: QueryRequest) -> st
def retrieve_response(
client: LlamaStackClient, model_id: str, query_request: QueryRequest
) -> str:

"""Retrieve response from LLMs and agents."""
available_shields = [shield.identifier for shield in client.shields.list()]
if not available_shields:
logger.info("No available shields. Disabling safety")
Expand Down Expand Up @@ -124,6 +125,7 @@ def retrieve_response(

def validate_attachments_metadata(attachments: list[Attachment]) -> None:
"""Validate the attachments metadata provided in the request.

Raises HTTPException if any attachment has an improper type or content type.
"""
for attachment in attachments:
Expand Down
1 change: 1 addition & 0 deletions src/app/endpoints/root.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,5 +27,6 @@

@router.get("/", response_class=HTMLResponse)
def root_endpoint_handler(request: Request) -> HTMLResponse:
"""Handle request to the / endpoint."""
logger.info("Serving index page")
return HTMLResponse(index_page)
3 changes: 3 additions & 0 deletions src/app/main.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
"""Definition of FastAPI based web service."""

from fastapi import FastAPI
from app import routers
import version
Expand Down Expand Up @@ -25,4 +27,5 @@

@app.on_event("startup")
async def startup_event() -> None:
"""Perform logger setup on service startup."""
get_logger("app.endpoints.handlers")
1 change: 1 addition & 0 deletions src/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
def get_llama_stack_client(
llama_stack_config: LLamaStackConfiguration,
) -> LlamaStackClient:
"""Retrieve Llama stack client according to configuration."""
if llama_stack_config.use_as_library_client is True:
if llama_stack_config.library_client_config_path is not None:
logger.info("Using Llama stack as library client")
Expand Down
7 changes: 7 additions & 0 deletions src/configuration.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
"""Configuration loader."""

import yaml

import logging
Expand All @@ -8,6 +10,8 @@


class AppConfig:
"""Singleton class to load and store the configuration."""

_instance = None

def __new__(cls, *args: Any, **kwargs: Any) -> "AppConfig":
Expand All @@ -28,17 +32,20 @@ def load_configuration(self, filename: str) -> None:
self.init_from_dict(config_dict)

def init_from_dict(self, config_dict: dict[Any, Any]) -> None:
"""Initialize configuration from a dictionary."""
self._configuration = Configuration(**config_dict)

@property
def configuration(self) -> Configuration:
"""Return the whole configuration."""
assert (
self._configuration is not None
), "logic error: configuration is not loaded"
return self._configuration

@property
def llama_stack_configuration(self) -> LLamaStackConfiguration:
"""Return Llama stack configuration."""
assert (
self._configuration is not None
), "logic error: configuration is not loaded"
Expand Down
2 changes: 2 additions & 0 deletions src/constants.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
"""Constants used in business logic."""

UNABLE_TO_PROCESS_RESPONSE = "Unable to process this request"

# Supported attachment types
Expand Down
2 changes: 2 additions & 0 deletions src/lightspeed-stack.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@


def create_argument_parser() -> ArgumentParser:
"""Create and configure argument parser object."""
parser = ArgumentParser()
parser.add_argument(
"-v",
Expand Down Expand Up @@ -45,6 +46,7 @@ def dump_configuration(configuration: Configuration) -> None:


def main() -> None:
"""Entry point to the web service."""
logger.info("Lightspeed stack startup")
parser = create_argument_parser()
args = parser.parse_args()
Expand Down
3 changes: 3 additions & 0 deletions src/log.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,11 @@
"""Log utilities."""

import logging
from rich.logging import RichHandler


def get_logger(name: str) -> logging.Logger:
"""Retrieve logger with the provided name."""
logger = logging.getLogger(name)
logger.setLevel(logging.DEBUG)
logger.handlers = [RichHandler()]
Expand Down
4 changes: 4 additions & 0 deletions src/models/config.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
"""Model with service configuration."""

from pydantic import BaseModel, model_validator

from typing import Optional
Expand All @@ -16,6 +18,7 @@ class ServiceConfiguration(BaseModel):

@model_validator(mode="after")
def check_service_configuration(self) -> Self:
"""Check service configuration."""
if self.port <= 0:
raise ValueError("Port value should not be negative")
if self.port > 65535:
Expand All @@ -35,6 +38,7 @@ class LLamaStackConfiguration(BaseModel):

@model_validator(mode="after")
def check_llama_stack_model(self) -> Self:
"""Check Llama stack configuration."""
if self.url is None:
if self.use_as_library_client is None:
raise ValueError(
Expand Down
4 changes: 3 additions & 1 deletion src/models/requests.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
"""Model for service requests."""

from pydantic import BaseModel, model_validator
from llama_stack_client.types.agents.turn_create_params import Document
from typing import Optional, Self
Expand Down Expand Up @@ -110,7 +112,7 @@ class QueryRequest(BaseModel):
}

def get_documents(self) -> list[Document]:
"""Returns the list of documents from the attachments."""
"""Return the list of documents from the attachments."""
if not self.attachments:
return []
return [
Expand Down
2 changes: 2 additions & 0 deletions src/models/responses.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
"""Models for service responses."""

from pydantic import BaseModel
from typing import Any, Optional

Expand Down