Skip to content

Commit

Permalink
choer: refactor project structure to handle multiple pyprojects in mo…
Browse files Browse the repository at this point in the history
…norepo (#361)

* break out pyproject into each package

* minor fixes: package build dests, docs, remove unused deploy values

* minor config and docs changes

* move sdk out of leapfrogai_api

* fully separate api and sdk

* replace components packages reference to sdk

* Makefile and README clarifications

* update e2e workflow

* add whisper dev dependencies

* update make clean target to remove src and package builds

* Update pyproject descriptions and documentation

* remove sdk package versions from pyprojects

* use pinned dev dependencies in whisper build

* simplify docker wheel builds

---------

Co-authored-by: Jon Perry <yrrepnoj@gmail.com>
  • Loading branch information
jalling97 and YrrepNoj authored Apr 18, 2024
1 parent fdc0ef9 commit d25bfd7
Show file tree
Hide file tree
Showing 70 changed files with 1,402 additions and 915 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/e2e.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ jobs:
python-version-file: 'pyproject.toml'

- name: Install Python Deps
run: python -m pip install ".[dev,e2e-test]"
run: python -m pip install "."

- name: Setup UDS Environment
uses: defenseunicorns/uds-common/.github/actions/setup@05f42bb3117b66ebef8c72ae050b34bce19385f5
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/lint.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -20,4 +20,4 @@ jobs:
uses: chartboost/ruff-action@e18ae971ccee1b2d7bbef113930f00c670b78da4 # v1.0.0
with:
version: 0.3.4
args: format --check
args: format --check
2 changes: 1 addition & 1 deletion .github/workflows/pytest.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ jobs:
run: docker run -p 50051:50051 -d --name=repeater ghcr.io/defenseunicorns/leapfrogai/repeater:0.3.3

- name: Install Python Deps
run: pip install ".[dev]"
run: pip install "." "src/leapfrogai_api" "src/leapfrogai_sdk"

- name: Run Pytest
run: python -m pytest tests/pytest -v
Expand Down
35 changes: 19 additions & 16 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ KEY ?= ""

VERSION ?= $(shell git describe --abbrev=0 --tags)
LOCAL_VERSION ?= $(shell git rev-parse --short HEAD)
SDK_DEST ?= src/leapfrogai_sdk/build
######################################################################################

.PHONY: help
Expand All @@ -14,29 +15,31 @@ help: ## Display this help information

clean: ## Clean up all the things (packages, build dirs, compiled .whl files, python eggs)
-rm zarf-package-*.tar.zst
-rm packages/**/zarf-package-*.tar.zst
-rm -rf build/*
-rm -rf src/**/build/*
-rm -rf packages/**/build/*
find . -name '*.whl' -delete
find . -name '*.egg-info' -type d -exec rm -rf {} +


build-wheel: ## Build the wheel for the leapfrogai_api module
python -m pip wheel . -w build


gen-python: ## Generate the protobufs for the OpenAI typing within the leapfrogai_api module
python3 -m grpc_tools.protoc -I src/leapfrogai_api/types/proto \
python3 -m grpc_tools.protoc -I src/leapfrogai_sdk/proto \
--pyi_out=src/. \
--python_out=src/. \
--grpc_python_out=src/. \
src/leapfrogai_api/types/proto/leapfrogai_api/types/**/*.proto
src/leapfrogai_sdk/proto/leapfrogai_sdk/**/*.proto

local-registry: ## Start up a local container registry. Errors in this target are ignored.
-docker run -d -p 5000:5000 --restart=always --name registry registry:2

sdk-wheel: ## build wheels for the leapfrogai_sdk package as a dependency for other lfai components
-rm ${SDK_DEST}/*.whl
python -m pip wheel src/leapfrogai_sdk -w ${SDK_DEST}

setup-api-deps: ## Download the wheels for the leapfrogai_api dependencies
setup-api-deps: sdk-wheel ## Download the wheels for the leapfrogai_api dependencies
-rm packages/api/build/*.whl
python -m pip wheel . -w packages/api/build
python -m pip wheel src/leapfrogai_api -w packages/api/build --find-links=${SDK_DEST}

build-api: local-registry setup-api-deps ## Build the leapfrogai_api container and Zarf package
## Build the image (and tag it for the local registry)
Expand All @@ -50,9 +53,9 @@ build-api: local-registry setup-api-deps ## Build the leapfrogai_api container a
uds zarf package create packages/api -o packages/api --registry-override=ghcr.io=localhost:5000 --insecure --set LEAPFROGAI_IMAGE_VERSION=${LOCAL_VERSION} --confirm


setup-llama-cpp-python-deps: ## Download the wheels for the optional 'llama-cpp-python' dependencies
setup-llama-cpp-python-deps: sdk-wheel ## Download the wheels for the optional 'llama-cpp-python' dependencies
-rm packages/llama-cpp-python/build/*.whl
python -m pip wheel ".[llama-cpp-python]" -w packages/llama-cpp-python/build
python -m pip wheel packages/llama-cpp-python -w packages/llama-cpp-python/build --find-links=${SDK_DEST}

build-llama-cpp-python: local-registry setup-llama-cpp-python-deps ## Build the llama-cpp-python (cpu) container and Zarf package
## Build the image (and tag it for the local registry)
Expand All @@ -66,9 +69,9 @@ build-llama-cpp-python: local-registry setup-llama-cpp-python-deps ## Build the
uds zarf package create packages/llama-cpp-python -o packages/llama-cpp-python --registry-override=ghcr.io=localhost:5000 --insecure --set IMAGE_VERSION=${LOCAL_VERSION} --confirm


setup-vllm-deps: ## Download the wheels for the optional 'vllm' dependencies
setup-vllm-deps: sdk-wheel ## Download the wheels for the optional 'vllm' dependencies
-rm packages/vllm/build/*.whl
python -m pip wheel ".[vllm]" -w packages/vllm/build
python -m pip wheel packages/vllm -w packages/vllm/build --find-links=${SDK_DEST}

build-vllm: local-registry setup-vllm-deps ## Build the vllm container and Zarf package
## Build the image (and tag it for the local registry)
Expand All @@ -82,9 +85,9 @@ build-vllm: local-registry setup-vllm-deps ## Build the vllm container and Zarf
uds zarf package create packages/vllm -o packages/vllm --registry-override=ghcr.io=localhost:5000 --insecure --set IMAGE_VERSION=${LOCAL_VERSION} --confirm


setup-text-embeddings-deps: ## Download the wheels for the optional 'text-embeddings' dependencies
setup-text-embeddings-deps: sdk-wheel ## Download the wheels for the optional 'text-embeddings' dependencies
-rm packages/text-embeddings/build/*.whl
python -m pip wheel ".[text-embeddings]" -w packages/text-embeddings/build
python -m pip wheel packages/text-embeddings -w packages/text-embeddings/build --find-links=${SDK_DEST}

build-text-embeddings: local-registry setup-text-embeddings-deps ## Build the text-embeddings container and Zarf package
## Build the image (and tag it for the local registry)
Expand All @@ -98,9 +101,9 @@ build-text-embeddings: local-registry setup-text-embeddings-deps ## Build the te
uds zarf package create packages/text-embeddings -o packages/text-embeddings --registry-override=ghcr.io=localhost:5000 --insecure --set IMAGE_VERSION=${LOCAL_VERSION} --confirm


setup-whisper-deps: ## Download the wheels for the optional 'whisper' dependencies
setup-whisper-deps: sdk-wheel ## Download the wheels for the optional 'whisper' dependencies
-rm packages/whisper/build/*.whl
python -m pip wheel ".[whisper]" -w packages/whisper/build
python -m pip wheel "packages/whisper[dev]" -w packages/whisper/build --find-links=${SDK_DEST}

build-whisper: local-registry setup-whisper-deps ## Build the whisper container and zarf package
## Build the image (and tag it for the local registry)
Expand Down
52 changes: 30 additions & 22 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -43,9 +43,10 @@ The LeapfrogAI repository follows a monorepo structure based around an [API](#ap
```
leapfrogai/
├── src/
│ └── leapfrogai_api/
│ ├── main.py
│ └── ...
│ ├── leapfrogai_api/
│ │ ├── main.py
│ │ └── ...
│ └── leapfrogai_sdk/
├── packages/
│ ├── api/
│ ├── llama-cpp-python/
Expand Down Expand Up @@ -94,7 +95,7 @@ LeapfrogAI leverages Chainguard's [apko](https://github.com/chainguard-dev/apko)

### SDK

The LeapfrogAI SDK provides a standard set of protobuff and python utilities for implementing backends and gRPC.
The LeapfrogAI [SDK](src/leapfrogai_sdk/) provides a standard set of protobuff and python utilities for implementing backends and gRPC.

### User Interface

Expand Down Expand Up @@ -122,22 +123,24 @@ python -m venv .venv
source .venv/bin/activate
```

Each component is built into its own Zarf package. This can be done easily using the provided `Make` targets:
Each component is built into its own Zarf package. You can build all of the packages you need at once with the following `Make` targets:

```
make build-api
make build-vllm # if you have GPUs
make build-llama-cpp-python # if you have CPU only
make build-text-embeddings
make build-whisper
make build-cpu # api, llama-cpp-python, text-embeddings, whisper
make build-gpu # api, vllm, text-embeddings, whisper
make build-all # all of the backends
```

**OR**

You can build all of the packages you need at once with the following make targets:
You can build components individually using teh following `Make` targets:

```
make build-cpu # api, llama-cpp-python, text-embeddings, whisper
make build-gpu # api, vllm, text-embeddings, whisper
make build-all # all of the backends
make build-api
make build-vllm # if you have GPUs
make build-llama-cpp-python # if you have CPU only
make build-text-embeddings
make build-whisper
```

Once the packages are created, you can deploy either a CPU or GPU-enabled deployment via one of the UDS bundles:
Expand Down Expand Up @@ -174,8 +177,9 @@ source .venv/bin/activate
To run the LeapfrogAI API locally (starting from the root directory of the repository):

```
python -m pip install ".[dev]"
cd src
python -m pip install src/leapfrogai_sdk
cd src/leapfrogai_api
python -m pip install .
uvicorn leapfrogai_api.main:app --port 3000 --reload
```

Expand All @@ -184,19 +188,21 @@ uvicorn leapfrogai_api.main:app --port 3000 --reload
To run the llama-cpp-python backend locally (starting from the root directory of the repository):

```
python -m pip install ".[llama-cpp-python,dev]"
python -m pip install src/leapfrogai_sdk
cd packages/llama-cpp-python
python -m pip install .
python scripts/model_download.py
mv .model/*.gguf .model/model.gguf
python -m leapfrogai_api.types.cli --app-dir=. main:Model
lfai-cli --app-dir=. main:Model
```

#### Backend: text-embeddings
To run the text-embeddings backend locally (starting from the root directory of the repository):

```
python -m pip install ".[text-embeddings,dev]"
python -m pip install src/leapfrogai_sdk
cd packages/text-embeddings
python -m pip install .
python scripts/model_download.py
python -u main.py
```
Expand All @@ -205,19 +211,21 @@ python -u main.py
To run the vllm backend locally (starting from the root directory of the repository):

```
python -m pip install ".[vllm,dev]"
python -m pip install src/leapfrogai_sdk
cd packages/vllm
python -m pip install .
python scripts/model_download.py
export QUANTIZATION=awq
python -m leapfrogai_api.types.cli --app-dir=. main:Model
lfai-cli --app-dir=. main:Model
```

#### Backend: whisper
To run the vllm backend locally (starting from the root directory of the repository):

```
python -m pip install ".[whisper,dev]"
python -m pip install src/leapfrogai_sdk
cd packages/whisper
python -m pip install ".[dev]"
ct2-transformers-converter --model openai/whisper-base --output_dir .model --copy_files tokenizer.json --quantization float32
python -u main.py
```
Expand Down
4 changes: 2 additions & 2 deletions packages/api/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,8 @@ WORKDIR /leapfrogai
# copy the api dependencies over
# NOTE: We are copying to this filename because installing 'optional extras' from a wheel requires the absolute path to the wheel file (instead of a wildcard whl)
COPY build/*.whl build/
COPY build/leapfrogai_api*.whl leapfrogai_api-100.100.100-py3-none-any.whl
RUN pip install leapfrogai_api-100.100.100-py3-none-any.whl --no-index --find-links=build/
COPY build/leapfrogai_api*.whl .
RUN pip install leapfrogai_api*.whl --no-index --find-links=build/

FROM ghcr.io/defenseunicorns/leapfrogai/python:3.11-${ARCH}

Expand Down
6 changes: 3 additions & 3 deletions packages/llama-cpp-python/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,8 @@ ENV PATH="/leapfrogai/.venv/bin:$PATH"
# copy the llama-cpp-python build dependencies over
# NOTE: We are copying to this filename because installing 'optional extras' from a wheel requires the absolute path to the wheel file (instead of a wildcard whl)
COPY build/*.whl build/
COPY build/leapfrogai_api*.whl leapfrogai_api-100.100.100-py3-none-any.whl
RUN pip install leapfrogai_api-100.100.100-py3-none-any.whl[llama-cpp-python] --no-index --find-links=build/
COPY build/lfai_llama_cpp_python*.whl .
RUN pip install lfai_llama_cpp_python*.whl --no-index --find-links=build/

# hardened and slim python image
FROM --platform=$BUILDPLATFORM ghcr.io/defenseunicorns/leapfrogai/python:3.11
Expand All @@ -43,4 +43,4 @@ COPY config.yaml .

EXPOSE 50051:50051

ENTRYPOINT ["python", "-m", "leapfrogai_api.types.cli", "--app-dir=.", "main:Model"]
ENTRYPOINT ["python", "-m", "leapfrogai_sdk.cli", "--app-dir=.", "main:Model"]
4 changes: 2 additions & 2 deletions packages/llama-cpp-python/main.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
import os
from typing import Any, Generator

from leapfrogai_api.types import BackendConfig
from leapfrogai_api.types.llm import LLM, GenerationConfig
from leapfrogai_sdk import BackendConfig
from leapfrogai_sdk.llm import LLM, GenerationConfig
from llama_cpp import Llama

GPU_ENABLED = (
Expand Down
28 changes: 28 additions & 0 deletions packages/llama-cpp-python/pyproject.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
[project]
name = "lfai-llama-cpp-python"
version = "0.6.1"
description = "A LeapfrogAI API-compatible llama-cpp-python wrapper for quantized and un-quantized model inferencing on CPU infrastructures."

dependencies = [
"llama-cpp-python == 0.2.28",
"leapfrogai-sdk",
]
requires-python = "~=3.11"
readme = "README.md"

[tool.pip-tools]
generate-hashes = true

[tool.setuptools]
packages = ["leapfrogai_sdk"]
package-dir = {"" = "../../src"}

[tool.pytest.ini_options]
addopts = ["--import-mode=importlib"]

[tool.ruff]
target-version = "py311"

[build-system]
requires = ["setuptools"]
build-backend = "setuptools.build_meta"
6 changes: 3 additions & 3 deletions packages/text-embeddings/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -10,11 +10,11 @@ RUN python3.11 -m venv .venv
ENV PATH="/leapfrogai/.venv/bin:$PATH"

# copy and install all python dependencies
# NOTE: We are copying the leapfrog whl to this filename because installing 'optional extras' from
# NOTE: We are copying the leapfrog whl to this filename because installing 'optional extras' from
# a wheel requires the absolute path to the wheel file (instead of a wildcard whl)
COPY build/*.whl build/
COPY build/leapfrogai_api*.whl leapfrogai_api-100.100.100-py3-none-any.whl
RUN pip install "leapfrogai_api-100.100.100-py3-none-any.whl[text-embeddings]" --no-index --find-links=build/
COPY build/lfai_text_embeddings*.whl .
RUN pip install lfai_text_embeddings*.whl --no-index --find-links=build/


# download model
Expand Down
2 changes: 1 addition & 1 deletion packages/text-embeddings/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
import logging

from InstructorEmbedding import INSTRUCTOR
from leapfrogai_api.types import (
from leapfrogai_sdk import (
Embedding,
EmbeddingRequest,
EmbeddingResponse,
Expand Down
33 changes: 33 additions & 0 deletions packages/text-embeddings/pyproject.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
[project]
name = "lfai-text-embeddings"
version = "0.6.1"
description = "A LeapfrogAI API-compatible embeddings library wrapper for text-based embedding generation."

dependencies = [
"InstructorEmbedding >= 1.0.1",
"torch == 2.1.2",
"numpy == 1.26.3",
"tqdm == 4.66.1",
"sentence-transformers == 2.2.2",
"transformers == 4.36.0",
"leapfrogai-sdk",
]
requires-python = "~=3.11"
readme = "README.md"

[tool.pip-tools]
generate-hashes = true

[tool.setuptools]
packages = ["leapfrogai_sdk"]
package-dir = {"" = "../../src"}

[tool.pytest.ini_options]
addopts = ["--import-mode=importlib"]

[tool.ruff]
target-version = "py311"

[build-system]
requires = ["setuptools"]
build-backend = "setuptools.build_meta"
6 changes: 3 additions & 3 deletions packages/vllm/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -32,8 +32,8 @@ ENV PATH="/home/leapfrogai/.venv/bin:$PATH"
# # create virtual environment for light-weight portability and minimal libraries
# # NOTE: We are copying to this filename because installing 'optional extras' from a wheel requires the absolute path to the wheel file (instead of a wildcard whl)
COPY build/*.whl build/
COPY build/leapfrogai_api*.whl leapfrogai_api-100.100.100-py3-none-any.whl
RUN pip install "leapfrogai_api-100.100.100-py3-none-any.whl[vllm]" --no-index --find-links=build/
COPY build/lfai_vllm*.whl .
RUN pip install lfai_vllm*.whl --no-index --find-links=build/

# download model
ARG REPO_ID=TheBloke/Synthia-7B-v2.0-GPTQ
Expand All @@ -50,4 +50,4 @@ COPY config.yaml .

EXPOSE 50051:50051

ENTRYPOINT ["python3.11", "-m", "leapfrogai_api.types.cli", "--app-dir=.", "main:Model"]
ENTRYPOINT ["python3.11", "-m", "leapfrogai_sdk.cli", "--app-dir=.", "main:Model"]
4 changes: 2 additions & 2 deletions packages/vllm/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,8 @@
from typing import Any, Dict, Generator

from dotenv import load_dotenv
from leapfrogai_api.types import BackendConfig
from leapfrogai_api.types.llm import LLM, GenerationConfig
from leapfrogai_sdk import BackendConfig
from leapfrogai_sdk.llm import LLM, GenerationConfig
from vllm import SamplingParams
from vllm.engine.arg_utils import AsyncEngineArgs
from vllm.engine.async_llm_engine import AsyncLLMEngine
Expand Down
Loading

0 comments on commit d25bfd7

Please sign in to comment.