Skip to content

Commit

Permalink
refactor: rename llama-stable to llama-ggml (#1287)
Browse files Browse the repository at this point in the history
* refactor: rename llama-stable to llama-ggml

* Makefile: get sources in sources/

Signed-off-by: Ettore Di Giacinto <mudler@localai.io>

* fixup path

Signed-off-by: Ettore Di Giacinto <mudler@localai.io>

* fixup sources

Signed-off-by: Ettore Di Giacinto <mudler@localai.io>

* fixups sd

Signed-off-by: Ettore Di Giacinto <mudler@localai.io>

* update SD

* fixup

* fixup: create piper libdir also when not built

Signed-off-by: Ettore Di Giacinto <mudler@localai.io>

* fix make target on linux test

Signed-off-by: Ettore Di Giacinto <mudler@localai.io>

---------

Signed-off-by: Ettore Di Giacinto <mudler@localai.io>
  • Loading branch information
mudler committed Nov 18, 2023
1 parent 2f65671 commit 3c9544b
Show file tree
Hide file tree
Showing 8 changed files with 125 additions and 138 deletions.
7 changes: 3 additions & 4 deletions .github/workflows/test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -81,10 +81,9 @@ jobs:
PATH=$PATH:/opt/conda/bin make -C backend/python/huggingface
# Pre-build piper before we start tests in order to have shared libraries in place
make go-piper && \
GO_TAGS="tts" make -C go-piper piper.o && \
sudo cp -rfv go-piper/piper/build/pi/lib/. /usr/lib/ && \
make sources/go-piper && \
GO_TAGS="tts" make -C sources/go-piper piper.o && \
sudo cp -rfv sources/go-piper/piper/build/pi/lib/. /usr/lib/ && \
# Pre-build stable diffusion before we install a newer version of abseil (not compatible with stablediffusion-ncn)
GO_TAGS="stablediffusion tts" GRPC_BACKENDS=backend-assets/grpc/stablediffusion make build
Expand Down
9 changes: 1 addition & 8 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,12 +1,5 @@
# go-llama build artifacts
go-llama
go-llama-stable
/gpt4all
go-stable-diffusion
go-piper
/go-bert
go-ggllm
/piper
/sources/
__pycache__/
*.a
get-sources
Expand Down
7 changes: 6 additions & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -108,6 +108,11 @@ RUN if [ "${BUILD_GRPC}" = "true" ]; then \
# Rebuild with defaults backends
RUN make build

RUN if [ ! -d "/build/sources/go-piper/piper/build/pi/lib/" ]; then \
mkdir -p /build/sources/go-piper/piper/build/pi/lib/ \
touch /build/sources/go-piper/piper/build/pi/lib/keep \
; fi

###################################
###################################

Expand Down Expand Up @@ -145,7 +150,7 @@ RUN make prepare-sources
COPY --from=builder /build/local-ai ./

# Copy shared libraries for piper
COPY --from=builder /build/go-piper/piper/build/pi/lib/* /usr/lib/
COPY --from=builder /build/sources/go-piper/piper/build/pi/lib/* /usr/lib/

# do not let stablediffusion rebuild (requires an older version of absl)
COPY --from=builder /build/backend-assets/grpc/stablediffusion ./backend-assets/grpc/stablediffusion
Expand Down
230 changes: 110 additions & 120 deletions Makefile

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion api/api_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -301,7 +301,7 @@ var _ = Describe("API test", func() {
response := postModelApplyRequest("http://127.0.0.1:9090/models/apply", modelApplyRequest{
URL: "github:go-skynet/model-gallery/openllama_3b.yaml",
Name: "openllama_3b",
Overrides: map[string]interface{}{"backend": "llama-stable", "mmap": true, "f16": true, "context_size": 128},
Overrides: map[string]interface{}{"backend": "llama-ggml", "mmap": true, "f16": true, "context_size": 128},
})

Expect(response["uuid"]).ToNot(BeEmpty(), fmt.Sprint(response))
Expand Down
File renamed without changes.
File renamed without changes.
8 changes: 4 additions & 4 deletions pkg/model/initializers.go
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,8 @@ import (
)

const (
LlamaBackend = "llama"
LlamaStableBackend = "llama-stable"
GoLlamaBackend = "llama"
LlamaGGML = "llama-ggml"
LLamaCPP = "llama-cpp"
StarcoderBackend = "starcoder"
GPTJBackend = "gptj"
Expand All @@ -41,8 +41,8 @@ const (

var AutoLoadBackends []string = []string{
LLamaCPP,
LlamaStableBackend,
LlamaBackend,
LlamaGGML,
GoLlamaBackend,
Gpt4All,
GPTNeoXBackend,
BertEmbeddingsBackend,
Expand Down

0 comments on commit 3c9544b

Please sign in to comment.