Skip to content

Commit

Permalink
nixos/nextjs-ollama-llm-ui: init module
Browse files Browse the repository at this point in the history
NixOS already has good support for the Ollama
backend service. Now we can benefit from
having a convenient web frontend as well for it.
  • Loading branch information
malteneuss committed May 22, 2024
1 parent fa67238 commit ff6d95a
Show file tree
Hide file tree
Showing 2 changed files with 91 additions and 0 deletions.
2 changes: 2 additions & 0 deletions nixos/doc/manual/release-notes/rl-2405.section.md
Original file line number Diff line number Diff line change
Expand Up @@ -137,6 +137,8 @@ The pre-existing [services.ankisyncd](#opt-services.ankisyncd.enable) has been m

- [ollama](https://ollama.ai), server for running large language models locally.

- [nextjs-ollama-llm-ui](https://github.com/jakobhoeg/nextjs-ollama-llm-ui), light-weight frontend server to chat with Ollama models through a web app.

- [ownCloud Infinite Scale Stack](https://owncloud.com/infinite-scale-4-0/), a modern and scalable rewrite of ownCloud.

- [PhotonVision](https://photonvision.org/), a free, fast, and easy-to-use computer vision solution for the FIRST® Robotics Competition.
Expand Down
89 changes: 89 additions & 0 deletions nixos/modules/services/web-apps/nextjs-ollama-llm-ui.nix
Original file line number Diff line number Diff line change
@@ -0,0 +1,89 @@
{
config,
pkgs,
lib,
...
}:
let
cfg = config.services.nextjs-ollama-llm-ui;
# we have to override the URL to a Ollama service here, because it gets baked into the web app.
nextjs-ollama-llm-ui = cfg.package.override { ollamaUrl = "https://ollama.lambdablob.com"; };
in
{
options = {
services.nextjs-ollama-llm-ui = {
enable = lib.mkEnableOption (
lib.mdDoc ''
Simple Ollama web UI service; an easy to use web frontend for a Ollama backend service.
Run state-of-the-art AI large language models (LLM) similar to ChatGPT locally with privacy
on your personal computer.
This service is stateless and doesn't store any data on the server; all data is kept
locally in your web browser.
See <https://github.com/jakobhoeg/nextjs-ollama-llm-ui>.
Required: You need the Ollama backend service running by having
"services.nextjs-ollama-llm-ui.ollamaUrl" point to the correct url.
You can host such a backend service with NixOS through "services.ollama".
''
);
package = lib.mkPackageOption pkgs "nextjs-ollama-llm-ui" { };

hostname = lib.mkOption {
type = lib.types.str;
default = "127.0.0.1";
example = "ui.example.org";
description = ''
The hostname under which the Ollama UI interface should be accessible.
By default it uses localhost/127.0.0.1 to be accessible only from the local machine.
Change to "0.0.0.0" to make it directly accessible from the local network.
Note: You should keep it at 127.0.0.1 and only serve to the local
network or internet from a (home) server behind a reverse-proxy and secured encryption.
See <https://wiki.nixos.org/wiki/Nginx> for instructions on how to set up a reverse-proxy.
'';
};

port = lib.mkOption {
type = lib.types.port;
default = 3000;
example = 3000;
description = ''
The port under which the Ollama UI interface should be accessible.
'';
};

ollamaUrl = lib.mkOption {
type = lib.types.str;
default = "127.0.0.1:11434";
example = "https://ollama.example.org";
description = ''
The address (including host and port) under which we can access the Ollama backend server.
!Note that if the the UI service is running under a domain "https://ui.example.org",
the Ollama backend service must allow "CORS" requests from this domain, e.g. by adding
"services.ollama.environment.OLLAMA_ORIGINS = [ ... "https://ui.example.org" ];"!
'';
};
};
};

config = lib.mkIf cfg.enable {
systemd.services = {

nextjs-ollama-llm-ui = {
wantedBy = [ "multi-user.target" ];
description = "Nextjs Ollama LLM Ui.";
after = [ "network.target" ];
environment = {
HOSTNAME = cfg.hostname;
PORT = toString cfg.port;
NEXT_PUBLIC_OLLAMA_URL = cfg.ollamaUrl;
};
serviceConfig = {
ExecStart = "${lib.getExe nextjs-ollama-llm-ui}";
DynamicUser = true;
};
};
};
};
meta.maintainers = with lib.maintainers; [ malteneuss ];
}

0 comments on commit ff6d95a

Please sign in to comment.