diff --git a/options/custom/containers/open-webui.nix b/options/custom/containers/open-webui.nix new file mode 100644 index 0000000..79d4074 --- /dev/null +++ b/options/custom/containers/open-webui.nix @@ -0,0 +1,38 @@ +{ + config, + lib, + ... +}: +with lib; let + cfg = config.custom.containers.open-webui; +in { + options.custom.containers.open-webui = { + enable = mkOption {default = false;}; + }; + + config = mkIf cfg.enable { + #?? arion-open-webui pull + environment.shellAliases.arion-open-webui = "sudo arion --prebuilt-file ${config.virtualisation.arion.projects.open-webui.settings.out.dockerComposeYaml}"; + + virtualisation.arion.projects.open-webui.settings.services = { + # https://github.com/open-webui/open-webui + # https://docs.openwebui.com/getting-started/quick-start/ + open-webui.service = { + container_name = "open-webui"; + dns = ["100.100.100.100"]; # Tailscale resolver + image = "ghcr.io/open-webui/open-webui:v0.5.20"; + network_mode = "host"; + restart = "unless-stopped"; + volumes = ["${config.custom.containers.directory}/open-webui/data:/app/backend/data"]; + + environment = { + PORT = 3033; # 8080/tcp + + # HACK: Offline Ollama endpoints result in an unusable interface, so cap timeout + # https://github.com/open-webui/open-webui/issues/11228 + AIOHTTP_CLIENT_TIMEOUT = 5; # 300 seconds by default + }; + }; + }; + }; +} diff --git a/profiles/server/default.nix b/profiles/server/default.nix index e0957af..be71cf0 100644 --- a/profiles/server/default.nix +++ b/profiles/server/default.nix @@ -22,6 +22,7 @@ miniflux.enable = true; netbox.enable = true; #// nextcloud.enable = true; + open-webui.enable = true; oryx.enable = true; #// owncast.enable = true; redlib.enable = true; diff --git a/secrets/server/caddy/Caddyfile b/secrets/server/caddy/Caddyfile index 889b9bd..d050cdc 100644 Binary files a/secrets/server/caddy/Caddyfile and b/secrets/server/caddy/Caddyfile differ