From 9d0a266bdc9d3c36a3d4a0006234544202790e91 Mon Sep 17 00:00:00 2001 From: Ivan Charapanau Date: Mon, 29 Jul 2024 18:03:13 +0200 Subject: [PATCH] feat: v0.0.3 - basic eject functionality - ollama commands --- README.md | 34 ++++++++++++++++++++++++++++-- compose.llamacpp.yml | 2 +- harbor.sh | 48 ++++++++++++++++++++++++++++++++++++++++-- open-webui/config.json | 5 +++-- 4 files changed, 82 insertions(+), 7 deletions(-) diff --git a/README.md b/README.md index c751d04..9d0d974 100644 --- a/README.md +++ b/README.md @@ -89,13 +89,13 @@ graph LR class SearXNG optional ``` -This project is a script around a pre-configured Docker Compose setup that connects various LLM-related projects together. It simplifies the initial configuration and can serve as a base for your own customized setup. +This project is a CLI and a pre-configured Docker Compose setup that connects various LLM-related projects together. It simplifies the initial configuration and can serve as a base for your own customized setup. - Services are pre-configured to work together - Reused local cache - huggingface, ollama, etc. - All configuration in one place - Access required CLIs via Docker without installing them - +- Eject from Harbor at any time ## Harbor CLI Reference @@ -193,6 +193,36 @@ harbor hf --help harbor hf scan-cache ``` +### `harbor ollama ` + +Runs Ollama CLI in the container against the Harbor configuraiton. + +```bash +# All Ollama commands are available +harbor ollama --version + +# Show currently cached models +harbor ollama list + +# See for more commands +harbor ollama --help +``` + +### `harbor eject` + +Renders Harbor's Docker Compose configuration into a standalone config that can be moved and used elsewhere. Accepts the same options as `harbor up`. + +```bash +# Eject with default services +harbor eject + +# Eject with additional services +harbor eject searxng + +# Likely, you want the output to be saved in a file +harbor eject searxng llamacpp > docker-compose.harbor.yml +``` + ## Services Overview | Service | Handle / Local URL | Description | diff --git a/compose.llamacpp.yml b/compose.llamacpp.yml index 5367dd2..8c1b3da 100644 --- a/compose.llamacpp.yml +++ b/compose.llamacpp.yml @@ -8,7 +8,7 @@ services: - 33831:8080 command: > --server - --model ${HARBOR_LLAMACPP_MODEL} + --model $(./scripts/hf.sh https://huggingface.co/bartowski/Qwen2-7B-Instruct-GGUF/blob/main/Qwen2-7B-Instruct-IQ2_S.gguf) --port 8080 --host 0.0.0.0 networks: diff --git a/harbor.sh b/harbor.sh index a4c02ef..00d67eb 100755 --- a/harbor.sh +++ b/harbor.sh @@ -88,14 +88,17 @@ show_help() { echo " logs - View the logs of the containers" echo " help - Show this help message" echo - echo "Setup Manageent Commands:" - echo " hf - Run the Hugging Face CLI" + echo "Setup Management Commands:" + echo " hf - Run the Harbor's Hugging Face CLI" + echo " ollama - Run the Harbor's Ollama CLI. Ollama service should be running" + echo " smi - Show NVIDIA GPU information" echo echo "CLI Commands:" echo " open - Open a service in the default browser" echo " ln - Create a symbolic link to the CLI" echo " defaults - Show the default services" echo " version - Show the CLI version" + echo " eject - Eject the Compose configuration, accepts same options as 'up'" echo echo "Options:" echo " Additional options to pass to the compose_with_options function" @@ -185,6 +188,35 @@ open_service() { echo "Opened $url in your default browser." } +smi() { + if command -v nvidia-smi &> /dev/null; then + nvidia-smi + else + echo "nvidia-smi not found." + fi +} + +eject() { + $(compose_with_options "$@") config +} + +run_in_service() { + local service_name="$1" + shift + local command_to_run="$@" + + if docker compose ps --services --filter "status=running" | grep -q "^${service_name}$"; then + echo "Service ${service_name} is running. Executing command..." + docker compose exec ${service_name} ${command_to_run} + else + echo "Harbor ${service_name} is not running. Please start it with 'harbor up ${service_name}' first." + fi +} + +exec_ollama() { + run_in_service ollama ollama "$@" +} + cd $harbor_home # Main script logic @@ -236,6 +268,18 @@ case "$1" in shift show_version ;; + smi) + shift + smi + ;; + eject) + shift + eject $@ + ;; + ollama) + shift + exec_ollama $@ + ;; *) echo "Unknown command: $1" show_help diff --git a/open-webui/config.json b/open-webui/config.json index c6567db..f2f6adf 100644 --- a/open-webui/config.json +++ b/open-webui/config.json @@ -25,8 +25,9 @@ "http://llamacpp:8080/v1" ], "api_keys": [ - "" - ] + "123" + ], + "enabled": true }, "image_generation": { "engine": "comfyui",