Files
2025-11-11 11:47:15 +01:00

54 lines
1.2 KiB
Bash

#!/usr/bin/env bash
set -euo pipefail
if ask_to_install "Ollama Server"; then
echo "=== OLLAMA SERVER INSTALLATION ==="
ensure_root
detect_pkg_manager
install_docker
$SUDO mkdir -p /srv/docker/ollama
cd /srv/docker/ollama
# Funktion, die den nächsten freien Port sucht
find_free_port() {
PORT=11434
while ss -lnt | awk '{print $4}' | grep -q ":$PORT$"; do
PORT=$((PORT + 1))
done
echo "$PORT"
}
FREE_PORT=$(find_free_port)
echo "✅ Freier Port gefunden: $FREE_PORT"
$SUDO tee docker-compose.yml >/dev/null <<EOF
services:
ollama:
image: ollama/ollama:latest
container_name: ollama-$FREE_PORT
restart: unless-stopped
ports:
- "$FREE_PORT:11434"
volumes:
- ollama_data:/root/.ollama
volumes:
ollama_data:
EOF
$SUDO docker compose up -d
echo "Ollama Server läuft auf Port $FREE_PORT"
read -p "Modell jetzt herunterladen? (z.B. llama3 / Enter = nein): " MODEL
if [ ! -z "$MODEL" ]; then
$SUDO curl -N -X POST http://127.0.0.1:$FREE_PORT/api/pull \
-H "Content-Type: application/json" \
-d "{\"name\":\"$MODEL\"}" || true
fi
echo "✅ Fertig! URL: http://<server-ip>:$FREE_PORT"
else
log "${YELLOW}⏭ Ollama Server übersprungen.${NC}"
fi