Files
GoClaw/docker/docker-compose.yml
¨NW¨ 322cebf475 fix(prod): production startup fixes — health endpoint, serveStatic path, entrypoint, docker config
- Add /api/health endpoint for Docker healthchecks
- Fix serveStatic path: dist/public instead of ../public
- Fix entrypoint.sh: DB wait check, npx drizzle-kit migrate, add netcat
- Fix Dockerfile: add bash/netcat, fix COPY order, add tsconfig.node.json
- Fix docker-compose.yml: add OLLAMA/LLM env vars for Node.js fallback
- Fix docker-stack.yml: remove template vars, use env vars instead of secrets
- Fix drizzle.config.ts: add migrations prefix
- Update .env.example with full LLM provider documentation
2026-04-08 23:09:28 +01:00

179 lines
6.6 KiB
YAML

##############################################################################
# GoClaw Control Center — Docker Compose (Local Development)
#
# Services:
# control-center — React + Node.js tRPC frontend/backend (:3000)
# gateway — Go Orchestrator + Tool Executor (:18789)
# db — MySQL 8 (:3306)
#
# LLM Provider (set in .env or environment):
# Cloud (default): LLM_BASE_URL=https://ollama.com/v1 + LLM_API_KEY=<key>
# OpenAI-compat: LLM_BASE_URL=https://api.openai.com/v1 + LLM_API_KEY=<key>
# Local GPU node: LLM_BASE_URL=http://<gpu-host>:11434 (no key needed)
#
# Local Ollama (GPU only):
# The ollama service below is commented out by default.
# Uncomment it only on machines with a compatible GPU.
# Then set: LLM_BASE_URL=http://ollama:11434
#
# Usage:
# cp docker/.env.example docker/.env # fill in LLM_API_KEY etc.
# docker compose -f docker/docker-compose.yml up -d
# docker compose -f docker/docker-compose.yml logs -f gateway
# docker compose -f docker/docker-compose.yml down -v
##############################################################################
name: goclaw
networks:
goclaw-net:
driver: bridge
volumes:
mysql-data:
# ollama-data: # Uncomment when using local Ollama service below
services:
# ── MySQL 8 ──────────────────────────────────────────────────────────────
db:
image: mysql:8.0
container_name: goclaw-db
restart: unless-stopped
environment:
MYSQL_ROOT_PASSWORD: ${MYSQL_ROOT_PASSWORD:-goClawRoot123}
MYSQL_DATABASE: ${MYSQL_DATABASE:-goclaw}
MYSQL_USER: ${MYSQL_USER:-goclaw}
MYSQL_PASSWORD: ${MYSQL_PASSWORD:-goClawPass123}
ports:
- "3306:3306"
volumes:
- mysql-data:/var/lib/mysql
networks:
- goclaw-net
healthcheck:
test:
[
"CMD",
"mysqladmin",
"ping",
"-h",
"localhost",
"-u",
"root",
"-p${MYSQL_ROOT_PASSWORD:-goClawRoot123}",
]
interval: 10s
timeout: 5s
retries: 5
start_period: 30s
# ── Local Ollama LLM Server (GPU ONLY — disabled by default) ─────────────
# Uncomment this entire block only on machines with a compatible NVIDIA or
# Apple Silicon GPU. Then set LLM_BASE_URL=http://ollama:11434 in the
# gateway service below (or in your .env file).
#
# ollama:
# image: ollama/ollama:latest
# container_name: goclaw-ollama
# restart: unless-stopped
# ports:
# - "11434:11434"
# volumes:
# - ollama-data:/root/.ollama
# networks:
# - goclaw-net
# environment:
# - OLLAMA_NUM_PARALLEL=2
# - OLLAMA_MAX_LOADED_MODELS=2
# # NVIDIA GPU support — uncomment if available:
# # deploy:
# # resources:
# # reservations:
# # devices:
# # - driver: nvidia
# # count: all
# # capabilities: [gpu]
# ── Go Gateway (Orchestrator + Tool Executor) ─────────────────────────────
gateway:
build:
context: ..
dockerfile: docker/Dockerfile.gateway
container_name: goclaw-gateway
restart: unless-stopped
ports:
- "18789:18789"
environment:
PORT: "18789"
# ── LLM Provider ─────────────────────────────────────────────────────
# Cloud default (Ollama Cloud, OpenAI-compatible):
LLM_BASE_URL: "${LLM_BASE_URL:-https://ollama.com/v1}"
LLM_API_KEY: "${LLM_API_KEY:-${OLLAMA_API_KEY:-}}"
# Legacy alias (still supported):
OLLAMA_API_KEY: "${OLLAMA_API_KEY:-${LLM_API_KEY:-}}"
# ── To use local Ollama on GPU node, set in .env: ─────────────────────
# LLM_BASE_URL=http://ollama:11434 (if ollama service above is enabled)
# LLM_BASE_URL=http://<gpu-host-ip>:11434 (external GPU machine)
# ─────────────────────────────────────────────────────────────────────
DEFAULT_MODEL: "${DEFAULT_MODEL:-qwen2.5:7b}"
DATABASE_URL: "${MYSQL_USER:-goclaw}:${MYSQL_PASSWORD:-goClawPass123}@tcp(db:3306)/${MYSQL_DATABASE:-goclaw}?parseTime=true"
PROJECT_ROOT: "/app"
GATEWAY_REQUEST_TIMEOUT_SECS: "120"
GATEWAY_MAX_TOOL_ITERATIONS: "10"
LOG_LEVEL: "info"
depends_on:
db:
condition: service_healthy
# ollama: # Uncomment if using local Ollama service above
# condition: service_started
networks:
- goclaw-net
volumes:
# Mount project root for file tools (read-only)
- ..:/app:ro
# Mount Docker socket for docker_exec tool
- /var/run/docker.sock:/var/run/docker.sock
healthcheck:
test: ["CMD", "wget", "-qO-", "http://localhost:18789/health"]
interval: 15s
timeout: 5s
retries: 3
start_period: 10s
# ── Control Center (React + Node.js) ─────────────────────────────────────
control-center:
build:
context: ..
dockerfile: docker/Dockerfile.control-center
container_name: goclaw-control-center
restart: unless-stopped
ports:
- "3000:3000"
environment:
NODE_ENV: production
DATABASE_URL: "mysql://${MYSQL_USER:-goclaw}:${MYSQL_PASSWORD:-goClawPass123}@db:3306/${MYSQL_DATABASE:-goclaw}"
GATEWAY_URL: "http://gateway:18789"
JWT_SECRET: "${JWT_SECRET:-change-me-in-production}"
OLLAMA_BASE_URL: "${LLM_BASE_URL:-https://ollama.com/v1}"
OLLAMA_API_KEY: "${LLM_API_KEY:-}"
VITE_APP_ID: "${VITE_APP_ID:-}"
OAUTH_SERVER_URL: "${OAUTH_SERVER_URL:-}"
VITE_OAUTH_PORTAL_URL: "${VITE_OAUTH_PORTAL_URL:-}"
BUILT_IN_FORGE_API_URL: "${BUILT_IN_FORGE_API_URL:-}"
BUILT_IN_FORGE_API_KEY: "${BUILT_IN_FORGE_API_KEY:-}"
VITE_FRONTEND_FORGE_API_KEY: "${VITE_FRONTEND_FORGE_API_KEY:-}"
VITE_FRONTEND_FORGE_API_URL: "${VITE_FRONTEND_FORGE_API_URL:-}"
depends_on:
db:
condition: service_healthy
gateway:
condition: service_healthy
networks:
- goclaw-net
healthcheck:
test: ["CMD", "wget", "-qO-", "http://localhost:3000/api/health"]
interval: 15s
timeout: 5s
retries: 3
start_period: 20s