From a6d81b1b0ed95a60eb9196606946ca2db8d548a3 Mon Sep 17 00:00:00 2001 From: Cole Medin Date: Tue, 5 Nov 2024 09:42:49 -0600 Subject: [PATCH] Making Ollama work within the Docker container, very important fix --- app/lib/.server/llm/api-key.ts | 6 +++++- app/utils/constants.ts | 18 +++++++++++++++++- docker-compose.yaml | 6 ++++++ 3 files changed, 28 insertions(+), 2 deletions(-) diff --git a/app/lib/.server/llm/api-key.ts b/app/lib/.server/llm/api-key.ts index e2764c1..327cfb3 100644 --- a/app/lib/.server/llm/api-key.ts +++ b/app/lib/.server/llm/api-key.ts @@ -35,7 +35,11 @@ export function getBaseURL(cloudflareEnv: Env, provider: string) { case 'OpenAILike': return env.OPENAI_LIKE_API_BASE_URL || cloudflareEnv.OPENAI_LIKE_API_BASE_URL; case 'Ollama': - return env.OLLAMA_API_BASE_URL || cloudflareEnv.OLLAMA_API_BASE_URL || "http://localhost:11434"; + let baseUrl = env.OLLAMA_API_BASE_URL || cloudflareEnv.OLLAMA_API_BASE_URL || "http://localhost:11434"; + if (env.RUNNING_IN_DOCKER === 'true') { + baseUrl = baseUrl.replace("localhost", "host.docker.internal"); + } + return baseUrl; default: return ""; } diff --git a/app/utils/constants.ts b/app/utils/constants.ts index b48cb34..d003df4 100644 --- a/app/utils/constants.ts +++ b/app/utils/constants.ts @@ -47,9 +47,25 @@ const staticModels: ModelInfo[] = [ export let MODEL_LIST: ModelInfo[] = [...staticModels]; +const getOllamaBaseUrl = () => { + const defaultBaseUrl = import.meta.env.OLLAMA_API_BASE_URL || 'http://localhost:11434'; + // Check if we're in the browser + if (typeof window !== 'undefined') { + // Frontend always uses localhost + return defaultBaseUrl; + } + + // Backend: Check if we're running in Docker + const isDocker = process.env.RUNNING_IN_DOCKER === 'true'; + + return isDocker + ? defaultBaseUrl.replace("localhost", "host.docker.internal") + : defaultBaseUrl; +}; + async function getOllamaModels(): Promise { try { - const base_url = import.meta.env.OLLAMA_API_BASE_URL || "http://localhost:11434"; + const base_url = getOllamaBaseUrl(); const response = await fetch(`${base_url}/api/tags`); const data = await response.json() as OllamaApiResponse; diff --git a/docker-compose.yaml b/docker-compose.yaml index 8c46136..c391dd7 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -20,6 +20,9 @@ services: - GOOGLE_GENERATIVE_AI_API_KEY=${GOOGLE_GENERATIVE_AI_API_KEY} - OLLAMA_API_BASE_URL=${OLLAMA_API_BASE_URL} - VITE_LOG_LEVEL=${VITE_LOG_LEVEL:-debug} + - RUNNING_IN_DOCKER=true + extra_hosts: + - "host.docker.internal:host-gateway" command: pnpm run dockerstart profiles: - production # This service only runs in the production profile @@ -43,6 +46,9 @@ services: - GOOGLE_GENERATIVE_AI_API_KEY=${GOOGLE_GENERATIVE_AI_API_KEY} - OLLAMA_API_BASE_URL=${OLLAMA_API_BASE_URL} - VITE_LOG_LEVEL=${VITE_LOG_LEVEL:-debug} + - RUNNING_IN_DOCKER=true + extra_hosts: + - "host.docker.internal:host-gateway" volumes: - type: bind source: .