Making Ollama work within the Docker container, very important fix
Some checks failed
Build and Push Container / build-and-push (push) Has been cancelled

This commit is contained in:
Cole Medin 2024-11-05 09:42:49 -06:00
parent e7ce25758c
commit a6d81b1b0e
3 changed files with 28 additions and 2 deletions

View File

@ -35,7 +35,11 @@ export function getBaseURL(cloudflareEnv: Env, provider: string) {
case 'OpenAILike':
return env.OPENAI_LIKE_API_BASE_URL || cloudflareEnv.OPENAI_LIKE_API_BASE_URL;
case 'Ollama':
return env.OLLAMA_API_BASE_URL || cloudflareEnv.OLLAMA_API_BASE_URL || "http://localhost:11434";
let baseUrl = env.OLLAMA_API_BASE_URL || cloudflareEnv.OLLAMA_API_BASE_URL || "http://localhost:11434";
if (env.RUNNING_IN_DOCKER === 'true') {
baseUrl = baseUrl.replace("localhost", "host.docker.internal");
}
return baseUrl;
default:
return "";
}

View File

@ -47,9 +47,25 @@ const staticModels: ModelInfo[] = [
export let MODEL_LIST: ModelInfo[] = [...staticModels];
const getOllamaBaseUrl = () => {
const defaultBaseUrl = import.meta.env.OLLAMA_API_BASE_URL || 'http://localhost:11434';
// Check if we're in the browser
if (typeof window !== 'undefined') {
// Frontend always uses localhost
return defaultBaseUrl;
}
// Backend: Check if we're running in Docker
const isDocker = process.env.RUNNING_IN_DOCKER === 'true';
return isDocker
? defaultBaseUrl.replace("localhost", "host.docker.internal")
: defaultBaseUrl;
};
async function getOllamaModels(): Promise<ModelInfo[]> {
try {
const base_url = import.meta.env.OLLAMA_API_BASE_URL || "http://localhost:11434";
const base_url = getOllamaBaseUrl();
const response = await fetch(`${base_url}/api/tags`);
const data = await response.json() as OllamaApiResponse;

View File

@ -20,6 +20,9 @@ services:
- GOOGLE_GENERATIVE_AI_API_KEY=${GOOGLE_GENERATIVE_AI_API_KEY}
- OLLAMA_API_BASE_URL=${OLLAMA_API_BASE_URL}
- VITE_LOG_LEVEL=${VITE_LOG_LEVEL:-debug}
- RUNNING_IN_DOCKER=true
extra_hosts:
- "host.docker.internal:host-gateway"
command: pnpm run dockerstart
profiles:
- production # This service only runs in the production profile
@ -43,6 +46,9 @@ services:
- GOOGLE_GENERATIVE_AI_API_KEY=${GOOGLE_GENERATIVE_AI_API_KEY}
- OLLAMA_API_BASE_URL=${OLLAMA_API_BASE_URL}
- VITE_LOG_LEVEL=${VITE_LOG_LEVEL:-debug}
- RUNNING_IN_DOCKER=true
extra_hosts:
- "host.docker.internal:host-gateway"
volumes:
- type: bind
source: .