mirror of
https://github.com/coleam00/bolt.new-any-llm
synced 2024-12-28 06:42:56 +00:00
Making Ollama work within the Docker container, very important fix
Some checks failed
Build and Push Container / build-and-push (push) Has been cancelled
Some checks failed
Build and Push Container / build-and-push (push) Has been cancelled
This commit is contained in:
parent
e7ce25758c
commit
a6d81b1b0e
@ -35,7 +35,11 @@ export function getBaseURL(cloudflareEnv: Env, provider: string) {
|
||||
case 'OpenAILike':
|
||||
return env.OPENAI_LIKE_API_BASE_URL || cloudflareEnv.OPENAI_LIKE_API_BASE_URL;
|
||||
case 'Ollama':
|
||||
return env.OLLAMA_API_BASE_URL || cloudflareEnv.OLLAMA_API_BASE_URL || "http://localhost:11434";
|
||||
let baseUrl = env.OLLAMA_API_BASE_URL || cloudflareEnv.OLLAMA_API_BASE_URL || "http://localhost:11434";
|
||||
if (env.RUNNING_IN_DOCKER === 'true') {
|
||||
baseUrl = baseUrl.replace("localhost", "host.docker.internal");
|
||||
}
|
||||
return baseUrl;
|
||||
default:
|
||||
return "";
|
||||
}
|
||||
|
@ -47,9 +47,25 @@ const staticModels: ModelInfo[] = [
|
||||
|
||||
export let MODEL_LIST: ModelInfo[] = [...staticModels];
|
||||
|
||||
const getOllamaBaseUrl = () => {
|
||||
const defaultBaseUrl = import.meta.env.OLLAMA_API_BASE_URL || 'http://localhost:11434';
|
||||
// Check if we're in the browser
|
||||
if (typeof window !== 'undefined') {
|
||||
// Frontend always uses localhost
|
||||
return defaultBaseUrl;
|
||||
}
|
||||
|
||||
// Backend: Check if we're running in Docker
|
||||
const isDocker = process.env.RUNNING_IN_DOCKER === 'true';
|
||||
|
||||
return isDocker
|
||||
? defaultBaseUrl.replace("localhost", "host.docker.internal")
|
||||
: defaultBaseUrl;
|
||||
};
|
||||
|
||||
async function getOllamaModels(): Promise<ModelInfo[]> {
|
||||
try {
|
||||
const base_url = import.meta.env.OLLAMA_API_BASE_URL || "http://localhost:11434";
|
||||
const base_url = getOllamaBaseUrl();
|
||||
const response = await fetch(`${base_url}/api/tags`);
|
||||
const data = await response.json() as OllamaApiResponse;
|
||||
|
||||
|
@ -20,6 +20,9 @@ services:
|
||||
- GOOGLE_GENERATIVE_AI_API_KEY=${GOOGLE_GENERATIVE_AI_API_KEY}
|
||||
- OLLAMA_API_BASE_URL=${OLLAMA_API_BASE_URL}
|
||||
- VITE_LOG_LEVEL=${VITE_LOG_LEVEL:-debug}
|
||||
- RUNNING_IN_DOCKER=true
|
||||
extra_hosts:
|
||||
- "host.docker.internal:host-gateway"
|
||||
command: pnpm run dockerstart
|
||||
profiles:
|
||||
- production # This service only runs in the production profile
|
||||
@ -43,6 +46,9 @@ services:
|
||||
- GOOGLE_GENERATIVE_AI_API_KEY=${GOOGLE_GENERATIVE_AI_API_KEY}
|
||||
- OLLAMA_API_BASE_URL=${OLLAMA_API_BASE_URL}
|
||||
- VITE_LOG_LEVEL=${VITE_LOG_LEVEL:-debug}
|
||||
- RUNNING_IN_DOCKER=true
|
||||
extra_hosts:
|
||||
- "host.docker.internal:host-gateway"
|
||||
volumes:
|
||||
- type: bind
|
||||
source: .
|
||||
|
Loading…
Reference in New Issue
Block a user