From e588b00ab5002d00f982504fa2cb22ac44d162c8 Mon Sep 17 00:00:00 2001 From: "Timothy J. Baek" Date: Fri, 17 Nov 2023 10:38:29 -0800 Subject: [PATCH] doc: docker compose updated --- README.md | 6 +++--- compose.yaml | 11 ++++++++--- 2 files changed, 11 insertions(+), 6 deletions(-) diff --git a/README.md b/README.md index fced28e27..622ee29eb 100644 --- a/README.md +++ b/README.md @@ -33,7 +33,7 @@ ChatGPT-Style Web Interface for Ollama 🦙 - 🤖 **Multiple Model Support**: Seamlessly switch between different chat models for diverse interactions. -- ⚙️ **Many Models Conversations**: : Effortlessly engage with various models simultaneously, harnessing their unique strengths for optimal responses. Enhance your experience by leveraging a diverse set of models in parallel. +- ⚙️ **Many Models Conversations**: Effortlessly engage with various models simultaneously, harnessing their unique strengths for optimal responses. Enhance your experience by leveraging a diverse set of models in parallel. - 🤝 **OpenAI Model Integration**: Seamlessly utilize OpenAI models alongside Ollama models for a versatile conversational experience. @@ -62,10 +62,10 @@ ChatGPT-Style Web Interface for Ollama 🦙 If you don't have Ollama installed yet, you can use the provided Docker Compose file for a hassle-free installation. Simply run the following command: ```bash -docker compose up --build +docker compose up -d --build ``` -This command will install both Ollama and Ollama Web UI on your system. Ensure to modify the `compose.yaml` file for GPU support if needed. +This command will install both Ollama and Ollama Web UI on your system. Ensure to modify the `compose.yaml` file for GPU support and Exposing Ollama API outside the container stack if needed. ### Installing Ollama Web UI Only diff --git a/compose.yaml b/compose.yaml index 0a77d64f8..b50363542 100644 --- a/compose.yaml +++ b/compose.yaml @@ -13,8 +13,9 @@ services: # - gpu volumes: - ollama:/root/.ollama - ports: - - 11434:11434 + # Uncomment below to expose Ollama API outside the container stack + # ports: + # - 11434:11434 container_name: ollama pull_policy: always tty: true @@ -29,10 +30,14 @@ services: dockerfile: Dockerfile image: ollama-webui:latest container_name: ollama-webui + depends_on: + - ollama ports: - 3000:8080 + environment: + - "OLLAMA_API_BASE_URL=http://ollama:11434/api" extra_hosts: - - host.docker.internal:host-gateway + - host.docker.internal:host-gateway restart: unless-stopped volumes: