mirror of
https://github.com/open-webui/open-webui
synced 2024-11-22 08:07:55 +00:00
fix: docker container volume mount location
This commit is contained in:
parent
21c7f50790
commit
7bdef56192
@ -112,14 +112,14 @@ After installing Ollama, verify that Ollama is running by accessing the followin
|
||||
If Ollama is hosted on your local machine and accessible at [http://127.0.0.1:11434/](http://127.0.0.1:11434/), run the following command:
|
||||
|
||||
```bash
|
||||
docker run -d -p 3000:8080 --add-host=host.docker.internal:host-gateway -v ollama-webui:/app/backend --name ollama-webui --restart always ghcr.io/ollama-webui/ollama-webui:main
|
||||
docker run -d -p 3000:8080 --add-host=host.docker.internal:host-gateway -v ollama-webui:/app/backend/data --name ollama-webui --restart always ghcr.io/ollama-webui/ollama-webui:main
|
||||
```
|
||||
|
||||
Alternatively, if you prefer to build the container yourself, use the following command:
|
||||
|
||||
```bash
|
||||
docker build -t ollama-webui .
|
||||
docker run -d -p 3000:8080 --add-host=host.docker.internal:host-gateway -v ollama-webui:/app/backend --name ollama-webui --restart always ollama-webui
|
||||
docker run -d -p 3000:8080 --add-host=host.docker.internal:host-gateway -v ollama-webui:/app/backend/data --name ollama-webui --restart always ollama-webui
|
||||
```
|
||||
|
||||
Your Ollama Web UI should now be hosted at [http://localhost:3000](http://localhost:3000) and accessible over LAN (or Network). Enjoy! 😄
|
||||
@ -129,14 +129,14 @@ Your Ollama Web UI should now be hosted at [http://localhost:3000](http://localh
|
||||
Change `OLLAMA_API_BASE_URL` environment variable to match the external Ollama Server url:
|
||||
|
||||
```bash
|
||||
docker run -d -p 3000:8080 -e OLLAMA_API_BASE_URL=https://example.com/api -v ollama-webui:/app/backend --name ollama-webui --restart always ghcr.io/ollama-webui/ollama-webui:main
|
||||
docker run -d -p 3000:8080 -e OLLAMA_API_BASE_URL=https://example.com/api -v ollama-webui:/app/backend/data --name ollama-webui --restart always ghcr.io/ollama-webui/ollama-webui:main
|
||||
```
|
||||
|
||||
Alternatively, if you prefer to build the container yourself, use the following command:
|
||||
|
||||
```bash
|
||||
docker build -t ollama-webui .
|
||||
docker run -d -p 3000:8080 -e OLLAMA_API_BASE_URL=https://example.com/api -v ollama-webui:/app/backend --name ollama-webui --restart always ollama-webui
|
||||
docker run -d -p 3000:8080 -e OLLAMA_API_BASE_URL=https://example.com/api -v ollama-webui:/app/backend/data --name ollama-webui --restart always ollama-webui
|
||||
```
|
||||
|
||||
## How to Install Without Docker
|
||||
|
@ -1,4 +1,4 @@
|
||||
from peewee import *
|
||||
|
||||
DB = SqliteDatabase("./ollama.db")
|
||||
DB = SqliteDatabase("./data/ollama.db")
|
||||
DB.connect()
|
||||
|
1
backend/data/readme.txt
Normal file
1
backend/data/readme.txt
Normal file
@ -0,0 +1 @@
|
||||
dir for backend files (db, documents, etc.)
|
@ -19,7 +19,7 @@ services:
|
||||
image: ollama-webui:latest
|
||||
container_name: ollama-webui
|
||||
volumes:
|
||||
- ollama-webui:/app/backend
|
||||
- ollama-webui:/app/backend/data
|
||||
depends_on:
|
||||
- ollama
|
||||
ports:
|
||||
|
2
run.sh
2
run.sh
@ -1,5 +1,5 @@
|
||||
docker stop ollama-webui || true
|
||||
docker rm ollama-webui || true
|
||||
docker build -t ollama-webui .
|
||||
docker run -d -p 3000:8080 --add-host=host.docker.internal:host-gateway -v ollama-webui:/app/backend --name ollama-webui --restart always ollama-webui
|
||||
docker run -d -p 3000:8080 --add-host=host.docker.internal:host-gateway -v ollama-webui:/app/backend/data --name ollama-webui --restart always ollama-webui
|
||||
docker image prune -f
|
Loading…
Reference in New Issue
Block a user