open-webui/run-ollama-docker.sh

17 lines
352 B
Bash
Raw Normal View History

2024-01-09 07:21:36 +00:00
#!/bin/bash
2024-01-09 07:30:58 +00:00
read -r -p "Do you want ollama in Docker with GPU support? (y/n): " use_gpu
2024-01-05 05:44:19 +00:00
docker rm -f ollama || true
docker pull ollama/ollama:latest
2024-01-09 07:30:58 +00:00
docker_args="-d -v ollama:/root/.ollama -p 11434:11434 --name ollama ollama/ollama"
if [ "$use_gpu" == "y" ]; then
2024-01-09 07:30:58 +00:00
docker_args+=" --gpus=all"
fi
2024-01-09 07:30:58 +00:00
docker run "$docker_args"
docker image prune -f