open-webui/run-ollama-docker.sh

20 lines
405 B
Bash
Raw Normal View History

2024-01-09 07:21:36 +00:00
#!/bin/bash
host_port=11434
container_port=11434
2024-01-09 07:30:58 +00:00
read -r -p "Do you want ollama in Docker with GPU support? (y/n): " use_gpu
2024-01-05 05:44:19 +00:00
docker rm -f ollama || true
docker pull ollama/ollama:latest
docker_args="-d -v ollama:/root/.ollama -p $host_port:$container_port --name ollama ollama/ollama"
2024-01-09 07:30:58 +00:00
if [ "$use_gpu" == "y" ]; then
2024-01-09 07:30:58 +00:00
docker_args+=" --gpus=all"
fi
2024-01-09 07:30:58 +00:00
docker run "$docker_args"
docker image prune -f