diff --git a/Dockerfile b/Dockerfile index 3019c2eaa..b70c82437 100644 --- a/Dockerfile +++ b/Dockerfile @@ -4,12 +4,16 @@ FROM node:alpine as build WORKDIR /app +# wget embedding model weight from alpine (does not exist from slim-buster) +RUN wget "https://chroma-onnx-models.s3.amazonaws.com/all-MiniLM-L6-v2/onnx.tar.gz" + COPY package.json package-lock.json ./ RUN npm ci COPY . . RUN npm run build + FROM python:3.11-slim-bookworm as base ENV ENV=prod @@ -22,6 +26,14 @@ ENV OPENAI_API_KEY "" ENV WEBUI_JWT_SECRET_KEY "SECRET_KEY" WORKDIR /app + +# copy embedding weight from build +COPY --from=build onnx.tar.gz /root/.cache/chroma/onnx_models/all-MiniLM-L6-v2 + +RUN cd /root/.cache/chroma/onnx_models/all-MiniLM-L6-v2 &&\ + tar -xzf onnx.tar.gz + +# copy built frontend files COPY --from=build /app/build /app/build WORKDIR /app/backend @@ -29,13 +41,6 @@ WORKDIR /app/backend COPY ./backend/requirements.txt ./requirements.txt RUN pip3 install -r requirements.txt -RUN MODEL_DIR="/root/.cache/chroma/onnx_models/all-MiniLM-L6-v2" &&\ - ARCHIVE_NAME="onnx.tar.gz" &&\ - mkdir -p $MODEL_DIR &&\ - cd $MODEL_DIR &&\ - wget -O $ARCHIVE_NAME "https://chroma-onnx-models.s3.amazonaws.com/all-MiniLM-L6-v2/$ARCHIVE_NAME" &&\ - tar -xzf $ARCHIVE_NAME - # RUN python -c "from sentence_transformers import SentenceTransformer; model = SentenceTransformer('all-MiniLM-L6-v2')" COPY ./backend . diff --git a/run.sh b/run.sh index 0ada65d1d..6e2dc6112 100644 --- a/run.sh +++ b/run.sh @@ -1,5 +1,5 @@ +docker build -t ollama-webui . docker stop ollama-webui || true docker rm ollama-webui || true -docker build -t ollama-webui . docker run -d -p 3000:8080 --add-host=host.docker.internal:host-gateway -v ollama-webui:/app/backend/data --name ollama-webui --restart always ollama-webui docker image prune -f \ No newline at end of file