diff --git a/Dockerfile b/Dockerfile index 8078bf0ea..f1ea4e064 100644 --- a/Dockerfile +++ b/Dockerfile @@ -38,6 +38,7 @@ ARG USE_OLLAMA ARG USE_CUDA_VER ARG USE_EMBEDDING_MODEL ARG USE_RERANKING_MODEL +ARG EXTRA_MODULES ARG UID ARG GID @@ -49,7 +50,8 @@ ENV ENV=prod \ USE_CUDA_DOCKER=${USE_CUDA} \ USE_CUDA_DOCKER_VER=${USE_CUDA_VER} \ USE_EMBEDDING_MODEL_DOCKER=${USE_EMBEDDING_MODEL} \ - USE_RERANKING_MODEL_DOCKER=${USE_RERANKING_MODEL} + USE_RERANKING_MODEL_DOCKER=${USE_RERANKING_MODEL} \ + EXTRA_MODULES_DOCKER=${EXTRA_MODULES} ## Basis URL Config ## ENV OLLAMA_BASE_URL="/ollama" \ diff --git a/backend/start.sh b/backend/start.sh index 16a004e45..b499736b8 100755 --- a/backend/start.sh +++ b/backend/start.sh @@ -30,6 +30,10 @@ if [[ "${USE_CUDA_DOCKER,,}" == "true" ]]; then export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:/usr/local/lib/python3.11/site-packages/torch/lib:/usr/local/lib/python3.11/site-packages/nvidia/cudnn/lib" fi +if [ -n "$EXTRA_MODULES_DOCKER" ]; then + echo "Loading extra modules: $EXTRA_MODULES_DOCKER" + uv pip install --system $EXTRA_MODULES_DOCKER --no-cache-dir +fi # Check if SPACE_ID is set, if so, configure for space if [ -n "$SPACE_ID" ]; then