diff --git a/Dockerfile b/Dockerfile index f1ea4e064..8078bf0ea 100644 --- a/Dockerfile +++ b/Dockerfile @@ -38,7 +38,6 @@ ARG USE_OLLAMA ARG USE_CUDA_VER ARG USE_EMBEDDING_MODEL ARG USE_RERANKING_MODEL -ARG EXTRA_MODULES ARG UID ARG GID @@ -50,8 +49,7 @@ ENV ENV=prod \ USE_CUDA_DOCKER=${USE_CUDA} \ USE_CUDA_DOCKER_VER=${USE_CUDA_VER} \ USE_EMBEDDING_MODEL_DOCKER=${USE_EMBEDDING_MODEL} \ - USE_RERANKING_MODEL_DOCKER=${USE_RERANKING_MODEL} \ - EXTRA_MODULES_DOCKER=${EXTRA_MODULES} + USE_RERANKING_MODEL_DOCKER=${USE_RERANKING_MODEL} ## Basis URL Config ## ENV OLLAMA_BASE_URL="/ollama" \ diff --git a/backend/start.sh b/backend/start.sh index b499736b8..0a5c48e8c 100755 --- a/backend/start.sh +++ b/backend/start.sh @@ -30,11 +30,6 @@ if [[ "${USE_CUDA_DOCKER,,}" == "true" ]]; then export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:/usr/local/lib/python3.11/site-packages/torch/lib:/usr/local/lib/python3.11/site-packages/nvidia/cudnn/lib" fi -if [ -n "$EXTRA_MODULES_DOCKER" ]; then - echo "Loading extra modules: $EXTRA_MODULES_DOCKER" - uv pip install --system $EXTRA_MODULES_DOCKER --no-cache-dir -fi - # Check if SPACE_ID is set, if so, configure for space if [ -n "$SPACE_ID" ]; then echo "Configuring for HuggingFace Space deployment"