From 71d88fe35d68dc8ba5907d63fb787ea33bed072e Mon Sep 17 00:00:00 2001 From: Jan-Timo Hesse Date: Thu, 8 Aug 2024 09:45:52 +0200 Subject: [PATCH] revert --- Dockerfile | 4 +--- backend/start.sh | 5 ----- 2 files changed, 1 insertion(+), 8 deletions(-) diff --git a/Dockerfile b/Dockerfile index f1ea4e064..8078bf0ea 100644 --- a/Dockerfile +++ b/Dockerfile @@ -38,7 +38,6 @@ ARG USE_OLLAMA ARG USE_CUDA_VER ARG USE_EMBEDDING_MODEL ARG USE_RERANKING_MODEL -ARG EXTRA_MODULES ARG UID ARG GID @@ -50,8 +49,7 @@ ENV ENV=prod \ USE_CUDA_DOCKER=${USE_CUDA} \ USE_CUDA_DOCKER_VER=${USE_CUDA_VER} \ USE_EMBEDDING_MODEL_DOCKER=${USE_EMBEDDING_MODEL} \ - USE_RERANKING_MODEL_DOCKER=${USE_RERANKING_MODEL} \ - EXTRA_MODULES_DOCKER=${EXTRA_MODULES} + USE_RERANKING_MODEL_DOCKER=${USE_RERANKING_MODEL} ## Basis URL Config ## ENV OLLAMA_BASE_URL="/ollama" \ diff --git a/backend/start.sh b/backend/start.sh index b499736b8..0a5c48e8c 100755 --- a/backend/start.sh +++ b/backend/start.sh @@ -30,11 +30,6 @@ if [[ "${USE_CUDA_DOCKER,,}" == "true" ]]; then export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:/usr/local/lib/python3.11/site-packages/torch/lib:/usr/local/lib/python3.11/site-packages/nvidia/cudnn/lib" fi -if [ -n "$EXTRA_MODULES_DOCKER" ]; then - echo "Loading extra modules: $EXTRA_MODULES_DOCKER" - uv pip install --system $EXTRA_MODULES_DOCKER --no-cache-dir -fi - # Check if SPACE_ID is set, if so, configure for space if [ -n "$SPACE_ID" ]; then echo "Configuring for HuggingFace Space deployment"