Jan-Timo Hesse 8 ماه پیش
والد
کامیت
71d88fe35d
2فایلهای تغییر یافته به همراه1 افزوده شده و 8 حذف شده
  1. 1 3
      Dockerfile
  2. 0 5
      backend/start.sh

+ 1 - 3
Dockerfile

@@ -38,7 +38,6 @@ ARG USE_OLLAMA
 ARG USE_CUDA_VER
 ARG USE_CUDA_VER
 ARG USE_EMBEDDING_MODEL
 ARG USE_EMBEDDING_MODEL
 ARG USE_RERANKING_MODEL
 ARG USE_RERANKING_MODEL
-ARG EXTRA_MODULES
 ARG UID
 ARG UID
 ARG GID
 ARG GID
 
 
@@ -50,8 +49,7 @@ ENV ENV=prod \
     USE_CUDA_DOCKER=${USE_CUDA} \
     USE_CUDA_DOCKER=${USE_CUDA} \
     USE_CUDA_DOCKER_VER=${USE_CUDA_VER} \
     USE_CUDA_DOCKER_VER=${USE_CUDA_VER} \
     USE_EMBEDDING_MODEL_DOCKER=${USE_EMBEDDING_MODEL} \
     USE_EMBEDDING_MODEL_DOCKER=${USE_EMBEDDING_MODEL} \
-    USE_RERANKING_MODEL_DOCKER=${USE_RERANKING_MODEL} \
-    EXTRA_MODULES_DOCKER=${EXTRA_MODULES}
+    USE_RERANKING_MODEL_DOCKER=${USE_RERANKING_MODEL}
 
 
 ## Basis URL Config ##
 ## Basis URL Config ##
 ENV OLLAMA_BASE_URL="/ollama" \
 ENV OLLAMA_BASE_URL="/ollama" \

+ 0 - 5
backend/start.sh

@@ -30,11 +30,6 @@ if [[ "${USE_CUDA_DOCKER,,}" == "true" ]]; then
   export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:/usr/local/lib/python3.11/site-packages/torch/lib:/usr/local/lib/python3.11/site-packages/nvidia/cudnn/lib"
   export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:/usr/local/lib/python3.11/site-packages/torch/lib:/usr/local/lib/python3.11/site-packages/nvidia/cudnn/lib"
 fi
 fi
 
 
-if [ -n "$EXTRA_MODULES_DOCKER" ]; then
-  echo "Loading extra modules: $EXTRA_MODULES_DOCKER"
-  uv pip install --system $EXTRA_MODULES_DOCKER --no-cache-dir
-fi
-
 # Check if SPACE_ID is set, if so, configure for space
 # Check if SPACE_ID is set, if so, configure for space
 if [ -n "$SPACE_ID" ]; then
 if [ -n "$SPACE_ID" ]; then
   echo "Configuring for HuggingFace Space deployment"
   echo "Configuring for HuggingFace Space deployment"