Przeglądaj źródła

Set cudnn LD_LIBRARY_PATH to fix whisper inference

Self Denial 1 rok temu
rodzic
commit
0f90332e61
1 zmienionych plików z 1 dodań i 1 usunięć
  1. 1 1
      backend/start.sh

+ 1 - 1
backend/start.sh

@@ -26,7 +26,7 @@ fi
 
 
 if [ "$USE_CUDA_DOCKER" = "true" ]; then
 if [ "$USE_CUDA_DOCKER" = "true" ]; then
   echo "CUDA is enabled, appending LD_LIBRARY_PATH to include torch/cudnn & cublas libraries."
   echo "CUDA is enabled, appending LD_LIBRARY_PATH to include torch/cudnn & cublas libraries."
-  export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:/usr/local/lib/python3.11/site-packages/torch/lib:/usr/local/lib/python3.11/site-packages/nvidia/cublas/lib"
+  export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:/usr/local/lib/python3.11/site-packages/torch/lib:/usr/local/lib/python3.11/site-packages/nvidia/cudnn/lib"
 fi
 fi
 
 
 WEBUI_SECRET_KEY="$WEBUI_SECRET_KEY" exec uvicorn main:app --host 0.0.0.0 --port "$PORT" --forwarded-allow-ips '*'
 WEBUI_SECRET_KEY="$WEBUI_SECRET_KEY" exec uvicorn main:app --host 0.0.0.0 --port "$PORT" --forwarded-allow-ips '*'