|
@@ -80,12 +80,12 @@ usage() {
|
|
echo " -h, --help Show this help message."
|
|
echo " -h, --help Show this help message."
|
|
echo ""
|
|
echo ""
|
|
echo "Examples:"
|
|
echo "Examples:"
|
|
- echo " ./$0 --drop"
|
|
|
|
- echo " ./$0 --enable-gpu[count=1]"
|
|
|
|
- echo " ./$0 --enable-api[port=11435]"
|
|
|
|
- echo " ./$0 --enable-gpu[count=1] --enable-api[port=12345] --webui[port=3000]"
|
|
|
|
- echo " ./$0 --enable-gpu[count=1] --enable-api[port=12345] --webui[port=3000] --data[folder=./ollama-data]"
|
|
|
|
- echo " ./$0 --enable-gpu[count=1] --enable-api[port=12345] --webui[port=3000] --data[folder=./ollama-data] --build"
|
|
|
|
|
|
+ echo " $0 --drop"
|
|
|
|
+ echo " $0 --enable-gpu[count=1]"
|
|
|
|
+ echo " $0 --enable-api[port=11435]"
|
|
|
|
+ echo " $0 --enable-gpu[count=1] --enable-api[port=12345] --webui[port=3000]"
|
|
|
|
+ echo " $0 --enable-gpu[count=1] --enable-api[port=12345] --webui[port=3000] --data[folder=./ollama-data]"
|
|
|
|
+ echo " $0 --enable-gpu[count=1] --enable-api[port=12345] --webui[port=3000] --data[folder=./ollama-data] --build"
|
|
echo ""
|
|
echo ""
|
|
echo "This script configures and runs a docker-compose setup with optional GPU support, API exposure, and web UI configuration."
|
|
echo "This script configures and runs a docker-compose setup with optional GPU support, API exposure, and web UI configuration."
|
|
echo "About the gpu to use, the script automatically detects it using the "lspci" command."
|
|
echo "About the gpu to use, the script automatically detects it using the "lspci" command."
|
|
@@ -234,4 +234,4 @@ else
|
|
echo "Aborted."
|
|
echo "Aborted."
|
|
fi
|
|
fi
|
|
|
|
|
|
-echo
|
|
|
|
|
|
+echo
|