run-compose.sh 7.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241
  1. #!/bin/bash
  2. # Define color and formatting codes
  3. BOLD='\033[1m'
  4. GREEN='\033[1;32m'
  5. WHITE='\033[1;37m'
  6. RED='\033[0;31m'
  7. NC='\033[0m' # No Color
  8. # Unicode character for tick mark
  9. TICK='\u2713'
  10. # Detect GPU driver
  11. get_gpu_driver() {
  12. # Detect NVIDIA GPUs using lspci or nvidia-smi
  13. if lspci | grep -i nvidia >/dev/null || nvidia-smi >/dev/null 2>&1; then
  14. echo "nvidia"
  15. return
  16. fi
  17. # Detect AMD GPUs (including GCN architecture check for amdgpu vs radeon)
  18. if lspci | grep -i amd >/dev/null; then
  19. # List of known GCN and later architecture cards
  20. # This is a simplified list, and in a real-world scenario, you'd want a more comprehensive one
  21. local gcn_and_later=("Radeon HD 7000" "Radeon HD 8000" "Radeon R5" "Radeon R7" "Radeon R9" "Radeon RX")
  22. # Get GPU information
  23. local gpu_info=$(lspci | grep -i 'vga.*amd')
  24. for model in "${gcn_and_later[@]}"; do
  25. if echo "$gpu_info" | grep -iq "$model"; then
  26. echo "amdgpu"
  27. return
  28. fi
  29. done
  30. # Default to radeon if no GCN or later architecture is detected
  31. echo "radeon"
  32. return
  33. fi
  34. # Detect Intel GPUs
  35. if lspci | grep -i intel >/dev/null; then
  36. echo "i915"
  37. return
  38. fi
  39. # If no known GPU is detected
  40. echo "Unknown or unsupported GPU driver"
  41. exit 1
  42. }
  43. # Function for rolling animation
  44. show_loading() {
  45. local spin='-\|/'
  46. local i=0
  47. printf " "
  48. while kill -0 $1 2>/dev/null; do
  49. i=$(( (i+1) %4 ))
  50. printf "\b${spin:$i:1}"
  51. sleep .1
  52. done
  53. # Replace the spinner with a tick
  54. printf "\b${GREEN}${TICK}${NC}"
  55. }
  56. # Usage information
  57. usage() {
  58. echo "Usage: $0 [OPTIONS]"
  59. echo "Options:"
  60. echo " --enable-gpu[count=COUNT] Enable GPU support with the specified count."
  61. echo " --enable-api[port=PORT] Enable API and expose it on the specified port."
  62. echo " --webui[port=PORT] Set the port for the web user interface."
  63. echo " --data[folder=PATH] Bind mount for ollama data folder (by default will create the 'ollama' volume)."
  64. echo " --build Build the docker image before running the compose project."
  65. echo " --drop Drop the compose project."
  66. echo " -q, --quiet Run script in headless mode."
  67. echo " -h, --help Show this help message."
  68. echo ""
  69. echo "Examples:"
  70. echo " $0 --drop"
  71. echo " $0 --enable-gpu[count=1]"
  72. echo " $0 --enable-gpu[count=all]"
  73. echo " $0 --enable-api[port=11435]"
  74. echo " $0 --enable-gpu[count=1] --enable-api[port=12345] --webui[port=3000]"
  75. echo " $0 --enable-gpu[count=1] --enable-api[port=12345] --webui[port=3000] --data[folder=./ollama-data]"
  76. echo " $0 --enable-gpu[count=1] --enable-api[port=12345] --webui[port=3000] --data[folder=./ollama-data] --build"
  77. echo ""
  78. echo "This script configures and runs a docker-compose setup with optional GPU support, API exposure, and web UI configuration."
  79. echo "About the gpu to use, the script automatically detects it using the "lspci" command."
  80. echo "In this case the gpu detected is: $(get_gpu_driver)"
  81. }
  82. # Default values
  83. gpu_count=1
  84. api_port=11435
  85. webui_port=3000
  86. headless=false
  87. build_image=false
  88. kill_compose=false
  89. # Function to extract value from the parameter
  90. extract_value() {
  91. echo "$1" | sed -E 's/.*\[.*=(.*)\].*/\1/; t; s/.*//'
  92. }
  93. # Parse arguments
  94. while [[ $# -gt 0 ]]; do
  95. key="$1"
  96. case $key in
  97. --enable-gpu*)
  98. enable_gpu=true
  99. value=$(extract_value "$key")
  100. gpu_count=${value:-1}
  101. ;;
  102. --enable-api*)
  103. enable_api=true
  104. value=$(extract_value "$key")
  105. api_port=${value:-11435}
  106. ;;
  107. --webui*)
  108. value=$(extract_value "$key")
  109. webui_port=${value:-3000}
  110. ;;
  111. --data*)
  112. value=$(extract_value "$key")
  113. data_dir=${value:-"./ollama-data"}
  114. ;;
  115. --drop)
  116. kill_compose=true
  117. ;;
  118. --build)
  119. build_image=true
  120. ;;
  121. -q|--quiet)
  122. headless=true
  123. ;;
  124. -h|--help)
  125. usage
  126. exit
  127. ;;
  128. *)
  129. # Unknown option
  130. echo "Unknown option: $key"
  131. usage
  132. exit 1
  133. ;;
  134. esac
  135. shift # past argument or value
  136. done
  137. if [[ $kill_compose == true ]]; then
  138. docker compose down --remove-orphans
  139. echo -e "${GREEN}${BOLD}Compose project dropped successfully.${NC}"
  140. exit
  141. else
  142. DEFAULT_COMPOSE_COMMAND="docker compose -f docker-compose.yaml"
  143. if [[ $enable_gpu == true ]]; then
  144. # Validate and process command-line arguments
  145. if [[ -n $gpu_count ]]; then
  146. if ! [[ $gpu_count =~ ^([0-9]+|all)$ ]]; then
  147. echo "Invalid GPU count: $gpu_count"
  148. exit 1
  149. fi
  150. echo "Enabling GPU with $gpu_count GPUs"
  151. # Add your GPU allocation logic here
  152. export OLLAMA_GPU_DRIVER=$(get_gpu_driver)
  153. export OLLAMA_GPU_COUNT=$gpu_count # Set OLLAMA_GPU_COUNT environment variable
  154. fi
  155. DEFAULT_COMPOSE_COMMAND+=" -f docker-compose.gpu.yaml"
  156. fi
  157. if [[ $enable_api == true ]]; then
  158. DEFAULT_COMPOSE_COMMAND+=" -f docker-compose.api.yaml"
  159. if [[ -n $api_port ]]; then
  160. export OLLAMA_WEBAPI_PORT=$api_port # Set OLLAMA_WEBAPI_PORT environment variable
  161. fi
  162. fi
  163. if [[ -n $data_dir ]]; then
  164. DEFAULT_COMPOSE_COMMAND+=" -f docker-compose.data.yaml"
  165. export OLLAMA_DATA_DIR=$data_dir # Set OLLAMA_DATA_DIR environment variable
  166. fi
  167. if [[ -n $webui_port ]]; then
  168. export OPEN_WEBUI_PORT=$webui_port # Set OPEN_WEBUI_PORT environment variable
  169. fi
  170. DEFAULT_COMPOSE_COMMAND+=" up -d"
  171. DEFAULT_COMPOSE_COMMAND+=" --remove-orphans"
  172. DEFAULT_COMPOSE_COMMAND+=" --force-recreate"
  173. if [[ $build_image == true ]]; then
  174. DEFAULT_COMPOSE_COMMAND+=" --build"
  175. fi
  176. fi
  177. # Recap of environment variables
  178. echo
  179. echo -e "${WHITE}${BOLD}Current Setup:${NC}"
  180. echo -e " ${GREEN}${BOLD}GPU Driver:${NC} ${OLLAMA_GPU_DRIVER:-Not Enabled}"
  181. echo -e " ${GREEN}${BOLD}GPU Count:${NC} ${OLLAMA_GPU_COUNT:-Not Enabled}"
  182. echo -e " ${GREEN}${BOLD}WebAPI Port:${NC} ${OLLAMA_WEBAPI_PORT:-Not Enabled}"
  183. echo -e " ${GREEN}${BOLD}Data Folder:${NC} ${data_dir:-Using ollama volume}"
  184. echo -e " ${GREEN}${BOLD}WebUI Port:${NC} $webui_port"
  185. echo
  186. if [[ $headless == true ]]; then
  187. echo -ne "${WHITE}${BOLD}Running in headless mode... ${NC}"
  188. choice="y"
  189. else
  190. # Ask for user acceptance
  191. echo -ne "${WHITE}${BOLD}Do you want to proceed with current setup? (Y/n): ${NC}"
  192. read -n1 -s choice
  193. fi
  194. echo
  195. if [[ $choice == "" || $choice == "y" ]]; then
  196. # Execute the command with the current user
  197. eval "$DEFAULT_COMPOSE_COMMAND" &
  198. # Capture the background process PID
  199. PID=$!
  200. # Display the loading animation
  201. #show_loading $PID
  202. # Wait for the command to finish
  203. wait $PID
  204. echo
  205. # Check exit status
  206. if [ $? -eq 0 ]; then
  207. echo -e "${GREEN}${BOLD}Compose project started successfully.${NC}"
  208. else
  209. echo -e "${RED}${BOLD}There was an error starting the compose project.${NC}"
  210. fi
  211. else
  212. echo "Aborted."
  213. fi
  214. echo