Przeglądaj źródła

feat: deploy to hf spaces

Timothy J. Baek 11 miesięcy temu
rodzic
commit
c1a97278a8

+ 46 - 0
.github/workflows/deploy-to-hf-spaces.yml

@@ -0,0 +1,46 @@
+name: Deploy to HuggingFace Spaces
+
+on:
+  push:
+    branches:
+      - dev
+      - main
+  workflow_dispatch:
+
+jobs:
+  check-secret:
+    runs-on: ubuntu-latest
+    outputs:
+      token-set: ${{ steps.check-key.outputs.defined }}
+    steps:
+      - id: check-key
+        env:
+          HF_TOKEN: ${{ secrets.HF_TOKEN }}
+        if: "${{ env.HF_TOKEN != '' }}"
+        run: echo "defined=true" >> $GITHUB_OUTPUT
+
+  deploy:
+    runs-on: ubuntu-latest
+    needs: [check-secret]
+    if: needs.check-secret.outputs.token-set == 'true'
+    env:
+      HF_TOKEN: ${{ secrets.HF_TOKEN }}
+    steps:
+      - name: Checkout repository
+        uses: actions/checkout@v4
+
+      - name: Remove git history
+        run: rm -rf .git
+
+      - name: Configure git
+        run: |
+          git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
+          git config --global user.name "github-actions[bot]"
+      - name: Set up Git and push to Space
+        run: |
+          git init --initial-branch=main
+          git lfs track "*.ttf"
+          rm demo.gif
+          git add .
+          git commit -m "GitHub deploy: ${{ github.sha }}"
+          git push --force https://open-webui:${HF_TOKEN}@huggingface.co/spaces/open-webui/open-webui main

+ 9 - 0
README.md

@@ -1,3 +1,12 @@
+---
+title: Open WebUI
+emoji: 🐳
+colorFrom: purple
+colorTo: gray
+sdk: docker
+app_port: 8080
+---
+
 # Open WebUI (Formerly Ollama WebUI) 👋
 # Open WebUI (Formerly Ollama WebUI) 👋
 
 
 ![GitHub stars](https://img.shields.io/github/stars/open-webui/open-webui?style=social)
 ![GitHub stars](https://img.shields.io/github/stars/open-webui/open-webui?style=social)

+ 43 - 0
backend/space/litellm_config.yaml

@@ -0,0 +1,43 @@
+litellm_settings:
+  drop_params: true
+model_list:
+  - model_name: 'HuggingFace: Mistral: Mistral 7B Instruct v0.1'
+    litellm_params:
+      model: huggingface/mistralai/Mistral-7B-Instruct-v0.1
+      api_key: os.environ/HF_TOKEN
+      max_tokens: 1024
+  - model_name: 'HuggingFace: Mistral: Mistral 7B Instruct v0.2'
+    litellm_params:
+      model: huggingface/mistralai/Mistral-7B-Instruct-v0.2
+      api_key: os.environ/HF_TOKEN
+      max_tokens: 1024
+  - model_name: 'HuggingFace: Meta: Llama 3 8B Instruct'
+    litellm_params:
+      model: huggingface/meta-llama/Meta-Llama-3-8B-Instruct
+      api_key: os.environ/HF_TOKEN
+      max_tokens: 2047
+  - model_name: 'HuggingFace: Mistral: Mixtral 8x7B Instruct v0.1'
+    litellm_params:
+      model: huggingface/mistralai/Mixtral-8x7B-Instruct-v0.1
+      api_key: os.environ/HF_TOKEN
+      max_tokens: 8192
+  - model_name: 'HuggingFace: Microsoft: Phi-3 Mini-4K-Instruct'
+    litellm_params:
+      model: huggingface/microsoft/Phi-3-mini-4k-instruct
+      api_key: os.environ/HF_TOKEN
+      max_tokens: 1024
+  - model_name: 'HuggingFace: Google: Gemma 7B 1.1'
+    litellm_params:
+      model: huggingface/google/gemma-1.1-7b-it
+      api_key: os.environ/HF_TOKEN
+      max_tokens: 1024
+  - model_name: 'HuggingFace: Yi-1.5 34B Chat'
+    litellm_params:
+      model: huggingface/01-ai/Yi-1.5-34B-Chat
+      api_key: os.environ/HF_TOKEN
+      max_tokens: 1024
+  - model_name: 'HuggingFace: Nous Research: Nous Hermes 2 Mixtral 8x7B DPO'
+    litellm_params:
+      model: huggingface/NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO
+      api_key: os.environ/HF_TOKEN
+      max_tokens: 2048

+ 28 - 0
backend/start.sh

@@ -30,4 +30,32 @@ if [ "$USE_CUDA_DOCKER" = "true" ]; then
   export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:/usr/local/lib/python3.11/site-packages/torch/lib:/usr/local/lib/python3.11/site-packages/nvidia/cudnn/lib"
   export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:/usr/local/lib/python3.11/site-packages/torch/lib:/usr/local/lib/python3.11/site-packages/nvidia/cudnn/lib"
 fi
 fi
 
 
+
+# HFSPACE:START
+# Check if SPACE_ID is set, if so, configure for space
+if [ -n "$SPACE_ID" ]; then
+  echo "Configuring for HuggingFace Space deployment"
+  
+  # Copy litellm_config.yaml with specified ownership
+  echo "Copying litellm_config.yaml to the desired location with specified ownership..."
+  cp ./backend/space/litellm_config.yaml ./data/litellm/config.yaml
+
+  WEBUI_SECRET_KEY="$WEBUI_SECRET_KEY" uvicorn main:app --host "$HOST" --port "$PORT" --forwarded-allow-ips '*' &
+  webui_pid=$!
+  echo "Waiting for webui to start..."
+  while ! curl -s http://localhost:8080/health > /dev/null; do
+    sleep 1
+  done
+  echo "Creating admin user..."
+  curl \
+    -X POST "http://localhost:8080/api/v1/auths/signup" \
+    -H "accept: application/json" \
+    -H "Content-Type: application/json" \
+    -d "{ \"email\": \"${ADMIN_USER_EMAIL}\", \"password\": \"${ADMIN_USER_PASSWORD}\", \"name\": \"Admin\" }"
+  echo "Shutting down webui..."
+  kill $webui_pid
+  export WEBUI_URL=${SPACE_HOST}
+fi
+# HFSPACE:END
+
 WEBUI_SECRET_KEY="$WEBUI_SECRET_KEY" exec uvicorn main:app --host "$HOST" --port "$PORT" --forwarded-allow-ips '*'
 WEBUI_SECRET_KEY="$WEBUI_SECRET_KEY" exec uvicorn main:app --host "$HOST" --port "$PORT" --forwarded-allow-ips '*'