mirror of
https://github.com/open-webui/open-webui
synced 2024-11-16 05:24:02 +00:00
commit
19bb6e36e1
59
.github/workflows/deploy-to-hf-spaces.yml
vendored
Normal file
59
.github/workflows/deploy-to-hf-spaces.yml
vendored
Normal file
@ -0,0 +1,59 @@
|
||||
name: Deploy to HuggingFace Spaces
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- dev
|
||||
- main
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
check-secret:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
token-set: ${{ steps.check-key.outputs.defined }}
|
||||
steps:
|
||||
- id: check-key
|
||||
env:
|
||||
HF_TOKEN: ${{ secrets.HF_TOKEN }}
|
||||
if: "${{ env.HF_TOKEN != '' }}"
|
||||
run: echo "defined=true" >> $GITHUB_OUTPUT
|
||||
|
||||
deploy:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [check-secret]
|
||||
if: needs.check-secret.outputs.token-set == 'true'
|
||||
env:
|
||||
HF_TOKEN: ${{ secrets.HF_TOKEN }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Remove git history
|
||||
run: rm -rf .git
|
||||
|
||||
- name: Prepend YAML front matter to README.md
|
||||
run: |
|
||||
echo "---" > temp_readme.md
|
||||
echo "title: Open WebUI" >> temp_readme.md
|
||||
echo "emoji: 🐳" >> temp_readme.md
|
||||
echo "colorFrom: purple" >> temp_readme.md
|
||||
echo "colorTo: gray" >> temp_readme.md
|
||||
echo "sdk: docker" >> temp_readme.md
|
||||
echo "app_port: 8080" >> temp_readme.md
|
||||
echo "---" >> temp_readme.md
|
||||
cat README.md >> temp_readme.md
|
||||
mv temp_readme.md README.md
|
||||
|
||||
- name: Configure git
|
||||
run: |
|
||||
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
|
||||
git config --global user.name "github-actions[bot]"
|
||||
- name: Set up Git and push to Space
|
||||
run: |
|
||||
git init --initial-branch=main
|
||||
git lfs track "*.ttf"
|
||||
rm demo.gif
|
||||
git add .
|
||||
git commit -m "GitHub deploy: ${{ github.sha }}"
|
||||
git push --force https://open-webui:${HF_TOKEN}@huggingface.co/spaces/open-webui/open-webui main
|
43
backend/space/litellm_config.yaml
Normal file
43
backend/space/litellm_config.yaml
Normal file
@ -0,0 +1,43 @@
|
||||
litellm_settings:
|
||||
drop_params: true
|
||||
model_list:
|
||||
- model_name: 'HuggingFace: Mistral: Mistral 7B Instruct v0.1'
|
||||
litellm_params:
|
||||
model: huggingface/mistralai/Mistral-7B-Instruct-v0.1
|
||||
api_key: os.environ/HF_TOKEN
|
||||
max_tokens: 1024
|
||||
- model_name: 'HuggingFace: Mistral: Mistral 7B Instruct v0.2'
|
||||
litellm_params:
|
||||
model: huggingface/mistralai/Mistral-7B-Instruct-v0.2
|
||||
api_key: os.environ/HF_TOKEN
|
||||
max_tokens: 1024
|
||||
- model_name: 'HuggingFace: Meta: Llama 3 8B Instruct'
|
||||
litellm_params:
|
||||
model: huggingface/meta-llama/Meta-Llama-3-8B-Instruct
|
||||
api_key: os.environ/HF_TOKEN
|
||||
max_tokens: 2047
|
||||
- model_name: 'HuggingFace: Mistral: Mixtral 8x7B Instruct v0.1'
|
||||
litellm_params:
|
||||
model: huggingface/mistralai/Mixtral-8x7B-Instruct-v0.1
|
||||
api_key: os.environ/HF_TOKEN
|
||||
max_tokens: 8192
|
||||
- model_name: 'HuggingFace: Microsoft: Phi-3 Mini-4K-Instruct'
|
||||
litellm_params:
|
||||
model: huggingface/microsoft/Phi-3-mini-4k-instruct
|
||||
api_key: os.environ/HF_TOKEN
|
||||
max_tokens: 1024
|
||||
- model_name: 'HuggingFace: Google: Gemma 7B 1.1'
|
||||
litellm_params:
|
||||
model: huggingface/google/gemma-1.1-7b-it
|
||||
api_key: os.environ/HF_TOKEN
|
||||
max_tokens: 1024
|
||||
- model_name: 'HuggingFace: Yi-1.5 34B Chat'
|
||||
litellm_params:
|
||||
model: huggingface/01-ai/Yi-1.5-34B-Chat
|
||||
api_key: os.environ/HF_TOKEN
|
||||
max_tokens: 1024
|
||||
- model_name: 'HuggingFace: Nous Research: Nous Hermes 2 Mixtral 8x7B DPO'
|
||||
litellm_params:
|
||||
model: huggingface/NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO
|
||||
api_key: os.environ/HF_TOKEN
|
||||
max_tokens: 2048
|
@ -30,4 +30,32 @@ if [ "$USE_CUDA_DOCKER" = "true" ]; then
|
||||
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:/usr/local/lib/python3.11/site-packages/torch/lib:/usr/local/lib/python3.11/site-packages/nvidia/cudnn/lib"
|
||||
fi
|
||||
|
||||
|
||||
# HFSPACE:START
|
||||
# Check if SPACE_ID is set, if so, configure for space
|
||||
if [ -n "$SPACE_ID" ]; then
|
||||
echo "Configuring for HuggingFace Space deployment"
|
||||
|
||||
# Copy litellm_config.yaml with specified ownership
|
||||
echo "Copying litellm_config.yaml to the desired location with specified ownership..."
|
||||
cp ./backend/space/litellm_config.yaml ./data/litellm/config.yaml
|
||||
|
||||
WEBUI_SECRET_KEY="$WEBUI_SECRET_KEY" uvicorn main:app --host "$HOST" --port "$PORT" --forwarded-allow-ips '*' &
|
||||
webui_pid=$!
|
||||
echo "Waiting for webui to start..."
|
||||
while ! curl -s http://localhost:8080/health > /dev/null; do
|
||||
sleep 1
|
||||
done
|
||||
echo "Creating admin user..."
|
||||
curl \
|
||||
-X POST "http://localhost:8080/api/v1/auths/signup" \
|
||||
-H "accept: application/json" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d "{ \"email\": \"${ADMIN_USER_EMAIL}\", \"password\": \"${ADMIN_USER_PASSWORD}\", \"name\": \"Admin\" }"
|
||||
echo "Shutting down webui..."
|
||||
kill $webui_pid
|
||||
export WEBUI_URL=${SPACE_HOST}
|
||||
fi
|
||||
# HFSPACE:END
|
||||
|
||||
WEBUI_SECRET_KEY="$WEBUI_SECRET_KEY" exec uvicorn main:app --host "$HOST" --port "$PORT" --forwarded-allow-ips '*'
|
||||
|
Loading…
Reference in New Issue
Block a user