mirror of
https://github.com/open-webui/open-webui
synced 2025-06-26 18:26:48 +00:00
feat: deploy to hf spaces
This commit is contained in:
43
backend/space/litellm_config.yaml
Normal file
43
backend/space/litellm_config.yaml
Normal file
@@ -0,0 +1,43 @@
|
||||
litellm_settings:
|
||||
drop_params: true
|
||||
model_list:
|
||||
- model_name: 'HuggingFace: Mistral: Mistral 7B Instruct v0.1'
|
||||
litellm_params:
|
||||
model: huggingface/mistralai/Mistral-7B-Instruct-v0.1
|
||||
api_key: os.environ/HF_TOKEN
|
||||
max_tokens: 1024
|
||||
- model_name: 'HuggingFace: Mistral: Mistral 7B Instruct v0.2'
|
||||
litellm_params:
|
||||
model: huggingface/mistralai/Mistral-7B-Instruct-v0.2
|
||||
api_key: os.environ/HF_TOKEN
|
||||
max_tokens: 1024
|
||||
- model_name: 'HuggingFace: Meta: Llama 3 8B Instruct'
|
||||
litellm_params:
|
||||
model: huggingface/meta-llama/Meta-Llama-3-8B-Instruct
|
||||
api_key: os.environ/HF_TOKEN
|
||||
max_tokens: 2047
|
||||
- model_name: 'HuggingFace: Mistral: Mixtral 8x7B Instruct v0.1'
|
||||
litellm_params:
|
||||
model: huggingface/mistralai/Mixtral-8x7B-Instruct-v0.1
|
||||
api_key: os.environ/HF_TOKEN
|
||||
max_tokens: 8192
|
||||
- model_name: 'HuggingFace: Microsoft: Phi-3 Mini-4K-Instruct'
|
||||
litellm_params:
|
||||
model: huggingface/microsoft/Phi-3-mini-4k-instruct
|
||||
api_key: os.environ/HF_TOKEN
|
||||
max_tokens: 1024
|
||||
- model_name: 'HuggingFace: Google: Gemma 7B 1.1'
|
||||
litellm_params:
|
||||
model: huggingface/google/gemma-1.1-7b-it
|
||||
api_key: os.environ/HF_TOKEN
|
||||
max_tokens: 1024
|
||||
- model_name: 'HuggingFace: Yi-1.5 34B Chat'
|
||||
litellm_params:
|
||||
model: huggingface/01-ai/Yi-1.5-34B-Chat
|
||||
api_key: os.environ/HF_TOKEN
|
||||
max_tokens: 1024
|
||||
- model_name: 'HuggingFace: Nous Research: Nous Hermes 2 Mixtral 8x7B DPO'
|
||||
litellm_params:
|
||||
model: huggingface/NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO
|
||||
api_key: os.environ/HF_TOKEN
|
||||
max_tokens: 2048
|
||||
@@ -30,4 +30,32 @@ if [ "$USE_CUDA_DOCKER" = "true" ]; then
|
||||
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:/usr/local/lib/python3.11/site-packages/torch/lib:/usr/local/lib/python3.11/site-packages/nvidia/cudnn/lib"
|
||||
fi
|
||||
|
||||
|
||||
# HFSPACE:START
|
||||
# Check if SPACE_ID is set, if so, configure for space
|
||||
if [ -n "$SPACE_ID" ]; then
|
||||
echo "Configuring for HuggingFace Space deployment"
|
||||
|
||||
# Copy litellm_config.yaml with specified ownership
|
||||
echo "Copying litellm_config.yaml to the desired location with specified ownership..."
|
||||
cp ./backend/space/litellm_config.yaml ./data/litellm/config.yaml
|
||||
|
||||
WEBUI_SECRET_KEY="$WEBUI_SECRET_KEY" uvicorn main:app --host "$HOST" --port "$PORT" --forwarded-allow-ips '*' &
|
||||
webui_pid=$!
|
||||
echo "Waiting for webui to start..."
|
||||
while ! curl -s http://localhost:8080/health > /dev/null; do
|
||||
sleep 1
|
||||
done
|
||||
echo "Creating admin user..."
|
||||
curl \
|
||||
-X POST "http://localhost:8080/api/v1/auths/signup" \
|
||||
-H "accept: application/json" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d "{ \"email\": \"${ADMIN_USER_EMAIL}\", \"password\": \"${ADMIN_USER_PASSWORD}\", \"name\": \"Admin\" }"
|
||||
echo "Shutting down webui..."
|
||||
kill $webui_pid
|
||||
export WEBUI_URL=${SPACE_HOST}
|
||||
fi
|
||||
# HFSPACE:END
|
||||
|
||||
WEBUI_SECRET_KEY="$WEBUI_SECRET_KEY" exec uvicorn main:app --host "$HOST" --port "$PORT" --forwarded-allow-ips '*'
|
||||
|
||||
Reference in New Issue
Block a user