2024-05-09 04:00:03 +00:00
from contextlib import asynccontextmanager
2024-02-23 08:30:26 +00:00
from bs4 import BeautifulSoup
import json
import markdown
2024-01-07 10:48:21 +00:00
import time
2024-02-24 08:21:53 +00:00
import os
import sys
2024-03-20 23:11:36 +00:00
import logging
2024-04-10 06:03:05 +00:00
import aiohttp
2024-02-25 19:26:58 +00:00
import requests
2024-05-21 22:04:00 +00:00
import mimetypes
2024-06-05 20:57:48 +00:00
import shutil
import os
2024-06-18 18:36:55 +00:00
import uuid
2024-06-11 17:19:59 +00:00
import inspect
2024-06-05 20:57:48 +00:00
import asyncio
2024-02-23 08:30:26 +00:00
2024-06-20 11:38:59 +00:00
from fastapi . concurrency import run_in_threadpool
2024-06-05 20:57:48 +00:00
from fastapi import FastAPI , Request , Depends , status , UploadFile , File , Form
2023-11-15 00:28:51 +00:00
from fastapi . staticfiles import StaticFiles
2024-05-28 16:50:17 +00:00
from fastapi . responses import JSONResponse
2023-11-15 00:28:51 +00:00
from fastapi import HTTPException
from fastapi . middleware . wsgi import WSGIMiddleware
from fastapi . middleware . cors import CORSMiddleware
2023-11-19 00:47:12 +00:00
from starlette . exceptions import HTTPException as StarletteHTTPException
2024-03-09 06:34:47 +00:00
from starlette . middleware . base import BaseHTTPMiddleware
2024-05-07 00:29:16 +00:00
from starlette . responses import StreamingResponse , Response
2024-01-07 06:07:20 +00:00
2024-06-04 06:39:52 +00:00
from apps . socket . main import app as socket_app
2024-06-09 20:17:44 +00:00
from apps . ollama . main import (
app as ollama_app ,
OpenAIChatCompletionForm ,
get_all_models as get_ollama_models ,
generate_openai_chat_completion as generate_ollama_chat_completion ,
)
from apps . openai . main import (
app as openai_app ,
get_all_models as get_openai_models ,
generate_chat_completion as generate_openai_chat_completion ,
)
2024-04-10 06:03:05 +00:00
2024-02-11 08:17:50 +00:00
from apps . audio . main import app as audio_app
2024-02-22 02:12:01 +00:00
from apps . images . main import app as images_app
from apps . rag . main import app as rag_app
2024-06-20 11:21:55 +00:00
from apps . webui . main import app as webui_app , get_pipe_models
2024-01-07 06:07:20 +00:00
2024-06-05 20:57:48 +00:00
2024-03-10 05:19:20 +00:00
from pydantic import BaseModel
2024-06-20 11:38:59 +00:00
from typing import List , Optional , Iterator , Generator , Union
2024-02-24 06:44:56 +00:00
2024-05-26 08:15:48 +00:00
from apps . webui . models . models import Models , ModelModel
2024-06-11 06:40:27 +00:00
from apps . webui . models . tools import Tools
2024-06-20 09:30:00 +00:00
from apps . webui . models . functions import Functions
from apps . webui . utils import load_toolkit_module_by_id , load_function_module_by_id
2024-06-11 06:40:27 +00:00
2024-05-28 02:16:07 +00:00
from utils . utils import (
get_admin_user ,
get_verified_user ,
get_current_user ,
get_http_authorization_cred ,
)
2024-06-11 06:40:27 +00:00
from utils . task import (
title_generation_template ,
search_query_generation_template ,
tools_function_calling_generation_template ,
)
2024-06-20 11:38:59 +00:00
from utils . misc import (
get_last_user_message ,
add_or_update_system_message ,
stream_message_template ,
)
2024-06-09 21:25:31 +00:00
2024-06-11 08:10:24 +00:00
from apps . rag . utils import get_rag_context , rag_template
2024-03-09 06:34:47 +00:00
2024-03-10 05:47:01 +00:00
from config import (
2024-03-24 03:16:18 +00:00
CONFIG_DATA ,
2024-03-10 05:47:01 +00:00
WEBUI_NAME ,
2024-05-07 00:29:16 +00:00
WEBUI_URL ,
2024-05-08 15:40:18 +00:00
WEBUI_AUTH ,
2024-03-10 05:47:01 +00:00
ENV ,
VERSION ,
CHANGELOG ,
FRONTEND_BUILD_DIR ,
2024-06-18 18:36:55 +00:00
UPLOAD_DIR ,
2024-04-09 10:32:28 +00:00
CACHE_DIR ,
STATIC_DIR ,
2024-05-24 08:40:48 +00:00
ENABLE_OPENAI_API ,
ENABLE_OLLAMA_API ,
2024-04-26 21:17:18 +00:00
ENABLE_MODEL_FILTER ,
2024-03-10 05:47:01 +00:00
MODEL_FILTER_LIST ,
2024-03-20 23:11:36 +00:00
GLOBAL_LOG_LEVEL ,
SRC_LOG_LEVELS ,
2024-03-21 01:35:02 +00:00
WEBHOOK_URL ,
2024-04-22 18:55:46 +00:00
ENABLE_ADMIN_EXPORT ,
2024-05-26 07:49:30 +00:00
WEBUI_BUILD_HASH ,
2024-06-09 21:53:10 +00:00
TASK_MODEL ,
TASK_MODEL_EXTERNAL ,
2024-06-09 21:25:31 +00:00
TITLE_GENERATION_PROMPT_TEMPLATE ,
2024-06-09 21:53:10 +00:00
SEARCH_QUERY_GENERATION_PROMPT_TEMPLATE ,
2024-06-09 22:19:36 +00:00
SEARCH_QUERY_PROMPT_LENGTH_THRESHOLD ,
2024-06-11 06:40:27 +00:00
TOOLS_FUNCTION_CALLING_PROMPT_TEMPLATE ,
2024-06-09 21:25:31 +00:00
AppConfig ,
2024-03-10 05:47:01 +00:00
)
2024-02-25 19:26:58 +00:00
from constants import ERROR_MESSAGES
2024-03-20 23:11:36 +00:00
logging . basicConfig ( stream = sys . stdout , level = GLOBAL_LOG_LEVEL )
log = logging . getLogger ( __name__ )
log . setLevel ( SRC_LOG_LEVELS [ " MAIN " ] )
2023-11-15 00:28:51 +00:00
2024-03-28 09:45:56 +00:00
2023-11-15 00:28:51 +00:00
class SPAStaticFiles ( StaticFiles ) :
async def get_response ( self , path : str , scope ) :
try :
return await super ( ) . get_response ( path , scope )
except ( HTTPException , StarletteHTTPException ) as ex :
if ex . status_code == 404 :
return await super ( ) . get_response ( " index.html " , scope )
else :
raise ex
2024-04-02 10:03:55 +00:00
print (
2024-05-03 21:23:38 +00:00
rf """
2024-04-02 10:03:55 +00:00
___ __ __ _ _ _ ___
/ _ \ _ __ ___ _ __ \ \ / / __ | | __ | | | | _ _ |
| | | | ' _ \ / _ \ ' _ \ \ \ / \ / / _ \ ' _ \ | | | || |
| | _ | | | _ ) | __ / | | | \ V V / __ / | _ ) | | _ | | | |
\___ / | . __ / \___ | _ | | _ | \_ / \_ / \___ | _ . __ / \___ / | ___ |
| _ |
2024-05-22 19:22:38 +00:00
v { VERSION } - building the best open - source AI user interface .
2024-05-26 07:49:30 +00:00
{ f " Commit: { WEBUI_BUILD_HASH } " if WEBUI_BUILD_HASH != " dev-build " else " " }
2024-04-02 10:03:55 +00:00
https : / / github . com / open - webui / open - webui
"""
)
2023-11-15 00:28:51 +00:00
2024-05-09 04:00:03 +00:00
@asynccontextmanager
async def lifespan ( app : FastAPI ) :
yield
app = FastAPI (
docs_url = " /docs " if ENV == " dev " else None , redoc_url = None , lifespan = lifespan
)
2023-11-15 00:28:51 +00:00
2024-05-10 07:03:24 +00:00
app . state . config = AppConfig ( )
2024-05-24 08:40:48 +00:00
app . state . config . ENABLE_OPENAI_API = ENABLE_OPENAI_API
app . state . config . ENABLE_OLLAMA_API = ENABLE_OLLAMA_API
2024-05-10 07:03:24 +00:00
app . state . config . ENABLE_MODEL_FILTER = ENABLE_MODEL_FILTER
app . state . config . MODEL_FILTER_LIST = MODEL_FILTER_LIST
2024-03-10 05:19:20 +00:00
2024-05-10 07:03:24 +00:00
app . state . config . WEBHOOK_URL = WEBHOOK_URL
2024-06-09 21:53:10 +00:00
app . state . config . TASK_MODEL = TASK_MODEL
app . state . config . TASK_MODEL_EXTERNAL = TASK_MODEL_EXTERNAL
2024-06-09 21:25:31 +00:00
app . state . config . TITLE_GENERATION_PROMPT_TEMPLATE = TITLE_GENERATION_PROMPT_TEMPLATE
2024-06-09 21:53:10 +00:00
app . state . config . SEARCH_QUERY_GENERATION_PROMPT_TEMPLATE = (
SEARCH_QUERY_GENERATION_PROMPT_TEMPLATE
)
2024-06-09 22:19:36 +00:00
app . state . config . SEARCH_QUERY_PROMPT_LENGTH_THRESHOLD = (
SEARCH_QUERY_PROMPT_LENGTH_THRESHOLD
)
2024-06-11 06:40:27 +00:00
app . state . config . TOOLS_FUNCTION_CALLING_PROMPT_TEMPLATE = (
TOOLS_FUNCTION_CALLING_PROMPT_TEMPLATE
)
2024-05-25 01:26:36 +00:00
app . state . MODELS = { }
2023-11-15 00:28:51 +00:00
origins = [ " * " ]
2024-05-17 03:49:28 +00:00
2024-06-20 08:51:39 +00:00
##################################
#
# ChatCompletion Middleware
#
##################################
2024-06-18 23:08:42 +00:00
async def get_function_call_response (
messages , files , tool_id , template , task_model_id , user
) :
2024-06-11 06:40:27 +00:00
tool = Tools . get_tool_by_id ( tool_id )
tools_specs = json . dumps ( tool . specs , indent = 2 )
content = tools_function_calling_generation_template ( template , tools_specs )
2024-05-17 03:49:28 +00:00
2024-06-11 18:15:43 +00:00
user_message = get_last_user_message ( messages )
prompt = (
" History: \n "
+ " \n " . join (
[
2024-06-11 18:31:14 +00:00
f " { message [ ' role ' ] . upper ( ) } : \" \" \" { message [ ' content ' ] } \" \" \" "
2024-06-11 18:15:43 +00:00
for message in messages [ : : - 1 ] [ : 4 ]
]
)
+ f " \n Query: { user_message } "
)
print ( prompt )
2024-06-11 06:40:27 +00:00
payload = {
" model " : task_model_id ,
" messages " : [
{ " role " : " system " , " content " : content } ,
{ " role " : " user " , " content " : f " Query: { prompt } " } ,
] ,
" stream " : False ,
}
2024-06-12 20:34:34 +00:00
try :
payload = filter_pipeline ( payload , user )
except Exception as e :
raise e
2024-06-11 06:40:27 +00:00
model = app . state . MODELS [ task_model_id ]
2024-05-17 03:49:28 +00:00
2024-06-11 06:40:27 +00:00
response = None
2024-06-11 07:18:45 +00:00
try :
if model [ " owned_by " ] == " ollama " :
2024-06-18 17:26:53 +00:00
response = await generate_ollama_chat_completion ( payload , user = user )
2024-06-11 07:18:45 +00:00
else :
response = await generate_openai_chat_completion ( payload , user = user )
content = None
2024-06-11 18:31:14 +00:00
if hasattr ( response , " body_iterator " ) :
async for chunk in response . body_iterator :
data = json . loads ( chunk . decode ( " utf-8 " ) )
content = data [ " choices " ] [ 0 ] [ " message " ] [ " content " ]
# Cleanup any remaining background tasks if necessary
if response . background is not None :
await response . background ( )
else :
content = response [ " choices " ] [ 0 ] [ " message " ] [ " content " ]
2024-06-11 07:18:45 +00:00
# Parse the function response
if content is not None :
2024-06-11 18:31:14 +00:00
print ( f " content: { content } " )
2024-06-11 07:18:45 +00:00
result = json . loads ( content )
print ( result )
2024-06-20 21:14:12 +00:00
citation = None
2024-06-11 07:18:45 +00:00
# Call the function
if " name " in result :
if tool_id in webui_app . state . TOOLS :
toolkit_module = webui_app . state . TOOLS [ tool_id ]
else :
toolkit_module = load_toolkit_module_by_id ( tool_id )
webui_app . state . TOOLS [ tool_id ] = toolkit_module
2024-06-18 23:45:03 +00:00
file_handler = False
# check if toolkit_module has file_handler self variable
if hasattr ( toolkit_module , " file_handler " ) :
file_handler = True
print ( " file_handler: " , file_handler )
2024-06-11 07:18:45 +00:00
function = getattr ( toolkit_module , result [ " name " ] )
function_result = None
try :
2024-06-11 17:19:59 +00:00
# Get the signature of the function
sig = inspect . signature ( function )
2024-06-17 20:28:29 +00:00
params = result [ " parameters " ]
2024-06-11 17:35:13 +00:00
if " __user__ " in sig . parameters :
# Call the function with the '__user__' parameter included
2024-06-17 20:28:29 +00:00
params = {
* * params ,
" __user__ " : {
" id " : user . id ,
" email " : user . email ,
" name " : user . name ,
" role " : user . role ,
} ,
}
if " __messages__ " in sig . parameters :
# Call the function with the '__messages__' parameter included
params = {
* * params ,
" __messages__ " : messages ,
}
2024-06-18 23:08:42 +00:00
if " __files__ " in sig . parameters :
# Call the function with the '__files__' parameter included
params = {
* * params ,
" __files__ " : files ,
}
2024-06-19 01:50:36 +00:00
if " __model__ " in sig . parameters :
# Call the function with the '__model__' parameter included
params = {
* * params ,
" __model__ " : model ,
}
2024-06-19 01:14:18 +00:00
if " __id__ " in sig . parameters :
# Call the function with the '__id__' parameter included
params = {
* * params ,
" __id__ " : tool_id ,
}
2024-06-21 03:40:03 +00:00
if inspect . iscoroutinefunction ( function ) :
function_result = await function ( * * params )
else :
function_result = function ( * * params )
2024-06-20 21:14:12 +00:00
if hasattr ( toolkit_module , " citation " ) and toolkit_module . citation :
citation = {
" source " : { " name " : f " TOOL: { tool . name } / { result [ ' name ' ] } " } ,
" document " : [ function_result ] ,
" metadata " : [ { " source " : result [ " name " ] } ] ,
}
2024-06-11 07:18:45 +00:00
except Exception as e :
print ( e )
# Add the function result to the system prompt
2024-06-18 23:45:03 +00:00
if function_result is not None :
2024-06-20 21:14:12 +00:00
return function_result , citation , file_handler
2024-06-11 07:18:45 +00:00
except Exception as e :
print ( f " Error: { e } " )
2024-06-11 06:40:27 +00:00
2024-06-20 21:14:12 +00:00
return None , None , False
2024-06-11 06:40:27 +00:00
class ChatCompletionMiddleware ( BaseHTTPMiddleware ) :
2024-03-09 06:34:47 +00:00
async def dispatch ( self , request : Request , call_next ) :
2024-06-20 09:06:10 +00:00
data_items = [ ]
2024-05-06 13:14:51 +00:00
2024-06-20 21:14:12 +00:00
show_citations = False
citations = [ ]
2024-06-20 09:30:00 +00:00
if request . method == " POST " and any (
endpoint in request . url . path
for endpoint in [ " /ollama/api/chat " , " /chat/completions " ]
2024-03-09 06:51:42 +00:00
) :
2024-03-20 23:11:36 +00:00
log . debug ( f " request.url.path: { request . url . path } " )
2024-03-09 06:52:42 +00:00
2024-03-09 06:34:47 +00:00
# Read the original request body
body = await request . body ( )
body_str = body . decode ( " utf-8 " )
data = json . loads ( body_str ) if body_str else { }
2024-06-20 09:37:36 +00:00
2024-06-11 08:10:24 +00:00
user = get_current_user (
2024-06-19 21:49:35 +00:00
request ,
2024-06-20 00:19:35 +00:00
get_http_authorization_cred ( request . headers . get ( " Authorization " ) ) ,
2024-06-11 08:10:24 +00:00
)
2024-06-20 09:30:00 +00:00
# Flag to skip RAG completions if file_handler is present in tools/functions
skip_files = False
2024-06-20 21:14:12 +00:00
if data . get ( " citations " ) :
show_citations = True
del data [ " citations " ]
2024-06-20 09:30:00 +00:00
model_id = data [ " model " ]
if model_id not in app . state . MODELS :
2024-06-11 06:40:27 +00:00
raise HTTPException (
status_code = status . HTTP_404_NOT_FOUND ,
detail = " Model not found " ,
)
2024-06-20 09:30:00 +00:00
model = app . state . MODELS [ model_id ]
# Check if the model has any filters
2024-06-20 11:38:59 +00:00
if " info " in model and " meta " in model [ " info " ] :
for filter_id in model [ " info " ] [ " meta " ] . get ( " filterIds " , [ ] ) :
filter = Functions . get_function_by_id ( filter_id )
if filter :
if filter_id in webui_app . state . FUNCTIONS :
function_module = webui_app . state . FUNCTIONS [ filter_id ]
else :
function_module , function_type = load_function_module_by_id (
filter_id
)
webui_app . state . FUNCTIONS [ filter_id ] = function_module
2024-06-11 06:40:27 +00:00
2024-06-20 11:38:59 +00:00
# Check if the function has a file_handler variable
2024-06-20 21:14:12 +00:00
if hasattr ( function_module , " file_handler " ) :
skip_files = function_module . file_handler
2024-06-20 09:30:00 +00:00
2024-06-20 11:38:59 +00:00
try :
if hasattr ( function_module , " inlet " ) :
2024-06-21 03:26:28 +00:00
inlet = function_module . inlet
2024-06-22 08:39:53 +00:00
# Get the signature of the function
sig = inspect . signature ( inlet )
param = { " body " : data }
if " __user__ " in sig . parameters :
param = {
* * param ,
" __user__ " : {
2024-06-21 03:26:28 +00:00
" id " : user . id ,
" email " : user . email ,
" name " : user . name ,
" role " : user . role ,
} ,
2024-06-22 08:39:53 +00:00
}
if " __id__ " in sig . parameters :
param = {
* * param ,
" __id__ " : filter_id ,
}
if inspect . iscoroutinefunction ( inlet ) :
data = await inlet ( * * param )
2024-06-21 03:26:28 +00:00
else :
2024-06-22 08:39:53 +00:00
data = inlet ( * * param )
2024-06-21 03:26:28 +00:00
2024-06-20 11:38:59 +00:00
except Exception as e :
print ( f " Error: { e } " )
return JSONResponse (
status_code = status . HTTP_400_BAD_REQUEST ,
content = { " detail " : str ( e ) } ,
2024-06-20 09:30:00 +00:00
)
# Set the task model
task_model_id = data [ " model " ]
# Check if the user has a custom task model and use that model
2024-06-11 06:40:27 +00:00
if app . state . MODELS [ task_model_id ] [ " owned_by " ] == " ollama " :
if (
app . state . config . TASK_MODEL
and app . state . config . TASK_MODEL in app . state . MODELS
) :
task_model_id = app . state . config . TASK_MODEL
else :
if (
app . state . config . TASK_MODEL_EXTERNAL
and app . state . config . TASK_MODEL_EXTERNAL in app . state . MODELS
) :
task_model_id = app . state . config . TASK_MODEL_EXTERNAL
2024-06-11 18:15:43 +00:00
prompt = get_last_user_message ( data [ " messages " ] )
2024-06-11 08:10:24 +00:00
context = " "
# If tool_ids field is present, call the functions
2024-06-11 06:40:27 +00:00
if " tool_ids " in data :
2024-06-11 17:19:59 +00:00
print ( data [ " tool_ids " ] )
2024-06-11 06:40:27 +00:00
for tool_id in data [ " tool_ids " ] :
2024-06-11 07:24:25 +00:00
print ( tool_id )
2024-06-12 20:34:34 +00:00
try :
2024-06-20 21:14:12 +00:00
response , citation , file_handler = (
await get_function_call_response (
messages = data [ " messages " ] ,
files = data . get ( " files " , [ ] ) ,
tool_id = tool_id ,
template = app . state . config . TOOLS_FUNCTION_CALLING_PROMPT_TEMPLATE ,
task_model_id = task_model_id ,
user = user ,
)
2024-06-12 20:34:34 +00:00
)
2024-06-11 06:40:27 +00:00
2024-06-18 23:45:03 +00:00
print ( file_handler )
2024-06-17 20:28:29 +00:00
if isinstance ( response , str ) :
2024-06-12 20:34:34 +00:00
context + = ( " \n " if context != " " else " " ) + response
2024-06-17 20:28:29 +00:00
2024-06-20 21:14:12 +00:00
if citation :
citations . append ( citation )
show_citations = True
2024-06-18 23:45:03 +00:00
if file_handler :
skip_files = True
2024-06-12 20:34:34 +00:00
except Exception as e :
print ( f " Error: { e } " )
2024-06-11 06:40:27 +00:00
del data [ " tool_ids " ]
2024-06-11 17:19:59 +00:00
print ( f " tool_context: { context } " )
2024-06-11 09:12:24 +00:00
2024-06-18 23:08:42 +00:00
# If files field is present, generate RAG completions
2024-06-18 23:45:03 +00:00
# If skip_files is True, skip the RAG completions
2024-06-18 21:38:23 +00:00
if " files " in data :
2024-06-18 23:45:03 +00:00
if not skip_files :
data = { * * data }
2024-06-20 21:14:12 +00:00
rag_context , rag_citations = get_rag_context (
2024-06-18 23:45:03 +00:00
files = data [ " files " ] ,
messages = data [ " messages " ] ,
embedding_function = rag_app . state . EMBEDDING_FUNCTION ,
k = rag_app . state . config . TOP_K ,
reranking_function = rag_app . state . sentence_transformer_rf ,
r = rag_app . state . config . RELEVANCE_THRESHOLD ,
hybrid_search = rag_app . state . config . ENABLE_RAG_HYBRID_SEARCH ,
)
if rag_context :
context + = ( " \n " if context != " " else " " ) + rag_context
2024-06-11 08:10:24 +00:00
2024-06-18 23:45:03 +00:00
log . debug ( f " rag_context: { rag_context } , citations: { citations } " )
2024-06-20 09:06:10 +00:00
2024-06-20 21:14:12 +00:00
if rag_citations :
citations . extend ( rag_citations )
2024-06-11 08:10:24 +00:00
2024-06-18 21:38:23 +00:00
del data [ " files " ]
2024-06-11 08:10:24 +00:00
2024-06-20 21:14:12 +00:00
if show_citations and len ( citations ) > 0 :
data_items . append ( { " citations " : citations } )
2024-06-20 11:21:55 +00:00
2024-06-11 08:10:24 +00:00
if context != " " :
system_prompt = rag_template (
rag_app . state . config . RAG_TEMPLATE , context , prompt
)
print ( system_prompt )
data [ " messages " ] = add_or_update_system_message (
2024-06-20 09:06:10 +00:00
system_prompt , data [ " messages " ]
2024-05-06 13:14:51 +00:00
)
2024-03-10 04:12:32 +00:00
2024-03-09 06:34:47 +00:00
modified_body_bytes = json . dumps ( data ) . encode ( " utf-8 " )
2024-03-11 01:40:50 +00:00
# Replace the request body with the modified one
request . _body = modified_body_bytes
# Set custom header to ensure content-length matches new body length
request . headers . __dict__ [ " _list " ] = [
( b " content-length " , str ( len ( modified_body_bytes ) ) . encode ( " utf-8 " ) ) ,
* [
( k , v )
for k , v in request . headers . raw
if k . lower ( ) != b " content-length "
] ,
]
2024-03-09 06:34:47 +00:00
2024-06-20 10:23:50 +00:00
response = await call_next ( request )
2024-05-06 13:14:51 +00:00
if isinstance ( response , StreamingResponse ) :
# If it's a streaming response, inject it as SSE event or NDJSON line
content_type = response . headers . get ( " Content-Type " )
if " text/event-stream " in content_type :
return StreamingResponse (
2024-06-20 09:06:10 +00:00
self . openai_stream_wrapper ( response . body_iterator , data_items ) ,
2024-05-06 13:14:51 +00:00
)
if " application/x-ndjson " in content_type :
return StreamingResponse (
2024-06-20 09:06:10 +00:00
self . ollama_stream_wrapper ( response . body_iterator , data_items ) ,
2024-05-06 13:14:51 +00:00
)
2024-06-20 10:23:50 +00:00
else :
return response
2024-05-06 13:14:51 +00:00
2024-06-20 10:23:50 +00:00
# If it's not a chat completion request, just pass it through
response = await call_next ( request )
2024-03-09 06:34:47 +00:00
return response
async def _receive ( self , body : bytes ) :
return { " type " : " http.request " , " body " : body , " more_body " : False }
2024-06-20 09:06:10 +00:00
async def openai_stream_wrapper ( self , original_generator , data_items ) :
for item in data_items :
yield f " data: { json . dumps ( item ) } \n \n "
2024-05-06 13:14:51 +00:00
async for data in original_generator :
yield data
2024-06-20 09:06:10 +00:00
async def ollama_stream_wrapper ( self , original_generator , data_items ) :
for item in data_items :
yield f " { json . dumps ( item ) } \n "
2024-05-06 13:14:51 +00:00
async for data in original_generator :
yield data
2024-03-09 06:34:47 +00:00
2024-06-11 06:40:27 +00:00
app . add_middleware ( ChatCompletionMiddleware )
2024-03-09 06:34:47 +00:00
2024-06-20 08:51:39 +00:00
##################################
#
# Pipeline Middleware
#
##################################
2024-03-09 06:34:47 +00:00
2024-06-09 21:25:31 +00:00
def filter_pipeline ( payload , user ) :
2024-06-20 00:19:35 +00:00
user = { " id " : user . id , " email " : user . email , " name " : user . name , " role " : user . role }
2024-06-09 21:25:31 +00:00
model_id = payload [ " model " ]
filters = [
model
for model in app . state . MODELS . values ( )
if " pipeline " in model
and " type " in model [ " pipeline " ]
and model [ " pipeline " ] [ " type " ] == " filter "
and (
model [ " pipeline " ] [ " pipelines " ] == [ " * " ]
or any (
model_id == target_model_id
for target_model_id in model [ " pipeline " ] [ " pipelines " ]
)
)
]
sorted_filters = sorted ( filters , key = lambda x : x [ " pipeline " ] [ " priority " ] )
model = app . state . MODELS [ model_id ]
if " pipeline " in model :
sorted_filters . append ( model )
for filter in sorted_filters :
r = None
try :
urlIdx = filter [ " urlIdx " ]
url = openai_app . state . config . OPENAI_API_BASE_URLS [ urlIdx ]
key = openai_app . state . config . OPENAI_API_KEYS [ urlIdx ]
if key != " " :
headers = { " Authorization " : f " Bearer { key } " }
r = requests . post (
f " { url } / { filter [ ' id ' ] } /filter/inlet " ,
headers = headers ,
json = {
" user " : user ,
" body " : payload ,
} ,
)
r . raise_for_status ( )
payload = r . json ( )
except Exception as e :
# Handle connection error here
print ( f " Connection error: { e } " )
if r is not None :
try :
res = r . json ( )
except :
pass
2024-06-12 20:31:05 +00:00
if " detail " in res :
raise Exception ( r . status_code , res [ " detail " ] )
2024-06-09 21:25:31 +00:00
else :
pass
if " pipeline " not in app . state . MODELS [ model_id ] :
if " chat_id " in payload :
del payload [ " chat_id " ]
if " title " in payload :
del payload [ " title " ]
2024-06-12 20:31:05 +00:00
2024-06-13 04:18:53 +00:00
if " task " in payload :
del payload [ " task " ]
2024-06-09 21:25:31 +00:00
return payload
2024-05-28 02:03:26 +00:00
class PipelineMiddleware ( BaseHTTPMiddleware ) :
async def dispatch ( self , request : Request , call_next ) :
if request . method == " POST " and (
2024-05-30 09:04:29 +00:00
" /ollama/api/chat " in request . url . path
or " /chat/completions " in request . url . path
2024-05-28 02:03:26 +00:00
) :
log . debug ( f " request.url.path: { request . url . path } " )
# Read the original request body
body = await request . body ( )
# Decode body to string
body_str = body . decode ( " utf-8 " )
# Parse string to JSON
data = json . loads ( body_str ) if body_str else { }
2024-06-09 21:25:31 +00:00
user = get_current_user (
2024-06-19 21:49:35 +00:00
request ,
2024-06-20 00:19:35 +00:00
get_http_authorization_cred ( request . headers . get ( " Authorization " ) ) ,
2024-06-09 21:25:31 +00:00
)
2024-06-12 20:31:05 +00:00
try :
data = filter_pipeline ( data , user )
except Exception as e :
return JSONResponse (
status_code = e . args [ 0 ] ,
content = { " detail " : e . args [ 1 ] } ,
)
2024-05-30 09:04:29 +00:00
2024-05-28 02:03:26 +00:00
modified_body_bytes = json . dumps ( data ) . encode ( " utf-8 " )
# Replace the request body with the modified one
request . _body = modified_body_bytes
# Set custom header to ensure content-length matches new body length
request . headers . __dict__ [ " _list " ] = [
( b " content-length " , str ( len ( modified_body_bytes ) ) . encode ( " utf-8 " ) ) ,
* [
( k , v )
for k , v in request . headers . raw
if k . lower ( ) != b " content-length "
] ,
]
response = await call_next ( request )
return response
async def _receive ( self , body : bytes ) :
return { " type " : " http.request " , " body " : body , " more_body " : False }
app . add_middleware ( PipelineMiddleware )
2024-05-28 16:50:17 +00:00
app . add_middleware (
CORSMiddleware ,
allow_origins = origins ,
allow_credentials = True ,
allow_methods = [ " * " ] ,
allow_headers = [ " * " ] ,
)
2023-11-15 00:28:51 +00:00
@app.middleware ( " http " )
async def check_url ( request : Request , call_next ) :
2024-05-25 01:26:36 +00:00
if len ( app . state . MODELS ) == 0 :
await get_all_models ( )
else :
pass
2023-11-15 00:28:51 +00:00
start_time = int ( time . time ( ) )
response = await call_next ( request )
process_time = int ( time . time ( ) ) - start_time
response . headers [ " X-Process-Time " ] = str ( process_time )
return response
2024-05-19 15:00:07 +00:00
@app.middleware ( " http " )
async def update_embedding_function ( request : Request , call_next ) :
response = await call_next ( request )
if " /embedding/update " in request . url . path :
webui_app . state . EMBEDDING_FUNCTION = rag_app . state . EMBEDDING_FUNCTION
return response
2024-02-22 11:22:23 +00:00
2024-05-19 15:00:07 +00:00
2024-06-04 06:39:52 +00:00
app . mount ( " /ws " , socket_app )
2024-03-04 03:55:32 +00:00
app . mount ( " /ollama " , ollama_app )
2024-05-24 08:40:48 +00:00
app . mount ( " /openai " , openai_app )
2024-02-11 08:17:50 +00:00
2024-02-22 02:12:01 +00:00
app . mount ( " /images/api/v1 " , images_app )
2024-02-11 08:17:50 +00:00
app . mount ( " /audio/api/v1 " , audio_app )
2024-01-07 06:07:20 +00:00
app . mount ( " /rag/api/v1 " , rag_app )
2024-05-19 15:00:07 +00:00
app . mount ( " /api/v1 " , webui_app )
webui_app . state . EMBEDDING_FUNCTION = rag_app . state . EMBEDDING_FUNCTION
2024-03-31 20:59:39 +00:00
2024-05-25 01:26:36 +00:00
async def get_all_models ( ) :
2024-06-20 11:21:55 +00:00
pipe_models = [ ]
2024-05-24 08:40:48 +00:00
openai_models = [ ]
ollama_models = [ ]
2024-06-20 11:21:55 +00:00
pipe_models = await get_pipe_models ( )
2024-05-24 08:40:48 +00:00
if app . state . config . ENABLE_OPENAI_API :
openai_models = await get_openai_models ( )
openai_models = openai_models [ " data " ]
if app . state . config . ENABLE_OLLAMA_API :
ollama_models = await get_ollama_models ( )
ollama_models = [
{
" id " : model [ " model " ] ,
" name " : model [ " name " ] ,
" object " : " model " ,
" created " : int ( time . time ( ) ) ,
" owned_by " : " ollama " ,
" ollama " : model ,
}
for model in ollama_models [ " models " ]
]
2024-06-20 11:21:55 +00:00
models = pipe_models + openai_models + ollama_models
2024-05-24 08:40:48 +00:00
2024-06-20 11:21:55 +00:00
custom_models = Models . get_all_models ( )
2024-05-24 08:40:48 +00:00
for custom_model in custom_models :
if custom_model . base_model_id == None :
for model in models :
2024-05-24 09:11:17 +00:00
if (
custom_model . id == model [ " id " ]
or custom_model . id == model [ " id " ] . split ( " : " ) [ 0 ]
) :
2024-05-24 08:40:48 +00:00
model [ " name " ] = custom_model . name
model [ " info " ] = custom_model . model_dump ( )
else :
2024-05-24 10:06:57 +00:00
owned_by = " openai "
for model in models :
2024-05-25 03:29:13 +00:00
if (
custom_model . base_model_id == model [ " id " ]
or custom_model . base_model_id == model [ " id " ] . split ( " : " ) [ 0 ]
) :
2024-05-24 10:06:57 +00:00
owned_by = model [ " owned_by " ]
break
2024-05-24 08:40:48 +00:00
models . append (
{
" id " : custom_model . id ,
" name " : custom_model . name ,
" object " : " model " ,
" created " : custom_model . created_at ,
2024-05-24 10:06:57 +00:00
" owned_by " : owned_by ,
2024-05-24 08:40:48 +00:00
" info " : custom_model . model_dump ( ) ,
2024-05-25 03:29:13 +00:00
" preset " : True ,
2024-05-24 08:40:48 +00:00
}
)
2024-05-25 01:26:36 +00:00
app . state . MODELS = { model [ " id " ] : model for model in models }
webui_app . state . MODELS = app . state . MODELS
return models
@app.get ( " /api/models " )
async def get_models ( user = Depends ( get_verified_user ) ) :
models = await get_all_models ( )
2024-05-28 02:03:26 +00:00
2024-05-28 02:34:05 +00:00
# Filter out filter pipelines
2024-05-28 02:03:26 +00:00
models = [
model
for model in models
2024-05-28 18:43:48 +00:00
if " pipeline " not in model or model [ " pipeline " ] . get ( " type " , None ) != " filter "
2024-05-28 02:03:26 +00:00
]
2024-05-24 08:40:48 +00:00
if app . state . config . ENABLE_MODEL_FILTER :
if user . role == " user " :
models = list (
filter (
lambda model : model [ " id " ] in app . state . config . MODEL_FILTER_LIST ,
models ,
)
)
return { " data " : models }
return { " data " : models }
2024-06-20 08:51:39 +00:00
@app.post ( " /api/chat/completions " )
async def generate_chat_completions ( form_data : dict , user = Depends ( get_verified_user ) ) :
model_id = form_data [ " model " ]
if model_id not in app . state . MODELS :
raise HTTPException (
status_code = status . HTTP_404_NOT_FOUND ,
detail = " Model not found " ,
)
model = app . state . MODELS [ model_id ]
print ( model )
2024-06-20 11:38:59 +00:00
pipe = model . get ( " pipe " )
if pipe :
2024-06-20 11:47:40 +00:00
form_data [ " user " ] = {
" id " : user . id ,
" email " : user . email ,
" name " : user . name ,
" role " : user . role ,
}
2024-06-20 11:38:59 +00:00
2024-06-21 03:37:04 +00:00
async def job ( ) :
2024-06-20 11:38:59 +00:00
pipe_id = form_data [ " model " ]
if " . " in pipe_id :
pipe_id , sub_pipe_id = pipe_id . split ( " . " , 1 )
print ( pipe_id )
pipe = webui_app . state . FUNCTIONS [ pipe_id ] . pipe
2024-06-22 08:42:28 +00:00
# Get the signature of the function
sig = inspect . signature ( pipe )
param = { " body " : form_data }
if " __user__ " in sig . parameters :
param = {
* * param ,
" __user__ " : {
" id " : user . id ,
" email " : user . email ,
" name " : user . name ,
" role " : user . role ,
} ,
}
if form_data [ " stream " ] :
2024-06-21 03:37:04 +00:00
async def stream_content ( ) :
if inspect . iscoroutinefunction ( pipe ) :
2024-06-22 08:42:28 +00:00
res = await pipe ( * * param )
2024-06-21 03:37:04 +00:00
else :
2024-06-22 08:42:28 +00:00
res = pipe ( * * param )
2024-06-20 11:38:59 +00:00
if isinstance ( res , str ) :
message = stream_message_template ( form_data [ " model " ] , res )
yield f " data: { json . dumps ( message ) } \n \n "
if isinstance ( res , Iterator ) :
for line in res :
if isinstance ( line , BaseModel ) :
line = line . model_dump_json ( )
line = f " data: { line } "
try :
line = line . decode ( " utf-8 " )
except :
pass
if line . startswith ( " data: " ) :
yield f " { line } \n \n "
else :
line = stream_message_template ( form_data [ " model " ] , line )
yield f " data: { json . dumps ( line ) } \n \n "
if isinstance ( res , str ) or isinstance ( res , Generator ) :
finish_message = {
" id " : f " { form_data [ ' model ' ] } - { str ( uuid . uuid4 ( ) ) } " ,
" object " : " chat.completion.chunk " ,
" created " : int ( time . time ( ) ) ,
" model " : form_data [ " model " ] ,
" choices " : [
{
" index " : 0 ,
" delta " : { } ,
" logprobs " : None ,
" finish_reason " : " stop " ,
}
] ,
}
yield f " data: { json . dumps ( finish_message ) } \n \n "
yield f " data: [DONE] "
2024-06-20 11:21:55 +00:00
2024-06-20 11:38:59 +00:00
return StreamingResponse (
stream_content ( ) , media_type = " text/event-stream "
)
else :
2024-06-21 03:37:04 +00:00
if inspect . iscoroutinefunction ( pipe ) :
2024-06-22 08:42:28 +00:00
res = await pipe ( * * param )
2024-06-21 03:37:04 +00:00
else :
2024-06-22 08:42:28 +00:00
res = pipe ( * * param )
2024-06-20 11:38:59 +00:00
if isinstance ( res , dict ) :
return res
elif isinstance ( res , BaseModel ) :
return res . model_dump ( )
else :
message = " "
if isinstance ( res , str ) :
message = res
if isinstance ( res , Generator ) :
for stream in res :
message = f " { message } { stream } "
return {
" id " : f " { form_data [ ' model ' ] } - { str ( uuid . uuid4 ( ) ) } " ,
" object " : " chat.completion " ,
" created " : int ( time . time ( ) ) ,
" model " : form_data [ " model " ] ,
" choices " : [
{
" index " : 0 ,
" message " : {
" role " : " assistant " ,
" content " : message ,
} ,
" logprobs " : None ,
" finish_reason " : " stop " ,
}
] ,
}
2024-06-21 03:37:04 +00:00
return await job ( )
2024-06-20 08:51:39 +00:00
if model [ " owned_by " ] == " ollama " :
return await generate_ollama_chat_completion ( form_data , user = user )
else :
return await generate_openai_chat_completion ( form_data , user = user )
@app.post ( " /api/chat/completed " )
async def chat_completed ( form_data : dict , user = Depends ( get_verified_user ) ) :
data = form_data
model_id = data [ " model " ]
2024-06-20 10:23:50 +00:00
if model_id not in app . state . MODELS :
raise HTTPException (
status_code = status . HTTP_404_NOT_FOUND ,
detail = " Model not found " ,
)
model = app . state . MODELS [ model_id ]
2024-06-20 08:51:39 +00:00
filters = [
model
for model in app . state . MODELS . values ( )
if " pipeline " in model
and " type " in model [ " pipeline " ]
and model [ " pipeline " ] [ " type " ] == " filter "
and (
model [ " pipeline " ] [ " pipelines " ] == [ " * " ]
or any (
model_id == target_model_id
for target_model_id in model [ " pipeline " ] [ " pipelines " ]
)
)
]
2024-06-20 10:23:50 +00:00
sorted_filters = sorted ( filters , key = lambda x : x [ " pipeline " ] [ " priority " ] )
if " pipeline " in model :
sorted_filters = [ model ] + sorted_filters
2024-06-20 08:51:39 +00:00
for filter in sorted_filters :
r = None
try :
urlIdx = filter [ " urlIdx " ]
url = openai_app . state . config . OPENAI_API_BASE_URLS [ urlIdx ]
key = openai_app . state . config . OPENAI_API_KEYS [ urlIdx ]
if key != " " :
headers = { " Authorization " : f " Bearer { key } " }
r = requests . post (
f " { url } / { filter [ ' id ' ] } /filter/outlet " ,
headers = headers ,
json = {
" user " : { " id " : user . id , " name " : user . name , " role " : user . role } ,
" body " : data ,
} ,
)
r . raise_for_status ( )
data = r . json ( )
except Exception as e :
# Handle connection error here
print ( f " Connection error: { e } " )
if r is not None :
try :
res = r . json ( )
if " detail " in res :
return JSONResponse (
status_code = r . status_code ,
content = res ,
)
except :
pass
else :
pass
2024-06-20 10:23:50 +00:00
# Check if the model has any filters
2024-06-20 11:38:59 +00:00
if " info " in model and " meta " in model [ " info " ] :
for filter_id in model [ " info " ] [ " meta " ] . get ( " filterIds " , [ ] ) :
filter = Functions . get_function_by_id ( filter_id )
if filter :
if filter_id in webui_app . state . FUNCTIONS :
function_module = webui_app . state . FUNCTIONS [ filter_id ]
else :
function_module , function_type = load_function_module_by_id (
filter_id
)
webui_app . state . FUNCTIONS [ filter_id ] = function_module
2024-06-20 10:23:50 +00:00
2024-06-20 11:38:59 +00:00
try :
if hasattr ( function_module , " outlet " ) :
2024-06-21 03:26:28 +00:00
outlet = function_module . outlet
2024-06-22 08:39:53 +00:00
# Get the signature of the function
sig = inspect . signature ( outlet )
param = { " body " : data }
if " __user__ " in sig . parameters :
param = {
* * param ,
" __user__ " : {
2024-06-21 03:26:28 +00:00
" id " : user . id ,
" email " : user . email ,
" name " : user . name ,
" role " : user . role ,
} ,
2024-06-22 08:39:53 +00:00
}
if " __id__ " in sig . parameters :
param = {
* * param ,
" __id__ " : filter_id ,
}
if inspect . iscoroutinefunction ( outlet ) :
data = await outlet ( * * param )
2024-06-21 03:26:28 +00:00
else :
2024-06-22 08:39:53 +00:00
data = outlet ( * * param )
2024-06-21 03:26:28 +00:00
2024-06-20 11:38:59 +00:00
except Exception as e :
print ( f " Error: { e } " )
return JSONResponse (
status_code = status . HTTP_400_BAD_REQUEST ,
content = { " detail " : str ( e ) } ,
2024-06-20 10:23:50 +00:00
)
2024-06-20 08:51:39 +00:00
return data
##################################
#
# Task Endpoints
#
##################################
# TODO: Refactor task API endpoints below into a separate file
2024-06-09 21:53:10 +00:00
@app.get ( " /api/task/config " )
async def get_task_config ( user = Depends ( get_verified_user ) ) :
return {
" TASK_MODEL " : app . state . config . TASK_MODEL ,
" TASK_MODEL_EXTERNAL " : app . state . config . TASK_MODEL_EXTERNAL ,
" TITLE_GENERATION_PROMPT_TEMPLATE " : app . state . config . TITLE_GENERATION_PROMPT_TEMPLATE ,
" SEARCH_QUERY_GENERATION_PROMPT_TEMPLATE " : app . state . config . SEARCH_QUERY_GENERATION_PROMPT_TEMPLATE ,
2024-06-09 22:29:55 +00:00
" SEARCH_QUERY_PROMPT_LENGTH_THRESHOLD " : app . state . config . SEARCH_QUERY_PROMPT_LENGTH_THRESHOLD ,
2024-06-11 06:40:27 +00:00
" TOOLS_FUNCTION_CALLING_PROMPT_TEMPLATE " : app . state . config . TOOLS_FUNCTION_CALLING_PROMPT_TEMPLATE ,
2024-06-09 21:53:10 +00:00
}
class TaskConfigForm ( BaseModel ) :
TASK_MODEL : Optional [ str ]
TASK_MODEL_EXTERNAL : Optional [ str ]
TITLE_GENERATION_PROMPT_TEMPLATE : str
SEARCH_QUERY_GENERATION_PROMPT_TEMPLATE : str
2024-06-09 22:29:55 +00:00
SEARCH_QUERY_PROMPT_LENGTH_THRESHOLD : int
2024-06-11 06:40:27 +00:00
TOOLS_FUNCTION_CALLING_PROMPT_TEMPLATE : str
2024-06-09 21:53:10 +00:00
@app.post ( " /api/task/config/update " )
async def update_task_config ( form_data : TaskConfigForm , user = Depends ( get_admin_user ) ) :
app . state . config . TASK_MODEL = form_data . TASK_MODEL
app . state . config . TASK_MODEL_EXTERNAL = form_data . TASK_MODEL_EXTERNAL
app . state . config . TITLE_GENERATION_PROMPT_TEMPLATE = (
form_data . TITLE_GENERATION_PROMPT_TEMPLATE
)
app . state . config . SEARCH_QUERY_GENERATION_PROMPT_TEMPLATE = (
form_data . SEARCH_QUERY_GENERATION_PROMPT_TEMPLATE
)
2024-06-09 22:29:55 +00:00
app . state . config . SEARCH_QUERY_PROMPT_LENGTH_THRESHOLD = (
form_data . SEARCH_QUERY_PROMPT_LENGTH_THRESHOLD
)
2024-06-11 06:40:27 +00:00
app . state . config . TOOLS_FUNCTION_CALLING_PROMPT_TEMPLATE = (
form_data . TOOLS_FUNCTION_CALLING_PROMPT_TEMPLATE
)
2024-06-09 21:53:10 +00:00
return {
" TASK_MODEL " : app . state . config . TASK_MODEL ,
" TASK_MODEL_EXTERNAL " : app . state . config . TASK_MODEL_EXTERNAL ,
" TITLE_GENERATION_PROMPT_TEMPLATE " : app . state . config . TITLE_GENERATION_PROMPT_TEMPLATE ,
" SEARCH_QUERY_GENERATION_PROMPT_TEMPLATE " : app . state . config . SEARCH_QUERY_GENERATION_PROMPT_TEMPLATE ,
2024-06-09 22:29:55 +00:00
" SEARCH_QUERY_PROMPT_LENGTH_THRESHOLD " : app . state . config . SEARCH_QUERY_PROMPT_LENGTH_THRESHOLD ,
2024-06-11 06:40:27 +00:00
" TOOLS_FUNCTION_CALLING_PROMPT_TEMPLATE " : app . state . config . TOOLS_FUNCTION_CALLING_PROMPT_TEMPLATE ,
2024-06-09 21:53:10 +00:00
}
2024-06-09 21:26:49 +00:00
@app.post ( " /api/task/title/completions " )
2024-06-09 21:25:31 +00:00
async def generate_title ( form_data : dict , user = Depends ( get_verified_user ) ) :
print ( " generate_title " )
2024-06-09 21:53:10 +00:00
2024-06-09 21:25:31 +00:00
model_id = form_data [ " model " ]
if model_id not in app . state . MODELS :
raise HTTPException (
status_code = status . HTTP_404_NOT_FOUND ,
detail = " Model not found " ,
)
2024-06-09 21:53:10 +00:00
# Check if the user has a custom task model
# If the user has a custom task model, use that model
if app . state . MODELS [ model_id ] [ " owned_by " ] == " ollama " :
if app . state . config . TASK_MODEL :
task_model_id = app . state . config . TASK_MODEL
if task_model_id in app . state . MODELS :
model_id = task_model_id
else :
if app . state . config . TASK_MODEL_EXTERNAL :
task_model_id = app . state . config . TASK_MODEL_EXTERNAL
if task_model_id in app . state . MODELS :
model_id = task_model_id
print ( model_id )
2024-06-09 21:25:31 +00:00
model = app . state . MODELS [ model_id ]
template = app . state . config . TITLE_GENERATION_PROMPT_TEMPLATE
content = title_generation_template (
2024-06-16 22:32:26 +00:00
template ,
form_data [ " prompt " ] ,
{
" name " : user . name ,
" location " : user . info . get ( " location " ) if user . info else None ,
} ,
2024-06-09 21:25:31 +00:00
)
payload = {
" model " : model_id ,
" messages " : [ { " role " : " user " , " content " : content } ] ,
" stream " : False ,
" max_tokens " : 50 ,
" chat_id " : form_data . get ( " chat_id " , None ) ,
" title " : True ,
}
2024-06-16 15:40:16 +00:00
log . debug ( payload )
2024-06-12 20:34:34 +00:00
try :
payload = filter_pipeline ( payload , user )
except Exception as e :
return JSONResponse (
status_code = e . args [ 0 ] ,
content = { " detail " : e . args [ 1 ] } ,
)
2024-06-09 21:25:31 +00:00
if model [ " owned_by " ] == " ollama " :
2024-06-18 17:26:53 +00:00
return await generate_ollama_chat_completion ( payload , user = user )
2024-06-09 21:25:31 +00:00
else :
return await generate_openai_chat_completion ( payload , user = user )
2024-06-09 21:53:10 +00:00
@app.post ( " /api/task/query/completions " )
async def generate_search_query ( form_data : dict , user = Depends ( get_verified_user ) ) :
print ( " generate_search_query " )
2024-06-09 22:19:36 +00:00
if len ( form_data [ " prompt " ] ) < app . state . config . SEARCH_QUERY_PROMPT_LENGTH_THRESHOLD :
raise HTTPException (
status_code = status . HTTP_400_BAD_REQUEST ,
detail = f " Skip search query generation for short prompts (< { app . state . config . SEARCH_QUERY_PROMPT_LENGTH_THRESHOLD } characters) " ,
)
2024-06-09 21:53:10 +00:00
model_id = form_data [ " model " ]
if model_id not in app . state . MODELS :
raise HTTPException (
status_code = status . HTTP_404_NOT_FOUND ,
detail = " Model not found " ,
)
# Check if the user has a custom task model
# If the user has a custom task model, use that model
if app . state . MODELS [ model_id ] [ " owned_by " ] == " ollama " :
if app . state . config . TASK_MODEL :
task_model_id = app . state . config . TASK_MODEL
if task_model_id in app . state . MODELS :
model_id = task_model_id
else :
if app . state . config . TASK_MODEL_EXTERNAL :
task_model_id = app . state . config . TASK_MODEL_EXTERNAL
if task_model_id in app . state . MODELS :
model_id = task_model_id
print ( model_id )
model = app . state . MODELS [ model_id ]
template = app . state . config . SEARCH_QUERY_GENERATION_PROMPT_TEMPLATE
content = search_query_generation_template (
2024-06-16 22:32:26 +00:00
template , form_data [ " prompt " ] , { " name " : user . name }
2024-06-09 21:53:10 +00:00
)
payload = {
" model " : model_id ,
" messages " : [ { " role " : " user " , " content " : content } ] ,
" stream " : False ,
" max_tokens " : 30 ,
2024-06-13 04:18:53 +00:00
" task " : True ,
}
print ( payload )
try :
payload = filter_pipeline ( payload , user )
except Exception as e :
return JSONResponse (
status_code = e . args [ 0 ] ,
content = { " detail " : e . args [ 1 ] } ,
)
if model [ " owned_by " ] == " ollama " :
2024-06-18 17:26:53 +00:00
return await generate_ollama_chat_completion ( payload , user = user )
2024-06-13 04:18:53 +00:00
else :
return await generate_openai_chat_completion ( payload , user = user )
@app.post ( " /api/task/emoji/completions " )
async def generate_emoji ( form_data : dict , user = Depends ( get_verified_user ) ) :
print ( " generate_emoji " )
model_id = form_data [ " model " ]
if model_id not in app . state . MODELS :
raise HTTPException (
status_code = status . HTTP_404_NOT_FOUND ,
detail = " Model not found " ,
)
# Check if the user has a custom task model
# If the user has a custom task model, use that model
if app . state . MODELS [ model_id ] [ " owned_by " ] == " ollama " :
if app . state . config . TASK_MODEL :
task_model_id = app . state . config . TASK_MODEL
if task_model_id in app . state . MODELS :
model_id = task_model_id
else :
if app . state . config . TASK_MODEL_EXTERNAL :
task_model_id = app . state . config . TASK_MODEL_EXTERNAL
if task_model_id in app . state . MODELS :
model_id = task_model_id
print ( model_id )
model = app . state . MODELS [ model_id ]
template = '''
2024-06-13 09:29:56 +00:00
Your task is to reflect the speaker ' s likely facial expression through a fitting emoji. Interpret emotions from the message and reflect their facial expression using fitting, diverse emojis (e.g., 😊, 😢, 😡, 😱).
2024-06-13 04:18:53 +00:00
Message : """ {{ prompt}} """
'''
content = title_generation_template (
2024-06-16 22:32:26 +00:00
template ,
form_data [ " prompt " ] ,
{
" name " : user . name ,
" location " : user . info . get ( " location " ) if user . info else None ,
} ,
2024-06-13 04:18:53 +00:00
)
payload = {
" model " : model_id ,
" messages " : [ { " role " : " user " , " content " : content } ] ,
" stream " : False ,
" max_tokens " : 4 ,
" chat_id " : form_data . get ( " chat_id " , None ) ,
" task " : True ,
2024-06-09 21:53:10 +00:00
}
2024-06-16 15:40:16 +00:00
log . debug ( payload )
2024-06-12 20:34:34 +00:00
try :
payload = filter_pipeline ( payload , user )
except Exception as e :
return JSONResponse (
status_code = e . args [ 0 ] ,
content = { " detail " : e . args [ 1 ] } ,
)
2024-06-09 21:53:10 +00:00
if model [ " owned_by " ] == " ollama " :
2024-06-18 17:26:53 +00:00
return await generate_ollama_chat_completion ( payload , user = user )
2024-06-09 21:53:10 +00:00
else :
return await generate_openai_chat_completion ( payload , user = user )
2024-06-11 06:40:27 +00:00
@app.post ( " /api/task/tools/completions " )
async def get_tools_function_calling ( form_data : dict , user = Depends ( get_verified_user ) ) :
print ( " get_tools_function_calling " )
model_id = form_data [ " model " ]
if model_id not in app . state . MODELS :
raise HTTPException (
status_code = status . HTTP_404_NOT_FOUND ,
detail = " Model not found " ,
)
# Check if the user has a custom task model
# If the user has a custom task model, use that model
if app . state . MODELS [ model_id ] [ " owned_by " ] == " ollama " :
if app . state . config . TASK_MODEL :
task_model_id = app . state . config . TASK_MODEL
if task_model_id in app . state . MODELS :
model_id = task_model_id
else :
if app . state . config . TASK_MODEL_EXTERNAL :
task_model_id = app . state . config . TASK_MODEL_EXTERNAL
if task_model_id in app . state . MODELS :
model_id = task_model_id
print ( model_id )
template = app . state . config . TOOLS_FUNCTION_CALLING_PROMPT_TEMPLATE
2024-06-12 20:34:34 +00:00
try :
2024-06-20 21:14:12 +00:00
context , citation , file_handler = await get_function_call_response (
2024-06-18 23:08:42 +00:00
form_data [ " messages " ] ,
form_data . get ( " files " , [ ] ) ,
form_data [ " tool_id " ] ,
template ,
model_id ,
user ,
2024-06-12 20:34:34 +00:00
)
return context
except Exception as e :
return JSONResponse (
status_code = e . args [ 0 ] ,
content = { " detail " : e . args [ 1 ] } ,
)
2024-06-11 06:40:27 +00:00
2024-06-20 08:51:39 +00:00
##################################
#
# Pipelines Endpoints
#
##################################
2024-05-30 09:04:29 +00:00
2024-06-20 08:51:39 +00:00
# TODO: Refactor pipelines API endpoints below into a separate file
2024-05-30 09:04:29 +00:00
2024-05-30 04:26:57 +00:00
@app.get ( " /api/pipelines/list " )
async def get_pipelines_list ( user = Depends ( get_admin_user ) ) :
2024-05-30 05:41:51 +00:00
responses = await get_openai_models ( raw = True )
print ( responses )
2024-06-02 23:46:33 +00:00
urlIdxs = [
idx
for idx , response in enumerate ( responses )
if response != None and " pipelines " in response
]
2024-05-30 04:26:57 +00:00
return {
" data " : [
{
" url " : openai_app . state . config . OPENAI_API_BASE_URLS [ urlIdx ] ,
" idx " : urlIdx ,
}
for urlIdx in urlIdxs
]
}
2024-06-05 20:57:48 +00:00
@app.post ( " /api/pipelines/upload " )
async def upload_pipeline (
urlIdx : int = Form ( . . . ) , file : UploadFile = File ( . . . ) , user = Depends ( get_admin_user )
) :
print ( " upload_pipeline " , urlIdx , file . filename )
# Check if the uploaded file is a python file
if not file . filename . endswith ( " .py " ) :
raise HTTPException (
status_code = status . HTTP_400_BAD_REQUEST ,
detail = " Only Python (.py) files are allowed. " ,
)
upload_folder = f " { CACHE_DIR } /pipelines "
os . makedirs ( upload_folder , exist_ok = True )
file_path = os . path . join ( upload_folder , file . filename )
try :
# Save the uploaded file
with open ( file_path , " wb " ) as buffer :
shutil . copyfileobj ( file . file , buffer )
url = openai_app . state . config . OPENAI_API_BASE_URLS [ urlIdx ]
key = openai_app . state . config . OPENAI_API_KEYS [ urlIdx ]
headers = { " Authorization " : f " Bearer { key } " }
with open ( file_path , " rb " ) as f :
files = { " file " : f }
r = requests . post ( f " { url } /pipelines/upload " , headers = headers , files = files )
r . raise_for_status ( )
data = r . json ( )
return { * * data }
except Exception as e :
# Handle connection error here
print ( f " Connection error: { e } " )
detail = " Pipeline not found "
if r is not None :
try :
res = r . json ( )
if " detail " in res :
detail = res [ " detail " ]
except :
pass
raise HTTPException (
status_code = ( r . status_code if r is not None else status . HTTP_404_NOT_FOUND ) ,
detail = detail ,
)
finally :
# Ensure the file is deleted after the upload is completed or on failure
if os . path . exists ( file_path ) :
os . remove ( file_path )
2024-05-30 05:03:22 +00:00
class AddPipelineForm ( BaseModel ) :
url : str
urlIdx : int
@app.post ( " /api/pipelines/add " )
async def add_pipeline ( form_data : AddPipelineForm , user = Depends ( get_admin_user ) ) :
r = None
try :
urlIdx = form_data . urlIdx
url = openai_app . state . config . OPENAI_API_BASE_URLS [ urlIdx ]
key = openai_app . state . config . OPENAI_API_KEYS [ urlIdx ]
headers = { " Authorization " : f " Bearer { key } " }
r = requests . post (
f " { url } /pipelines/add " , headers = headers , json = { " url " : form_data . url }
)
r . raise_for_status ( )
data = r . json ( )
return { * * data }
except Exception as e :
# Handle connection error here
print ( f " Connection error: { e } " )
detail = " Pipeline not found "
if r is not None :
try :
res = r . json ( )
if " detail " in res :
detail = res [ " detail " ]
except :
pass
raise HTTPException (
status_code = ( r . status_code if r is not None else status . HTTP_404_NOT_FOUND ) ,
detail = detail ,
)
class DeletePipelineForm ( BaseModel ) :
id : str
urlIdx : int
@app.delete ( " /api/pipelines/delete " )
async def delete_pipeline ( form_data : DeletePipelineForm , user = Depends ( get_admin_user ) ) :
r = None
try :
urlIdx = form_data . urlIdx
url = openai_app . state . config . OPENAI_API_BASE_URLS [ urlIdx ]
key = openai_app . state . config . OPENAI_API_KEYS [ urlIdx ]
headers = { " Authorization " : f " Bearer { key } " }
r = requests . delete (
f " { url } /pipelines/delete " , headers = headers , json = { " id " : form_data . id }
)
r . raise_for_status ( )
data = r . json ( )
return { * * data }
except Exception as e :
# Handle connection error here
print ( f " Connection error: { e } " )
detail = " Pipeline not found "
if r is not None :
try :
res = r . json ( )
if " detail " in res :
detail = res [ " detail " ]
except :
pass
raise HTTPException (
status_code = ( r . status_code if r is not None else status . HTTP_404_NOT_FOUND ) ,
detail = detail ,
)
2024-05-28 19:04:19 +00:00
@app.get ( " /api/pipelines " )
2024-05-30 04:26:57 +00:00
async def get_pipelines ( urlIdx : Optional [ int ] = None , user = Depends ( get_admin_user ) ) :
2024-05-30 05:18:27 +00:00
r = None
try :
urlIdx
2024-05-30 04:26:57 +00:00
2024-05-30 05:18:27 +00:00
url = openai_app . state . config . OPENAI_API_BASE_URLS [ urlIdx ]
key = openai_app . state . config . OPENAI_API_KEYS [ urlIdx ]
2024-05-30 04:26:57 +00:00
2024-05-30 05:18:27 +00:00
headers = { " Authorization " : f " Bearer { key } " }
r = requests . get ( f " { url } /pipelines " , headers = headers )
2024-05-30 04:26:57 +00:00
2024-05-30 05:18:27 +00:00
r . raise_for_status ( )
data = r . json ( )
2024-05-28 19:04:19 +00:00
2024-05-30 05:18:27 +00:00
return { * * data }
except Exception as e :
# Handle connection error here
print ( f " Connection error: { e } " )
2024-05-28 19:04:19 +00:00
2024-05-30 05:18:27 +00:00
detail = " Pipeline not found "
if r is not None :
try :
res = r . json ( )
if " detail " in res :
detail = res [ " detail " ]
except :
pass
raise HTTPException (
status_code = ( r . status_code if r is not None else status . HTTP_404_NOT_FOUND ) ,
detail = detail ,
)
2024-05-28 19:32:49 +00:00
2024-05-30 05:18:27 +00:00
@app.get ( " /api/pipelines/ {pipeline_id} /valves " )
async def get_pipeline_valves (
urlIdx : Optional [ int ] , pipeline_id : str , user = Depends ( get_admin_user )
) :
models = await get_all_models ( )
r = None
try :
2024-05-28 19:32:49 +00:00
2024-05-30 05:18:27 +00:00
url = openai_app . state . config . OPENAI_API_BASE_URLS [ urlIdx ]
key = openai_app . state . config . OPENAI_API_KEYS [ urlIdx ]
2024-05-28 19:32:49 +00:00
2024-05-30 05:18:27 +00:00
headers = { " Authorization " : f " Bearer { key } " }
r = requests . get ( f " { url } / { pipeline_id } /valves " , headers = headers )
2024-05-28 19:32:49 +00:00
2024-05-30 05:18:27 +00:00
r . raise_for_status ( )
data = r . json ( )
2024-05-28 19:32:49 +00:00
2024-05-30 05:18:27 +00:00
return { * * data }
except Exception as e :
# Handle connection error here
print ( f " Connection error: { e } " )
2024-05-28 20:05:31 +00:00
2024-05-30 05:18:27 +00:00
detail = " Pipeline not found "
2024-05-28 20:05:31 +00:00
2024-05-30 05:18:27 +00:00
if r is not None :
try :
res = r . json ( )
if " detail " in res :
detail = res [ " detail " ]
except :
pass
2024-05-28 19:32:49 +00:00
raise HTTPException (
2024-05-30 05:18:27 +00:00
status_code = ( r . status_code if r is not None else status . HTTP_404_NOT_FOUND ) ,
detail = detail ,
2024-05-28 19:32:49 +00:00
)
@app.get ( " /api/pipelines/ {pipeline_id} /valves/spec " )
2024-05-30 05:18:27 +00:00
async def get_pipeline_valves_spec (
urlIdx : Optional [ int ] , pipeline_id : str , user = Depends ( get_admin_user )
) :
2024-05-28 19:32:49 +00:00
models = await get_all_models ( )
2024-05-30 05:18:27 +00:00
r = None
try :
url = openai_app . state . config . OPENAI_API_BASE_URLS [ urlIdx ]
key = openai_app . state . config . OPENAI_API_KEYS [ urlIdx ]
2024-05-28 19:32:49 +00:00
2024-05-30 05:18:27 +00:00
headers = { " Authorization " : f " Bearer { key } " }
r = requests . get ( f " { url } / { pipeline_id } /valves/spec " , headers = headers )
2024-05-28 19:32:49 +00:00
2024-05-30 05:18:27 +00:00
r . raise_for_status ( )
data = r . json ( )
2024-05-28 19:32:49 +00:00
2024-05-30 05:18:27 +00:00
return { * * data }
except Exception as e :
# Handle connection error here
print ( f " Connection error: { e } " )
2024-05-28 19:32:49 +00:00
2024-05-30 05:18:27 +00:00
detail = " Pipeline not found "
if r is not None :
try :
res = r . json ( )
if " detail " in res :
detail = res [ " detail " ]
except :
pass
2024-05-28 20:05:31 +00:00
2024-05-28 19:32:49 +00:00
raise HTTPException (
2024-05-30 05:18:27 +00:00
status_code = ( r . status_code if r is not None else status . HTTP_404_NOT_FOUND ) ,
detail = detail ,
2024-05-28 19:32:49 +00:00
)
@app.post ( " /api/pipelines/ {pipeline_id} /valves/update " )
async def update_pipeline_valves (
2024-05-30 05:18:27 +00:00
urlIdx : Optional [ int ] ,
pipeline_id : str ,
form_data : dict ,
user = Depends ( get_admin_user ) ,
2024-05-28 19:32:49 +00:00
) :
models = await get_all_models ( )
2024-05-30 05:18:27 +00:00
r = None
try :
url = openai_app . state . config . OPENAI_API_BASE_URLS [ urlIdx ]
key = openai_app . state . config . OPENAI_API_KEYS [ urlIdx ]
2024-05-28 19:32:49 +00:00
2024-05-30 05:18:27 +00:00
headers = { " Authorization " : f " Bearer { key } " }
r = requests . post (
f " { url } / { pipeline_id } /valves/update " ,
headers = headers ,
json = { * * form_data } ,
)
2024-05-28 19:32:49 +00:00
2024-05-30 05:18:27 +00:00
r . raise_for_status ( )
data = r . json ( )
2024-05-28 19:32:49 +00:00
2024-05-30 05:18:27 +00:00
return { * * data }
except Exception as e :
# Handle connection error here
print ( f " Connection error: { e } " )
2024-05-28 19:32:49 +00:00
2024-05-30 05:18:27 +00:00
detail = " Pipeline not found "
2024-05-28 20:05:31 +00:00
2024-05-30 05:18:27 +00:00
if r is not None :
try :
res = r . json ( )
if " detail " in res :
detail = res [ " detail " ]
except :
pass
2024-05-28 20:05:31 +00:00
2024-05-28 19:32:49 +00:00
raise HTTPException (
2024-05-30 05:41:51 +00:00
status_code = ( r . status_code if r is not None else status . HTTP_404_NOT_FOUND ) ,
2024-05-30 05:18:27 +00:00
detail = detail ,
2024-05-28 19:32:49 +00:00
)
2024-06-20 08:51:39 +00:00
##################################
#
# Config Endpoints
#
##################################
2024-02-22 02:12:01 +00:00
@app.get ( " /api/config " )
async def get_app_config ( ) :
2024-03-31 18:06:15 +00:00
# Checking and Handling the Absence of 'ui' in CONFIG_DATA
2024-03-31 20:59:39 +00:00
default_locale = " en-US "
2024-03-31 18:06:15 +00:00
if " ui " in CONFIG_DATA :
default_locale = CONFIG_DATA [ " ui " ] . get ( " default_locale " , " en-US " )
# The Rest of the Function Now Uses the Variables Defined Above
2024-02-22 02:12:01 +00:00
return {
" status " : True ,
2024-02-24 01:12:19 +00:00
" name " : WEBUI_NAME ,
2024-02-23 08:30:26 +00:00
" version " : VERSION ,
2024-03-31 18:06:15 +00:00
" default_locale " : default_locale ,
2024-05-10 07:03:24 +00:00
" default_models " : webui_app . state . config . DEFAULT_MODELS ,
" default_prompt_suggestions " : webui_app . state . config . DEFAULT_PROMPT_SUGGESTIONS ,
2024-05-26 20:02:40 +00:00
" features " : {
2024-05-26 16:05:26 +00:00
" auth " : WEBUI_AUTH ,
" auth_trusted_header " : bool ( webui_app . state . AUTH_TRUSTED_EMAIL_HEADER ) ,
2024-05-26 20:02:40 +00:00
" enable_signup " : webui_app . state . config . ENABLE_SIGNUP ,
2024-06-02 02:03:56 +00:00
" enable_web_search " : rag_app . state . config . ENABLE_RAG_WEB_SEARCH ,
2024-05-26 16:05:26 +00:00
" enable_image_generation " : images_app . state . config . ENABLED ,
2024-05-26 16:10:25 +00:00
" enable_community_sharing " : webui_app . state . config . ENABLE_COMMUNITY_SHARING ,
2024-05-27 06:40:05 +00:00
" enable_admin_export " : ENABLE_ADMIN_EXPORT ,
2024-05-26 16:05:26 +00:00
} ,
2024-06-08 03:18:48 +00:00
" audio " : {
" tts " : {
" engine " : audio_app . state . config . TTS_ENGINE ,
" voice " : audio_app . state . config . TTS_VOICE ,
} ,
" stt " : {
" engine " : audio_app . state . config . STT_ENGINE ,
} ,
} ,
2024-02-22 02:12:01 +00:00
}
2024-03-10 05:19:20 +00:00
@app.get ( " /api/config/model/filter " )
async def get_model_filter_config ( user = Depends ( get_admin_user ) ) :
2024-03-10 05:47:01 +00:00
return {
2024-05-10 07:03:24 +00:00
" enabled " : app . state . config . ENABLE_MODEL_FILTER ,
" models " : app . state . config . MODEL_FILTER_LIST ,
2024-03-10 05:47:01 +00:00
}
2024-03-10 05:19:20 +00:00
class ModelFilterConfigForm ( BaseModel ) :
enabled : bool
models : List [ str ]
@app.post ( " /api/config/model/filter " )
2024-03-21 01:35:02 +00:00
async def update_model_filter_config (
2024-03-10 05:19:20 +00:00
form_data : ModelFilterConfigForm , user = Depends ( get_admin_user )
) :
2024-05-17 17:35:33 +00:00
app . state . config . ENABLE_MODEL_FILTER = form_data . enabled
app . state . config . MODEL_FILTER_LIST = form_data . models
2024-03-10 05:19:20 +00:00
2024-03-10 05:47:01 +00:00
return {
2024-05-10 07:03:24 +00:00
" enabled " : app . state . config . ENABLE_MODEL_FILTER ,
" models " : app . state . config . MODEL_FILTER_LIST ,
2024-03-10 05:47:01 +00:00
}
2024-03-10 05:19:20 +00:00
2024-06-20 08:51:39 +00:00
# TODO: webhook endpoint should be under config endpoints
2024-03-21 01:35:02 +00:00
@app.get ( " /api/webhook " )
async def get_webhook_url ( user = Depends ( get_admin_user ) ) :
return {
2024-05-10 07:03:24 +00:00
" url " : app . state . config . WEBHOOK_URL ,
2024-03-21 01:35:02 +00:00
}
class UrlForm ( BaseModel ) :
url : str
@app.post ( " /api/webhook " )
async def update_webhook_url ( form_data : UrlForm , user = Depends ( get_admin_user ) ) :
2024-05-10 07:03:24 +00:00
app . state . config . WEBHOOK_URL = form_data . url
webui_app . state . WEBHOOK_URL = app . state . config . WEBHOOK_URL
2024-06-04 04:17:43 +00:00
return { " url " : app . state . config . WEBHOOK_URL }
2024-05-26 16:23:24 +00:00
2024-03-05 08:59:35 +00:00
@app.get ( " /api/version " )
async def get_app_config ( ) :
return {
" version " : VERSION ,
}
2024-02-23 08:30:26 +00:00
@app.get ( " /api/changelog " )
async def get_app_changelog ( ) :
2024-03-31 08:10:57 +00:00
return { key : CHANGELOG [ key ] for idx , key in enumerate ( CHANGELOG ) if idx < 5 }
2024-02-23 08:30:26 +00:00
2024-02-25 19:26:58 +00:00
@app.get ( " /api/version/updates " )
async def get_app_latest_release_version ( ) :
try :
2024-06-06 00:24:59 +00:00
async with aiohttp . ClientSession ( trust_env = True ) as session :
2024-04-10 06:03:05 +00:00
async with session . get (
" https://api.github.com/repos/open-webui/open-webui/releases/latest "
) as response :
response . raise_for_status ( )
data = await response . json ( )
latest_version = data [ " tag_name " ]
return { " current " : VERSION , " latest " : latest_version [ 1 : ] }
except aiohttp . ClientError as e :
2024-02-25 19:26:58 +00:00
raise HTTPException (
status_code = status . HTTP_503_SERVICE_UNAVAILABLE ,
2024-02-25 19:55:15 +00:00
detail = ERROR_MESSAGES . RATE_LIMIT_EXCEEDED ,
2024-02-25 19:26:58 +00:00
)
2024-04-10 08:27:19 +00:00
2024-04-02 18:55:00 +00:00
@app.get ( " /manifest.json " )
async def get_manifest_json ( ) :
return {
2024-04-04 03:43:55 +00:00
" name " : WEBUI_NAME ,
" short_name " : WEBUI_NAME ,
2024-04-02 18:55:00 +00:00
" start_url " : " / " ,
" display " : " standalone " ,
" background_color " : " #343541 " ,
" orientation " : " portrait-primary " ,
2024-05-02 02:32:36 +00:00
" icons " : [ { " src " : " /static/logo.png " , " type " : " image/png " , " sizes " : " 500x500 " } ] ,
2024-04-02 18:55:00 +00:00
}
2024-04-10 08:27:19 +00:00
2024-05-07 00:29:16 +00:00
@app.get ( " /opensearch.xml " )
async def get_opensearch_xml ( ) :
xml_content = rf """
< OpenSearchDescription xmlns = " http://a9.com/-/spec/opensearch/1.1/ " xmlns : moz = " http://www.mozilla.org/2006/browser/search/ " >
< ShortName > { WEBUI_NAME } < / ShortName >
< Description > Search { WEBUI_NAME } < / Description >
< InputEncoding > UTF - 8 < / InputEncoding >
< Image width = " 16 " height = " 16 " type = " image/x-icon " > { WEBUI_URL } / favicon . png < / Image >
< Url type = " text/html " method = " get " template = " {WEBUI_URL} /?q= { " { searchTerms } " } " / >
< moz : SearchForm > { WEBUI_URL } < / moz : SearchForm >
< / OpenSearchDescription >
"""
return Response ( content = xml_content , media_type = " application/xml " )
2024-05-15 18:17:18 +00:00
@app.get ( " /health " )
async def healthcheck ( ) :
return { " status " : True }
2024-04-09 10:32:28 +00:00
app . mount ( " /static " , StaticFiles ( directory = STATIC_DIR ) , name = " static " )
app . mount ( " /cache " , StaticFiles ( directory = CACHE_DIR ) , name = " cache " )
2024-02-24 01:12:19 +00:00
2024-04-28 15:03:30 +00:00
if os . path . exists ( FRONTEND_BUILD_DIR ) :
2024-05-22 04:38:58 +00:00
mimetypes . add_type ( " text/javascript " , " .js " )
2024-04-28 15:03:30 +00:00
app . mount (
" / " ,
SPAStaticFiles ( directory = FRONTEND_BUILD_DIR , html = True ) ,
name = " spa-static-files " ,
)
else :
log . warning (
f " Frontend build directory not found at ' { FRONTEND_BUILD_DIR } ' . Serving API only. "
)