mirror of
https://github.com/open-webui/open-webui
synced 2025-02-22 05:08:14 +00:00
refac
This commit is contained in:
parent
c689356b31
commit
de26a78a16
@ -42,7 +42,7 @@ from apps.openai.main import (
|
|||||||
from apps.audio.main import app as audio_app
|
from apps.audio.main import app as audio_app
|
||||||
from apps.images.main import app as images_app
|
from apps.images.main import app as images_app
|
||||||
from apps.rag.main import app as rag_app
|
from apps.rag.main import app as rag_app
|
||||||
from apps.webui.main import app as webui_app
|
from apps.webui.main import app as webui_app, get_pipe_models
|
||||||
|
|
||||||
|
|
||||||
from pydantic import BaseModel
|
from pydantic import BaseModel
|
||||||
@ -448,10 +448,12 @@ class ChatCompletionMiddleware(BaseHTTPMiddleware):
|
|||||||
|
|
||||||
if citations and data.get("citations"):
|
if citations and data.get("citations"):
|
||||||
data_items.append({"citations": citations})
|
data_items.append({"citations": citations})
|
||||||
del data["citations"]
|
|
||||||
|
|
||||||
del data["files"]
|
del data["files"]
|
||||||
|
|
||||||
|
if data.get("citations"):
|
||||||
|
del data["citations"]
|
||||||
|
|
||||||
if context != "":
|
if context != "":
|
||||||
system_prompt = rag_template(
|
system_prompt = rag_template(
|
||||||
rag_app.state.config.RAG_TEMPLATE, context, prompt
|
rag_app.state.config.RAG_TEMPLATE, context, prompt
|
||||||
@ -691,17 +693,18 @@ webui_app.state.EMBEDDING_FUNCTION = rag_app.state.EMBEDDING_FUNCTION
|
|||||||
|
|
||||||
|
|
||||||
async def get_all_models():
|
async def get_all_models():
|
||||||
|
pipe_models = []
|
||||||
openai_models = []
|
openai_models = []
|
||||||
ollama_models = []
|
ollama_models = []
|
||||||
|
|
||||||
|
pipe_models = await get_pipe_models()
|
||||||
|
|
||||||
if app.state.config.ENABLE_OPENAI_API:
|
if app.state.config.ENABLE_OPENAI_API:
|
||||||
openai_models = await get_openai_models()
|
openai_models = await get_openai_models()
|
||||||
|
|
||||||
openai_models = openai_models["data"]
|
openai_models = openai_models["data"]
|
||||||
|
|
||||||
if app.state.config.ENABLE_OLLAMA_API:
|
if app.state.config.ENABLE_OLLAMA_API:
|
||||||
ollama_models = await get_ollama_models()
|
ollama_models = await get_ollama_models()
|
||||||
|
|
||||||
ollama_models = [
|
ollama_models = [
|
||||||
{
|
{
|
||||||
"id": model["model"],
|
"id": model["model"],
|
||||||
@ -714,9 +717,9 @@ async def get_all_models():
|
|||||||
for model in ollama_models["models"]
|
for model in ollama_models["models"]
|
||||||
]
|
]
|
||||||
|
|
||||||
models = openai_models + ollama_models
|
models = pipe_models + openai_models + ollama_models
|
||||||
custom_models = Models.get_all_models()
|
|
||||||
|
|
||||||
|
custom_models = Models.get_all_models()
|
||||||
for custom_model in custom_models:
|
for custom_model in custom_models:
|
||||||
if custom_model.base_model_id == None:
|
if custom_model.base_model_id == None:
|
||||||
for model in models:
|
for model in models:
|
||||||
@ -791,6 +794,13 @@ async def generate_chat_completions(form_data: dict, user=Depends(get_verified_u
|
|||||||
model = app.state.MODELS[model_id]
|
model = app.state.MODELS[model_id]
|
||||||
print(model)
|
print(model)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
if model.get('pipe') == True:
|
||||||
|
print('hi')
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
if model["owned_by"] == "ollama":
|
if model["owned_by"] == "ollama":
|
||||||
return await generate_ollama_chat_completion(form_data, user=user)
|
return await generate_ollama_chat_completion(form_data, user=user)
|
||||||
else:
|
else:
|
||||||
|
Loading…
Reference in New Issue
Block a user