mirror of
https://github.com/open-webui/pipelines
synced 2025-05-14 09:30:46 +00:00
refac
This commit is contained in:
parent
be06b61e83
commit
1fa58ecc1c
16
main.py
16
main.py
@ -19,6 +19,8 @@ import os
|
|||||||
import importlib.util
|
import importlib.util
|
||||||
|
|
||||||
|
|
||||||
|
from concurrent.futures import ThreadPoolExecutor
|
||||||
|
|
||||||
PIPELINES = {}
|
PIPELINES = {}
|
||||||
|
|
||||||
|
|
||||||
@ -53,10 +55,7 @@ from contextlib import asynccontextmanager
|
|||||||
async def lifespan(app: FastAPI):
|
async def lifespan(app: FastAPI):
|
||||||
for pipeline in PIPELINES.values():
|
for pipeline in PIPELINES.values():
|
||||||
if hasattr(pipeline["module"], "on_startup"):
|
if hasattr(pipeline["module"], "on_startup"):
|
||||||
info = await pipeline["module"].on_startup()
|
await pipeline["module"].on_startup()
|
||||||
if info:
|
|
||||||
pipeline["id"] = info["id"]
|
|
||||||
pipeline["name"] = info["name"]
|
|
||||||
yield
|
yield
|
||||||
|
|
||||||
for pipeline in PIPELINES.values():
|
for pipeline in PIPELINES.values():
|
||||||
@ -132,15 +131,16 @@ async def generate_openai_chat_completion(form_data: OpenAIChatCompletionForm):
|
|||||||
def stream_content():
|
def stream_content():
|
||||||
res = get_response(user_message, messages=form_data.messages)
|
res = get_response(user_message, messages=form_data.messages)
|
||||||
|
|
||||||
print(res)
|
print(f"stream:true:{res}")
|
||||||
|
|
||||||
if isinstance(res, str):
|
if isinstance(res, str):
|
||||||
message = stream_message_template(form_data.model, res)
|
message = stream_message_template(form_data.model, res)
|
||||||
|
print(f"stream_content:str:{message}")
|
||||||
yield f"data: {json.dumps(message)}\n\n"
|
yield f"data: {json.dumps(message)}\n\n"
|
||||||
|
|
||||||
elif isinstance(res, Generator):
|
elif isinstance(res, Generator):
|
||||||
for message in res:
|
for message in res:
|
||||||
print(message)
|
print(f"stream_content:Generator:{message}")
|
||||||
message = stream_message_template(form_data.model, message)
|
message = stream_message_template(form_data.model, message)
|
||||||
yield f"data: {json.dumps(message)}\n\n"
|
yield f"data: {json.dumps(message)}\n\n"
|
||||||
|
|
||||||
@ -165,6 +165,8 @@ async def generate_openai_chat_completion(form_data: OpenAIChatCompletionForm):
|
|||||||
return StreamingResponse(stream_content(), media_type="text/event-stream")
|
return StreamingResponse(stream_content(), media_type="text/event-stream")
|
||||||
else:
|
else:
|
||||||
res = get_response(user_message, messages=form_data.messages)
|
res = get_response(user_message, messages=form_data.messages)
|
||||||
|
print(f"stream:false:{res}")
|
||||||
|
|
||||||
message = ""
|
message = ""
|
||||||
|
|
||||||
if isinstance(res, str):
|
if isinstance(res, str):
|
||||||
@ -174,6 +176,8 @@ async def generate_openai_chat_completion(form_data: OpenAIChatCompletionForm):
|
|||||||
for stream in res:
|
for stream in res:
|
||||||
message = f"{message}{stream}"
|
message = f"{message}{stream}"
|
||||||
|
|
||||||
|
print(f"stream:false:{message}")
|
||||||
|
|
||||||
return {
|
return {
|
||||||
"id": f"{form_data.model}-{str(uuid.uuid4())}",
|
"id": f"{form_data.model}-{str(uuid.uuid4())}",
|
||||||
"object": "chat.completion",
|
"object": "chat.completion",
|
||||||
|
@ -41,6 +41,4 @@ class Pipeline:
|
|||||||
query_engine = self.index.as_query_engine(streaming=True)
|
query_engine = self.index.as_query_engine(streaming=True)
|
||||||
response = query_engine.query(user_message)
|
response = query_engine.query(user_message)
|
||||||
|
|
||||||
print(response)
|
|
||||||
|
|
||||||
return response.response_gen
|
return response.response_gen
|
||||||
|
Loading…
Reference in New Issue
Block a user