mirror of
https://github.com/open-webui/pipelines
synced 2025-05-13 17:10:45 +00:00
refac: concurrency
This commit is contained in:
parent
eaa4112f46
commit
6b4fba3309
22
main.py
22
main.py
@ -1,5 +1,7 @@
|
|||||||
from fastapi import FastAPI, Request, Depends, status, HTTPException
|
from fastapi import FastAPI, Request, Depends, status, HTTPException
|
||||||
from fastapi.middleware.cors import CORSMiddleware
|
from fastapi.middleware.cors import CORSMiddleware
|
||||||
|
from fastapi.concurrency import run_in_threadpool
|
||||||
|
|
||||||
|
|
||||||
from starlette.responses import StreamingResponse, Response
|
from starlette.responses import StreamingResponse, Response
|
||||||
from pydantic import BaseModel, ConfigDict
|
from pydantic import BaseModel, ConfigDict
|
||||||
@ -116,12 +118,13 @@ async def generate_openai_chat_completion(form_data: OpenAIChatCompletionForm):
|
|||||||
detail=f"Model {form_data.model} not found",
|
detail=f"Model {form_data.model} not found",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def job():
|
||||||
|
|
||||||
get_response = PIPELINES[form_data.model]["module"].get_response
|
get_response = PIPELINES[form_data.model]["module"].get_response
|
||||||
|
|
||||||
if form_data.stream:
|
if form_data.stream:
|
||||||
|
|
||||||
def stream_content():
|
def stream_content():
|
||||||
|
|
||||||
res = get_response(user_message, messages=form_data.messages)
|
res = get_response(user_message, messages=form_data.messages)
|
||||||
|
|
||||||
if isinstance(res, str):
|
if isinstance(res, str):
|
||||||
@ -130,6 +133,7 @@ async def generate_openai_chat_completion(form_data: OpenAIChatCompletionForm):
|
|||||||
|
|
||||||
elif isinstance(res, Generator):
|
elif isinstance(res, Generator):
|
||||||
for message in res:
|
for message in res:
|
||||||
|
print(message)
|
||||||
message = stream_message_template(message)
|
message = stream_message_template(message)
|
||||||
yield f"data: {json.dumps(message)}\n\n"
|
yield f"data: {json.dumps(message)}\n\n"
|
||||||
|
|
||||||
@ -139,7 +143,12 @@ async def generate_openai_chat_completion(form_data: OpenAIChatCompletionForm):
|
|||||||
"created": int(time.time()),
|
"created": int(time.time()),
|
||||||
"model": MODEL_ID,
|
"model": MODEL_ID,
|
||||||
"choices": [
|
"choices": [
|
||||||
{"index": 0, "delta": {}, "logprobs": None, "finish_reason": "stop"}
|
{
|
||||||
|
"index": 0,
|
||||||
|
"delta": {},
|
||||||
|
"logprobs": None,
|
||||||
|
"finish_reason": "stop",
|
||||||
|
}
|
||||||
],
|
],
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -176,6 +185,15 @@ async def generate_openai_chat_completion(form_data: OpenAIChatCompletionForm):
|
|||||||
],
|
],
|
||||||
}
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
return await run_in_threadpool(job)
|
||||||
|
except Exception as e:
|
||||||
|
print(e)
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=500,
|
||||||
|
detail="{e}",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@app.get("/")
|
@app.get("/")
|
||||||
async def get_status():
|
async def get_status():
|
||||||
|
Loading…
Reference in New Issue
Block a user