mirror of
https://github.com/open-webui/pipelines
synced 2025-05-12 08:30:43 +00:00
feat: body param added
This commit is contained in:
parent
1fa58ecc1c
commit
ee4544d4f9
12
main.py
12
main.py
@ -129,7 +129,11 @@ async def generate_openai_chat_completion(form_data: OpenAIChatCompletionForm):
|
|||||||
if form_data.stream:
|
if form_data.stream:
|
||||||
|
|
||||||
def stream_content():
|
def stream_content():
|
||||||
res = get_response(user_message, messages=form_data.messages)
|
res = get_response(
|
||||||
|
user_message,
|
||||||
|
messages=form_data.messages,
|
||||||
|
body=form_data.model_dump_json(),
|
||||||
|
)
|
||||||
|
|
||||||
print(f"stream:true:{res}")
|
print(f"stream:true:{res}")
|
||||||
|
|
||||||
@ -164,7 +168,11 @@ async def generate_openai_chat_completion(form_data: OpenAIChatCompletionForm):
|
|||||||
|
|
||||||
return StreamingResponse(stream_content(), media_type="text/event-stream")
|
return StreamingResponse(stream_content(), media_type="text/event-stream")
|
||||||
else:
|
else:
|
||||||
res = get_response(user_message, messages=form_data.messages)
|
res = get_response(
|
||||||
|
user_message,
|
||||||
|
messages=form_data.messages,
|
||||||
|
body=form_data.model_dump_json(),
|
||||||
|
)
|
||||||
print(f"stream:false:{res}")
|
print(f"stream:false:{res}")
|
||||||
|
|
||||||
message = ""
|
message = ""
|
||||||
|
@ -79,7 +79,7 @@ class Pipeline:
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
def get_response(
|
def get_response(
|
||||||
self, user_message: str, messages: List[OpenAIChatMessage]
|
self, user_message: str, messages: List[OpenAIChatMessage], body: dict
|
||||||
) -> Union[str, Generator]:
|
) -> Union[str, Generator]:
|
||||||
# This is where you can add your custom RAG pipeline.
|
# This is where you can add your custom RAG pipeline.
|
||||||
# Typically, you would retrieve relevant information from your knowledge base and synthesize it to generate a response.
|
# Typically, you would retrieve relevant information from your knowledge base and synthesize it to generate a response.
|
||||||
|
@ -70,7 +70,7 @@ class Pipeline:
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
def get_response(
|
def get_response(
|
||||||
self, user_message: str, messages: List[OpenAIChatMessage]
|
self, user_message: str, messages: List[OpenAIChatMessage], body: dict
|
||||||
) -> Union[str, Generator]:
|
) -> Union[str, Generator]:
|
||||||
# This is where you can add your custom RAG pipeline.
|
# This is where you can add your custom RAG pipeline.
|
||||||
# Typically, you would retrieve relevant information from your knowledge base and synthesize it to generate a response.
|
# Typically, you would retrieve relevant information from your knowledge base and synthesize it to generate a response.
|
||||||
|
@ -30,7 +30,7 @@ class Pipeline:
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
def get_response(
|
def get_response(
|
||||||
self, user_message: str, messages: List[OpenAIChatMessage]
|
self, user_message: str, messages: List[OpenAIChatMessage], body: dict
|
||||||
) -> Union[str, Generator]:
|
) -> Union[str, Generator]:
|
||||||
# This is where you can add your custom RAG pipeline.
|
# This is where you can add your custom RAG pipeline.
|
||||||
# Typically, you would retrieve relevant information from your knowledge base and synthesize it to generate a response.
|
# Typically, you would retrieve relevant information from your knowledge base and synthesize it to generate a response.
|
||||||
|
@ -25,7 +25,7 @@ class Pipeline:
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
def get_response(
|
def get_response(
|
||||||
self, user_message: str, messages: List[OpenAIChatMessage]
|
self, user_message: str, messages: List[OpenAIChatMessage], body: dict
|
||||||
) -> Union[str, Generator]:
|
) -> Union[str, Generator]:
|
||||||
# This is where you can add your custom RAG pipeline.
|
# This is where you can add your custom RAG pipeline.
|
||||||
# Typically, you would retrieve relevant information from your knowledge base and synthesize it to generate a response.
|
# Typically, you would retrieve relevant information from your knowledge base and synthesize it to generate a response.
|
||||||
|
49
pipelines/examples/openai_pipeline.py
Normal file
49
pipelines/examples/openai_pipeline.py
Normal file
@ -0,0 +1,49 @@
|
|||||||
|
from typing import List, Union, Generator
|
||||||
|
from schemas import OpenAIChatMessage
|
||||||
|
import requests
|
||||||
|
|
||||||
|
|
||||||
|
class Pipeline:
|
||||||
|
def __init__(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
async def on_startup(self):
|
||||||
|
# This function is called when the server is started.
|
||||||
|
print(f"on_startup:{__name__}")
|
||||||
|
pass
|
||||||
|
|
||||||
|
async def on_shutdown(self):
|
||||||
|
# This function is called when the server is stopped.
|
||||||
|
print(f"on_shutdown:{__name__}")
|
||||||
|
pass
|
||||||
|
|
||||||
|
def get_response(
|
||||||
|
self, user_message: str, messages: List[OpenAIChatMessage], body: dict
|
||||||
|
) -> Union[str, Generator]:
|
||||||
|
# This is where you can add your custom pipelines like RAG.'
|
||||||
|
print(f"get_response:{__name__}")
|
||||||
|
|
||||||
|
print(messages)
|
||||||
|
print(user_message)
|
||||||
|
OPENAI_API_KEY = "your-api-key-here"
|
||||||
|
|
||||||
|
headers = {}
|
||||||
|
headers["Authorization"] = f"Bearer {OPENAI_API_KEY}"
|
||||||
|
headers["Content-Type"] = "application/json"
|
||||||
|
|
||||||
|
r = requests.request(
|
||||||
|
method="POST",
|
||||||
|
url="https://api.openai.com/v1",
|
||||||
|
data=body,
|
||||||
|
headers=headers,
|
||||||
|
stream=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
r.raise_for_status()
|
||||||
|
|
||||||
|
# Check if response is SSE
|
||||||
|
if "text/event-stream" in r.headers.get("Content-Type", ""):
|
||||||
|
return r.iter_content(chunk_size=8192)
|
||||||
|
else:
|
||||||
|
response_data = r.json()
|
||||||
|
return f"{response_data['choices'][0]['text']}"
|
@ -17,12 +17,13 @@ class Pipeline:
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
def get_response(
|
def get_response(
|
||||||
self, user_message: str, messages: List[OpenAIChatMessage]
|
self, user_message: str, messages: List[OpenAIChatMessage], body: dict
|
||||||
) -> Union[str, Generator]:
|
) -> Union[str, Generator]:
|
||||||
# This is where you can add your custom pipelines like RAG.'
|
# This is where you can add your custom pipelines like RAG.'
|
||||||
print(f"get_response:{__name__}")
|
print(f"get_response:{__name__}")
|
||||||
|
|
||||||
print(messages)
|
print(messages)
|
||||||
print(user_message)
|
print(user_message)
|
||||||
|
print(body)
|
||||||
|
|
||||||
return f"{__name__} response to: {user_message}"
|
return f"{__name__} response to: {user_message}"
|
||||||
|
@ -17,7 +17,7 @@ class Pipeline:
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
def get_response(
|
def get_response(
|
||||||
self, user_message: str, messages: List[OpenAIChatMessage]
|
self, user_message: str, messages: List[OpenAIChatMessage], body: dict
|
||||||
) -> Union[str, Generator]:
|
) -> Union[str, Generator]:
|
||||||
# This is where you can add your custom pipelines like RAG.'
|
# This is where you can add your custom pipelines like RAG.'
|
||||||
print(f"get_response:{__name__}")
|
print(f"get_response:{__name__}")
|
||||||
|
Loading…
Reference in New Issue
Block a user