mirror of
https://github.com/open-webui/pipelines
synced 2025-05-13 00:50:44 +00:00
feat: valves
This commit is contained in:
parent
1752e2af67
commit
91f58c52e5
40
main.py
40
main.py
@ -13,7 +13,7 @@ import json
|
|||||||
import uuid
|
import uuid
|
||||||
|
|
||||||
from utils import get_last_user_message, stream_message_template
|
from utils import get_last_user_message, stream_message_template
|
||||||
from schemas import OpenAIChatCompletionForm
|
from schemas import ValveForm, OpenAIChatCompletionForm
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import importlib.util
|
import importlib.util
|
||||||
@ -79,11 +79,12 @@ def on_startup():
|
|||||||
PIPELINES[loaded_module.__name__] = {
|
PIPELINES[loaded_module.__name__] = {
|
||||||
"module": pipeline_id,
|
"module": pipeline_id,
|
||||||
"id": pipeline_id,
|
"id": pipeline_id,
|
||||||
"name": (
|
"name": (pipeline.name if hasattr(pipeline, "name") else pipeline_id),
|
||||||
pipeline.name
|
"valve": hasattr(pipeline, "valve"),
|
||||||
if hasattr(pipeline, "name")
|
"pipelines": (
|
||||||
else loaded_module.__name__
|
pipeline.pipelines if hasattr(pipeline, "pipelines") else []
|
||||||
),
|
),
|
||||||
|
"priority": pipeline.priority if hasattr(pipeline, "priority") else 0,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -146,23 +147,44 @@ async def get_models():
|
|||||||
"object": "model",
|
"object": "model",
|
||||||
"created": int(time.time()),
|
"created": int(time.time()),
|
||||||
"owned_by": "openai",
|
"owned_by": "openai",
|
||||||
"pipeline": True,
|
"pipeline": {
|
||||||
|
"type": "pipeline" if not pipeline.get("valve") else "valve",
|
||||||
|
"pipelines": pipeline.get("pipelines", []),
|
||||||
|
"priority": pipeline.get("priority", 0),
|
||||||
|
},
|
||||||
}
|
}
|
||||||
for pipeline in PIPELINES.values()
|
for pipeline in PIPELINES.values()
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@app.post("/valve")
|
||||||
|
@app.post("/v1/valve")
|
||||||
|
async def valve(form_data: ValveForm):
|
||||||
|
if form_data.model not in app.state.PIPELINES or not app.state.PIPELINES[
|
||||||
|
form_data.model
|
||||||
|
].get("valve", False):
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_404_NOT_FOUND,
|
||||||
|
detail=f"Valve {form_data.model} not found",
|
||||||
|
)
|
||||||
|
|
||||||
|
pipeline = PIPELINE_MODULES[form_data.model]
|
||||||
|
return await pipeline.control_valve(form_data.body)
|
||||||
|
|
||||||
|
|
||||||
@app.post("/chat/completions")
|
@app.post("/chat/completions")
|
||||||
@app.post("/v1/chat/completions")
|
@app.post("/v1/chat/completions")
|
||||||
async def generate_openai_chat_completion(form_data: OpenAIChatCompletionForm):
|
async def generate_openai_chat_completion(form_data: OpenAIChatCompletionForm):
|
||||||
user_message = get_last_user_message(form_data.messages)
|
user_message = get_last_user_message(form_data.messages)
|
||||||
messages = [message.model_dump() for message in form_data.messages]
|
messages = [message.model_dump() for message in form_data.messages]
|
||||||
|
|
||||||
if form_data.model not in app.state.PIPELINES:
|
if form_data.model not in app.state.PIPELINES or app.state.PIPELINES[
|
||||||
return HTTPException(
|
form_data.model
|
||||||
|
].get("valve", False):
|
||||||
|
raise HTTPException(
|
||||||
status_code=status.HTTP_404_NOT_FOUND,
|
status_code=status.HTTP_404_NOT_FOUND,
|
||||||
detail=f"Model {form_data.model} not found",
|
detail=f"Pipeline {form_data.model} not found",
|
||||||
)
|
)
|
||||||
|
|
||||||
def job():
|
def job():
|
||||||
|
37
pipelines/examples/valve_pipeline.py
Normal file
37
pipelines/examples/valve_pipeline.py
Normal file
@ -0,0 +1,37 @@
|
|||||||
|
from typing import List, Union, Generator, Iterator
|
||||||
|
from schemas import OpenAIChatMessage
|
||||||
|
|
||||||
|
|
||||||
|
class Pipeline:
|
||||||
|
def __init__(self):
|
||||||
|
# Pipeline valves are only compatible with Open WebUI
|
||||||
|
# You can think of valve pipeline as a middleware that can be used to edit the form data before it is sent to the OpenAI API.
|
||||||
|
self.valve = True
|
||||||
|
self.id = "valve_pipeline"
|
||||||
|
self.name = "Valve"
|
||||||
|
|
||||||
|
# Assign a priority level to the valve pipeline.
|
||||||
|
# The priority level determines the order in which the valve pipelines are executed.
|
||||||
|
# The lower the number, the higher the priority.
|
||||||
|
self.priority = 0
|
||||||
|
|
||||||
|
# List target pipelines (models) that this valve will be connected to.
|
||||||
|
self.pipelines = [
|
||||||
|
{"id": "llama3:latest"},
|
||||||
|
]
|
||||||
|
pass
|
||||||
|
|
||||||
|
async def on_startup(self):
|
||||||
|
# This function is called when the server is started.
|
||||||
|
print(f"on_startup:{__name__}")
|
||||||
|
pass
|
||||||
|
|
||||||
|
async def on_shutdown(self):
|
||||||
|
# This function is called when the server is stopped.
|
||||||
|
print(f"on_shutdown:{__name__}")
|
||||||
|
pass
|
||||||
|
|
||||||
|
async def control_valve(self, body: dict) -> dict:
|
||||||
|
print(f"get_response:{__name__}")
|
||||||
|
print(body)
|
||||||
|
return body
|
37
pipelines/valve_pipeline.py
Normal file
37
pipelines/valve_pipeline.py
Normal file
@ -0,0 +1,37 @@
|
|||||||
|
from typing import List, Union, Generator, Iterator
|
||||||
|
from schemas import OpenAIChatMessage
|
||||||
|
|
||||||
|
|
||||||
|
class Pipeline:
|
||||||
|
def __init__(self):
|
||||||
|
# Pipeline valves are only compatible with Open WebUI
|
||||||
|
# You can think of valve pipeline as a middleware that can be used to edit the form data before it is sent to the OpenAI API.
|
||||||
|
self.valve = True
|
||||||
|
self.id = "valve_pipeline"
|
||||||
|
self.name = "Valve"
|
||||||
|
|
||||||
|
# Assign a priority level to the valve pipeline.
|
||||||
|
# The priority level determines the order in which the valve pipelines are executed.
|
||||||
|
# The lower the number, the higher the priority.
|
||||||
|
self.priority = 0
|
||||||
|
|
||||||
|
# List target pipelines (models) that this valve will be connected to.
|
||||||
|
self.pipelines = [
|
||||||
|
{"id": "llama3:latest"},
|
||||||
|
]
|
||||||
|
pass
|
||||||
|
|
||||||
|
async def on_startup(self):
|
||||||
|
# This function is called when the server is started.
|
||||||
|
print(f"on_startup:{__name__}")
|
||||||
|
pass
|
||||||
|
|
||||||
|
async def on_shutdown(self):
|
||||||
|
# This function is called when the server is stopped.
|
||||||
|
print(f"on_shutdown:{__name__}")
|
||||||
|
pass
|
||||||
|
|
||||||
|
async def control_valve(self, body: dict) -> dict:
|
||||||
|
print(f"get_response:{__name__}")
|
||||||
|
print(body)
|
||||||
|
return body
|
@ -15,3 +15,9 @@ class OpenAIChatCompletionForm(BaseModel):
|
|||||||
messages: List[OpenAIChatMessage]
|
messages: List[OpenAIChatMessage]
|
||||||
|
|
||||||
model_config = ConfigDict(extra="allow")
|
model_config = ConfigDict(extra="allow")
|
||||||
|
|
||||||
|
|
||||||
|
class ValveForm(BaseModel):
|
||||||
|
model: str
|
||||||
|
body: dict
|
||||||
|
model_config = ConfigDict(extra="allow")
|
||||||
|
Loading…
Reference in New Issue
Block a user