mirror of
https://github.com/open-webui/pipelines
synced 2025-05-14 01:20:48 +00:00
feat: blueprints
This commit is contained in:
parent
be5f596d2a
commit
313f1a7592
172
blueprints/function_calling_blueprint.py
Normal file
172
blueprints/function_calling_blueprint.py
Normal file
@ -0,0 +1,172 @@
|
|||||||
|
from typing import List, Optional
|
||||||
|
from pydantic import BaseModel
|
||||||
|
from schemas import OpenAIChatMessage
|
||||||
|
import os
|
||||||
|
import requests
|
||||||
|
import json
|
||||||
|
|
||||||
|
from utils.main import (
|
||||||
|
get_last_user_message,
|
||||||
|
add_or_update_system_message,
|
||||||
|
get_tools_specs,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class Pipeline:
|
||||||
|
class Valves(BaseModel):
|
||||||
|
# List target pipeline ids (models) that this filter will be connected to.
|
||||||
|
# If you want to connect this filter to all pipelines, you can set pipelines to ["*"]
|
||||||
|
pipelines: List[str] = []
|
||||||
|
|
||||||
|
# Assign a priority level to the filter pipeline.
|
||||||
|
# The priority level determines the order in which the filter pipelines are executed.
|
||||||
|
# The lower the number, the higher the priority.
|
||||||
|
priority: int = 0
|
||||||
|
|
||||||
|
# Valves for function calling
|
||||||
|
OPENAI_API_BASE_URL: str
|
||||||
|
OPENAI_API_KEY: str
|
||||||
|
TASK_MODEL: str
|
||||||
|
TEMPLATE: str
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
# Pipeline filters are only compatible with Open WebUI
|
||||||
|
# You can think of filter pipeline as a middleware that can be used to edit the form data before it is sent to the OpenAI API.
|
||||||
|
self.type = "filter"
|
||||||
|
|
||||||
|
# Assign a unique identifier to the pipeline.
|
||||||
|
# The identifier must be unique across all pipelines.
|
||||||
|
# The identifier must be an alphanumeric string that can include underscores or hyphens. It cannot contain spaces, special characters, slashes, or backslashes.
|
||||||
|
self.id = "function_calling_blueprint"
|
||||||
|
self.name = "Function Calling Blueprint"
|
||||||
|
|
||||||
|
# Initialize valves
|
||||||
|
self.valves = self.Valves(
|
||||||
|
**{
|
||||||
|
"pipelines": ["*"], # Connect to all pipelines
|
||||||
|
"OPENAI_API_BASE_URL": os.getenv(
|
||||||
|
"OPENAI_API_BASE_URL", "https://api.openai.com/v1"
|
||||||
|
),
|
||||||
|
"OPENAI_API_KEY": os.getenv("OPENAI_API_KEY", "YOUR_OPENAI_API_KEY"),
|
||||||
|
"TASK_MODEL": os.getenv("TASK_MODEL", "gpt-3.5-turbo"),
|
||||||
|
"TEMPLATE": """Use the following context as your learned knowledge, inside <context></context> XML tags.
|
||||||
|
<context>
|
||||||
|
{{CONTEXT}}
|
||||||
|
</context>
|
||||||
|
|
||||||
|
When answer to user:
|
||||||
|
- If you don't know, just say that you don't know.
|
||||||
|
- If you don't know when you are not sure, ask for clarification.
|
||||||
|
Avoid mentioning that you obtained the information from the context.
|
||||||
|
And answer according to the language of the user's question.""",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
async def on_startup(self):
|
||||||
|
# This function is called when the server is started.
|
||||||
|
print(f"on_startup:{__name__}")
|
||||||
|
pass
|
||||||
|
|
||||||
|
async def on_shutdown(self):
|
||||||
|
# This function is called when the server is stopped.
|
||||||
|
print(f"on_shutdown:{__name__}")
|
||||||
|
pass
|
||||||
|
|
||||||
|
async def inlet(self, body: dict, user: Optional[dict] = None) -> dict:
|
||||||
|
# If title generation is requested, skip the function calling filter
|
||||||
|
if body.get("title", False):
|
||||||
|
return body
|
||||||
|
|
||||||
|
print(f"pipe:{__name__}")
|
||||||
|
print(user)
|
||||||
|
|
||||||
|
# Get the last user message
|
||||||
|
user_message = get_last_user_message(body["messages"])
|
||||||
|
|
||||||
|
# Get the tools specs
|
||||||
|
tools_specs = get_tools_specs(self.tools)
|
||||||
|
|
||||||
|
# System prompt for function calling
|
||||||
|
fc_system_prompt = (
|
||||||
|
f"Tools: {json.dumps(tools_specs, indent=2)}"
|
||||||
|
+ """
|
||||||
|
If a function tool doesn't match the query, return an empty string. Else, pick a function tool, fill in the parameters from the function tool's schema, and return it in the format { "name": \"functionName\", "parameters": { "key": "value" } }. Only pick a function if the user asks. Only return the object. Do not return any other text."
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
r = None
|
||||||
|
try:
|
||||||
|
# Call the OpenAI API to get the function response
|
||||||
|
r = requests.post(
|
||||||
|
url=f"{self.valves.OPENAI_API_BASE_URL}/chat/completions",
|
||||||
|
json={
|
||||||
|
"model": self.valves.TASK_MODEL,
|
||||||
|
"messages": [
|
||||||
|
{
|
||||||
|
"role": "system",
|
||||||
|
"content": fc_system_prompt,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"role": "user",
|
||||||
|
"content": "History:\n"
|
||||||
|
+ "\n".join(
|
||||||
|
[
|
||||||
|
f"{message['role']}: {message['content']}"
|
||||||
|
for message in body["messages"][::-1][:4]
|
||||||
|
]
|
||||||
|
)
|
||||||
|
+ f"Query: {user_message}",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
# TODO: dynamically add response_format?
|
||||||
|
# "response_format": {"type": "json_object"},
|
||||||
|
},
|
||||||
|
headers={
|
||||||
|
"Authorization": f"Bearer {self.valves.OPENAI_API_KEY}",
|
||||||
|
"Content-Type": "application/json",
|
||||||
|
},
|
||||||
|
stream=False,
|
||||||
|
)
|
||||||
|
r.raise_for_status()
|
||||||
|
|
||||||
|
response = r.json()
|
||||||
|
content = response["choices"][0]["message"]["content"]
|
||||||
|
|
||||||
|
# Parse the function response
|
||||||
|
if content != "":
|
||||||
|
result = json.loads(content)
|
||||||
|
print(result)
|
||||||
|
|
||||||
|
# Call the function
|
||||||
|
if "name" in result:
|
||||||
|
function = getattr(self.tools, result["name"])
|
||||||
|
function_result = None
|
||||||
|
try:
|
||||||
|
function_result = function(**result["parameters"])
|
||||||
|
except Exception as e:
|
||||||
|
print(e)
|
||||||
|
|
||||||
|
# Add the function result to the system prompt
|
||||||
|
if function_result:
|
||||||
|
system_prompt = self.valves.TEMPLATE.replace(
|
||||||
|
"{{CONTEXT}}", function_result
|
||||||
|
)
|
||||||
|
|
||||||
|
print(system_prompt)
|
||||||
|
messages = add_or_update_system_message(
|
||||||
|
system_prompt, body["messages"]
|
||||||
|
)
|
||||||
|
|
||||||
|
# Return the updated messages
|
||||||
|
return {**body, "messages": messages}
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error: {e}")
|
||||||
|
|
||||||
|
if r:
|
||||||
|
try:
|
||||||
|
print(r.json())
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
return body
|
@ -1,69 +1,16 @@
|
|||||||
from typing import List, Optional
|
|
||||||
from pydantic import BaseModel
|
|
||||||
from schemas import OpenAIChatMessage
|
|
||||||
import os
|
import os
|
||||||
import requests
|
import requests
|
||||||
import json
|
from typing import Literal, List, Optional
|
||||||
|
from blueprints.function_calling_blueprint import Pipeline as FunctionCallingBlueprint
|
||||||
from utils.main import (
|
|
||||||
get_last_user_message,
|
|
||||||
add_or_update_system_message,
|
|
||||||
get_function_specs,
|
|
||||||
)
|
|
||||||
from typing import Literal
|
|
||||||
|
|
||||||
|
|
||||||
class Pipeline:
|
class Pipeline(FunctionCallingBlueprint):
|
||||||
def __init__(self):
|
class Valves(FunctionCallingBlueprint.Valves):
|
||||||
# Pipeline filters are only compatible with Open WebUI
|
# Add your custom parameters here
|
||||||
# You can think of filter pipeline as a middleware that can be used to edit the form data before it is sent to the OpenAI API.
|
|
||||||
self.type = "filter"
|
|
||||||
|
|
||||||
# Assign a unique identifier to the pipeline.
|
|
||||||
# The identifier must be unique across all pipelines.
|
|
||||||
# The identifier must be an alphanumeric string that can include underscores or hyphens. It cannot contain spaces, special characters, slashes, or backslashes.
|
|
||||||
self.id = "function_calling_filter_pipeline"
|
|
||||||
self.name = "Function Calling Filter"
|
|
||||||
|
|
||||||
class Valves(BaseModel):
|
|
||||||
# List target pipeline ids (models) that this filter will be connected to.
|
|
||||||
# If you want to connect this filter to all pipelines, you can set pipelines to ["*"]
|
|
||||||
pipelines: List[str] = []
|
|
||||||
|
|
||||||
# Assign a priority level to the filter pipeline.
|
|
||||||
# The priority level determines the order in which the filter pipelines are executed.
|
|
||||||
# The lower the number, the higher the priority.
|
|
||||||
priority: int = 0
|
|
||||||
|
|
||||||
# Valves for function calling
|
|
||||||
OPENAI_API_BASE_URL: str
|
|
||||||
OPENAI_API_KEY: str
|
|
||||||
TASK_MODEL: str
|
|
||||||
TEMPLATE: str
|
|
||||||
|
|
||||||
OPENWEATHERMAP_API_KEY: str = ""
|
OPENWEATHERMAP_API_KEY: str = ""
|
||||||
|
pass
|
||||||
|
|
||||||
# Initialize valves
|
class Tools:
|
||||||
self.valves = Valves(
|
|
||||||
**{
|
|
||||||
"pipelines": ["*"], # Connect to all pipelines
|
|
||||||
"OPENAI_API_BASE_URL": "https://api.openai.com/v1",
|
|
||||||
"OPENAI_API_KEY": os.getenv("OPENAI_API_KEY", "YOUR_OPENAI_API_KEY"),
|
|
||||||
"TASK_MODEL": "gpt-3.5-turbo",
|
|
||||||
"TEMPLATE": """Use the following context as your learned knowledge, inside <context></context> XML tags.
|
|
||||||
<context>
|
|
||||||
{{CONTEXT}}
|
|
||||||
</context>
|
|
||||||
|
|
||||||
When answer to user:
|
|
||||||
- If you don't know, just say that you don't know.
|
|
||||||
- If you don't know when you are not sure, ask for clarification.
|
|
||||||
Avoid mentioning that you obtained the information from the context.
|
|
||||||
And answer according to the language of the user's question.""",
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
class Functions:
|
|
||||||
def __init__(self, pipeline) -> None:
|
def __init__(self, pipeline) -> None:
|
||||||
self.pipeline = pipeline
|
self.pipeline = pipeline
|
||||||
|
|
||||||
@ -119,113 +66,15 @@ And answer according to the language of the user's question.""",
|
|||||||
print(e)
|
print(e)
|
||||||
return "Invalid equation"
|
return "Invalid equation"
|
||||||
|
|
||||||
self.functions = Functions(self)
|
def __init__(self):
|
||||||
|
super().__init__()
|
||||||
async def on_startup(self):
|
self.id = "my_tools_pipeline"
|
||||||
# This function is called when the server is started.
|
self.name = "My Tools Pipeline"
|
||||||
print(f"on_startup:{__name__}")
|
self.valves = self.Valves(
|
||||||
pass
|
**{
|
||||||
|
**self.valves.model_dump(),
|
||||||
async def on_shutdown(self):
|
"pipelines": ["*"], # Connect to all pipelines
|
||||||
# This function is called when the server is stopped.
|
"OPENWEATHERMAP_API_KEY": os.getenv("OPENWEATHERMAP_API_KEY", ""),
|
||||||
print(f"on_shutdown:{__name__}")
|
|
||||||
pass
|
|
||||||
|
|
||||||
async def inlet(self, body: dict, user: Optional[dict] = None) -> dict:
|
|
||||||
# If title generation is requested, skip the function calling filter
|
|
||||||
if body.get("title", False):
|
|
||||||
return body
|
|
||||||
|
|
||||||
print(f"pipe:{__name__}")
|
|
||||||
print(user)
|
|
||||||
|
|
||||||
# Get the last user message
|
|
||||||
user_message = get_last_user_message(body["messages"])
|
|
||||||
|
|
||||||
# Get the function specs
|
|
||||||
function_specs = get_function_specs(self.functions)
|
|
||||||
|
|
||||||
# System prompt for function calling
|
|
||||||
fc_system_prompt = (
|
|
||||||
f"Functions: {json.dumps(function_specs, indent=2)}"
|
|
||||||
+ """
|
|
||||||
If a function doesn't match the query, return an empty string. Else, pick a function, fill in the parameters from the function's schema, and return it in the format { "name": \"functionName\", "parameters": { "key": "value" } }. Only pick a function if the user asks. Only return the object. Do not return any other text."
|
|
||||||
"""
|
|
||||||
)
|
|
||||||
|
|
||||||
r = None
|
|
||||||
try:
|
|
||||||
# Call the OpenAI API to get the function response
|
|
||||||
r = requests.post(
|
|
||||||
url=f"{self.valves.OPENAI_API_BASE_URL}/chat/completions",
|
|
||||||
json={
|
|
||||||
"model": self.valves.TASK_MODEL,
|
|
||||||
"messages": [
|
|
||||||
{
|
|
||||||
"role": "system",
|
|
||||||
"content": fc_system_prompt,
|
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"role": "user",
|
|
||||||
"content": "History:\n"
|
|
||||||
+ "\n".join(
|
|
||||||
[
|
|
||||||
f"{message['role']}: {message['content']}"
|
|
||||||
for message in body["messages"][::-1][:4]
|
|
||||||
]
|
|
||||||
)
|
)
|
||||||
+ f"Query: {user_message}",
|
self.tools = self.Tools(self)
|
||||||
},
|
|
||||||
],
|
|
||||||
# TODO: dynamically add response_format?
|
|
||||||
# "response_format": {"type": "json_object"},
|
|
||||||
},
|
|
||||||
headers={
|
|
||||||
"Authorization": f"Bearer {self.valves.OPENAI_API_KEY}",
|
|
||||||
"Content-Type": "application/json",
|
|
||||||
},
|
|
||||||
stream=False,
|
|
||||||
)
|
|
||||||
r.raise_for_status()
|
|
||||||
|
|
||||||
response = r.json()
|
|
||||||
content = response["choices"][0]["message"]["content"]
|
|
||||||
|
|
||||||
# Parse the function response
|
|
||||||
if content != "":
|
|
||||||
result = json.loads(content)
|
|
||||||
print(result)
|
|
||||||
|
|
||||||
# Call the function
|
|
||||||
if "name" in result:
|
|
||||||
function = getattr(self.functions, result["name"])
|
|
||||||
function_result = None
|
|
||||||
try:
|
|
||||||
function_result = function(**result["parameters"])
|
|
||||||
except Exception as e:
|
|
||||||
print(e)
|
|
||||||
|
|
||||||
# Add the function result to the system prompt
|
|
||||||
if function_result:
|
|
||||||
system_prompt = self.valves.TEMPLATE.replace(
|
|
||||||
"{{CONTEXT}}", function_result
|
|
||||||
)
|
|
||||||
|
|
||||||
print(system_prompt)
|
|
||||||
messages = add_or_update_system_message(
|
|
||||||
system_prompt, body["messages"]
|
|
||||||
)
|
|
||||||
|
|
||||||
# Return the updated messages
|
|
||||||
return {**body, "messages": messages}
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error: {e}")
|
|
||||||
|
|
||||||
if r:
|
|
||||||
try:
|
|
||||||
print(r.json())
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
|
|
||||||
return body
|
|
||||||
|
1
main.py
1
main.py
@ -203,7 +203,6 @@ async def get_models():
|
|||||||
Returns the available pipelines
|
Returns the available pipelines
|
||||||
"""
|
"""
|
||||||
app.state.PIPELINES = get_all_pipelines()
|
app.state.PIPELINES = get_all_pipelines()
|
||||||
|
|
||||||
return {
|
return {
|
||||||
"data": [
|
"data": [
|
||||||
{
|
{
|
||||||
|
@ -80,12 +80,11 @@ def doc_to_dict(docstring):
|
|||||||
return ret_dict
|
return ret_dict
|
||||||
|
|
||||||
|
|
||||||
def get_function_specs(functions) -> List[dict]:
|
def get_tools_specs(tools) -> List[dict]:
|
||||||
|
|
||||||
function_list = [
|
function_list = [
|
||||||
{"name": func, "function": getattr(functions, func)}
|
{"name": func, "function": getattr(tools, func)}
|
||||||
for func in dir(functions)
|
for func in dir(tools)
|
||||||
if callable(getattr(functions, func)) and not func.startswith("__")
|
if callable(getattr(tools, func)) and not func.startswith("__")
|
||||||
]
|
]
|
||||||
|
|
||||||
specs = []
|
specs = []
|
||||||
|
Loading…
Reference in New Issue
Block a user