Merge branch 'open-webui:main' into main

This commit is contained in:
Justin 2024-10-21 09:43:44 -04:00 committed by GitHub
commit d6c025e976
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 56 additions and 42 deletions

View File

@ -11,6 +11,16 @@ from utils.pipelines.main import (
get_tools_specs, get_tools_specs,
) )
# System prompt for function calling
DEFAULT_SYSTEM_PROMPT = (
"""Tools: {}
If a function tool doesn't match the query, return an empty string. Else, pick a
function tool, fill in the parameters from the function tool's schema, and
return it in the format {{ "name": \"functionName\", "parameters": {{ "key":
"value" }} }}. Only pick a function if the user asks. Only return the object. Do not return any other text."
"""
)
class Pipeline: class Pipeline:
class Valves(BaseModel): class Valves(BaseModel):
@ -29,7 +39,7 @@ class Pipeline:
TASK_MODEL: str TASK_MODEL: str
TEMPLATE: str TEMPLATE: str
def __init__(self): def __init__(self, prompt: str | None = None) -> None:
# Pipeline filters are only compatible with Open WebUI # Pipeline filters are only compatible with Open WebUI
# You can think of filter pipeline as a middleware that can be used to edit the form data before it is sent to the OpenAI API. # You can think of filter pipeline as a middleware that can be used to edit the form data before it is sent to the OpenAI API.
self.type = "filter" self.type = "filter"
@ -40,6 +50,8 @@ class Pipeline:
# The identifier must be an alphanumeric string that can include underscores or hyphens. It cannot contain spaces, special characters, slashes, or backslashes. # The identifier must be an alphanumeric string that can include underscores or hyphens. It cannot contain spaces, special characters, slashes, or backslashes.
# self.id = "function_calling_blueprint" # self.id = "function_calling_blueprint"
self.name = "Function Calling Blueprint" self.name = "Function Calling Blueprint"
self.prompt = prompt or DEFAULT_SYSTEM_PROMPT
self.tools: object = None
# Initialize valves # Initialize valves
self.valves = self.Valves( self.valves = self.Valves(
@ -87,14 +99,45 @@ And answer according to the language of the user's question.""",
# Get the tools specs # Get the tools specs
tools_specs = get_tools_specs(self.tools) tools_specs = get_tools_specs(self.tools)
# System prompt for function calling prompt = self.prompt.format(json.dumps(tools_specs, indent=2))
fc_system_prompt = ( content = "History:\n" + "\n".join(
f"Tools: {json.dumps(tools_specs, indent=2)}" [
+ """ f"{message['role']}: {message['content']}"
If a function tool doesn't match the query, return an empty string. Else, pick a function tool, fill in the parameters from the function tool's schema, and return it in the format { "name": \"functionName\", "parameters": { "key": "value" } }. Only pick a function if the user asks. Only return the object. Do not return any other text." for message in body["messages"][::-1][:4]
""" ]
) ) + f"Query: {user_message}"
result = self.run_completion(prompt, content)
messages = self.call_function(result, body["messages"])
return {**body, "messages": messages}
# Call the function
def call_function(self, result, messages: list[dict]) -> list[dict]:
if "name" not in result:
return messages
function = getattr(self.tools, result["name"])
function_result = None
try:
function_result = function(**result["parameters"])
except Exception as e:
print(e)
# Add the function result to the system prompt
if function_result:
system_prompt = self.valves.TEMPLATE.replace(
"{{CONTEXT}}", function_result
)
messages = add_or_update_system_message(
system_prompt, messages
)
# Return the updated messages
return messages
def run_completion(self, system_prompt: str, content: str) -> dict:
r = None r = None
try: try:
# Call the OpenAI API to get the function response # Call the OpenAI API to get the function response
@ -105,18 +148,11 @@ If a function tool doesn't match the query, return an empty string. Else, pick a
"messages": [ "messages": [
{ {
"role": "system", "role": "system",
"content": fc_system_prompt, "content": system_prompt,
}, },
{ {
"role": "user", "role": "user",
"content": "History:\n" "content": content,
+ "\n".join(
[
f"{message['role']}: {message['content']}"
for message in body["messages"][::-1][:4]
]
)
+ f"Query: {user_message}",
}, },
], ],
# TODO: dynamically add response_format? # TODO: dynamically add response_format?
@ -137,29 +173,7 @@ If a function tool doesn't match the query, return an empty string. Else, pick a
if content != "": if content != "":
result = json.loads(content) result = json.loads(content)
print(result) print(result)
return result
# Call the function
if "name" in result:
function = getattr(self.tools, result["name"])
function_result = None
try:
function_result = function(**result["parameters"])
except Exception as e:
print(e)
# Add the function result to the system prompt
if function_result:
system_prompt = self.valves.TEMPLATE.replace(
"{{CONTEXT}}", function_result
)
print(system_prompt)
messages = add_or_update_system_message(
system_prompt, body["messages"]
)
# Return the updated messages
return {**body, "messages": messages}
except Exception as e: except Exception as e:
print(f"Error: {e}") print(f"Error: {e}")
@ -170,4 +184,4 @@ If a function tool doesn't match the query, return an empty string. Else, pick a
except: except:
pass pass
return body return {}

View File

@ -62,7 +62,7 @@ def pop_system_message(messages: List[dict]) -> Tuple[dict, List[dict]]:
return get_system_message(messages), remove_system_message(messages) return get_system_message(messages), remove_system_message(messages)
def add_or_update_system_message(content: str, messages: List[dict]): def add_or_update_system_message(content: str, messages: List[dict]) -> List[dict]:
""" """
Adds a new system message at the beginning of the messages list Adds a new system message at the beginning of the messages list
or updates the existing system message at the beginning. or updates the existing system message at the beginning.