mirror of
https://github.com/open-webui/open-webui
synced 2024-11-24 13:07:25 +00:00
Fix
Logging cleanup: removed some extraneous hard prints (including some that revealed message content!); improved debug logging a bit. + added chat_id to task metadata (helpful for logging/tracking in some pipe functions)
This commit is contained in:
parent
9412f51c19
commit
d24c21b40f
@ -585,8 +585,6 @@ async def generate_chat_completion(
|
|||||||
# Convert the modified body back to JSON
|
# Convert the modified body back to JSON
|
||||||
payload = json.dumps(payload)
|
payload = json.dumps(payload)
|
||||||
|
|
||||||
log.debug(payload)
|
|
||||||
|
|
||||||
headers = {}
|
headers = {}
|
||||||
headers["Authorization"] = f"Bearer {key}"
|
headers["Authorization"] = f"Bearer {key}"
|
||||||
headers["Content-Type"] = "application/json"
|
headers["Content-Type"] = "application/json"
|
||||||
|
@ -68,6 +68,7 @@ from open_webui.config import (
|
|||||||
)
|
)
|
||||||
from open_webui.env import (
|
from open_webui.env import (
|
||||||
ENV,
|
ENV,
|
||||||
|
SRC_LOG_LEVELS,
|
||||||
WEBUI_AUTH_TRUSTED_EMAIL_HEADER,
|
WEBUI_AUTH_TRUSTED_EMAIL_HEADER,
|
||||||
WEBUI_AUTH_TRUSTED_NAME_HEADER,
|
WEBUI_AUTH_TRUSTED_NAME_HEADER,
|
||||||
)
|
)
|
||||||
@ -94,6 +95,7 @@ app = FastAPI(
|
|||||||
)
|
)
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
log.setLevel(SRC_LOG_LEVELS["MAIN"])
|
||||||
|
|
||||||
app.state.config = AppConfig()
|
app.state.config = AppConfig()
|
||||||
|
|
||||||
@ -270,7 +272,7 @@ async def get_pipe_models():
|
|||||||
log.exception(e)
|
log.exception(e)
|
||||||
sub_pipes = []
|
sub_pipes = []
|
||||||
|
|
||||||
print(sub_pipes)
|
log.debug(f"get_pipe_models: function '{pipe.id}' is a manifold of {sub_pipes}")
|
||||||
|
|
||||||
for p in sub_pipes:
|
for p in sub_pipes:
|
||||||
sub_pipe_id = f'{pipe.id}.{p["id"]}'
|
sub_pipe_id = f'{pipe.id}.{p["id"]}'
|
||||||
@ -280,6 +282,7 @@ async def get_pipe_models():
|
|||||||
sub_pipe_name = f"{function_module.name}{sub_pipe_name}"
|
sub_pipe_name = f"{function_module.name}{sub_pipe_name}"
|
||||||
|
|
||||||
pipe_flag = {"type": pipe.type}
|
pipe_flag = {"type": pipe.type}
|
||||||
|
|
||||||
pipe_models.append(
|
pipe_models.append(
|
||||||
{
|
{
|
||||||
"id": sub_pipe_id,
|
"id": sub_pipe_id,
|
||||||
@ -293,6 +296,8 @@ async def get_pipe_models():
|
|||||||
else:
|
else:
|
||||||
pipe_flag = {"type": "pipe"}
|
pipe_flag = {"type": "pipe"}
|
||||||
|
|
||||||
|
log.debug(f"get_pipe_models: function '{pipe.id}' is a single pipe {{ 'id': {pipe.id}, 'name': {pipe.name} }}")
|
||||||
|
|
||||||
pipe_models.append(
|
pipe_models.append(
|
||||||
{
|
{
|
||||||
"id": pipe.id,
|
"id": pipe.id,
|
||||||
@ -346,7 +351,7 @@ def get_pipe_id(form_data: dict) -> str:
|
|||||||
pipe_id = form_data["model"]
|
pipe_id = form_data["model"]
|
||||||
if "." in pipe_id:
|
if "." in pipe_id:
|
||||||
pipe_id, _ = pipe_id.split(".", 1)
|
pipe_id, _ = pipe_id.split(".", 1)
|
||||||
print(pipe_id)
|
|
||||||
return pipe_id
|
return pipe_id
|
||||||
|
|
||||||
|
|
||||||
@ -453,7 +458,7 @@ async def generate_function_chat_completion(form_data, user, models: dict = {}):
|
|||||||
return
|
return
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f"Error: {e}")
|
log.error(f"Error: {e}")
|
||||||
yield f"data: {json.dumps({'error': {'detail':str(e)}})}\n\n"
|
yield f"data: {json.dumps({'error': {'detail':str(e)}})}\n\n"
|
||||||
return
|
return
|
||||||
|
|
||||||
@ -483,7 +488,7 @@ async def generate_function_chat_completion(form_data, user, models: dict = {}):
|
|||||||
res = await execute_pipe(pipe, params)
|
res = await execute_pipe(pipe, params)
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f"Error: {e}")
|
log.error(f"Error: {e}")
|
||||||
return {"error": {"detail": str(e)}}
|
return {"error": {"detail": str(e)}}
|
||||||
|
|
||||||
if isinstance(res, StreamingResponse) or isinstance(res, dict):
|
if isinstance(res, StreamingResponse) or isinstance(res, dict):
|
||||||
|
@ -5,10 +5,15 @@ import sys
|
|||||||
from importlib import util
|
from importlib import util
|
||||||
import types
|
import types
|
||||||
import tempfile
|
import tempfile
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from open_webui.env import SRC_LOG_LEVELS
|
||||||
from open_webui.apps.webui.models.functions import Functions
|
from open_webui.apps.webui.models.functions import Functions
|
||||||
from open_webui.apps.webui.models.tools import Tools
|
from open_webui.apps.webui.models.tools import Tools
|
||||||
|
|
||||||
|
log = logging.getLogger(__name__)
|
||||||
|
log.setLevel(SRC_LOG_LEVELS["MAIN"])
|
||||||
|
|
||||||
|
|
||||||
def extract_frontmatter(content):
|
def extract_frontmatter(content):
|
||||||
"""
|
"""
|
||||||
@ -95,7 +100,7 @@ def load_tools_module_by_id(toolkit_id, content=None):
|
|||||||
# Executing the modified content in the created module's namespace
|
# Executing the modified content in the created module's namespace
|
||||||
exec(content, module.__dict__)
|
exec(content, module.__dict__)
|
||||||
frontmatter = extract_frontmatter(content)
|
frontmatter = extract_frontmatter(content)
|
||||||
print(f"Loaded module: {module.__name__}")
|
log.info(f"Loaded module: {module.__name__}")
|
||||||
|
|
||||||
# Create and return the object if the class 'Tools' is found in the module
|
# Create and return the object if the class 'Tools' is found in the module
|
||||||
if hasattr(module, "Tools"):
|
if hasattr(module, "Tools"):
|
||||||
@ -103,7 +108,7 @@ def load_tools_module_by_id(toolkit_id, content=None):
|
|||||||
else:
|
else:
|
||||||
raise Exception("No Tools class found in the module")
|
raise Exception("No Tools class found in the module")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f"Error loading module: {toolkit_id}: {e}")
|
log.error(f"Error loading module: {toolkit_id}: {e}")
|
||||||
del sys.modules[module_name] # Clean up
|
del sys.modules[module_name] # Clean up
|
||||||
raise e
|
raise e
|
||||||
finally:
|
finally:
|
||||||
@ -139,7 +144,7 @@ def load_function_module_by_id(function_id, content=None):
|
|||||||
# Execute the modified content in the created module's namespace
|
# Execute the modified content in the created module's namespace
|
||||||
exec(content, module.__dict__)
|
exec(content, module.__dict__)
|
||||||
frontmatter = extract_frontmatter(content)
|
frontmatter = extract_frontmatter(content)
|
||||||
print(f"Loaded module: {module.__name__}")
|
log.info(f"Loaded module: {module.__name__}")
|
||||||
|
|
||||||
# Create appropriate object based on available class type in the module
|
# Create appropriate object based on available class type in the module
|
||||||
if hasattr(module, "Pipe"):
|
if hasattr(module, "Pipe"):
|
||||||
@ -151,7 +156,7 @@ def load_function_module_by_id(function_id, content=None):
|
|||||||
else:
|
else:
|
||||||
raise Exception("No Function class found in the module")
|
raise Exception("No Function class found in the module")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f"Error loading module: {function_id}: {e}")
|
log.error(f"Error loading module: {function_id}: {e}")
|
||||||
del sys.modules[module_name] # Cleanup by removing the module in case of error
|
del sys.modules[module_name] # Cleanup by removing the module in case of error
|
||||||
|
|
||||||
Functions.update_function_by_id(function_id, {"is_active": False})
|
Functions.update_function_by_id(function_id, {"is_active": False})
|
||||||
@ -164,7 +169,7 @@ def install_frontmatter_requirements(requirements):
|
|||||||
if requirements:
|
if requirements:
|
||||||
req_list = [req.strip() for req in requirements.split(",")]
|
req_list = [req.strip() for req in requirements.split(",")]
|
||||||
for req in req_list:
|
for req in req_list:
|
||||||
print(f"Installing requirement: {req}")
|
log.info(f"Installing requirement: {req}")
|
||||||
subprocess.check_call([sys.executable, "-m", "pip", "install", req])
|
subprocess.check_call([sys.executable, "-m", "pip", "install", req])
|
||||||
else:
|
else:
|
||||||
print("No requirements found in frontmatter.")
|
log.info("No requirements found in frontmatter.")
|
||||||
|
@ -539,7 +539,6 @@ async def chat_completion_files_handler(
|
|||||||
if len(queries) == 0:
|
if len(queries) == 0:
|
||||||
queries = [get_last_user_message(body["messages"])]
|
queries = [get_last_user_message(body["messages"])]
|
||||||
|
|
||||||
print(f"{queries=}")
|
|
||||||
|
|
||||||
sources = get_sources_from_files(
|
sources = get_sources_from_files(
|
||||||
files=files,
|
files=files,
|
||||||
@ -970,7 +969,7 @@ app.add_middleware(SecurityHeadersMiddleware)
|
|||||||
@app.middleware("http")
|
@app.middleware("http")
|
||||||
async def commit_session_after_request(request: Request, call_next):
|
async def commit_session_after_request(request: Request, call_next):
|
||||||
response = await call_next(request)
|
response = await call_next(request)
|
||||||
log.debug("Commit session after request")
|
#log.debug("Commit session after request")
|
||||||
Session.commit()
|
Session.commit()
|
||||||
return response
|
return response
|
||||||
|
|
||||||
@ -1177,6 +1176,8 @@ async def get_all_models():
|
|||||||
model["actions"].extend(
|
model["actions"].extend(
|
||||||
get_action_items_from_module(action_function, function_module)
|
get_action_items_from_module(action_function, function_module)
|
||||||
)
|
)
|
||||||
|
log.debug(f"get_all_models() returned {len(models)} models")
|
||||||
|
|
||||||
return models
|
return models
|
||||||
|
|
||||||
|
|
||||||
@ -1214,6 +1215,8 @@ async def get_models(user=Depends(get_verified_user)):
|
|||||||
filtered_models.append(model)
|
filtered_models.append(model)
|
||||||
models = filtered_models
|
models = filtered_models
|
||||||
|
|
||||||
|
log.debug(f"/api/models returned filtered models accessible to the user: {json.dumps([model['id'] for model in models])}")
|
||||||
|
|
||||||
return {"data": models}
|
return {"data": models}
|
||||||
|
|
||||||
|
|
||||||
@ -1704,7 +1707,6 @@ async def update_task_config(form_data: TaskConfigForm, user=Depends(get_admin_u
|
|||||||
|
|
||||||
@app.post("/api/task/title/completions")
|
@app.post("/api/task/title/completions")
|
||||||
async def generate_title(form_data: dict, user=Depends(get_verified_user)):
|
async def generate_title(form_data: dict, user=Depends(get_verified_user)):
|
||||||
print("generate_title")
|
|
||||||
|
|
||||||
model_list = await get_all_models()
|
model_list = await get_all_models()
|
||||||
models = {model["id"]: model for model in model_list}
|
models = {model["id"]: model for model in model_list}
|
||||||
@ -1725,9 +1727,7 @@ async def generate_title(form_data: dict, user=Depends(get_verified_user)):
|
|||||||
models,
|
models,
|
||||||
)
|
)
|
||||||
|
|
||||||
print(task_model_id)
|
log.debug(f"generating chat title using model {task_model_id} for user {user.email} ")
|
||||||
|
|
||||||
model = models[task_model_id]
|
|
||||||
|
|
||||||
if app.state.config.TITLE_GENERATION_PROMPT_TEMPLATE != "":
|
if app.state.config.TITLE_GENERATION_PROMPT_TEMPLATE != "":
|
||||||
template = app.state.config.TITLE_GENERATION_PROMPT_TEMPLATE
|
template = app.state.config.TITLE_GENERATION_PROMPT_TEMPLATE
|
||||||
@ -1766,10 +1766,12 @@ Artificial Intelligence in Healthcare
|
|||||||
"max_completion_tokens": 50,
|
"max_completion_tokens": 50,
|
||||||
}
|
}
|
||||||
),
|
),
|
||||||
"chat_id": form_data.get("chat_id", None),
|
"metadata": {
|
||||||
"metadata": {"task": str(TASKS.TITLE_GENERATION), "task_body": form_data},
|
"task": str(TASKS.TITLE_GENERATION),
|
||||||
|
"task_body": form_data,
|
||||||
|
"chat_id": form_data.get("chat_id", None)
|
||||||
|
},
|
||||||
}
|
}
|
||||||
log.debug(payload)
|
|
||||||
|
|
||||||
# Handle pipeline filters
|
# Handle pipeline filters
|
||||||
try:
|
try:
|
||||||
@ -1793,7 +1795,7 @@ Artificial Intelligence in Healthcare
|
|||||||
|
|
||||||
@app.post("/api/task/tags/completions")
|
@app.post("/api/task/tags/completions")
|
||||||
async def generate_chat_tags(form_data: dict, user=Depends(get_verified_user)):
|
async def generate_chat_tags(form_data: dict, user=Depends(get_verified_user)):
|
||||||
print("generate_chat_tags")
|
|
||||||
if not app.state.config.ENABLE_TAGS_GENERATION:
|
if not app.state.config.ENABLE_TAGS_GENERATION:
|
||||||
return JSONResponse(
|
return JSONResponse(
|
||||||
status_code=status.HTTP_200_OK,
|
status_code=status.HTTP_200_OK,
|
||||||
@ -1818,7 +1820,8 @@ async def generate_chat_tags(form_data: dict, user=Depends(get_verified_user)):
|
|||||||
app.state.config.TASK_MODEL_EXTERNAL,
|
app.state.config.TASK_MODEL_EXTERNAL,
|
||||||
models,
|
models,
|
||||||
)
|
)
|
||||||
print(task_model_id)
|
|
||||||
|
log.debug(f"generating chat tags using model {task_model_id} for user {user.email} ")
|
||||||
|
|
||||||
if app.state.config.TAGS_GENERATION_PROMPT_TEMPLATE != "":
|
if app.state.config.TAGS_GENERATION_PROMPT_TEMPLATE != "":
|
||||||
template = app.state.config.TAGS_GENERATION_PROMPT_TEMPLATE
|
template = app.state.config.TAGS_GENERATION_PROMPT_TEMPLATE
|
||||||
@ -1849,9 +1852,12 @@ JSON format: { "tags": ["tag1", "tag2", "tag3"] }
|
|||||||
"model": task_model_id,
|
"model": task_model_id,
|
||||||
"messages": [{"role": "user", "content": content}],
|
"messages": [{"role": "user", "content": content}],
|
||||||
"stream": False,
|
"stream": False,
|
||||||
"metadata": {"task": str(TASKS.TAGS_GENERATION), "task_body": form_data},
|
"metadata": {
|
||||||
|
"task": str(TASKS.TAGS_GENERATION),
|
||||||
|
"task_body": form_data,
|
||||||
|
"chat_id": form_data.get("chat_id", None)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
log.debug(payload)
|
|
||||||
|
|
||||||
# Handle pipeline filters
|
# Handle pipeline filters
|
||||||
try:
|
try:
|
||||||
@ -1875,7 +1881,7 @@ JSON format: { "tags": ["tag1", "tag2", "tag3"] }
|
|||||||
|
|
||||||
@app.post("/api/task/queries/completions")
|
@app.post("/api/task/queries/completions")
|
||||||
async def generate_queries(form_data: dict, user=Depends(get_verified_user)):
|
async def generate_queries(form_data: dict, user=Depends(get_verified_user)):
|
||||||
print("generate_queries")
|
|
||||||
type = form_data.get("type")
|
type = form_data.get("type")
|
||||||
if type == "web_search":
|
if type == "web_search":
|
||||||
if not app.state.config.ENABLE_SEARCH_QUERY_GENERATION:
|
if not app.state.config.ENABLE_SEARCH_QUERY_GENERATION:
|
||||||
@ -1908,9 +1914,8 @@ async def generate_queries(form_data: dict, user=Depends(get_verified_user)):
|
|||||||
app.state.config.TASK_MODEL_EXTERNAL,
|
app.state.config.TASK_MODEL_EXTERNAL,
|
||||||
models,
|
models,
|
||||||
)
|
)
|
||||||
print(task_model_id)
|
|
||||||
|
log.debug(f"generating {type} queries using model {task_model_id} for user {user.email}")
|
||||||
model = models[task_model_id]
|
|
||||||
|
|
||||||
if app.state.config.QUERY_GENERATION_PROMPT_TEMPLATE != "":
|
if app.state.config.QUERY_GENERATION_PROMPT_TEMPLATE != "":
|
||||||
template = app.state.config.QUERY_GENERATION_PROMPT_TEMPLATE
|
template = app.state.config.QUERY_GENERATION_PROMPT_TEMPLATE
|
||||||
@ -1925,9 +1930,8 @@ async def generate_queries(form_data: dict, user=Depends(get_verified_user)):
|
|||||||
"model": task_model_id,
|
"model": task_model_id,
|
||||||
"messages": [{"role": "user", "content": content}],
|
"messages": [{"role": "user", "content": content}],
|
||||||
"stream": False,
|
"stream": False,
|
||||||
"metadata": {"task": str(TASKS.QUERY_GENERATION), "task_body": form_data},
|
"metadata": {"task": str(TASKS.QUERY_GENERATION), "task_body": form_data, "chat_id": form_data.get("chat_id", None)},
|
||||||
}
|
}
|
||||||
log.debug(payload)
|
|
||||||
|
|
||||||
# Handle pipeline filters
|
# Handle pipeline filters
|
||||||
try:
|
try:
|
||||||
@ -1951,7 +1955,6 @@ async def generate_queries(form_data: dict, user=Depends(get_verified_user)):
|
|||||||
|
|
||||||
@app.post("/api/task/emoji/completions")
|
@app.post("/api/task/emoji/completions")
|
||||||
async def generate_emoji(form_data: dict, user=Depends(get_verified_user)):
|
async def generate_emoji(form_data: dict, user=Depends(get_verified_user)):
|
||||||
print("generate_emoji")
|
|
||||||
|
|
||||||
model_list = await get_all_models()
|
model_list = await get_all_models()
|
||||||
models = {model["id"]: model for model in model_list}
|
models = {model["id"]: model for model in model_list}
|
||||||
@ -1971,9 +1974,8 @@ async def generate_emoji(form_data: dict, user=Depends(get_verified_user)):
|
|||||||
app.state.config.TASK_MODEL_EXTERNAL,
|
app.state.config.TASK_MODEL_EXTERNAL,
|
||||||
models,
|
models,
|
||||||
)
|
)
|
||||||
print(task_model_id)
|
|
||||||
|
|
||||||
model = models[task_model_id]
|
log.debug(f"generating emoji using model {task_model_id} for user {user.email} ")
|
||||||
|
|
||||||
template = '''
|
template = '''
|
||||||
Your task is to reflect the speaker's likely facial expression through a fitting emoji. Interpret emotions from the message and reflect their facial expression using fitting, diverse emojis (e.g., 😊, 😢, 😡, 😱).
|
Your task is to reflect the speaker's likely facial expression through a fitting emoji. Interpret emotions from the message and reflect their facial expression using fitting, diverse emojis (e.g., 😊, 😢, 😡, 😱).
|
||||||
@ -2003,7 +2005,6 @@ Message: """{{prompt}}"""
|
|||||||
"chat_id": form_data.get("chat_id", None),
|
"chat_id": form_data.get("chat_id", None),
|
||||||
"metadata": {"task": str(TASKS.EMOJI_GENERATION), "task_body": form_data},
|
"metadata": {"task": str(TASKS.EMOJI_GENERATION), "task_body": form_data},
|
||||||
}
|
}
|
||||||
log.debug(payload)
|
|
||||||
|
|
||||||
# Handle pipeline filters
|
# Handle pipeline filters
|
||||||
try:
|
try:
|
||||||
@ -2027,7 +2028,6 @@ Message: """{{prompt}}"""
|
|||||||
|
|
||||||
@app.post("/api/task/moa/completions")
|
@app.post("/api/task/moa/completions")
|
||||||
async def generate_moa_response(form_data: dict, user=Depends(get_verified_user)):
|
async def generate_moa_response(form_data: dict, user=Depends(get_verified_user)):
|
||||||
print("generate_moa_response")
|
|
||||||
|
|
||||||
model_list = await get_all_models()
|
model_list = await get_all_models()
|
||||||
models = {model["id"]: model for model in model_list}
|
models = {model["id"]: model for model in model_list}
|
||||||
@ -2047,9 +2047,8 @@ async def generate_moa_response(form_data: dict, user=Depends(get_verified_user)
|
|||||||
app.state.config.TASK_MODEL_EXTERNAL,
|
app.state.config.TASK_MODEL_EXTERNAL,
|
||||||
models,
|
models,
|
||||||
)
|
)
|
||||||
print(task_model_id)
|
|
||||||
|
log.debug(f"generating MOA model {task_model_id} for user {user.email} ")
|
||||||
model = models[task_model_id]
|
|
||||||
|
|
||||||
template = """You have been provided with a set of responses from various models to the latest user query: "{{prompt}}"
|
template = """You have been provided with a set of responses from various models to the latest user query: "{{prompt}}"
|
||||||
|
|
||||||
@ -2073,7 +2072,6 @@ Responses from models: {{responses}}"""
|
|||||||
"task_body": form_data,
|
"task_body": form_data,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
log.debug(payload)
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
payload = filter_pipeline(payload, user, models)
|
payload = filter_pipeline(payload, user, models)
|
||||||
@ -2108,7 +2106,7 @@ Responses from models: {{responses}}"""
|
|||||||
async def get_pipelines_list(user=Depends(get_admin_user)):
|
async def get_pipelines_list(user=Depends(get_admin_user)):
|
||||||
responses = await get_openai_models_responses()
|
responses = await get_openai_models_responses()
|
||||||
|
|
||||||
print(responses)
|
log.debug(f"get_pipelines_list: get_openai_models_responses returned {responses}")
|
||||||
urlIdxs = [
|
urlIdxs = [
|
||||||
idx
|
idx
|
||||||
for idx, response in enumerate(responses)
|
for idx, response in enumerate(responses)
|
||||||
|
Loading…
Reference in New Issue
Block a user