Merge branch 'open-webui:dev' into dev

This commit is contained in:
gabriel-ecegi
2024-12-17 06:03:53 +01:00
committed by GitHub
9 changed files with 85 additions and 446 deletions

View File

@@ -65,12 +65,12 @@ def get_function_module_by_id(request: Request, pipe_id: str):
return function_module
async def get_function_models():
async def get_function_models(request):
pipes = Functions.get_functions_by_type("pipe", active_only=True)
pipe_models = []
for pipe in pipes:
function_module = get_function_module_by_id(pipe.id)
function_module = get_function_module_by_id(request, pipe.id)
# Check if function is a manifold
if hasattr(function_module, "pipes"):
@@ -253,7 +253,7 @@ async def generate_function_chat_completion(
form_data = apply_model_system_prompt_to_body(params, form_data, user)
pipe_id = get_pipe_id(form_data)
function_module = get_function_module_by_id(pipe_id)
function_module = get_function_module_by_id(request, pipe_id)
pipe = function_module.pipe
params = get_function_params(function_module, form_data, user, extra_params)

View File

@@ -1,6 +1,7 @@
import requests
import logging
import ftfy
import sys
from langchain_community.document_loaders import (
BSHTMLLoader,
@@ -18,8 +19,9 @@ from langchain_community.document_loaders import (
YoutubeLoader,
)
from langchain_core.documents import Document
from open_webui.env import SRC_LOG_LEVELS
from open_webui.env import SRC_LOG_LEVELS, GLOBAL_LOG_LEVEL
logging.basicConfig(stream=sys.stdout, level=GLOBAL_LOG_LEVEL)
log = logging.getLogger(__name__)
log.setLevel(SRC_LOG_LEVELS["RAG"])
@@ -106,7 +108,7 @@ class TikaLoader:
if "Content-Type" in raw_metadata:
headers["Content-Type"] = raw_metadata["Content-Type"]
log.info("Tika extracted text: %s", text)
log.debug("Tika extracted text: %s", text)
return [Document(page_content=text, metadata=headers)]
else:

View File

@@ -5,6 +5,7 @@ from pathlib import Path
from typing import Optional
from pydantic import BaseModel
import mimetypes
from urllib.parse import quote
from open_webui.storage.provider import Storage
@@ -222,11 +223,15 @@ async def get_file_content_by_id(id: str, user=Depends(get_verified_user)):
# Check if the file already exists in the cache
if file_path.is_file():
print(f"file_path: {file_path}")
# Handle Unicode filenames
filename = file.meta.get("name", file.filename)
encoded_filename = quote(filename) # RFC5987 encoding
headers = {
"Content-Disposition": f'attachment; filename="{file.meta.get("name", file.filename)}"'
"Content-Disposition": f"attachment; filename*=UTF-8''{encoded_filename}"
}
return FileResponse(file_path, headers=headers)
else:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
@@ -283,16 +288,20 @@ async def get_file_content_by_id(id: str, user=Depends(get_verified_user)):
if file and (file.user_id == user.id or user.role == "admin"):
file_path = file.path
# Handle Unicode filenames
filename = file.meta.get("name", file.filename)
encoded_filename = quote(filename) # RFC5987 encoding
headers = {
"Content-Disposition": f"attachment; filename*=UTF-8''{encoded_filename}"
}
if file_path:
file_path = Storage.get_file(file_path)
file_path = Path(file_path)
# Check if the file already exists in the cache
if file_path.is_file():
print(f"file_path: {file_path}")
headers = {
"Content-Disposition": f'attachment; filename="{file.meta.get("name", file.filename)}"'
}
return FileResponse(file_path, headers=headers)
else:
raise HTTPException(
@@ -311,7 +320,7 @@ async def get_file_content_by_id(id: str, user=Depends(get_verified_user)):
return StreamingResponse(
generator(),
media_type="text/plain",
headers={"Content-Disposition": f"attachment; filename={file_name}"},
headers=headers,
)
else:
raise HTTPException(

View File

@@ -186,9 +186,10 @@ async def generate_title(
try:
return await generate_chat_completion(request, form_data=payload, user=user)
except Exception as e:
log.error("Exception occurred", exc_info=True)
return JSONResponse(
status_code=status.HTTP_400_BAD_REQUEST,
content={"detail": str(e)},
content={"detail": "An internal error has occurred."},
)
@@ -248,9 +249,10 @@ async def generate_chat_tags(
try:
return await generate_chat_completion(request, form_data=payload, user=user)
except Exception as e:
log.error(f"Error generating chat completion: {e}")
return JSONResponse(
status_code=status.HTTP_400_BAD_REQUEST,
content={"detail": str(e)},
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
content={"detail": "An internal error has occurred."},
)

View File

@@ -1,5 +1,3 @@
# TODO: move socket to webui app
import asyncio
import socketio
import logging

View File

@@ -362,7 +362,39 @@ async def chat_completion_files_handler(
return body, {"sources": sources}
def apply_params_to_form_data(form_data, model):
params = form_data.pop("params", {})
if model.get("ollama"):
form_data["options"] = params
if "format" in params:
form_data["format"] = params["format"]
if "keep_alive" in params:
form_data["keep_alive"] = params["keep_alive"]
else:
if "seed" in params:
form_data["seed"] = params["seed"]
if "stop" in params:
form_data["stop"] = params["stop"]
if "temperature" in params:
form_data["temperature"] = params["temperature"]
if "top_p" in params:
form_data["top_p"] = params["top_p"]
if "frequency_penalty" in params:
form_data["frequency_penalty"] = params["frequency_penalty"]
return form_data
async def process_chat_payload(request, form_data, user, model):
form_data = apply_params_to_form_data(form_data, model)
log.debug(f"form_data: {form_data}")
metadata = {
"chat_id": form_data.pop("chat_id", None),
"message_id": form_data.pop("id", None),

View File

@@ -52,7 +52,7 @@ async def get_all_base_models(request: Request):
for model in ollama_models["models"]
]
function_models = await get_function_models()
function_models = await get_function_models(request)
models = function_models + openai_models + ollama_models
return models