Merge branch 'dev' into add_reset_button_mermaidjs_renderer

This commit is contained in:
cvaz1306 2024-12-28 15:46:23 -08:00 committed by GitHub
commit 95598e5435
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
7 changed files with 87 additions and 24 deletions

View File

@ -311,6 +311,11 @@ RESET_CONFIG_ON_START = (
os.environ.get("RESET_CONFIG_ON_START", "False").lower() == "true" os.environ.get("RESET_CONFIG_ON_START", "False").lower() == "true"
) )
ENABLE_REALTIME_CHAT_SAVE = (
os.environ.get("ENABLE_REALTIME_CHAT_SAVE", "True").lower() == "true"
)
#################################### ####################################
# REDIS # REDIS
#################################### ####################################

View File

@ -65,6 +65,7 @@ from open_webui.env import (
SRC_LOG_LEVELS, SRC_LOG_LEVELS,
GLOBAL_LOG_LEVEL, GLOBAL_LOG_LEVEL,
BYPASS_MODEL_ACCESS_CONTROL, BYPASS_MODEL_ACCESS_CONTROL,
ENABLE_REALTIME_CHAT_SAVE,
) )
from open_webui.constants import TASKS from open_webui.constants import TASKS
@ -928,6 +929,10 @@ async def process_chat_response(
# Handle as a background task # Handle as a background task
async def post_response_handler(response, events): async def post_response_handler(response, events):
assistant_message = get_last_assistant_message(form_data["messages"])
content = assistant_message if assistant_message else ""
try: try:
for event in events: for event in events:
await event_emitter( await event_emitter(
@ -946,9 +951,6 @@ async def process_chat_response(
}, },
) )
assistant_message = get_last_assistant_message(form_data["messages"])
content = assistant_message if assistant_message else ""
async for line in response.body_iterator: async for line in response.body_iterator:
line = line.decode("utf-8") if isinstance(line, bytes) else line line = line.decode("utf-8") if isinstance(line, bytes) else line
data = line data = line
@ -977,7 +979,6 @@ async def process_chat_response(
) )
else: else:
value = ( value = (
data.get("choices", [])[0] data.get("choices", [])[0]
.get("delta", {}) .get("delta", {})
@ -987,6 +988,7 @@ async def process_chat_response(
if value: if value:
content = f"{content}{value}" content = f"{content}{value}"
if ENABLE_REALTIME_CHAT_SAVE:
# Save message in the database # Save message in the database
Chats.upsert_message_to_chat_by_id_and_message_id( Chats.upsert_message_to_chat_by_id_and_message_id(
metadata["chat_id"], metadata["chat_id"],
@ -995,6 +997,10 @@ async def process_chat_response(
"content": content, "content": content,
}, },
) )
else:
data = {
"content": content,
}
except Exception as e: except Exception as e:
done = "data: [DONE]" in line done = "data: [DONE]" in line
@ -1003,6 +1009,16 @@ async def process_chat_response(
if done: if done:
data = {"done": True, "content": content, "title": title} data = {"done": True, "content": content, "title": title}
if not ENABLE_REALTIME_CHAT_SAVE:
# Save message in the database
Chats.upsert_message_to_chat_by_id_and_message_id(
metadata["chat_id"],
metadata["message_id"],
{
"content": content,
},
)
# Send a webhook notification if the user is not active # Send a webhook notification if the user is not active
if ( if (
get_user_id_from_session_pool(metadata["session_id"]) get_user_id_from_session_pool(metadata["session_id"])
@ -1036,6 +1052,16 @@ async def process_chat_response(
print("Task was cancelled!") print("Task was cancelled!")
await event_emitter({"type": "task-cancelled"}) await event_emitter({"type": "task-cancelled"})
if not ENABLE_REALTIME_CHAT_SAVE:
# Save message in the database
Chats.upsert_message_to_chat_by_id_and_message_id(
metadata["chat_id"],
metadata["message_id"],
{
"content": content,
},
)
if response.background is not None: if response.background is not None:
await response.background() await response.background()

View File

@ -66,7 +66,7 @@
{($settings?.widescreenMode ?? null) ? 'max-w-full' : 'max-w-5xl'} mx-auto" {($settings?.widescreenMode ?? null) ? 'max-w-full' : 'max-w-5xl'} mx-auto"
> >
{#if channel} {#if channel}
<div class="flex flex-col gap-1.5 py-5"> <div class="flex flex-col gap-1.5 pb-5 pt-10">
<div class="text-2xl font-medium capitalize">{channel.name}</div> <div class="text-2xl font-medium capitalize">{channel.name}</div>
<div class=" text-gray-500"> <div class=" text-gray-500">

View File

@ -1053,7 +1053,7 @@
}; };
const chatCompletionEventHandler = async (data, message, chatId) => { const chatCompletionEventHandler = async (data, message, chatId) => {
const { id, done, choices, sources, selected_model_id, error, usage } = data; const { id, done, choices, content, sources, selected_model_id, error, usage } = data;
if (error) { if (error) {
await handleOpenAIError(error, message); await handleOpenAIError(error, message);
@ -1105,6 +1105,38 @@
} }
} }
if (content) {
// REALTIME_CHAT_SAVE is disabled
message.content = content;
if (navigator.vibrate && ($settings?.hapticFeedback ?? false)) {
navigator.vibrate(5);
}
// Emit chat event for TTS
const messageContentParts = getMessageContentParts(
message.content,
$config?.audio?.tts?.split_on ?? 'punctuation'
);
messageContentParts.pop();
// dispatch only last sentence and make sure it hasn't been dispatched before
if (
messageContentParts.length > 0 &&
messageContentParts[messageContentParts.length - 1] !== message.lastSentence
) {
message.lastSentence = messageContentParts[messageContentParts.length - 1];
eventTarget.dispatchEvent(
new CustomEvent('chat', {
detail: {
id: message.id,
content: messageContentParts[messageContentParts.length - 1]
}
})
);
}
}
if (selected_model_id) { if (selected_model_id) {
message.selectedModelId = selected_model_id; message.selectedModelId = selected_model_id;
message.arena = true; message.arena = true;

View File

@ -2,8 +2,8 @@
export let className = 'size-4'; export let className = 'size-4';
</script> </script>
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 512 512" fill="currentColor" class={className}> <svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 512 512"
<path fill="currentColor"
d="M463.5 224l8.5 0c13.3 0 24-10.7 24-24l0-128c0-9.7-5.8-18.5-14.8-22.2s-19.3-1.7-26.2 5.2L413.4 96.6c-87.6-86.5-228.7-86.2-315.8 1c-87.5 87.5-87.5 229.3 0 316.8s229.3 87.5 316.8 0c12.5-12.5 12.5-32.8 0-45.3s-32.8-12.5-45.3 0c-62.5 62.5-163.8 62.5-226.3 0s-62.5-163.8 0-226.3c62.2-62.2 162.7-62.5 225.3-1L327 183c-6.9 6.9-8.9 17.2-5.2 26.2s12.5 14.8 22.2 14.8l119.5 0z" class={className}>
/> <path d="M463.5 224l8.5 0c13.3 0 24-10.7 24-24l0-128c0-9.7-5.8-18.5-14.8-22.2s-19.3-1.7-26.2 5.2L413.4 96.6c-87.6-86.5-228.7-86.2-315.8 1c-87.5 87.5-87.5 229.3 0 316.8s229.3 87.5 316.8 0c12.5-12.5 12.5-32.8 0-45.3s-32.8-12.5-45.3 0c-62.5 62.5-163.8 62.5-226.3 0s-62.5-163.8 0-226.3c62.2-62.2 162.7-62.5 225.3-1L327 183c-6.9 6.9-8.9 17.2-5.2 26.2s12.5 14.8 22.2 14.8l119.5 0z"/>
</svg> </svg>

0
update_ollama_models.sh Normal file → Executable file
View File