This commit is contained in:
Timothy J. Baek 2024-06-12 21:18:53 -07:00
parent bdd2ac0015
commit d6fd2a8228
8 changed files with 371 additions and 140 deletions

View File

@ -494,6 +494,9 @@ def filter_pipeline(payload, user):
if "title" in payload: if "title" in payload:
del payload["title"] del payload["title"]
if "task" in payload:
del payload["task"]
return payload return payload
@ -835,6 +838,71 @@ async def generate_search_query(form_data: dict, user=Depends(get_verified_user)
"messages": [{"role": "user", "content": content}], "messages": [{"role": "user", "content": content}],
"stream": False, "stream": False,
"max_tokens": 30, "max_tokens": 30,
"task": True,
}
print(payload)
try:
payload = filter_pipeline(payload, user)
except Exception as e:
return JSONResponse(
status_code=e.args[0],
content={"detail": e.args[1]},
)
if model["owned_by"] == "ollama":
return await generate_ollama_chat_completion(
OpenAIChatCompletionForm(**payload), user=user
)
else:
return await generate_openai_chat_completion(payload, user=user)
@app.post("/api/task/emoji/completions")
async def generate_emoji(form_data: dict, user=Depends(get_verified_user)):
print("generate_emoji")
model_id = form_data["model"]
if model_id not in app.state.MODELS:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="Model not found",
)
# Check if the user has a custom task model
# If the user has a custom task model, use that model
if app.state.MODELS[model_id]["owned_by"] == "ollama":
if app.state.config.TASK_MODEL:
task_model_id = app.state.config.TASK_MODEL
if task_model_id in app.state.MODELS:
model_id = task_model_id
else:
if app.state.config.TASK_MODEL_EXTERNAL:
task_model_id = app.state.config.TASK_MODEL_EXTERNAL
if task_model_id in app.state.MODELS:
model_id = task_model_id
print(model_id)
model = app.state.MODELS[model_id]
template = '''
You are a perceptive assistant skilled at interpreting emotions from a provided message. Your task is to reflect the speaker's likely facial expression through a fitting emoji. Prioritize using diverse facial expression emojis to convey the nuanced emotions expressed in the text. Please avoid using generic or overly ambiguous emojis like "🤔", and instead, choose ones that vividly represent the speaker's mood or reaction.
Message: """{{prompt}}"""
'''
content = title_generation_template(
template, form_data["prompt"], user.model_dump()
)
payload = {
"model": model_id,
"messages": [{"role": "user", "content": content}],
"stream": False,
"max_tokens": 4,
"chat_id": form_data.get("chat_id", None),
"task": True,
} }
print(payload) print(payload)

View File

@ -205,6 +205,46 @@ export const generateTitle = async (
return res?.choices[0]?.message?.content.replace(/["']/g, '') ?? 'New Chat'; return res?.choices[0]?.message?.content.replace(/["']/g, '') ?? 'New Chat';
}; };
export const generateEmoji = async (
token: string = '',
model: string,
prompt: string,
chat_id?: string
) => {
let error = null;
const res = await fetch(`${WEBUI_BASE_URL}/api/task/emoji/completions`, {
method: 'POST',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
Authorization: `Bearer ${token}`
},
body: JSON.stringify({
model: model,
prompt: prompt,
...(chat_id && { chat_id: chat_id })
})
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((err) => {
console.log(err);
if ('detail' in err) {
error = err.detail;
}
return null;
});
if (error) {
throw error;
}
return res?.choices[0]?.message?.content.replace(/["']/g, '') ?? null;
};
export const generateSearchQuery = async ( export const generateSearchQuery = async (
token: string = '', token: string = '',
model: string, model: string,

View File

@ -64,6 +64,8 @@
export let chatIdProp = ''; export let chatIdProp = '';
let loaded = false; let loaded = false;
const eventTarget = new EventTarget();
let stopResponseFlag = false; let stopResponseFlag = false;
let autoScroll = true; let autoScroll = true;
let processing = ''; let processing = '';
@ -300,7 +302,7 @@
// Chat functions // Chat functions
////////////////////////// //////////////////////////
const submitPrompt = async (userPrompt, _user = null) => { const submitPrompt = async (userPrompt, { _raw = false } = {}) => {
let _responses = []; let _responses = [];
console.log('submitPrompt', $chatId); console.log('submitPrompt', $chatId);
@ -344,7 +346,6 @@
parentId: messages.length !== 0 ? messages.at(-1).id : null, parentId: messages.length !== 0 ? messages.at(-1).id : null,
childrenIds: [], childrenIds: [],
role: 'user', role: 'user',
user: _user ?? undefined,
content: userPrompt, content: userPrompt,
files: _files.length > 0 ? _files : undefined, files: _files.length > 0 ? _files : undefined,
timestamp: Math.floor(Date.now() / 1000), // Unix epoch timestamp: Math.floor(Date.now() / 1000), // Unix epoch
@ -362,15 +363,13 @@
// Wait until history/message have been updated // Wait until history/message have been updated
await tick(); await tick();
_responses = await sendPrompt(userPrompt, userMessageId, { newChat: true });
// Send prompt
_responses = await sendPrompt(userPrompt, userMessageId);
} }
return _responses; return _responses;
}; };
const sendPrompt = async (prompt, parentId, modelId = null, newChat = true) => { const sendPrompt = async (prompt, parentId, { modelId = null, newChat = false } = {}) => {
let _responses = []; let _responses = [];
// If modelId is provided, use it, else use selected model // If modelId is provided, use it, else use selected model
@ -490,7 +489,6 @@
responseMessage.userContext = userContext; responseMessage.userContext = userContext;
const chatEventEmitter = await getChatEventEmitter(model.id, _chatId); const chatEventEmitter = await getChatEventEmitter(model.id, _chatId);
if (webSearchEnabled) { if (webSearchEnabled) {
await getWebSearchResults(model.id, parentId, responseMessageId); await getWebSearchResults(model.id, parentId, responseMessageId);
} }
@ -503,8 +501,6 @@
} }
_responses.push(_response); _responses.push(_response);
console.log('chatEventEmitter', chatEventEmitter);
if (chatEventEmitter) clearInterval(chatEventEmitter); if (chatEventEmitter) clearInterval(chatEventEmitter);
} else { } else {
toast.error($i18n.t(`Model {{modelId}} not found`, { modelId })); toast.error($i18n.t(`Model {{modelId}} not found`, { modelId }));
@ -513,88 +509,9 @@
); );
await chats.set(await getChatList(localStorage.token)); await chats.set(await getChatList(localStorage.token));
return _responses; return _responses;
}; };
const getWebSearchResults = async (model: string, parentId: string, responseId: string) => {
const responseMessage = history.messages[responseId];
responseMessage.statusHistory = [
{
done: false,
action: 'web_search',
description: $i18n.t('Generating search query')
}
];
messages = messages;
const prompt = history.messages[parentId].content;
let searchQuery = await generateSearchQuery(localStorage.token, model, messages, prompt).catch(
(error) => {
console.log(error);
return prompt;
}
);
if (!searchQuery) {
toast.warning($i18n.t('No search query generated'));
responseMessage.statusHistory.push({
done: true,
error: true,
action: 'web_search',
description: 'No search query generated'
});
messages = messages;
}
responseMessage.statusHistory.push({
done: false,
action: 'web_search',
description: $i18n.t(`Searching "{{searchQuery}}"`, { searchQuery })
});
messages = messages;
const results = await runWebSearch(localStorage.token, searchQuery).catch((error) => {
console.log(error);
toast.error(error);
return null;
});
if (results) {
responseMessage.statusHistory.push({
done: true,
action: 'web_search',
description: $i18n.t('Searched {{count}} sites', { count: results.filenames.length }),
query: searchQuery,
urls: results.filenames
});
if (responseMessage?.files ?? undefined === undefined) {
responseMessage.files = [];
}
responseMessage.files.push({
collection_name: results.collection_name,
name: searchQuery,
type: 'web_search_results',
urls: results.filenames
});
messages = messages;
} else {
responseMessage.statusHistory.push({
done: true,
error: true,
action: 'web_search',
description: 'No search results found'
});
messages = messages;
}
};
const sendPromptOllama = async (model, userPrompt, responseMessageId, _chatId) => { const sendPromptOllama = async (model, userPrompt, responseMessageId, _chatId) => {
let _response = null; let _response = null;
@ -676,6 +593,8 @@
array.findIndex((i) => JSON.stringify(i) === JSON.stringify(item)) === index array.findIndex((i) => JSON.stringify(i) === JSON.stringify(item)) === index
); );
eventTarget.dispatchEvent(new CustomEvent('chat:start'));
const [res, controller] = await generateChatCompletion(localStorage.token, { const [res, controller] = await generateChatCompletion(localStorage.token, {
model: model.id, model: model.id,
messages: messagesBody, messages: messagesBody,
@ -745,6 +664,9 @@
continue; continue;
} else { } else {
responseMessage.content += data.message.content; responseMessage.content += data.message.content;
eventTarget.dispatchEvent(
new CustomEvent('chat', { detail: { content: data.message.content } })
);
messages = messages; messages = messages;
} }
} else { } else {
@ -771,21 +693,13 @@
messages = messages; messages = messages;
if ($settings.notificationEnabled && !document.hasFocus()) { if ($settings.notificationEnabled && !document.hasFocus()) {
const notification = new Notification( const notification = new Notification(`${model.id}`, {
selectedModelfile
? `${
selectedModelfile.title.charAt(0).toUpperCase() +
selectedModelfile.title.slice(1)
}`
: `${model.id}`,
{
body: responseMessage.content, body: responseMessage.content,
icon: selectedModelfile?.imageUrl ?? `${WEBUI_BASE_URL}/static/favicon.png` icon: `${WEBUI_BASE_URL}/static/favicon.png`
} });
);
} }
if ($settings.responseAutoCopy) { if ($settings?.responseAutoCopy ?? false) {
copyToClipboard(responseMessage.content); copyToClipboard(responseMessage.content);
} }
@ -846,6 +760,7 @@
stopResponseFlag = false; stopResponseFlag = false;
await tick(); await tick();
eventTarget.dispatchEvent(new CustomEvent('chat:finish'));
if (autoScroll) { if (autoScroll) {
scrollToBottom(); scrollToBottom();
@ -887,6 +802,8 @@
scrollToBottom(); scrollToBottom();
eventTarget.dispatchEvent(new CustomEvent('chat:start'));
try { try {
const [res, controller] = await generateOpenAIChatCompletion( const [res, controller] = await generateOpenAIChatCompletion(
localStorage.token, localStorage.token,
@ -1007,6 +924,7 @@
continue; continue;
} else { } else {
responseMessage.content += value; responseMessage.content += value;
eventTarget.dispatchEvent(new CustomEvent('chat', { detail: { content: value } }));
messages = messages; messages = messages;
} }
@ -1057,6 +975,8 @@
stopResponseFlag = false; stopResponseFlag = false;
await tick(); await tick();
eventTarget.dispatchEvent(new CustomEvent('chat:finish'));
if (autoScroll) { if (autoScroll) {
scrollToBottom(); scrollToBottom();
} }
@ -1123,9 +1043,12 @@
let userPrompt = userMessage.content; let userPrompt = userMessage.content;
if ((userMessage?.models ?? [...selectedModels]).length == 1) { if ((userMessage?.models ?? [...selectedModels]).length == 1) {
await sendPrompt(userPrompt, userMessage.id, undefined, false); // If user message has only one model selected, sendPrompt automatically selects it for regeneration
await sendPrompt(userPrompt, userMessage.id);
} else { } else {
await sendPrompt(userPrompt, userMessage.id, message.model, false); // If there are multiple models selected, use the model of the response message for regeneration
// e.g. many model chat
await sendPrompt(userPrompt, userMessage.id, { modelId: message.model });
} }
} }
}; };
@ -1191,6 +1114,84 @@
} }
}; };
const getWebSearchResults = async (model: string, parentId: string, responseId: string) => {
const responseMessage = history.messages[responseId];
responseMessage.statusHistory = [
{
done: false,
action: 'web_search',
description: $i18n.t('Generating search query')
}
];
messages = messages;
const prompt = history.messages[parentId].content;
let searchQuery = await generateSearchQuery(localStorage.token, model, messages, prompt).catch(
(error) => {
console.log(error);
return prompt;
}
);
if (!searchQuery) {
toast.warning($i18n.t('No search query generated'));
responseMessage.statusHistory.push({
done: true,
error: true,
action: 'web_search',
description: 'No search query generated'
});
messages = messages;
}
responseMessage.statusHistory.push({
done: false,
action: 'web_search',
description: $i18n.t(`Searching "{{searchQuery}}"`, { searchQuery })
});
messages = messages;
const results = await runWebSearch(localStorage.token, searchQuery).catch((error) => {
console.log(error);
toast.error(error);
return null;
});
if (results) {
responseMessage.statusHistory.push({
done: true,
action: 'web_search',
description: $i18n.t('Searched {{count}} sites', { count: results.filenames.length }),
query: searchQuery,
urls: results.filenames
});
if (responseMessage?.files ?? undefined === undefined) {
responseMessage.files = [];
}
responseMessage.files.push({
collection_name: results.collection_name,
name: searchQuery,
type: 'web_search_results',
urls: results.filenames
});
messages = messages;
} else {
responseMessage.statusHistory.push({
done: true,
error: true,
action: 'web_search',
description: 'No search results found'
});
messages = messages;
}
};
const getTags = async () => { const getTags = async () => {
return await getTagsById(localStorage.token, $chatId).catch(async (error) => { return await getTagsById(localStorage.token, $chatId).catch(async (error) => {
return []; return [];
@ -1206,7 +1207,13 @@
</title> </title>
</svelte:head> </svelte:head>
<CallOverlay {submitPrompt} bind:files /> <CallOverlay
{submitPrompt}
bind:files
modelId={selectedModelIds?.at(0) ?? null}
chatId={$chatId}
{eventTarget}
/>
{#if !chatIdProp || (loaded && chatIdProp)} {#if !chatIdProp || (loaded && chatIdProp)}
<div <div

View File

@ -348,7 +348,6 @@
<Models <Models
bind:this={modelsElement} bind:this={modelsElement}
bind:prompt bind:prompt
bind:user
bind:chatInputPlaceholder bind:chatInputPlaceholder
{messages} {messages}
on:select={(e) => { on:select={(e) => {
@ -467,7 +466,7 @@
document.getElementById('chat-textarea')?.focus(); document.getElementById('chat-textarea')?.focus();
if ($settings?.speechAutoSend ?? false) { if ($settings?.speechAutoSend ?? false) {
submitPrompt(prompt, user); submitPrompt(prompt);
} }
}} }}
/> />
@ -476,7 +475,7 @@
class="w-full flex gap-1.5" class="w-full flex gap-1.5"
on:submit|preventDefault={() => { on:submit|preventDefault={() => {
// check if selectedModels support image input // check if selectedModels support image input
submitPrompt(prompt, user); submitPrompt(prompt);
}} }}
> >
<div <div
@ -718,7 +717,7 @@
// Submit the prompt when Enter key is pressed // Submit the prompt when Enter key is pressed
if (prompt !== '' && e.key === 'Enter' && !e.shiftKey) { if (prompt !== '' && e.key === 'Enter' && !e.shiftKey) {
submitPrompt(prompt, user); submitPrompt(prompt);
} }
} }
}} }}

View File

@ -3,21 +3,30 @@
import { onMount, tick, getContext } from 'svelte'; import { onMount, tick, getContext } from 'svelte';
import { blobToFile, calculateSHA256, extractSentences, findWordIndices } from '$lib/utils'; import { blobToFile, calculateSHA256, extractSentences, findWordIndices } from '$lib/utils';
import { generateEmoji } from '$lib/apis';
import { synthesizeOpenAISpeech, transcribeAudio } from '$lib/apis/audio'; import { synthesizeOpenAISpeech, transcribeAudio } from '$lib/apis/audio';
import { toast } from 'svelte-sonner'; import { toast } from 'svelte-sonner';
import Tooltip from '$lib/components/common/Tooltip.svelte'; import Tooltip from '$lib/components/common/Tooltip.svelte';
import VideoInputMenu from './CallOverlay/VideoInputMenu.svelte'; import VideoInputMenu from './CallOverlay/VideoInputMenu.svelte';
import { get } from 'svelte/store';
const i18n = getContext('i18n'); const i18n = getContext('i18n');
export let eventTarget: EventTarget;
export let submitPrompt: Function; export let submitPrompt: Function;
export let files; export let files;
export let chatId;
export let modelId;
let message = '';
let loading = false; let loading = false;
let confirmed = false; let confirmed = false;
let emoji = null;
let camera = false; let camera = false;
let cameraStream = null; let cameraStream = null;
@ -125,9 +134,11 @@
} }
const audioElement = document.getElementById('audioElement'); const audioElement = document.getElementById('audioElement');
if (audioElement) {
audioElement.pause(); audioElement.pause();
audioElement.currentTime = 0; audioElement.currentTime = 0;
}
assistantSpeaking = false; assistantSpeaking = false;
}; };
@ -138,6 +149,7 @@
const audioElement = document.getElementById('audioElement'); const audioElement = document.getElementById('audioElement');
const audio = assistantAudio[idx]; const audio = assistantAudio[idx];
if (audioElement) {
audioElement.src = audio.src; // Assume `assistantAudio` has objects with a `src` property audioElement.src = audio.src; // Assume `assistantAudio` has objects with a `src` property
audioElement.muted = true; audioElement.muted = true;
@ -160,6 +172,7 @@
res(e); res(e);
}; };
}
}); });
} else { } else {
return Promise.resolve(); return Promise.resolve();
@ -200,15 +213,8 @@
console.log(res.text); console.log(res.text);
if (res.text !== '') { if (res.text !== '') {
const _responses = await submitPrompt(res.text); const _responses = await submitPrompt(res.text, { _raw: true });
console.log(_responses); console.log(_responses);
if (_responses.at(0)) {
const content = _responses[0];
if ((content ?? '').trim() !== '') {
assistantSpeakingHandler(content);
}
}
} }
} }
}; };
@ -216,6 +222,23 @@
const assistantSpeakingHandler = async (content) => { const assistantSpeakingHandler = async (content) => {
assistantSpeaking = true; assistantSpeaking = true;
if (modelId && ($settings?.showEmojiInCall ?? false)) {
console.log('Generating emoji');
const res = await generateEmoji(localStorage.token, modelId, content, chatId).catch(
(error) => {
console.error(error);
return null;
}
);
if (res) {
console.log(res);
if (/\p{Extended_Pictographic}/u.test(res)) {
emoji = res.match(/\p{Extended_Pictographic}/gu)[0];
}
}
}
if (($config.audio.tts.engine ?? '') == '') { if (($config.audio.tts.engine ?? '') == '') {
let voices = []; let voices = [];
const getVoicesLoop = setInterval(async () => { const getVoicesLoop = setInterval(async () => {
@ -237,6 +260,10 @@
} }
speechSynthesis.speak(currentUtterance); speechSynthesis.speak(currentUtterance);
currentUtterance.onend = async () => {
assistantSpeaking = false;
};
} }
}, 100); }, 100);
} else if ($config.audio.tts.engine === 'openai') { } else if ($config.audio.tts.engine === 'openai') {
@ -280,15 +307,22 @@
const audio = new Audio(blobUrl); const audio = new Audio(blobUrl);
assistantAudio[idx] = audio; assistantAudio[idx] = audio;
lastPlayedAudioPromise = lastPlayedAudioPromise.then(() => playAudio(idx)); lastPlayedAudioPromise = lastPlayedAudioPromise.then(() => playAudio(idx));
if (idx === sentences.length - 1) {
lastPlayedAudioPromise.then(() => {
assistantSpeaking = false;
});
}
} }
} }
} }
}; };
const stopRecordingCallback = async () => { const stopRecordingCallback = async (_continue = true) => {
if ($showCallOverlay) { if ($showCallOverlay) {
if (confirmed) { if (confirmed) {
loading = true; loading = true;
emoji = null;
if (cameraStream) { if (cameraStream) {
const imageUrl = takeScreenshot(); const imageUrl = takeScreenshot();
@ -310,7 +344,9 @@
audioChunks = []; audioChunks = [];
mediaRecorder = false; mediaRecorder = false;
if (_continue) {
startRecording(); startRecording();
}
} else { } else {
audioChunks = []; audioChunks = [];
mediaRecorder = false; mediaRecorder = false;
@ -443,7 +479,30 @@
startRecording(); startRecording();
} else { } else {
stopCamera(); stopCamera();
stopAllAudio();
stopRecordingCallback(false);
} }
onMount(() => {
console.log(eventTarget);
eventTarget.addEventListener('chat:start', async (e) => {
console.log('Chat start event:', e.detail);
message = '';
});
eventTarget.addEventListener('chat', async (e) => {
const { content } = e.detail;
message += content;
console.log('Chat event:', message);
});
eventTarget.addEventListener('chat:finish', async (e) => {
console.log('Chat finish event:', e.detail);
message = '';
});
});
</script> </script>
{#if $showCallOverlay} {#if $showCallOverlay}
@ -492,6 +551,19 @@
r="3" r="3"
/><circle class="spinner_qM83 spinner_ZTLf" cx="20" cy="12" r="3" /></svg /><circle class="spinner_qM83 spinner_ZTLf" cx="20" cy="12" r="3" /></svg
> >
{:else if emoji}
<div
class=" transition-all rounded-full"
style="font-size:{rmsLevel * 100 > 4
? '4.5'
: rmsLevel * 100 > 2
? '4'
: rmsLevel * 100 > 1
? '3.5'
: '3'}rem;width:100%;text-align:center;"
>
{emoji}
</div>
{:else} {:else}
<div <div
class=" {rmsLevel * 100 > 4 class=" {rmsLevel * 100 > 4
@ -546,6 +618,19 @@
r="3" r="3"
/><circle class="spinner_qM83 spinner_ZTLf" cx="20" cy="12" r="3" /></svg /><circle class="spinner_qM83 spinner_ZTLf" cx="20" cy="12" r="3" /></svg
> >
{:else if emoji}
<div
class=" transition-all rounded-full"
style="font-size:{rmsLevel * 100 > 4
? '13'
: rmsLevel * 100 > 2
? '12'
: rmsLevel * 100 > 1
? '11.5'
: '11'}rem;width:100%;text-align:center;"
>
{emoji}
</div>
{:else} {:else}
<div <div
class=" {rmsLevel * 100 > 4 class=" {rmsLevel * 100 > 4

View File

@ -79,7 +79,7 @@
history.currentId = userMessageId; history.currentId = userMessageId;
await tick(); await tick();
await sendPrompt(userPrompt, userMessageId, undefined, false); await sendPrompt(userPrompt, userMessageId);
}; };
const updateChatMessages = async () => { const updateChatMessages = async () => {

View File

@ -20,9 +20,12 @@
// Interface // Interface
let defaultModelId = ''; let defaultModelId = '';
let showUsername = false; let showUsername = false;
let chatBubble = true; let chatBubble = true;
let chatDirection: 'LTR' | 'RTL' = 'LTR'; let chatDirection: 'LTR' | 'RTL' = 'LTR';
let showEmojiInCall = false;
const toggleSplitLargeChunks = async () => { const toggleSplitLargeChunks = async () => {
splitLargeChunks = !splitLargeChunks; splitLargeChunks = !splitLargeChunks;
saveSettings({ splitLargeChunks: splitLargeChunks }); saveSettings({ splitLargeChunks: splitLargeChunks });
@ -43,6 +46,11 @@
saveSettings({ showUsername: showUsername }); saveSettings({ showUsername: showUsername });
}; };
const toggleEmojiInCall = async () => {
showEmojiInCall = !showEmojiInCall;
saveSettings({ showEmojiInCall: showEmojiInCall });
};
const toggleTitleAutoGenerate = async () => { const toggleTitleAutoGenerate = async () => {
titleAutoGenerate = !titleAutoGenerate; titleAutoGenerate = !titleAutoGenerate;
saveSettings({ saveSettings({
@ -88,8 +96,12 @@
onMount(async () => { onMount(async () => {
titleAutoGenerate = $settings?.title?.auto ?? true; titleAutoGenerate = $settings?.title?.auto ?? true;
responseAutoCopy = $settings.responseAutoCopy ?? false; responseAutoCopy = $settings.responseAutoCopy ?? false;
showUsername = $settings.showUsername ?? false; showUsername = $settings.showUsername ?? false;
showEmojiInCall = $settings.showEmojiInCall ?? false;
chatBubble = $settings.chatBubble ?? true; chatBubble = $settings.chatBubble ?? true;
widescreenMode = $settings.widescreenMode ?? false; widescreenMode = $settings.widescreenMode ?? false;
splitLargeChunks = $settings.splitLargeChunks ?? false; splitLargeChunks = $settings.splitLargeChunks ?? false;
@ -192,6 +204,26 @@
</div> </div>
</div> </div>
<div>
<div class=" py-0.5 flex w-full justify-between">
<div class=" self-center text-xs font-medium">{$i18n.t('Display Emoji in Call')}</div>
<button
class="p-1 px-3 text-xs flex rounded transition"
on:click={() => {
toggleEmojiInCall();
}}
type="button"
>
{#if showEmojiInCall === true}
<span class="ml-2 self-center">{$i18n.t('On')}</span>
{:else}
<span class="ml-2 self-center">{$i18n.t('Off')}</span>
{/if}
</button>
</div>
</div>
{#if !$settings.chatBubble} {#if !$settings.chatBubble}
<div> <div>
<div class=" py-0.5 flex w-full justify-between"> <div class=" py-0.5 flex w-full justify-between">

View File

@ -436,7 +436,7 @@ export const removeEmojis = (str) => {
export const extractSentences = (text) => { export const extractSentences = (text) => {
// Split the paragraph into sentences based on common punctuation marks // Split the paragraph into sentences based on common punctuation marks
const sentences = text.split(/(?<=[.!?])/); const sentences = text.split(/(?<=[.!?])\s+/);
return sentences return sentences
.map((sentence) => removeEmojis(sentence.trim())) .map((sentence) => removeEmojis(sentence.trim()))