From 7f70de99d39028ae3fccb2d2749c186f61a61e17 Mon Sep 17 00:00:00 2001 From: "Timothy J. Baek" Date: Thu, 13 Jun 2024 20:15:23 -0700 Subject: [PATCH] refac: voice call --- src/lib/components/chat/Chat.svelte | 114 +++- .../chat/MessageInput/CallOverlay.svelte | 633 ++++++++---------- src/lib/utils/index.ts | 18 + 3 files changed, 407 insertions(+), 358 deletions(-) diff --git a/src/lib/components/chat/Chat.svelte b/src/lib/components/chat/Chat.svelte index 44a221ba6..c9ee74aa2 100644 --- a/src/lib/components/chat/Chat.svelte +++ b/src/lib/components/chat/Chat.svelte @@ -30,6 +30,7 @@ import { convertMessagesToHistory, copyToClipboard, + extractSentencesForAudio, promptTemplate, splitStream } from '$lib/utils'; @@ -593,7 +594,15 @@ array.findIndex((i) => JSON.stringify(i) === JSON.stringify(item)) === index ); - eventTarget.dispatchEvent(new CustomEvent('chat:start')); + eventTarget.dispatchEvent( + new CustomEvent('chat:start', { + detail: { + id: responseMessageId + } + }) + ); + + await tick(); const [res, controller] = await generateChatCompletion(localStorage.token, { model: model.id, @@ -664,9 +673,23 @@ continue; } else { responseMessage.content += data.message.content; - eventTarget.dispatchEvent( - new CustomEvent('chat', { detail: { content: data.message.content } }) - ); + + const sentences = extractSentencesForAudio(responseMessage.content); + sentences.pop(); + + // dispatch only last sentence and make sure it hasn't been dispatched before + if ( + sentences.length > 0 && + sentences[sentences.length - 1] !== responseMessage.lastSentence + ) { + responseMessage.lastSentence = sentences[sentences.length - 1]; + eventTarget.dispatchEvent( + new CustomEvent('chat', { + detail: { id: responseMessageId, content: sentences[sentences.length - 1] } + }) + ); + } + messages = messages; } } else { @@ -760,7 +783,23 @@ stopResponseFlag = false; await tick(); - eventTarget.dispatchEvent(new CustomEvent('chat:finish')); + + let lastSentence = extractSentencesForAudio(responseMessage.content)?.at(-1) ?? ''; + if (lastSentence) { + eventTarget.dispatchEvent( + new CustomEvent('chat', { + detail: { id: responseMessageId, content: lastSentence } + }) + ); + } + eventTarget.dispatchEvent( + new CustomEvent('chat:finish', { + detail: { + id: responseMessageId, + content: responseMessage.content + } + }) + ); if (autoScroll) { scrollToBottom(); @@ -802,7 +841,14 @@ scrollToBottom(); - eventTarget.dispatchEvent(new CustomEvent('chat:start')); + eventTarget.dispatchEvent( + new CustomEvent('chat:start', { + detail: { + id: responseMessageId + } + }) + ); + await tick(); try { const [res, controller] = await generateOpenAIChatCompletion( @@ -924,7 +970,23 @@ continue; } else { responseMessage.content += value; - eventTarget.dispatchEvent(new CustomEvent('chat', { detail: { content: value } })); + + const sentences = extractSentencesForAudio(responseMessage.content); + sentences.pop(); + + // dispatch only last sentence and make sure it hasn't been dispatched before + if ( + sentences.length > 0 && + sentences[sentences.length - 1] !== responseMessage.lastSentence + ) { + responseMessage.lastSentence = sentences[sentences.length - 1]; + eventTarget.dispatchEvent( + new CustomEvent('chat', { + detail: { id: responseMessageId, content: sentences[sentences.length - 1] } + }) + ); + } + messages = messages; } @@ -975,7 +1037,23 @@ stopResponseFlag = false; await tick(); - eventTarget.dispatchEvent(new CustomEvent('chat:finish')); + let lastSentence = extractSentencesForAudio(responseMessage.content)?.at(-1) ?? ''; + if (lastSentence) { + eventTarget.dispatchEvent( + new CustomEvent('chat', { + detail: { id: responseMessageId, content: lastSentence } + }) + ); + } + + eventTarget.dispatchEvent( + new CustomEvent('chat:finish', { + detail: { + id: responseMessageId, + content: responseMessage.content + } + }) + ); if (autoScroll) { scrollToBottom(); @@ -1207,14 +1285,18 @@ - +