2024-06-07 05:30:19 +00:00
|
|
|
<script lang="ts">
|
2024-06-17 08:55:06 +00:00
|
|
|
import { config, models, settings, showCallOverlay } from '$lib/stores';
|
2024-08-22 15:27:22 +00:00
|
|
|
import { onMount, tick, getContext, onDestroy } from 'svelte';
|
2024-06-07 05:30:19 +00:00
|
|
|
|
2024-06-14 03:15:23 +00:00
|
|
|
import {
|
|
|
|
blobToFile,
|
|
|
|
calculateSHA256,
|
|
|
|
extractSentencesForAudio,
|
|
|
|
findWordIndices
|
|
|
|
} from '$lib/utils';
|
2024-06-13 04:18:53 +00:00
|
|
|
import { generateEmoji } from '$lib/apis';
|
2024-06-07 07:27:05 +00:00
|
|
|
import { synthesizeOpenAISpeech, transcribeAudio } from '$lib/apis/audio';
|
2024-06-13 04:18:53 +00:00
|
|
|
|
2024-06-07 06:29:08 +00:00
|
|
|
import { toast } from 'svelte-sonner';
|
2024-06-07 21:49:36 +00:00
|
|
|
|
2024-06-07 07:28:34 +00:00
|
|
|
import Tooltip from '$lib/components/common/Tooltip.svelte';
|
2024-06-07 21:49:36 +00:00
|
|
|
import VideoInputMenu from './CallOverlay/VideoInputMenu.svelte';
|
2024-06-07 06:29:08 +00:00
|
|
|
|
2024-06-07 05:30:19 +00:00
|
|
|
const i18n = getContext('i18n');
|
|
|
|
|
2024-06-13 04:18:53 +00:00
|
|
|
export let eventTarget: EventTarget;
|
2024-06-13 08:28:15 +00:00
|
|
|
|
2024-06-07 07:04:47 +00:00
|
|
|
export let submitPrompt: Function;
|
2024-06-13 08:28:15 +00:00
|
|
|
export let stopResponse: Function;
|
|
|
|
|
2024-06-07 21:08:04 +00:00
|
|
|
export let files;
|
2024-06-07 07:04:47 +00:00
|
|
|
|
2024-06-13 04:18:53 +00:00
|
|
|
export let chatId;
|
|
|
|
export let modelId;
|
|
|
|
|
2024-08-22 15:12:31 +00:00
|
|
|
let wakeLock = null;
|
|
|
|
|
2024-06-17 08:55:06 +00:00
|
|
|
let model = null;
|
|
|
|
|
2024-06-07 06:29:08 +00:00
|
|
|
let loading = false;
|
|
|
|
let confirmed = false;
|
2024-06-13 08:28:15 +00:00
|
|
|
let interrupted = false;
|
2024-06-16 23:50:57 +00:00
|
|
|
let assistantSpeaking = false;
|
2024-06-07 05:30:19 +00:00
|
|
|
|
2024-06-13 04:18:53 +00:00
|
|
|
let emoji = null;
|
|
|
|
|
2024-06-07 21:08:04 +00:00
|
|
|
let camera = false;
|
|
|
|
let cameraStream = null;
|
|
|
|
|
2024-06-13 08:28:15 +00:00
|
|
|
let chatStreaming = false;
|
2024-06-07 06:29:08 +00:00
|
|
|
|
2024-06-13 08:28:15 +00:00
|
|
|
let rmsLevel = 0;
|
|
|
|
let hasStartedSpeaking = false;
|
2024-06-07 06:29:08 +00:00
|
|
|
let mediaRecorder;
|
2024-08-22 15:27:22 +00:00
|
|
|
let audioStream = null;
|
2024-06-07 06:29:08 +00:00
|
|
|
let audioChunks = [];
|
|
|
|
|
2024-06-13 08:28:15 +00:00
|
|
|
let videoInputDevices = [];
|
|
|
|
let selectedVideoInputDeviceId = null;
|
|
|
|
|
|
|
|
const getVideoInputDevices = async () => {
|
|
|
|
const devices = await navigator.mediaDevices.enumerateDevices();
|
|
|
|
videoInputDevices = devices.filter((device) => device.kind === 'videoinput');
|
|
|
|
|
|
|
|
if (!!navigator.mediaDevices.getDisplayMedia) {
|
|
|
|
videoInputDevices = [
|
|
|
|
...videoInputDevices,
|
|
|
|
{
|
|
|
|
deviceId: 'screen',
|
|
|
|
label: 'Screen Share'
|
|
|
|
}
|
|
|
|
];
|
|
|
|
}
|
|
|
|
|
|
|
|
console.log(videoInputDevices);
|
|
|
|
if (selectedVideoInputDeviceId === null && videoInputDevices.length > 0) {
|
|
|
|
selectedVideoInputDeviceId = videoInputDevices[0].deviceId;
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
const startCamera = async () => {
|
|
|
|
await getVideoInputDevices();
|
|
|
|
|
|
|
|
if (cameraStream === null) {
|
|
|
|
camera = true;
|
|
|
|
await tick();
|
|
|
|
try {
|
|
|
|
await startVideoStream();
|
|
|
|
} catch (err) {
|
|
|
|
console.error('Error accessing webcam: ', err);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
const startVideoStream = async () => {
|
|
|
|
const video = document.getElementById('camera-feed');
|
|
|
|
if (video) {
|
|
|
|
if (selectedVideoInputDeviceId === 'screen') {
|
|
|
|
cameraStream = await navigator.mediaDevices.getDisplayMedia({
|
|
|
|
video: {
|
|
|
|
cursor: 'always'
|
|
|
|
},
|
|
|
|
audio: false
|
|
|
|
});
|
|
|
|
} else {
|
|
|
|
cameraStream = await navigator.mediaDevices.getUserMedia({
|
|
|
|
video: {
|
|
|
|
deviceId: selectedVideoInputDeviceId ? { exact: selectedVideoInputDeviceId } : undefined
|
|
|
|
}
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
if (cameraStream) {
|
|
|
|
await getVideoInputDevices();
|
|
|
|
video.srcObject = cameraStream;
|
|
|
|
await video.play();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
const stopVideoStream = async () => {
|
|
|
|
if (cameraStream) {
|
|
|
|
const tracks = cameraStream.getTracks();
|
|
|
|
tracks.forEach((track) => track.stop());
|
|
|
|
}
|
|
|
|
|
|
|
|
cameraStream = null;
|
|
|
|
};
|
|
|
|
|
|
|
|
const takeScreenshot = () => {
|
|
|
|
const video = document.getElementById('camera-feed');
|
|
|
|
const canvas = document.getElementById('camera-canvas');
|
|
|
|
|
|
|
|
if (!canvas) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
const context = canvas.getContext('2d');
|
|
|
|
|
|
|
|
// Make the canvas match the video dimensions
|
|
|
|
canvas.width = video.videoWidth;
|
|
|
|
canvas.height = video.videoHeight;
|
|
|
|
|
|
|
|
// Draw the image from the video onto the canvas
|
|
|
|
context.drawImage(video, 0, 0, video.videoWidth, video.videoHeight);
|
|
|
|
|
|
|
|
// Convert the canvas to a data base64 URL and console log it
|
|
|
|
const dataURL = canvas.toDataURL('image/png');
|
|
|
|
console.log(dataURL);
|
|
|
|
|
|
|
|
return dataURL;
|
|
|
|
};
|
|
|
|
|
|
|
|
const stopCamera = async () => {
|
|
|
|
await stopVideoStream();
|
|
|
|
camera = false;
|
|
|
|
};
|
|
|
|
|
2024-06-14 09:41:27 +00:00
|
|
|
const MIN_DECIBELS = -55;
|
2024-06-07 06:29:08 +00:00
|
|
|
const VISUALIZER_BUFFER_LENGTH = 300;
|
|
|
|
|
2024-06-14 03:15:23 +00:00
|
|
|
const transcribeHandler = async (audioBlob) => {
|
|
|
|
// Create a blob from the audio chunks
|
|
|
|
|
|
|
|
await tick();
|
|
|
|
const file = blobToFile(audioBlob, 'recording.wav');
|
|
|
|
|
|
|
|
const res = await transcribeAudio(localStorage.token, file).catch((error) => {
|
|
|
|
toast.error(error);
|
|
|
|
return null;
|
|
|
|
});
|
|
|
|
|
|
|
|
if (res) {
|
|
|
|
console.log(res.text);
|
|
|
|
|
|
|
|
if (res.text !== '') {
|
|
|
|
const _responses = await submitPrompt(res.text, { _raw: true });
|
|
|
|
console.log(_responses);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
const stopRecordingCallback = async (_continue = true) => {
|
|
|
|
if ($showCallOverlay) {
|
|
|
|
console.log('%c%s', 'color: red; font-size: 20px;', '🚨 stopRecordingCallback 🚨');
|
|
|
|
|
|
|
|
// deep copy the audioChunks array
|
|
|
|
const _audioChunks = audioChunks.slice(0);
|
|
|
|
|
|
|
|
audioChunks = [];
|
|
|
|
mediaRecorder = false;
|
|
|
|
|
|
|
|
if (_continue) {
|
|
|
|
startRecording();
|
|
|
|
}
|
|
|
|
|
|
|
|
if (confirmed) {
|
|
|
|
loading = true;
|
|
|
|
emoji = null;
|
|
|
|
|
|
|
|
if (cameraStream) {
|
|
|
|
const imageUrl = takeScreenshot();
|
|
|
|
|
|
|
|
files = [
|
|
|
|
{
|
|
|
|
type: 'image',
|
|
|
|
url: imageUrl
|
|
|
|
}
|
|
|
|
];
|
|
|
|
}
|
|
|
|
|
|
|
|
const audioBlob = new Blob(_audioChunks, { type: 'audio/wav' });
|
|
|
|
await transcribeHandler(audioBlob);
|
|
|
|
|
|
|
|
confirmed = false;
|
|
|
|
loading = false;
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
audioChunks = [];
|
|
|
|
mediaRecorder = false;
|
2024-08-22 15:27:22 +00:00
|
|
|
|
|
|
|
if (audioStream) {
|
|
|
|
const tracks = audioStream.getTracks();
|
|
|
|
tracks.forEach((track) => track.stop());
|
|
|
|
}
|
|
|
|
audioStream = null;
|
2024-06-14 03:15:23 +00:00
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
const startRecording = async () => {
|
2024-08-22 15:27:22 +00:00
|
|
|
audioStream = await navigator.mediaDevices.getUserMedia({ audio: true });
|
|
|
|
mediaRecorder = new MediaRecorder(audioStream);
|
2024-06-14 07:05:01 +00:00
|
|
|
|
2024-06-14 03:15:23 +00:00
|
|
|
mediaRecorder.onstart = () => {
|
|
|
|
console.log('Recording started');
|
|
|
|
audioChunks = [];
|
2024-08-22 15:27:22 +00:00
|
|
|
analyseAudio(audioStream);
|
2024-06-14 03:15:23 +00:00
|
|
|
};
|
2024-06-14 07:05:01 +00:00
|
|
|
|
2024-06-14 03:15:23 +00:00
|
|
|
mediaRecorder.ondataavailable = (event) => {
|
|
|
|
if (hasStartedSpeaking) {
|
|
|
|
audioChunks.push(event.data);
|
|
|
|
}
|
|
|
|
};
|
2024-06-14 07:05:01 +00:00
|
|
|
|
|
|
|
mediaRecorder.onstop = (e) => {
|
|
|
|
console.log('Recording stopped', e);
|
|
|
|
stopRecordingCallback();
|
2024-06-14 03:15:23 +00:00
|
|
|
};
|
2024-06-14 07:05:01 +00:00
|
|
|
|
2024-06-14 03:15:23 +00:00
|
|
|
mediaRecorder.start();
|
|
|
|
};
|
|
|
|
|
2024-06-07 06:29:08 +00:00
|
|
|
// Function to calculate the RMS level from time domain data
|
|
|
|
const calculateRMS = (data: Uint8Array) => {
|
|
|
|
let sumSquares = 0;
|
|
|
|
for (let i = 0; i < data.length; i++) {
|
|
|
|
const normalizedValue = (data[i] - 128) / 128; // Normalize the data
|
|
|
|
sumSquares += normalizedValue * normalizedValue;
|
|
|
|
}
|
|
|
|
return Math.sqrt(sumSquares / data.length);
|
|
|
|
};
|
|
|
|
|
|
|
|
const analyseAudio = (stream) => {
|
|
|
|
const audioContext = new AudioContext();
|
|
|
|
const audioStreamSource = audioContext.createMediaStreamSource(stream);
|
|
|
|
|
|
|
|
const analyser = audioContext.createAnalyser();
|
|
|
|
analyser.minDecibels = MIN_DECIBELS;
|
|
|
|
audioStreamSource.connect(analyser);
|
|
|
|
|
|
|
|
const bufferLength = analyser.frequencyBinCount;
|
|
|
|
|
|
|
|
const domainData = new Uint8Array(bufferLength);
|
|
|
|
const timeDomainData = new Uint8Array(analyser.fftSize);
|
|
|
|
|
|
|
|
let lastSoundTime = Date.now();
|
2024-06-07 06:36:47 +00:00
|
|
|
hasStartedSpeaking = false;
|
2024-06-07 06:29:08 +00:00
|
|
|
|
2024-06-14 07:05:01 +00:00
|
|
|
console.log('🔊 Sound detection started', lastSoundTime, hasStartedSpeaking);
|
|
|
|
|
2024-06-07 06:29:08 +00:00
|
|
|
const detectSound = () => {
|
|
|
|
const processFrame = () => {
|
2024-06-07 06:42:31 +00:00
|
|
|
if (!mediaRecorder || !$showCallOverlay) {
|
|
|
|
return;
|
|
|
|
}
|
2024-06-13 08:28:15 +00:00
|
|
|
|
2024-06-22 20:21:36 +00:00
|
|
|
if (assistantSpeaking && !($settings?.voiceInterruption ?? false)) {
|
2024-06-17 00:19:18 +00:00
|
|
|
// Mute the audio if the assistant is speaking
|
2024-06-16 23:50:57 +00:00
|
|
|
analyser.maxDecibels = 0;
|
2024-06-17 00:19:18 +00:00
|
|
|
analyser.minDecibels = -1;
|
2024-06-16 23:50:57 +00:00
|
|
|
} else {
|
|
|
|
analyser.minDecibels = MIN_DECIBELS;
|
|
|
|
analyser.maxDecibels = -30;
|
|
|
|
}
|
|
|
|
|
2024-06-07 06:29:08 +00:00
|
|
|
analyser.getByteTimeDomainData(timeDomainData);
|
|
|
|
analyser.getByteFrequencyData(domainData);
|
|
|
|
|
|
|
|
// Calculate RMS level from time domain data
|
|
|
|
rmsLevel = calculateRMS(timeDomainData);
|
|
|
|
|
|
|
|
// Check if initial speech/noise has started
|
|
|
|
const hasSound = domainData.some((value) => value > 0);
|
|
|
|
if (hasSound) {
|
2024-06-13 08:28:15 +00:00
|
|
|
// BIG RED TEXT
|
|
|
|
console.log('%c%s', 'color: red; font-size: 20px;', '🔊 Sound detected');
|
2024-06-14 03:15:23 +00:00
|
|
|
|
|
|
|
if (!hasStartedSpeaking) {
|
|
|
|
hasStartedSpeaking = true;
|
|
|
|
stopAllAudio();
|
|
|
|
}
|
|
|
|
|
|
|
|
lastSoundTime = Date.now();
|
2024-06-07 06:29:08 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// Start silence detection only after initial speech/noise has been detected
|
|
|
|
if (hasStartedSpeaking) {
|
|
|
|
if (Date.now() - lastSoundTime > 2000) {
|
|
|
|
confirmed = true;
|
|
|
|
|
|
|
|
if (mediaRecorder) {
|
2024-06-14 07:05:01 +00:00
|
|
|
console.log('%c%s', 'color: red; font-size: 20px;', '🔇 Silence detected');
|
2024-06-07 06:29:08 +00:00
|
|
|
mediaRecorder.stop();
|
2024-06-14 07:05:01 +00:00
|
|
|
return;
|
2024-06-07 06:29:08 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
window.requestAnimationFrame(processFrame);
|
|
|
|
};
|
|
|
|
|
|
|
|
window.requestAnimationFrame(processFrame);
|
|
|
|
};
|
2024-06-07 05:30:19 +00:00
|
|
|
|
2024-06-07 06:29:08 +00:00
|
|
|
detectSound();
|
|
|
|
};
|
|
|
|
|
2024-06-14 03:15:23 +00:00
|
|
|
let finishedMessages = {};
|
|
|
|
let currentMessageId = null;
|
|
|
|
let currentUtterance = null;
|
2024-06-07 07:27:05 +00:00
|
|
|
|
2024-06-13 08:28:15 +00:00
|
|
|
const speakSpeechSynthesisHandler = (content) => {
|
|
|
|
if ($showCallOverlay) {
|
|
|
|
return new Promise((resolve) => {
|
|
|
|
let voices = [];
|
|
|
|
const getVoicesLoop = setInterval(async () => {
|
|
|
|
voices = await speechSynthesis.getVoices();
|
|
|
|
if (voices.length > 0) {
|
|
|
|
clearInterval(getVoicesLoop);
|
|
|
|
|
|
|
|
const voice =
|
|
|
|
voices
|
|
|
|
?.filter(
|
|
|
|
(v) => v.voiceURI === ($settings?.audio?.tts?.voice ?? $config?.audio?.tts?.voice)
|
|
|
|
)
|
|
|
|
?.at(0) ?? undefined;
|
|
|
|
|
|
|
|
currentUtterance = new SpeechSynthesisUtterance(content);
|
|
|
|
|
|
|
|
if (voice) {
|
|
|
|
currentUtterance.voice = voice;
|
|
|
|
}
|
|
|
|
|
|
|
|
speechSynthesis.speak(currentUtterance);
|
|
|
|
currentUtterance.onend = async (e) => {
|
2024-06-14 09:30:36 +00:00
|
|
|
await new Promise((r) => setTimeout(r, 200));
|
2024-06-13 08:28:15 +00:00
|
|
|
resolve(e);
|
|
|
|
};
|
|
|
|
}
|
|
|
|
}, 100);
|
|
|
|
});
|
|
|
|
} else {
|
|
|
|
return Promise.resolve();
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
const playAudio = (audio) => {
|
2024-06-07 22:00:42 +00:00
|
|
|
if ($showCallOverlay) {
|
2024-06-13 08:28:15 +00:00
|
|
|
return new Promise((resolve) => {
|
2024-06-07 22:00:42 +00:00
|
|
|
const audioElement = document.getElementById('audioElement');
|
2024-06-07 07:57:53 +00:00
|
|
|
|
2024-06-13 04:18:53 +00:00
|
|
|
if (audioElement) {
|
2024-06-13 08:28:15 +00:00
|
|
|
audioElement.src = audio.src;
|
2024-06-13 04:18:53 +00:00
|
|
|
audioElement.muted = true;
|
2024-06-08 10:27:56 +00:00
|
|
|
|
2024-06-13 04:18:53 +00:00
|
|
|
audioElement
|
|
|
|
.play()
|
|
|
|
.then(() => {
|
|
|
|
audioElement.muted = false;
|
|
|
|
})
|
|
|
|
.catch((error) => {
|
2024-06-13 08:28:15 +00:00
|
|
|
console.error(error);
|
2024-06-13 04:18:53 +00:00
|
|
|
});
|
2024-06-07 07:57:53 +00:00
|
|
|
|
2024-06-13 04:18:53 +00:00
|
|
|
audioElement.onended = async (e) => {
|
2024-06-13 08:28:15 +00:00
|
|
|
await new Promise((r) => setTimeout(r, 100));
|
|
|
|
resolve(e);
|
2024-06-13 04:18:53 +00:00
|
|
|
};
|
|
|
|
}
|
2024-06-07 22:00:42 +00:00
|
|
|
});
|
|
|
|
} else {
|
|
|
|
return Promise.resolve();
|
|
|
|
}
|
2024-06-07 07:27:05 +00:00
|
|
|
};
|
|
|
|
|
2024-06-14 03:15:23 +00:00
|
|
|
const stopAllAudio = async () => {
|
2024-06-16 23:50:57 +00:00
|
|
|
assistantSpeaking = false;
|
2024-06-14 03:15:23 +00:00
|
|
|
interrupted = true;
|
2024-06-13 09:29:56 +00:00
|
|
|
|
2024-06-14 03:15:23 +00:00
|
|
|
if (chatStreaming) {
|
|
|
|
stopResponse();
|
2024-06-07 07:27:05 +00:00
|
|
|
}
|
|
|
|
|
2024-06-14 03:15:23 +00:00
|
|
|
if (currentUtterance) {
|
|
|
|
speechSynthesis.cancel();
|
|
|
|
currentUtterance = null;
|
|
|
|
}
|
2024-06-13 08:28:15 +00:00
|
|
|
|
2024-06-14 03:15:23 +00:00
|
|
|
const audioElement = document.getElementById('audioElement');
|
|
|
|
if (audioElement) {
|
2024-06-14 07:05:01 +00:00
|
|
|
audioElement.muted = true;
|
2024-06-14 03:15:23 +00:00
|
|
|
audioElement.pause();
|
|
|
|
audioElement.currentTime = 0;
|
|
|
|
}
|
|
|
|
};
|
2024-06-13 09:29:56 +00:00
|
|
|
|
2024-06-14 03:15:23 +00:00
|
|
|
let audioAbortController = new AbortController();
|
2024-06-13 09:29:56 +00:00
|
|
|
|
2024-06-14 03:15:23 +00:00
|
|
|
// Audio cache map where key is the content and value is the Audio object.
|
|
|
|
const audioCache = new Map();
|
2024-06-14 09:30:36 +00:00
|
|
|
const emojiCache = new Map();
|
|
|
|
|
2024-06-14 03:15:23 +00:00
|
|
|
const fetchAudio = async (content) => {
|
|
|
|
if (!audioCache.has(content)) {
|
|
|
|
try {
|
2024-06-14 09:30:36 +00:00
|
|
|
// Set the emoji for the content if needed
|
|
|
|
if ($settings?.showEmojiInCall ?? false) {
|
|
|
|
const emoji = await generateEmoji(localStorage.token, modelId, content, chatId);
|
|
|
|
if (emoji) {
|
|
|
|
emojiCache.set(content, emoji);
|
|
|
|
}
|
|
|
|
}
|
2024-06-13 09:29:56 +00:00
|
|
|
|
2024-06-14 09:30:36 +00:00
|
|
|
if ($config.audio.tts.engine !== '') {
|
|
|
|
const res = await synthesizeOpenAISpeech(
|
|
|
|
localStorage.token,
|
|
|
|
$settings?.audio?.tts?.voice ?? $config?.audio?.tts?.voice,
|
|
|
|
content
|
|
|
|
).catch((error) => {
|
|
|
|
console.error(error);
|
|
|
|
return null;
|
|
|
|
});
|
|
|
|
|
|
|
|
if (res) {
|
|
|
|
const blob = await res.blob();
|
|
|
|
const blobUrl = URL.createObjectURL(blob);
|
|
|
|
audioCache.set(content, new Audio(blobUrl));
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
audioCache.set(content, true);
|
2024-06-13 09:29:56 +00:00
|
|
|
}
|
2024-06-14 03:15:23 +00:00
|
|
|
} catch (error) {
|
|
|
|
console.error('Error synthesizing speech:', error);
|
2024-06-13 04:18:53 +00:00
|
|
|
}
|
|
|
|
}
|
2024-06-14 09:30:36 +00:00
|
|
|
|
2024-06-14 03:15:23 +00:00
|
|
|
return audioCache.get(content);
|
2024-06-13 08:28:15 +00:00
|
|
|
};
|
2024-06-13 04:18:53 +00:00
|
|
|
|
2024-06-14 03:15:23 +00:00
|
|
|
let messages = {};
|
|
|
|
|
|
|
|
const monitorAndPlayAudio = async (id, signal) => {
|
|
|
|
while (!signal.aborted) {
|
|
|
|
if (messages[id] && messages[id].length > 0) {
|
|
|
|
// Retrieve the next content string from the queue
|
|
|
|
const content = messages[id].shift(); // Dequeues the content for playing
|
|
|
|
|
|
|
|
if (audioCache.has(content)) {
|
|
|
|
// If content is available in the cache, play it
|
2024-06-14 09:30:36 +00:00
|
|
|
|
|
|
|
// Set the emoji for the content if available
|
|
|
|
if (($settings?.showEmojiInCall ?? false) && emojiCache.has(content)) {
|
|
|
|
emoji = emojiCache.get(content);
|
|
|
|
} else {
|
|
|
|
emoji = null;
|
|
|
|
}
|
|
|
|
|
|
|
|
if ($config.audio.tts.engine !== '') {
|
|
|
|
try {
|
|
|
|
console.log(
|
|
|
|
'%c%s',
|
|
|
|
'color: red; font-size: 20px;',
|
|
|
|
`Playing audio for content: ${content}`
|
|
|
|
);
|
|
|
|
|
|
|
|
const audio = audioCache.get(content);
|
|
|
|
await playAudio(audio); // Here ensure that playAudio is indeed correct method to execute
|
|
|
|
console.log(`Played audio for content: ${content}`);
|
|
|
|
await new Promise((resolve) => setTimeout(resolve, 200)); // Wait before retrying to reduce tight loop
|
|
|
|
} catch (error) {
|
|
|
|
console.error('Error playing audio:', error);
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
await speakSpeechSynthesisHandler(content);
|
2024-06-14 03:15:23 +00:00
|
|
|
}
|
|
|
|
} else {
|
|
|
|
// If not available in the cache, push it back to the queue and delay
|
|
|
|
messages[id].unshift(content); // Re-queue the content at the start
|
|
|
|
console.log(`Audio for "${content}" not yet available in the cache, re-queued...`);
|
|
|
|
await new Promise((resolve) => setTimeout(resolve, 200)); // Wait before retrying to reduce tight loop
|
2024-06-07 21:08:04 +00:00
|
|
|
}
|
2024-06-14 03:15:23 +00:00
|
|
|
} else if (finishedMessages[id] && messages[id] && messages[id].length === 0) {
|
|
|
|
// If the message is finished and there are no more messages to process, break the loop
|
2024-06-16 23:50:57 +00:00
|
|
|
assistantSpeaking = false;
|
2024-06-14 03:15:23 +00:00
|
|
|
break;
|
|
|
|
} else {
|
|
|
|
// No messages to process, sleep for a bit
|
|
|
|
await new Promise((resolve) => setTimeout(resolve, 200));
|
2024-06-07 06:29:08 +00:00
|
|
|
}
|
|
|
|
}
|
2024-06-14 03:15:23 +00:00
|
|
|
console.log(`Audio monitoring and playing stopped for message ID ${id}`);
|
2024-06-07 06:29:08 +00:00
|
|
|
};
|
|
|
|
|
2024-06-14 03:15:23 +00:00
|
|
|
onMount(async () => {
|
2024-08-22 15:12:31 +00:00
|
|
|
const setWakeLock = async () => {
|
|
|
|
try {
|
|
|
|
wakeLock = await navigator.wakeLock.request('screen');
|
|
|
|
} catch (err) {
|
|
|
|
// The Wake Lock request has failed - usually system related, such as battery.
|
|
|
|
console.log(err);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (wakeLock) {
|
|
|
|
// Add a listener to release the wake lock when the page is unloaded
|
|
|
|
wakeLock.addEventListener('release', () => {
|
|
|
|
// the wake lock has been released
|
|
|
|
console.log('Wake Lock released');
|
|
|
|
});
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
if ('wakeLock' in navigator) {
|
|
|
|
await setWakeLock();
|
|
|
|
|
|
|
|
document.addEventListener('visibilitychange', async () => {
|
|
|
|
// Re-request the wake lock if the document becomes visible
|
|
|
|
if (wakeLock !== null && document.visibilityState === 'visible') {
|
|
|
|
await setWakeLock();
|
|
|
|
}
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2024-06-17 08:55:06 +00:00
|
|
|
model = $models.find((m) => m.id === modelId);
|
|
|
|
|
2024-06-14 03:15:23 +00:00
|
|
|
startRecording();
|
2024-06-07 06:29:08 +00:00
|
|
|
|
2024-06-14 03:15:23 +00:00
|
|
|
const chatStartHandler = async (e) => {
|
|
|
|
const { id } = e.detail;
|
2024-06-13 09:29:56 +00:00
|
|
|
|
2024-06-14 03:15:23 +00:00
|
|
|
chatStreaming = true;
|
2024-06-13 09:29:56 +00:00
|
|
|
|
2024-06-14 09:30:36 +00:00
|
|
|
if (currentMessageId !== id) {
|
|
|
|
console.log(`Received chat start event for message ID ${id}`);
|
2024-06-13 04:18:53 +00:00
|
|
|
|
2024-06-14 09:30:36 +00:00
|
|
|
currentMessageId = id;
|
|
|
|
if (audioAbortController) {
|
|
|
|
audioAbortController.abort();
|
2024-06-14 03:15:23 +00:00
|
|
|
}
|
2024-06-14 09:30:36 +00:00
|
|
|
audioAbortController = new AbortController();
|
|
|
|
|
2024-06-16 23:50:57 +00:00
|
|
|
assistantSpeaking = true;
|
2024-06-14 09:30:36 +00:00
|
|
|
// Start monitoring and playing audio for the message ID
|
|
|
|
monitorAndPlayAudio(id, audioAbortController.signal);
|
2024-06-13 09:29:56 +00:00
|
|
|
}
|
2024-06-14 03:15:23 +00:00
|
|
|
};
|
2024-06-13 04:18:53 +00:00
|
|
|
|
2024-06-14 03:15:23 +00:00
|
|
|
const chatEventHandler = async (e) => {
|
|
|
|
const { id, content } = e.detail;
|
|
|
|
// "id" here is message id
|
|
|
|
// if "id" is not the same as "currentMessageId" then do not process
|
|
|
|
// "content" here is a sentence from the assistant,
|
|
|
|
// there will be many sentences for the same "id"
|
|
|
|
|
2024-06-14 09:30:36 +00:00
|
|
|
if (currentMessageId === id) {
|
|
|
|
console.log(`Received chat event for message ID ${id}: ${content}`);
|
2024-06-14 03:15:23 +00:00
|
|
|
|
2024-06-14 09:30:36 +00:00
|
|
|
try {
|
|
|
|
if (messages[id] === undefined) {
|
|
|
|
messages[id] = [content];
|
|
|
|
} else {
|
|
|
|
messages[id].push(content);
|
|
|
|
}
|
2024-06-13 08:28:15 +00:00
|
|
|
|
2024-06-14 09:30:36 +00:00
|
|
|
console.log(content);
|
2024-06-13 08:28:15 +00:00
|
|
|
|
2024-06-14 09:30:36 +00:00
|
|
|
fetchAudio(content);
|
|
|
|
} catch (error) {
|
|
|
|
console.error('Failed to fetch or play audio:', error);
|
2024-06-13 08:28:15 +00:00
|
|
|
}
|
2024-06-13 09:29:56 +00:00
|
|
|
}
|
2024-06-14 03:15:23 +00:00
|
|
|
};
|
2024-06-13 08:28:15 +00:00
|
|
|
|
2024-06-14 03:15:23 +00:00
|
|
|
const chatFinishHandler = async (e) => {
|
|
|
|
const { id, content } = e.detail;
|
|
|
|
// "content" here is the entire message from the assistant
|
2024-06-16 23:50:57 +00:00
|
|
|
finishedMessages[id] = true;
|
2024-06-13 08:28:15 +00:00
|
|
|
|
2024-06-14 03:15:23 +00:00
|
|
|
chatStreaming = false;
|
|
|
|
};
|
|
|
|
|
|
|
|
eventTarget.addEventListener('chat:start', chatStartHandler);
|
|
|
|
eventTarget.addEventListener('chat', chatEventHandler);
|
|
|
|
eventTarget.addEventListener('chat:finish', chatFinishHandler);
|
|
|
|
|
|
|
|
return async () => {
|
|
|
|
eventTarget.removeEventListener('chat:start', chatStartHandler);
|
|
|
|
eventTarget.removeEventListener('chat', chatEventHandler);
|
|
|
|
eventTarget.removeEventListener('chat:finish', chatFinishHandler);
|
|
|
|
|
2024-06-14 03:16:23 +00:00
|
|
|
audioAbortController.abort();
|
|
|
|
await tick();
|
2024-06-14 07:05:01 +00:00
|
|
|
|
2024-06-14 03:16:23 +00:00
|
|
|
await stopAllAudio();
|
|
|
|
|
2024-06-14 03:15:23 +00:00
|
|
|
await stopRecordingCallback(false);
|
|
|
|
await stopCamera();
|
|
|
|
};
|
2024-06-13 04:18:53 +00:00
|
|
|
});
|
2024-08-22 15:27:22 +00:00
|
|
|
|
|
|
|
onDestroy(async () => {
|
|
|
|
await stopAllAudio();
|
|
|
|
await stopRecordingCallback(false);
|
|
|
|
await stopCamera();
|
|
|
|
});
|
2024-06-07 06:29:08 +00:00
|
|
|
</script>
|
|
|
|
|
|
|
|
{#if $showCallOverlay}
|
2024-06-07 22:55:02 +00:00
|
|
|
<div class=" absolute w-full h-screen max-h-[100dvh] flex z-[999] overflow-hidden">
|
2024-06-07 06:29:08 +00:00
|
|
|
<div
|
2024-06-07 22:55:02 +00:00
|
|
|
class="absolute w-full h-screen max-h-[100dvh] bg-white text-gray-700 dark:bg-black dark:text-gray-300 flex justify-center"
|
2024-06-07 06:29:08 +00:00
|
|
|
>
|
2024-06-07 22:55:02 +00:00
|
|
|
<div class="max-w-lg w-full h-screen max-h-[100dvh] flex flex-col justify-between p-3 md:p-6">
|
2024-06-07 21:08:04 +00:00
|
|
|
{#if camera}
|
2024-06-16 23:50:57 +00:00
|
|
|
<button
|
|
|
|
type="button"
|
|
|
|
class="flex justify-center items-center w-full h-20 min-h-20"
|
|
|
|
on:click={() => {
|
|
|
|
if (assistantSpeaking) {
|
|
|
|
stopAllAudio();
|
|
|
|
}
|
|
|
|
}}
|
|
|
|
>
|
2024-06-14 09:30:36 +00:00
|
|
|
{#if emoji}
|
|
|
|
<div
|
|
|
|
class=" transition-all rounded-full"
|
|
|
|
style="font-size:{rmsLevel * 100 > 4
|
|
|
|
? '4.5'
|
|
|
|
: rmsLevel * 100 > 2
|
2024-08-13 10:12:35 +00:00
|
|
|
? '4.25'
|
|
|
|
: rmsLevel * 100 > 1
|
|
|
|
? '3.75'
|
|
|
|
: '3.5'}rem;width: 100%; text-align:center;"
|
2024-06-14 09:30:36 +00:00
|
|
|
>
|
|
|
|
{emoji}
|
|
|
|
</div>
|
2024-06-17 04:52:50 +00:00
|
|
|
{:else if loading || assistantSpeaking}
|
2024-06-07 21:08:04 +00:00
|
|
|
<svg
|
|
|
|
class="size-12 text-gray-900 dark:text-gray-400"
|
|
|
|
viewBox="0 0 24 24"
|
|
|
|
fill="currentColor"
|
|
|
|
xmlns="http://www.w3.org/2000/svg"
|
|
|
|
><style>
|
|
|
|
.spinner_qM83 {
|
|
|
|
animation: spinner_8HQG 1.05s infinite;
|
2024-06-07 06:29:08 +00:00
|
|
|
}
|
2024-06-07 21:08:04 +00:00
|
|
|
.spinner_oXPr {
|
|
|
|
animation-delay: 0.1s;
|
2024-06-07 06:29:08 +00:00
|
|
|
}
|
2024-06-07 21:08:04 +00:00
|
|
|
.spinner_ZTLf {
|
|
|
|
animation-delay: 0.2s;
|
2024-06-07 06:29:08 +00:00
|
|
|
}
|
2024-06-07 21:08:04 +00:00
|
|
|
@keyframes spinner_8HQG {
|
|
|
|
0%,
|
|
|
|
57.14% {
|
|
|
|
animation-timing-function: cubic-bezier(0.33, 0.66, 0.66, 1);
|
|
|
|
transform: translate(0);
|
|
|
|
}
|
|
|
|
28.57% {
|
|
|
|
animation-timing-function: cubic-bezier(0.33, 0, 0.66, 0.33);
|
|
|
|
transform: translateY(-6px);
|
|
|
|
}
|
|
|
|
100% {
|
|
|
|
transform: translate(0);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
</style><circle class="spinner_qM83" cx="4" cy="12" r="3" /><circle
|
|
|
|
class="spinner_qM83 spinner_oXPr"
|
|
|
|
cx="12"
|
|
|
|
cy="12"
|
|
|
|
r="3"
|
|
|
|
/><circle class="spinner_qM83 spinner_ZTLf" cx="20" cy="12" r="3" /></svg
|
|
|
|
>
|
|
|
|
{:else}
|
|
|
|
<div
|
|
|
|
class=" {rmsLevel * 100 > 4
|
|
|
|
? ' size-[4.5rem]'
|
|
|
|
: rmsLevel * 100 > 2
|
2024-08-13 10:12:35 +00:00
|
|
|
? ' size-16'
|
|
|
|
: rmsLevel * 100 > 1
|
|
|
|
? 'size-14'
|
|
|
|
: 'size-12'} transition-all rounded-full {(model?.info?.meta
|
2024-07-09 06:07:23 +00:00
|
|
|
?.profile_image_url ?? '/static/favicon.png') !== '/static/favicon.png'
|
2024-06-17 08:55:06 +00:00
|
|
|
? ' bg-cover bg-center bg-no-repeat'
|
|
|
|
: 'bg-black dark:bg-white'} bg-black dark:bg-white"
|
2024-07-09 06:07:23 +00:00
|
|
|
style={(model?.info?.meta?.profile_image_url ?? '/static/favicon.png') !==
|
|
|
|
'/static/favicon.png'
|
2024-06-17 08:55:06 +00:00
|
|
|
? `background-image: url('${model?.info?.meta?.profile_image_url}');`
|
|
|
|
: ''}
|
2024-06-07 21:08:04 +00:00
|
|
|
/>
|
|
|
|
{/if}
|
|
|
|
<!-- navbar -->
|
2024-06-16 23:50:57 +00:00
|
|
|
</button>
|
2024-06-07 21:08:04 +00:00
|
|
|
{/if}
|
|
|
|
|
2024-06-07 22:55:02 +00:00
|
|
|
<div class="flex justify-center items-center flex-1 h-full w-full max-h-full">
|
2024-06-07 21:08:04 +00:00
|
|
|
{#if !camera}
|
2024-06-16 23:50:57 +00:00
|
|
|
<button
|
|
|
|
type="button"
|
|
|
|
on:click={() => {
|
|
|
|
if (assistantSpeaking) {
|
|
|
|
stopAllAudio();
|
|
|
|
}
|
|
|
|
}}
|
|
|
|
>
|
|
|
|
{#if emoji}
|
|
|
|
<div
|
|
|
|
class=" transition-all rounded-full"
|
|
|
|
style="font-size:{rmsLevel * 100 > 4
|
|
|
|
? '13'
|
|
|
|
: rmsLevel * 100 > 2
|
2024-08-13 10:12:35 +00:00
|
|
|
? '12'
|
|
|
|
: rmsLevel * 100 > 1
|
|
|
|
? '11.5'
|
|
|
|
: '11'}rem;width:100%;text-align:center;"
|
2024-06-16 23:50:57 +00:00
|
|
|
>
|
|
|
|
{emoji}
|
|
|
|
</div>
|
2024-06-17 04:52:50 +00:00
|
|
|
{:else if loading || assistantSpeaking}
|
2024-06-16 23:50:57 +00:00
|
|
|
<svg
|
|
|
|
class="size-44 text-gray-900 dark:text-gray-400"
|
|
|
|
viewBox="0 0 24 24"
|
|
|
|
fill="currentColor"
|
|
|
|
xmlns="http://www.w3.org/2000/svg"
|
|
|
|
><style>
|
|
|
|
.spinner_qM83 {
|
|
|
|
animation: spinner_8HQG 1.05s infinite;
|
2024-06-07 21:08:04 +00:00
|
|
|
}
|
2024-06-16 23:50:57 +00:00
|
|
|
.spinner_oXPr {
|
|
|
|
animation-delay: 0.1s;
|
2024-06-07 21:08:04 +00:00
|
|
|
}
|
2024-06-16 23:50:57 +00:00
|
|
|
.spinner_ZTLf {
|
|
|
|
animation-delay: 0.2s;
|
2024-06-07 21:08:04 +00:00
|
|
|
}
|
2024-06-16 23:50:57 +00:00
|
|
|
@keyframes spinner_8HQG {
|
|
|
|
0%,
|
|
|
|
57.14% {
|
|
|
|
animation-timing-function: cubic-bezier(0.33, 0.66, 0.66, 1);
|
|
|
|
transform: translate(0);
|
|
|
|
}
|
|
|
|
28.57% {
|
|
|
|
animation-timing-function: cubic-bezier(0.33, 0, 0.66, 0.33);
|
|
|
|
transform: translateY(-6px);
|
|
|
|
}
|
|
|
|
100% {
|
|
|
|
transform: translate(0);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
</style><circle class="spinner_qM83" cx="4" cy="12" r="3" /><circle
|
|
|
|
class="spinner_qM83 spinner_oXPr"
|
|
|
|
cx="12"
|
|
|
|
cy="12"
|
|
|
|
r="3"
|
|
|
|
/><circle class="spinner_qM83 spinner_ZTLf" cx="20" cy="12" r="3" /></svg
|
|
|
|
>
|
|
|
|
{:else}
|
|
|
|
<div
|
|
|
|
class=" {rmsLevel * 100 > 4
|
|
|
|
? ' size-52'
|
|
|
|
: rmsLevel * 100 > 2
|
2024-08-13 10:12:35 +00:00
|
|
|
? 'size-48'
|
|
|
|
: rmsLevel * 100 > 1
|
|
|
|
? 'size-[11.5rem]'
|
|
|
|
: 'size-44'} transition-all rounded-full {(model?.info?.meta
|
2024-07-09 06:07:23 +00:00
|
|
|
?.profile_image_url ?? '/static/favicon.png') !== '/static/favicon.png'
|
2024-06-17 08:55:06 +00:00
|
|
|
? ' bg-cover bg-center bg-no-repeat'
|
|
|
|
: 'bg-black dark:bg-white'} "
|
2024-07-09 06:07:23 +00:00
|
|
|
style={(model?.info?.meta?.profile_image_url ?? '/static/favicon.png') !==
|
|
|
|
'/static/favicon.png'
|
2024-06-17 08:55:06 +00:00
|
|
|
? `background-image: url('${model?.info?.meta?.profile_image_url}');`
|
|
|
|
: ''}
|
2024-06-16 23:50:57 +00:00
|
|
|
/>
|
|
|
|
{/if}
|
|
|
|
</button>
|
2024-06-07 06:29:08 +00:00
|
|
|
{:else}
|
2024-06-07 22:55:02 +00:00
|
|
|
<div
|
|
|
|
class="relative flex video-container w-full max-h-full pt-2 pb-4 md:py-6 px-2 h-full"
|
|
|
|
>
|
2024-06-07 21:08:04 +00:00
|
|
|
<video
|
|
|
|
id="camera-feed"
|
|
|
|
autoplay
|
2024-06-07 22:55:02 +00:00
|
|
|
class="rounded-2xl h-full min-w-full object-cover object-center"
|
2024-06-07 22:31:58 +00:00
|
|
|
playsinline
|
2024-06-07 21:08:04 +00:00
|
|
|
/>
|
|
|
|
|
|
|
|
<canvas id="camera-canvas" style="display:none;" />
|
|
|
|
|
2024-06-07 22:55:02 +00:00
|
|
|
<div class=" absolute top-4 md:top-8 left-4">
|
2024-06-07 21:08:04 +00:00
|
|
|
<button
|
|
|
|
type="button"
|
|
|
|
class="p-1.5 text-white cursor-pointer backdrop-blur-xl bg-black/10 rounded-full"
|
|
|
|
on:click={() => {
|
|
|
|
stopCamera();
|
|
|
|
}}
|
|
|
|
>
|
|
|
|
<svg
|
|
|
|
xmlns="http://www.w3.org/2000/svg"
|
|
|
|
viewBox="0 0 16 16"
|
|
|
|
fill="currentColor"
|
|
|
|
class="size-6"
|
|
|
|
>
|
|
|
|
<path
|
|
|
|
d="M5.28 4.22a.75.75 0 0 0-1.06 1.06L6.94 8l-2.72 2.72a.75.75 0 1 0 1.06 1.06L8 9.06l2.72 2.72a.75.75 0 1 0 1.06-1.06L9.06 8l2.72-2.72a.75.75 0 0 0-1.06-1.06L8 6.94 5.28 4.22Z"
|
|
|
|
/>
|
|
|
|
</svg>
|
|
|
|
</button>
|
|
|
|
</div>
|
|
|
|
</div>
|
2024-06-07 06:29:08 +00:00
|
|
|
{/if}
|
2024-06-07 05:30:19 +00:00
|
|
|
</div>
|
|
|
|
|
2024-06-07 06:29:08 +00:00
|
|
|
<div class="flex justify-between items-center pb-2 w-full">
|
|
|
|
<div>
|
2024-06-07 21:49:36 +00:00
|
|
|
{#if camera}
|
|
|
|
<VideoInputMenu
|
|
|
|
devices={videoInputDevices}
|
|
|
|
on:change={async (e) => {
|
|
|
|
console.log(e.detail);
|
|
|
|
selectedVideoInputDeviceId = e.detail;
|
|
|
|
await stopVideoStream();
|
|
|
|
await startVideoStream();
|
2024-06-07 21:08:04 +00:00
|
|
|
}}
|
|
|
|
>
|
2024-06-07 21:49:36 +00:00
|
|
|
<button class=" p-3 rounded-full bg-gray-50 dark:bg-gray-900" type="button">
|
|
|
|
<svg
|
|
|
|
xmlns="http://www.w3.org/2000/svg"
|
|
|
|
viewBox="0 0 20 20"
|
|
|
|
fill="currentColor"
|
|
|
|
class="size-5"
|
|
|
|
>
|
|
|
|
<path
|
|
|
|
fill-rule="evenodd"
|
|
|
|
d="M15.312 11.424a5.5 5.5 0 0 1-9.201 2.466l-.312-.311h2.433a.75.75 0 0 0 0-1.5H3.989a.75.75 0 0 0-.75.75v4.242a.75.75 0 0 0 1.5 0v-2.43l.31.31a7 7 0 0 0 11.712-3.138.75.75 0 0 0-1.449-.39Zm1.23-3.723a.75.75 0 0 0 .219-.53V2.929a.75.75 0 0 0-1.5 0V5.36l-.31-.31A7 7 0 0 0 3.239 8.188a.75.75 0 1 0 1.448.389A5.5 5.5 0 0 1 13.89 6.11l.311.31h-2.432a.75.75 0 0 0 0 1.5h4.243a.75.75 0 0 0 .53-.219Z"
|
|
|
|
clip-rule="evenodd"
|
|
|
|
/>
|
|
|
|
</svg>
|
|
|
|
</button>
|
|
|
|
</VideoInputMenu>
|
|
|
|
{:else}
|
2024-06-10 00:39:33 +00:00
|
|
|
<Tooltip content={$i18n.t('Camera')}>
|
2024-06-07 21:49:36 +00:00
|
|
|
<button
|
|
|
|
class=" p-3 rounded-full bg-gray-50 dark:bg-gray-900"
|
|
|
|
type="button"
|
2024-06-12 18:44:05 +00:00
|
|
|
on:click={async () => {
|
|
|
|
await navigator.mediaDevices.getUserMedia({ video: true });
|
2024-06-07 21:49:36 +00:00
|
|
|
startCamera();
|
|
|
|
}}
|
2024-06-07 07:28:34 +00:00
|
|
|
>
|
2024-06-07 21:49:36 +00:00
|
|
|
<svg
|
|
|
|
xmlns="http://www.w3.org/2000/svg"
|
|
|
|
fill="none"
|
|
|
|
viewBox="0 0 24 24"
|
|
|
|
stroke-width="1.5"
|
|
|
|
stroke="currentColor"
|
|
|
|
class="size-5"
|
|
|
|
>
|
|
|
|
<path
|
|
|
|
stroke-linecap="round"
|
|
|
|
stroke-linejoin="round"
|
|
|
|
d="M6.827 6.175A2.31 2.31 0 0 1 5.186 7.23c-.38.054-.757.112-1.134.175C2.999 7.58 2.25 8.507 2.25 9.574V18a2.25 2.25 0 0 0 2.25 2.25h15A2.25 2.25 0 0 0 21.75 18V9.574c0-1.067-.75-1.994-1.802-2.169a47.865 47.865 0 0 0-1.134-.175 2.31 2.31 0 0 1-1.64-1.055l-.822-1.316a2.192 2.192 0 0 0-1.736-1.039 48.774 48.774 0 0 0-5.232 0 2.192 2.192 0 0 0-1.736 1.039l-.821 1.316Z"
|
|
|
|
/>
|
|
|
|
<path
|
|
|
|
stroke-linecap="round"
|
|
|
|
stroke-linejoin="round"
|
|
|
|
d="M16.5 12.75a4.5 4.5 0 1 1-9 0 4.5 4.5 0 0 1 9 0ZM18.75 10.5h.008v.008h-.008V10.5Z"
|
|
|
|
/>
|
|
|
|
</svg>
|
|
|
|
</button>
|
|
|
|
</Tooltip>
|
|
|
|
{/if}
|
2024-06-07 06:29:08 +00:00
|
|
|
</div>
|
2024-06-07 05:30:19 +00:00
|
|
|
|
2024-06-07 06:29:08 +00:00
|
|
|
<div>
|
2024-06-16 23:50:57 +00:00
|
|
|
<button
|
|
|
|
type="button"
|
|
|
|
on:click={() => {
|
|
|
|
if (assistantSpeaking) {
|
|
|
|
stopAllAudio();
|
|
|
|
}
|
|
|
|
}}
|
|
|
|
>
|
2024-06-07 06:29:08 +00:00
|
|
|
<div class=" line-clamp-1 text-sm font-medium">
|
|
|
|
{#if loading}
|
2024-06-08 20:42:22 +00:00
|
|
|
{$i18n.t('Thinking...')}
|
2024-06-16 23:50:57 +00:00
|
|
|
{:else if assistantSpeaking}
|
|
|
|
{$i18n.t('Tap to interrupt')}
|
2024-06-07 06:29:08 +00:00
|
|
|
{:else}
|
2024-06-08 20:42:22 +00:00
|
|
|
{$i18n.t('Listening...')}
|
2024-06-07 06:29:08 +00:00
|
|
|
{/if}
|
|
|
|
</div>
|
|
|
|
</button>
|
|
|
|
</div>
|
|
|
|
|
|
|
|
<div>
|
|
|
|
<button
|
|
|
|
class=" p-3 rounded-full bg-gray-50 dark:bg-gray-900"
|
|
|
|
on:click={async () => {
|
|
|
|
showCallOverlay.set(false);
|
|
|
|
}}
|
|
|
|
type="button"
|
2024-06-07 05:30:19 +00:00
|
|
|
>
|
2024-06-07 06:29:08 +00:00
|
|
|
<svg
|
|
|
|
xmlns="http://www.w3.org/2000/svg"
|
|
|
|
viewBox="0 0 20 20"
|
|
|
|
fill="currentColor"
|
|
|
|
class="size-5"
|
|
|
|
>
|
|
|
|
<path
|
|
|
|
d="M6.28 5.22a.75.75 0 0 0-1.06 1.06L8.94 10l-3.72 3.72a.75.75 0 1 0 1.06 1.06L10 11.06l3.72 3.72a.75.75 0 1 0 1.06-1.06L11.06 10l3.72-3.72a.75.75 0 0 0-1.06-1.06L10 8.94 6.28 5.22Z"
|
|
|
|
/>
|
|
|
|
</svg>
|
|
|
|
</button>
|
|
|
|
</div>
|
2024-06-07 05:30:19 +00:00
|
|
|
</div>
|
|
|
|
</div>
|
|
|
|
</div>
|
|
|
|
</div>
|
2024-06-07 06:29:08 +00:00
|
|
|
{/if}
|