open-webui/src/lib/components/chat/MessageInput/CallOverlay.svelte

864 lines
22 KiB
Svelte
Raw Normal View History

2024-06-07 05:30:19 +00:00
<script lang="ts">
2024-06-08 03:18:48 +00:00
import { config, settings, showCallOverlay } from '$lib/stores';
2024-06-07 05:30:19 +00:00
import { onMount, tick, getContext } from 'svelte';
2024-06-07 07:27:05 +00:00
import { blobToFile, calculateSHA256, extractSentences, findWordIndices } from '$lib/utils';
2024-06-13 04:18:53 +00:00
import { generateEmoji } from '$lib/apis';
2024-06-07 07:27:05 +00:00
import { synthesizeOpenAISpeech, transcribeAudio } from '$lib/apis/audio';
2024-06-13 04:18:53 +00:00
2024-06-07 06:29:08 +00:00
import { toast } from 'svelte-sonner';
2024-06-07 21:49:36 +00:00
2024-06-07 07:28:34 +00:00
import Tooltip from '$lib/components/common/Tooltip.svelte';
2024-06-07 21:49:36 +00:00
import VideoInputMenu from './CallOverlay/VideoInputMenu.svelte';
2024-06-07 06:29:08 +00:00
2024-06-07 05:30:19 +00:00
const i18n = getContext('i18n');
2024-06-13 04:18:53 +00:00
export let eventTarget: EventTarget;
2024-06-13 08:28:15 +00:00
2024-06-07 07:04:47 +00:00
export let submitPrompt: Function;
2024-06-13 08:28:15 +00:00
export let stopResponse: Function;
2024-06-07 21:08:04 +00:00
export let files;
2024-06-07 07:04:47 +00:00
2024-06-13 04:18:53 +00:00
export let chatId;
export let modelId;
2024-06-07 06:29:08 +00:00
let loading = false;
let confirmed = false;
2024-06-13 08:28:15 +00:00
let interrupted = false;
2024-06-07 05:30:19 +00:00
2024-06-13 04:18:53 +00:00
let emoji = null;
2024-06-07 21:08:04 +00:00
let camera = false;
let cameraStream = null;
2024-06-07 07:04:47 +00:00
let assistantSpeaking = false;
2024-06-13 08:28:15 +00:00
let chatStreaming = false;
let assistantMessage = '';
let assistantSentences = [];
let assistantSentenceAudios = {};
let assistantSentenceIdx = -1;
let audioQueue = [];
2024-06-13 09:29:56 +00:00
let emojiQueue = [];
2024-06-13 08:28:15 +00:00
$: assistantSentences = extractSentences(assistantMessage).reduce((mergedTexts, currentText) => {
const lastIndex = mergedTexts.length - 1;
if (lastIndex >= 0) {
const previousText = mergedTexts[lastIndex];
const wordCount = previousText.split(/\s+/).length;
if (wordCount < 2) {
mergedTexts[lastIndex] = previousText + ' ' + currentText;
} else {
mergedTexts.push(currentText);
}
} else {
mergedTexts.push(currentText);
}
return mergedTexts;
}, []);
2024-06-07 05:30:19 +00:00
2024-06-07 07:27:05 +00:00
let currentUtterance = null;
2024-06-07 06:29:08 +00:00
2024-06-13 08:28:15 +00:00
let rmsLevel = 0;
let hasStartedSpeaking = false;
2024-06-07 06:29:08 +00:00
let mediaRecorder;
let audioChunks = [];
2024-06-13 08:28:15 +00:00
let videoInputDevices = [];
let selectedVideoInputDeviceId = null;
const getVideoInputDevices = async () => {
const devices = await navigator.mediaDevices.enumerateDevices();
videoInputDevices = devices.filter((device) => device.kind === 'videoinput');
if (!!navigator.mediaDevices.getDisplayMedia) {
videoInputDevices = [
...videoInputDevices,
{
deviceId: 'screen',
label: 'Screen Share'
}
];
}
console.log(videoInputDevices);
if (selectedVideoInputDeviceId === null && videoInputDevices.length > 0) {
selectedVideoInputDeviceId = videoInputDevices[0].deviceId;
}
};
const startCamera = async () => {
await getVideoInputDevices();
if (cameraStream === null) {
camera = true;
await tick();
try {
await startVideoStream();
} catch (err) {
console.error('Error accessing webcam: ', err);
}
}
};
const startVideoStream = async () => {
const video = document.getElementById('camera-feed');
if (video) {
if (selectedVideoInputDeviceId === 'screen') {
cameraStream = await navigator.mediaDevices.getDisplayMedia({
video: {
cursor: 'always'
},
audio: false
});
} else {
cameraStream = await navigator.mediaDevices.getUserMedia({
video: {
deviceId: selectedVideoInputDeviceId ? { exact: selectedVideoInputDeviceId } : undefined
}
});
}
if (cameraStream) {
await getVideoInputDevices();
video.srcObject = cameraStream;
await video.play();
}
}
};
const stopVideoStream = async () => {
if (cameraStream) {
const tracks = cameraStream.getTracks();
tracks.forEach((track) => track.stop());
}
cameraStream = null;
};
const takeScreenshot = () => {
const video = document.getElementById('camera-feed');
const canvas = document.getElementById('camera-canvas');
if (!canvas) {
return;
}
const context = canvas.getContext('2d');
// Make the canvas match the video dimensions
canvas.width = video.videoWidth;
canvas.height = video.videoHeight;
// Draw the image from the video onto the canvas
context.drawImage(video, 0, 0, video.videoWidth, video.videoHeight);
// Convert the canvas to a data base64 URL and console log it
const dataURL = canvas.toDataURL('image/png');
console.log(dataURL);
return dataURL;
};
const stopCamera = async () => {
await stopVideoStream();
camera = false;
};
2024-06-07 06:29:08 +00:00
const MIN_DECIBELS = -45;
const VISUALIZER_BUFFER_LENGTH = 300;
// Function to calculate the RMS level from time domain data
const calculateRMS = (data: Uint8Array) => {
let sumSquares = 0;
for (let i = 0; i < data.length; i++) {
const normalizedValue = (data[i] - 128) / 128; // Normalize the data
sumSquares += normalizedValue * normalizedValue;
}
return Math.sqrt(sumSquares / data.length);
};
const analyseAudio = (stream) => {
const audioContext = new AudioContext();
const audioStreamSource = audioContext.createMediaStreamSource(stream);
const analyser = audioContext.createAnalyser();
analyser.minDecibels = MIN_DECIBELS;
audioStreamSource.connect(analyser);
const bufferLength = analyser.frequencyBinCount;
const domainData = new Uint8Array(bufferLength);
const timeDomainData = new Uint8Array(analyser.fftSize);
let lastSoundTime = Date.now();
2024-06-07 06:36:47 +00:00
hasStartedSpeaking = false;
2024-06-07 06:29:08 +00:00
const detectSound = () => {
const processFrame = () => {
2024-06-07 06:42:31 +00:00
if (!mediaRecorder || !$showCallOverlay) {
return;
}
2024-06-13 08:28:15 +00:00
2024-06-07 06:29:08 +00:00
analyser.getByteTimeDomainData(timeDomainData);
analyser.getByteFrequencyData(domainData);
// Calculate RMS level from time domain data
rmsLevel = calculateRMS(timeDomainData);
// Check if initial speech/noise has started
const hasSound = domainData.some((value) => value > 0);
if (hasSound) {
hasStartedSpeaking = true;
lastSoundTime = Date.now();
2024-06-13 08:28:15 +00:00
// BIG RED TEXT
console.log('%c%s', 'color: red; font-size: 20px;', '🔊 Sound detected');
stopAllAudio();
2024-06-07 06:29:08 +00:00
}
// Start silence detection only after initial speech/noise has been detected
if (hasStartedSpeaking) {
if (Date.now() - lastSoundTime > 2000) {
confirmed = true;
if (mediaRecorder) {
mediaRecorder.stop();
}
}
}
window.requestAnimationFrame(processFrame);
};
window.requestAnimationFrame(processFrame);
};
2024-06-07 05:30:19 +00:00
2024-06-07 06:29:08 +00:00
detectSound();
};
2024-06-13 08:28:15 +00:00
const transcribeHandler = async (audioBlob) => {
// Create a blob from the audio chunks
await tick();
const file = blobToFile(audioBlob, 'recording.wav');
const res = await transcribeAudio(localStorage.token, file).catch((error) => {
toast.error(error);
return null;
});
if (res) {
console.log(res.text);
if (res.text !== '') {
const _responses = await submitPrompt(res.text, { _raw: true });
console.log(_responses);
}
}
};
const stopAllAudio = async () => {
interrupted = true;
if (chatStreaming) {
stopResponse();
}
2024-06-07 07:27:05 +00:00
if (currentUtterance) {
speechSynthesis.cancel();
currentUtterance = null;
}
2024-06-07 07:57:53 +00:00
2024-06-13 08:28:15 +00:00
await tick();
2024-06-13 09:29:56 +00:00
emojiQueue = [];
2024-06-13 08:28:15 +00:00
audioQueue = [];
await tick();
2024-06-07 07:57:53 +00:00
2024-06-13 08:28:15 +00:00
const audioElement = document.getElementById('audioElement');
2024-06-13 04:18:53 +00:00
if (audioElement) {
audioElement.pause();
audioElement.currentTime = 0;
}
2024-06-13 08:28:15 +00:00
2024-06-07 07:27:05 +00:00
assistantSpeaking = false;
};
2024-06-13 08:28:15 +00:00
const speakSpeechSynthesisHandler = (content) => {
if ($showCallOverlay) {
return new Promise((resolve) => {
let voices = [];
const getVoicesLoop = setInterval(async () => {
voices = await speechSynthesis.getVoices();
if (voices.length > 0) {
clearInterval(getVoicesLoop);
const voice =
voices
?.filter(
(v) => v.voiceURI === ($settings?.audio?.tts?.voice ?? $config?.audio?.tts?.voice)
)
?.at(0) ?? undefined;
currentUtterance = new SpeechSynthesisUtterance(content);
if (voice) {
currentUtterance.voice = voice;
}
speechSynthesis.speak(currentUtterance);
currentUtterance.onend = async (e) => {
await new Promise((r) => setTimeout(r, 100));
resolve(e);
};
}
}, 100);
});
} else {
return Promise.resolve();
}
};
const playAudio = (audio) => {
2024-06-07 22:00:42 +00:00
if ($showCallOverlay) {
2024-06-13 08:28:15 +00:00
return new Promise((resolve) => {
2024-06-07 22:00:42 +00:00
const audioElement = document.getElementById('audioElement');
2024-06-07 07:57:53 +00:00
2024-06-13 04:18:53 +00:00
if (audioElement) {
2024-06-13 08:28:15 +00:00
audioElement.src = audio.src;
2024-06-13 04:18:53 +00:00
audioElement.muted = true;
2024-06-08 10:27:56 +00:00
2024-06-13 04:18:53 +00:00
audioElement
.play()
.then(() => {
audioElement.muted = false;
})
.catch((error) => {
2024-06-13 08:28:15 +00:00
console.error(error);
2024-06-13 04:18:53 +00:00
});
2024-06-07 07:57:53 +00:00
2024-06-13 04:18:53 +00:00
audioElement.onended = async (e) => {
2024-06-13 08:28:15 +00:00
await new Promise((r) => setTimeout(r, 100));
resolve(e);
2024-06-13 04:18:53 +00:00
};
}
2024-06-07 22:00:42 +00:00
});
} else {
return Promise.resolve();
}
2024-06-07 07:27:05 +00:00
};
2024-06-13 08:28:15 +00:00
const playAudioHandler = async () => {
console.log('playAudioHandler', audioQueue, assistantSpeaking, audioQueue.length > 0);
if (!assistantSpeaking && !interrupted && audioQueue.length > 0) {
assistantSpeaking = true;
2024-06-13 09:29:56 +00:00
if ($settings?.showEmojiInCall ?? false) {
if (emojiQueue.length > 0) {
emoji = emojiQueue.shift();
emojiQueue = emojiQueue;
}
}
2024-06-13 08:28:15 +00:00
const audioToPlay = audioQueue.shift(); // Shift the audio out from queue before playing.
audioQueue = audioQueue;
await playAudio(audioToPlay);
2024-06-07 07:27:05 +00:00
assistantSpeaking = false;
}
};
2024-06-13 08:28:15 +00:00
const setContentAudio = async (content, idx) => {
if (assistantSentenceAudios[idx] === undefined) {
2024-06-13 09:29:56 +00:00
// Wait for the previous audio to be loaded
if (idx > 0) {
await new Promise((resolve) => {
const check = setInterval(() => {
if (
assistantSentenceAudios[idx - 1] !== undefined &&
assistantSentenceAudios[idx - 1] !== null
) {
clearInterval(check);
resolve();
}
}, 100);
});
}
2024-06-13 08:28:15 +00:00
assistantSentenceAudios[idx] = null;
2024-06-13 09:29:56 +00:00
if ($settings?.showEmojiInCall ?? false) {
const sentenceEmoji = await generateEmoji(localStorage.token, modelId, content);
if (sentenceEmoji) {
// Big red text with content and emoji
console.log('%c%s', 'color: blue; font-size: 10px;', `${sentenceEmoji}: ${content}`);
if (/\p{Extended_Pictographic}/u.test(sentenceEmoji)) {
emojiQueue.push(sentenceEmoji.match(/\p{Extended_Pictographic}/gu)[0]);
emojiQueue = emojiQueue;
}
}
await tick();
}
2024-06-13 08:28:15 +00:00
const res = await synthesizeOpenAISpeech(
localStorage.token,
$settings?.audio?.tts?.voice ?? $config?.audio?.tts?.voice,
content
).catch((error) => {
toast.error(error);
assistantSpeaking = false;
return null;
});
2024-06-13 04:18:53 +00:00
if (res) {
2024-06-13 08:28:15 +00:00
const blob = await res.blob();
const blobUrl = URL.createObjectURL(blob);
const audio = new Audio(blobUrl);
assistantSentenceAudios[idx] = audio;
2024-06-13 09:29:56 +00:00
console.log('%c%s', 'color: red; font-size: 20px;', content);
2024-06-13 08:28:15 +00:00
audioQueue.push(audio);
audioQueue = audioQueue;
2024-06-13 04:18:53 +00:00
}
}
2024-06-13 08:28:15 +00:00
};
2024-06-13 04:18:53 +00:00
2024-06-13 08:28:15 +00:00
const stopRecordingCallback = async (_continue = true) => {
if ($showCallOverlay) {
2024-06-13 09:29:56 +00:00
console.log('%c%s', 'color: red; font-size: 20px;', '🚨 stopRecordingCallback 🚨');
2024-06-13 08:28:15 +00:00
// deep copy the audioChunks array
const _audioChunks = audioChunks.slice(0);
2024-06-07 07:27:05 +00:00
2024-06-13 08:28:15 +00:00
audioChunks = [];
mediaRecorder = false;
2024-06-13 04:18:53 +00:00
2024-06-13 08:28:15 +00:00
if (_continue) {
startRecording();
2024-06-07 07:04:47 +00:00
}
2024-06-07 06:29:08 +00:00
if (confirmed) {
loading = true;
2024-06-13 04:18:53 +00:00
emoji = null;
2024-06-07 06:29:08 +00:00
2024-06-07 21:08:04 +00:00
if (cameraStream) {
const imageUrl = takeScreenshot();
files = [
{
type: 'image',
url: imageUrl
}
];
}
2024-06-13 08:28:15 +00:00
const audioBlob = new Blob(_audioChunks, { type: 'audio/wav' });
2024-06-07 06:29:08 +00:00
await transcribeHandler(audioBlob);
confirmed = false;
loading = false;
}
2024-06-07 06:42:31 +00:00
} else {
audioChunks = [];
mediaRecorder = false;
2024-06-07 06:29:08 +00:00
}
};
const startRecording = async () => {
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
mediaRecorder = new MediaRecorder(stream);
mediaRecorder.onstart = () => {
console.log('Recording started');
audioChunks = [];
analyseAudio(stream);
};
2024-06-07 06:36:47 +00:00
mediaRecorder.ondataavailable = (event) => {
if (hasStartedSpeaking) {
audioChunks.push(event.data);
}
};
2024-06-07 06:29:08 +00:00
mediaRecorder.onstop = async () => {
console.log('Recording stopped');
2024-06-07 06:42:31 +00:00
await stopRecordingCallback();
2024-06-07 06:29:08 +00:00
};
mediaRecorder.start();
};
2024-06-13 09:29:56 +00:00
const resetAssistantMessage = async () => {
interrupted = false;
assistantMessage = '';
assistantSentenceIdx = -1;
assistantSentenceAudios = {}; // Reset audio tracking
audioQueue = []; // Clear the audio queue
audioQueue = audioQueue;
emoji = null;
emojiQueue = [];
emojiQueue = emojiQueue;
};
$: (async () => {
if ($showCallOverlay) {
await resetAssistantMessage();
await tick();
startRecording();
} else {
stopCamera();
stopAllAudio();
stopRecordingCallback(false);
}
})();
2024-06-13 04:18:53 +00:00
2024-06-13 08:28:15 +00:00
$: {
if (audioQueue.length > 0 && !assistantSpeaking) {
playAudioHandler();
}
}
2024-06-13 04:18:53 +00:00
onMount(() => {
eventTarget.addEventListener('chat:start', async (e) => {
2024-06-13 09:29:56 +00:00
if ($showCallOverlay) {
console.log('Chat start event:', e);
await resetAssistantMessage();
await tick();
chatStreaming = true;
}
2024-06-13 04:18:53 +00:00
});
eventTarget.addEventListener('chat', async (e) => {
2024-06-13 09:29:56 +00:00
if ($showCallOverlay) {
const { content } = e.detail;
assistantMessage += content;
await tick();
2024-06-13 08:28:15 +00:00
2024-06-13 09:29:56 +00:00
if (!interrupted) {
if ($config.audio.tts.engine !== '') {
assistantSentenceIdx = assistantSentences.length - 2;
2024-06-13 08:28:15 +00:00
2024-06-13 09:29:56 +00:00
if (assistantSentenceIdx >= 0 && !assistantSentenceAudios[assistantSentenceIdx]) {
await tick();
setContentAudio(assistantSentences[assistantSentenceIdx], assistantSentenceIdx);
}
2024-06-13 08:28:15 +00:00
}
}
2024-06-13 04:18:53 +00:00
2024-06-13 09:29:56 +00:00
chatStreaming = true;
}
2024-06-13 04:18:53 +00:00
});
eventTarget.addEventListener('chat:finish', async (e) => {
2024-06-13 09:29:56 +00:00
if ($showCallOverlay) {
chatStreaming = false;
loading = false;
2024-06-13 08:28:15 +00:00
2024-06-13 09:29:56 +00:00
console.log('Chat finish event:', e);
await tick();
2024-06-13 08:28:15 +00:00
2024-06-13 09:29:56 +00:00
if (!interrupted) {
if ($config.audio.tts.engine !== '') {
for (const [idx, sentence] of assistantSentences.entries()) {
if (!assistantSentenceAudios[idx]) {
await tick();
setContentAudio(sentence, idx);
}
2024-06-13 08:28:15 +00:00
}
2024-06-13 09:29:56 +00:00
} else {
if ($settings?.showEmojiInCall ?? false) {
const res = await generateEmoji(localStorage.token, modelId, assistantMessage);
if (res) {
console.log(res);
if (/\p{Extended_Pictographic}/u.test(res)) {
emoji = res.match(/\p{Extended_Pictographic}/gu)[0];
}
}
}
speakSpeechSynthesisHandler(assistantMessage);
2024-06-13 08:28:15 +00:00
}
}
}
2024-06-13 04:18:53 +00:00
});
});
2024-06-07 06:29:08 +00:00
</script>
2024-06-13 08:28:15 +00:00
<audio id="audioElement" src="" style="display: none;" />
2024-06-07 06:29:08 +00:00
{#if $showCallOverlay}
2024-06-07 22:55:02 +00:00
<div class=" absolute w-full h-screen max-h-[100dvh] flex z-[999] overflow-hidden">
2024-06-07 06:29:08 +00:00
<div
2024-06-07 22:55:02 +00:00
class="absolute w-full h-screen max-h-[100dvh] bg-white text-gray-700 dark:bg-black dark:text-gray-300 flex justify-center"
2024-06-07 06:29:08 +00:00
>
2024-06-07 22:55:02 +00:00
<div class="max-w-lg w-full h-screen max-h-[100dvh] flex flex-col justify-between p-3 md:p-6">
2024-06-07 21:08:04 +00:00
{#if camera}
2024-06-13 09:29:56 +00:00
<div class="flex justify-center items-center w-full h-20 min-h-20">
2024-06-07 21:08:04 +00:00
{#if loading}
<svg
class="size-12 text-gray-900 dark:text-gray-400"
viewBox="0 0 24 24"
fill="currentColor"
xmlns="http://www.w3.org/2000/svg"
><style>
.spinner_qM83 {
animation: spinner_8HQG 1.05s infinite;
2024-06-07 06:29:08 +00:00
}
2024-06-07 21:08:04 +00:00
.spinner_oXPr {
animation-delay: 0.1s;
2024-06-07 06:29:08 +00:00
}
2024-06-07 21:08:04 +00:00
.spinner_ZTLf {
animation-delay: 0.2s;
2024-06-07 06:29:08 +00:00
}
2024-06-07 21:08:04 +00:00
@keyframes spinner_8HQG {
0%,
57.14% {
animation-timing-function: cubic-bezier(0.33, 0.66, 0.66, 1);
transform: translate(0);
}
28.57% {
animation-timing-function: cubic-bezier(0.33, 0, 0.66, 0.33);
transform: translateY(-6px);
}
100% {
transform: translate(0);
}
}
</style><circle class="spinner_qM83" cx="4" cy="12" r="3" /><circle
class="spinner_qM83 spinner_oXPr"
cx="12"
cy="12"
r="3"
/><circle class="spinner_qM83 spinner_ZTLf" cx="20" cy="12" r="3" /></svg
>
2024-06-13 04:18:53 +00:00
{:else if emoji}
<div
class=" transition-all rounded-full"
style="font-size:{rmsLevel * 100 > 4
? '4.5'
: rmsLevel * 100 > 2
2024-06-13 09:29:56 +00:00
? '4.25'
2024-06-13 04:18:53 +00:00
: rmsLevel * 100 > 1
2024-06-13 09:29:56 +00:00
? '3.75'
: '3.5'}rem;width: 100%; text-align:center;"
2024-06-13 04:18:53 +00:00
>
{emoji}
</div>
2024-06-07 21:08:04 +00:00
{:else}
<div
class=" {rmsLevel * 100 > 4
? ' size-[4.5rem]'
: rmsLevel * 100 > 2
? ' size-16'
: rmsLevel * 100 > 1
? 'size-14'
: 'size-12'} transition-all bg-black dark:bg-white rounded-full"
/>
{/if}
<!-- navbar -->
</div>
{/if}
2024-06-07 22:55:02 +00:00
<div class="flex justify-center items-center flex-1 h-full w-full max-h-full">
2024-06-07 21:08:04 +00:00
{#if !camera}
{#if loading}
<svg
class="size-44 text-gray-900 dark:text-gray-400"
viewBox="0 0 24 24"
fill="currentColor"
xmlns="http://www.w3.org/2000/svg"
><style>
.spinner_qM83 {
animation: spinner_8HQG 1.05s infinite;
}
.spinner_oXPr {
animation-delay: 0.1s;
}
.spinner_ZTLf {
animation-delay: 0.2s;
}
@keyframes spinner_8HQG {
0%,
57.14% {
animation-timing-function: cubic-bezier(0.33, 0.66, 0.66, 1);
transform: translate(0);
}
28.57% {
animation-timing-function: cubic-bezier(0.33, 0, 0.66, 0.33);
transform: translateY(-6px);
}
100% {
transform: translate(0);
}
}
</style><circle class="spinner_qM83" cx="4" cy="12" r="3" /><circle
class="spinner_qM83 spinner_oXPr"
cx="12"
cy="12"
r="3"
/><circle class="spinner_qM83 spinner_ZTLf" cx="20" cy="12" r="3" /></svg
>
2024-06-13 04:18:53 +00:00
{:else if emoji}
<div
class=" transition-all rounded-full"
style="font-size:{rmsLevel * 100 > 4
? '13'
: rmsLevel * 100 > 2
? '12'
: rmsLevel * 100 > 1
? '11.5'
: '11'}rem;width:100%;text-align:center;"
>
{emoji}
</div>
2024-06-07 21:08:04 +00:00
{:else}
<div
class=" {rmsLevel * 100 > 4
? ' size-52'
: rmsLevel * 100 > 2
? 'size-48'
: rmsLevel * 100 > 1
? 'size-[11.5rem]'
: 'size-44'} transition-all bg-black dark:bg-white rounded-full"
/>
{/if}
2024-06-07 06:29:08 +00:00
{:else}
2024-06-07 22:55:02 +00:00
<div
class="relative flex video-container w-full max-h-full pt-2 pb-4 md:py-6 px-2 h-full"
>
2024-06-07 21:08:04 +00:00
<video
id="camera-feed"
autoplay
2024-06-07 22:55:02 +00:00
class="rounded-2xl h-full min-w-full object-cover object-center"
2024-06-07 22:31:58 +00:00
playsinline
2024-06-07 21:08:04 +00:00
/>
<canvas id="camera-canvas" style="display:none;" />
2024-06-07 22:55:02 +00:00
<div class=" absolute top-4 md:top-8 left-4">
2024-06-07 21:08:04 +00:00
<button
type="button"
class="p-1.5 text-white cursor-pointer backdrop-blur-xl bg-black/10 rounded-full"
on:click={() => {
stopCamera();
}}
>
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 16 16"
fill="currentColor"
class="size-6"
>
<path
d="M5.28 4.22a.75.75 0 0 0-1.06 1.06L6.94 8l-2.72 2.72a.75.75 0 1 0 1.06 1.06L8 9.06l2.72 2.72a.75.75 0 1 0 1.06-1.06L9.06 8l2.72-2.72a.75.75 0 0 0-1.06-1.06L8 6.94 5.28 4.22Z"
/>
</svg>
</button>
</div>
</div>
2024-06-07 06:29:08 +00:00
{/if}
2024-06-07 05:30:19 +00:00
</div>
2024-06-07 06:29:08 +00:00
<div class="flex justify-between items-center pb-2 w-full">
<div>
2024-06-07 21:49:36 +00:00
{#if camera}
<VideoInputMenu
devices={videoInputDevices}
on:change={async (e) => {
console.log(e.detail);
selectedVideoInputDeviceId = e.detail;
await stopVideoStream();
await startVideoStream();
2024-06-07 21:08:04 +00:00
}}
>
2024-06-07 21:49:36 +00:00
<button class=" p-3 rounded-full bg-gray-50 dark:bg-gray-900" type="button">
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 20 20"
fill="currentColor"
class="size-5"
>
<path
fill-rule="evenodd"
d="M15.312 11.424a5.5 5.5 0 0 1-9.201 2.466l-.312-.311h2.433a.75.75 0 0 0 0-1.5H3.989a.75.75 0 0 0-.75.75v4.242a.75.75 0 0 0 1.5 0v-2.43l.31.31a7 7 0 0 0 11.712-3.138.75.75 0 0 0-1.449-.39Zm1.23-3.723a.75.75 0 0 0 .219-.53V2.929a.75.75 0 0 0-1.5 0V5.36l-.31-.31A7 7 0 0 0 3.239 8.188a.75.75 0 1 0 1.448.389A5.5 5.5 0 0 1 13.89 6.11l.311.31h-2.432a.75.75 0 0 0 0 1.5h4.243a.75.75 0 0 0 .53-.219Z"
clip-rule="evenodd"
/>
</svg>
</button>
</VideoInputMenu>
{:else}
2024-06-10 00:39:33 +00:00
<Tooltip content={$i18n.t('Camera')}>
2024-06-07 21:49:36 +00:00
<button
class=" p-3 rounded-full bg-gray-50 dark:bg-gray-900"
type="button"
2024-06-12 18:44:05 +00:00
on:click={async () => {
await navigator.mediaDevices.getUserMedia({ video: true });
2024-06-07 21:49:36 +00:00
startCamera();
}}
2024-06-07 07:28:34 +00:00
>
2024-06-07 21:49:36 +00:00
<svg
xmlns="http://www.w3.org/2000/svg"
fill="none"
viewBox="0 0 24 24"
stroke-width="1.5"
stroke="currentColor"
class="size-5"
>
<path
stroke-linecap="round"
stroke-linejoin="round"
d="M6.827 6.175A2.31 2.31 0 0 1 5.186 7.23c-.38.054-.757.112-1.134.175C2.999 7.58 2.25 8.507 2.25 9.574V18a2.25 2.25 0 0 0 2.25 2.25h15A2.25 2.25 0 0 0 21.75 18V9.574c0-1.067-.75-1.994-1.802-2.169a47.865 47.865 0 0 0-1.134-.175 2.31 2.31 0 0 1-1.64-1.055l-.822-1.316a2.192 2.192 0 0 0-1.736-1.039 48.774 48.774 0 0 0-5.232 0 2.192 2.192 0 0 0-1.736 1.039l-.821 1.316Z"
/>
<path
stroke-linecap="round"
stroke-linejoin="round"
d="M16.5 12.75a4.5 4.5 0 1 1-9 0 4.5 4.5 0 0 1 9 0ZM18.75 10.5h.008v.008h-.008V10.5Z"
/>
</svg>
</button>
</Tooltip>
{/if}
2024-06-07 06:29:08 +00:00
</div>
2024-06-07 05:30:19 +00:00
2024-06-07 06:29:08 +00:00
<div>
2024-06-07 07:04:47 +00:00
<button type="button">
2024-06-07 06:29:08 +00:00
<div class=" line-clamp-1 text-sm font-medium">
{#if loading}
2024-06-08 20:42:22 +00:00
{$i18n.t('Thinking...')}
2024-06-07 06:29:08 +00:00
{:else}
2024-06-08 20:42:22 +00:00
{$i18n.t('Listening...')}
2024-06-07 06:29:08 +00:00
{/if}
</div>
</button>
</div>
<div>
<button
class=" p-3 rounded-full bg-gray-50 dark:bg-gray-900"
on:click={async () => {
showCallOverlay.set(false);
}}
type="button"
2024-06-07 05:30:19 +00:00
>
2024-06-07 06:29:08 +00:00
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 20 20"
fill="currentColor"
class="size-5"
>
<path
d="M6.28 5.22a.75.75 0 0 0-1.06 1.06L8.94 10l-3.72 3.72a.75.75 0 1 0 1.06 1.06L10 11.06l3.72 3.72a.75.75 0 1 0 1.06-1.06L11.06 10l3.72-3.72a.75.75 0 0 0-1.06-1.06L10 8.94 6.28 5.22Z"
/>
</svg>
</button>
</div>
2024-06-07 05:30:19 +00:00
</div>
</div>
</div>
</div>
2024-06-07 06:29:08 +00:00
{/if}