1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003 |
- <script lang="ts">
- import { config, models, settings, showCallOverlay } from '$lib/stores';
- import { onMount, tick, getContext, onDestroy, createEventDispatcher } from 'svelte';
- import { DropdownMenu } from 'bits-ui';
- import Dropdown from '$lib/components/common/Dropdown.svelte';
- import { flyAndScale } from '$lib/utils/transitions';
- const dispatch = createEventDispatcher();
- import { blobToFile } from '$lib/utils';
- import { generateEmoji } from '$lib/apis';
- import { synthesizeOpenAISpeech, transcribeAudio } from '$lib/apis/audio';
- import { toast } from 'svelte-sonner';
- import Tooltip from '$lib/components/common/Tooltip.svelte';
- import VideoInputMenu from './CallOverlay/VideoInputMenu.svelte';
- const i18n = getContext('i18n');
- export let eventTarget: EventTarget;
- export let submitPrompt: Function;
- export let stopResponse: Function;
- export let files;
- export let chatId;
- export let modelId;
- let wakeLock = null;
- let model = null;
- let loading = false;
- let confirmed = false;
- let interrupted = false;
- let assistantSpeaking = false;
- let emoji = null;
- let camera = false;
- let cameraStream = null;
- let chatStreaming = false;
- let rmsLevel = 0;
- let hasStartedSpeaking = false;
- let mediaRecorder;
- let audioStream = null;
- let audioChunks = [];
- let videoInputDevices = [];
- let selectedVideoInputDeviceId = null;
- const getVideoInputDevices = async () => {
- const devices = await navigator.mediaDevices.enumerateDevices();
- videoInputDevices = devices.filter((device) => device.kind === 'videoinput');
- if (navigator.mediaDevices.getDisplayMedia) {
- videoInputDevices = [
- ...videoInputDevices,
- {
- deviceId: 'screen',
- label: 'Screen Share'
- }
- ];
- }
- console.log(videoInputDevices);
- if (selectedVideoInputDeviceId === null && videoInputDevices.length > 0) {
- selectedVideoInputDeviceId = videoInputDevices[0].deviceId;
- }
- };
- const startCamera = async () => {
- await getVideoInputDevices();
- if (cameraStream === null) {
- camera = true;
- await tick();
- try {
- await startVideoStream();
- } catch (err) {
- console.error('Error accessing webcam: ', err);
- }
- }
- };
- const startVideoStream = async () => {
- const video = document.getElementById('camera-feed');
- if (video) {
- if (selectedVideoInputDeviceId === 'screen') {
- cameraStream = await navigator.mediaDevices.getDisplayMedia({
- video: {
- cursor: 'always'
- },
- audio: false
- });
- } else {
- cameraStream = await navigator.mediaDevices.getUserMedia({
- video: {
- deviceId: selectedVideoInputDeviceId ? { exact: selectedVideoInputDeviceId } : undefined
- }
- });
- }
- if (cameraStream) {
- await getVideoInputDevices();
- video.srcObject = cameraStream;
- await video.play();
- }
- }
- };
- const stopVideoStream = async () => {
- if (cameraStream) {
- const tracks = cameraStream.getTracks();
- tracks.forEach((track) => track.stop());
- }
- cameraStream = null;
- };
- const takeScreenshot = () => {
- const video = document.getElementById('camera-feed');
- const canvas = document.getElementById('camera-canvas');
- if (!canvas) {
- return;
- }
- const context = canvas.getContext('2d');
- // Make the canvas match the video dimensions
- canvas.width = video.videoWidth;
- canvas.height = video.videoHeight;
- // Draw the image from the video onto the canvas
- context.drawImage(video, 0, 0, video.videoWidth, video.videoHeight);
- // Convert the canvas to a data base64 URL and console log it
- const dataURL = canvas.toDataURL('image/png');
- console.log(dataURL);
- return dataURL;
- };
- const stopCamera = async () => {
- await stopVideoStream();
- camera = false;
- };
- const MIN_DECIBELS = -55;
- const VISUALIZER_BUFFER_LENGTH = 300;
- const transcribeHandler = async (audioBlob) => {
- // Create a blob from the audio chunks
- await tick();
- const file = blobToFile(audioBlob, 'recording.wav');
- const res = await transcribeAudio(localStorage.token, file).catch((error) => {
- toast.error(error);
- return null;
- });
- if (res) {
- console.log(res.text);
- if (res.text !== '') {
- const _responses = await submitPrompt(res.text, { _raw: true });
- console.log(_responses);
- }
- }
- };
- const stopRecordingCallback = async (_continue = true) => {
- if ($showCallOverlay) {
- console.log('%c%s', 'color: red; font-size: 20px;', '🚨 stopRecordingCallback 🚨');
- // deep copy the audioChunks array
- const _audioChunks = audioChunks.slice(0);
- audioChunks = [];
- mediaRecorder = false;
- if (_continue) {
- startRecording();
- }
- if (confirmed) {
- loading = true;
- emoji = null;
- if (cameraStream) {
- const imageUrl = takeScreenshot();
- files = [
- {
- type: 'image',
- url: imageUrl
- }
- ];
- }
- const audioBlob = new Blob(_audioChunks, { type: 'audio/wav' });
- await transcribeHandler(audioBlob);
- confirmed = false;
- loading = false;
- }
- } else {
- audioChunks = [];
- mediaRecorder = false;
- if (audioStream) {
- const tracks = audioStream.getTracks();
- tracks.forEach((track) => track.stop());
- }
- audioStream = null;
- }
- };
- const startRecording = async () => {
- audioStream = await navigator.mediaDevices.getUserMedia({ audio: true });
- mediaRecorder = new MediaRecorder(audioStream);
- mediaRecorder.onstart = () => {
- console.log('Recording started');
- audioChunks = [];
- analyseAudio(audioStream);
- };
- mediaRecorder.ondataavailable = (event) => {
- if (hasStartedSpeaking) {
- audioChunks.push(event.data);
- }
- };
- mediaRecorder.onstop = (e) => {
- console.log('Recording stopped', e);
- stopRecordingCallback();
- };
- mediaRecorder.start();
- };
- const stopAudioStream = async () => {
- if (audioStream) {
- const tracks = audioStream.getTracks();
- tracks.forEach((track) => track.stop());
- }
- audioStream = null;
- };
- // Function to calculate the RMS level from time domain data
- const calculateRMS = (data: Uint8Array) => {
- let sumSquares = 0;
- for (let i = 0; i < data.length; i++) {
- const normalizedValue = (data[i] - 128) / 128; // Normalize the data
- sumSquares += normalizedValue * normalizedValue;
- }
- return Math.sqrt(sumSquares / data.length);
- };
- const analyseAudio = (stream) => {
- const audioContext = new AudioContext();
- const audioStreamSource = audioContext.createMediaStreamSource(stream);
- const analyser = audioContext.createAnalyser();
- analyser.minDecibels = MIN_DECIBELS;
- audioStreamSource.connect(analyser);
- const bufferLength = analyser.frequencyBinCount;
- const domainData = new Uint8Array(bufferLength);
- const timeDomainData = new Uint8Array(analyser.fftSize);
- let lastSoundTime = Date.now();
- hasStartedSpeaking = false;
- console.log('🔊 Sound detection started', lastSoundTime, hasStartedSpeaking);
- const detectSound = () => {
- const processFrame = () => {
- if (!mediaRecorder || !$showCallOverlay) {
- return;
- }
- if (assistantSpeaking && !($settings?.voiceInterruption ?? false)) {
- // Mute the audio if the assistant is speaking
- analyser.maxDecibels = 0;
- analyser.minDecibels = -1;
- } else {
- analyser.minDecibels = MIN_DECIBELS;
- analyser.maxDecibels = -30;
- }
- analyser.getByteTimeDomainData(timeDomainData);
- analyser.getByteFrequencyData(domainData);
- // Calculate RMS level from time domain data
- rmsLevel = calculateRMS(timeDomainData);
- // Check if initial speech/noise has started
- const hasSound = domainData.some((value) => value > 0);
- if (hasSound) {
- // BIG RED TEXT
- console.log('%c%s', 'color: red; font-size: 20px;', '🔊 Sound detected');
- if (!hasStartedSpeaking) {
- hasStartedSpeaking = true;
- stopAllAudio();
- }
- lastSoundTime = Date.now();
- }
- // Start silence detection only after initial speech/noise has been detected
- if (hasStartedSpeaking) {
- if (Date.now() - lastSoundTime > 2000) {
- confirmed = true;
- if (mediaRecorder) {
- console.log('%c%s', 'color: red; font-size: 20px;', '🔇 Silence detected');
- mediaRecorder.stop();
- return;
- }
- }
- }
- window.requestAnimationFrame(processFrame);
- };
- window.requestAnimationFrame(processFrame);
- };
- detectSound();
- };
- let finishedMessages = {};
- let currentMessageId = null;
- let currentUtterance = null;
- const speakSpeechSynthesisHandler = (content) => {
- if ($showCallOverlay) {
- return new Promise((resolve) => {
- let voices = [];
- const getVoicesLoop = setInterval(async () => {
- voices = await speechSynthesis.getVoices();
- if (voices.length > 0) {
- clearInterval(getVoicesLoop);
- const voice =
- voices
- ?.filter(
- (v) => v.voiceURI === ($settings?.audio?.tts?.voice ?? $config?.audio?.tts?.voice)
- )
- ?.at(0) ?? undefined;
- currentUtterance = new SpeechSynthesisUtterance(content);
- currentUtterance.rate = speechRate;
- if (voice) {
- currentUtterance.voice = voice;
- }
- speechSynthesis.speak(currentUtterance);
- currentUtterance.onend = async (e) => {
- await new Promise((r) => setTimeout(r, 200));
- resolve(e);
- };
- }
- }, 100);
- });
- } else {
- return Promise.resolve();
- }
- };
- const playAudio = (audio) => {
- if ($showCallOverlay) {
- return new Promise((resolve) => {
- const audioElement = document.getElementById('audioElement') as HTMLAudioElement;
- if (audioElement) {
- audioElement.src = audio.src;
- audioElement.muted = true;
- audioElement.playbackRate = speechRate;
- audioElement
- .play()
- .then(() => {
- audioElement.muted = false;
- })
- .catch((error) => {
- console.error(error);
- });
- audioElement.onended = async (e) => {
- await new Promise((r) => setTimeout(r, 100));
- resolve(e);
- };
- }
- });
- } else {
- return Promise.resolve();
- }
- };
- const stopAllAudio = async () => {
- assistantSpeaking = false;
- interrupted = true;
- if (chatStreaming) {
- stopResponse();
- }
- if (currentUtterance) {
- speechSynthesis.cancel();
- currentUtterance = null;
- }
- const audioElement = document.getElementById('audioElement');
- if (audioElement) {
- audioElement.muted = true;
- audioElement.pause();
- audioElement.currentTime = 0;
- }
- };
- let audioAbortController = new AbortController();
- // Audio speed control
- let speechRate = 1;
- let showSpeedMenu = false;
- const speedOptions = [2, 1.75, 1.5, 1.25, 1, 0.75, 0.5];
- const setSpeedRate = (rate: number) => {
- speechRate = rate;
- showSpeedMenu = false;
- updateAudioSpeed();
- };
- const updateAudioSpeed = () => {
- if (currentUtterance) {
- currentUtterance.rate = speechRate;
- }
- const audioElement = document.getElementById('audioElement') as HTMLAudioElement;
- if (audioElement) {
- audioElement.playbackRate = speechRate;
- }
- };
- // Audio cache map where key is the content and value is the Audio object.
- const audioCache = new Map();
- const emojiCache = new Map();
- const fetchAudio = async (content) => {
- if (!audioCache.has(content)) {
- try {
- // Set the emoji for the content if needed
- if ($settings?.showEmojiInCall ?? false) {
- const emoji = await generateEmoji(localStorage.token, modelId, content, chatId);
- if (emoji) {
- emojiCache.set(content, emoji);
- }
- }
- if ($config.audio.tts.engine !== '') {
- const res = await synthesizeOpenAISpeech(
- localStorage.token,
- $settings?.audio?.tts?.voice ?? $config?.audio?.tts?.voice,
- content
- ).catch((error) => {
- console.error(error);
- return null;
- });
- if (res) {
- const blob = await res.blob();
- const blobUrl = URL.createObjectURL(blob);
- audioCache.set(content, new Audio(blobUrl));
- }
- } else {
- audioCache.set(content, true);
- }
- } catch (error) {
- console.error('Error synthesizing speech:', error);
- }
- }
- return audioCache.get(content);
- };
- let messages = {};
- const monitorAndPlayAudio = async (id, signal) => {
- while (!signal.aborted) {
- if (messages[id] && messages[id].length > 0) {
- // Retrieve the next content string from the queue
- const content = messages[id].shift(); // Dequeues the content for playing
- if (audioCache.has(content)) {
- // If content is available in the cache, play it
- // Set the emoji for the content if available
- if (($settings?.showEmojiInCall ?? false) && emojiCache.has(content)) {
- emoji = emojiCache.get(content);
- } else {
- emoji = null;
- }
- if ($config.audio.tts.engine !== '') {
- try {
- console.log(
- '%c%s',
- 'color: red; font-size: 20px;',
- `Playing audio for content: ${content}`
- );
- const audio = audioCache.get(content);
- await playAudio(audio); // Here ensure that playAudio is indeed correct method to execute
- console.log(`Played audio for content: ${content}`);
- await new Promise((resolve) => setTimeout(resolve, 200)); // Wait before retrying to reduce tight loop
- } catch (error) {
- console.error('Error playing audio:', error);
- }
- } else {
- await speakSpeechSynthesisHandler(content);
- }
- } else {
- // If not available in the cache, push it back to the queue and delay
- messages[id].unshift(content); // Re-queue the content at the start
- console.log(`Audio for "${content}" not yet available in the cache, re-queued...`);
- await new Promise((resolve) => setTimeout(resolve, 200)); // Wait before retrying to reduce tight loop
- }
- } else if (finishedMessages[id] && messages[id] && messages[id].length === 0) {
- // If the message is finished and there are no more messages to process, break the loop
- assistantSpeaking = false;
- break;
- } else {
- // No messages to process, sleep for a bit
- await new Promise((resolve) => setTimeout(resolve, 200));
- }
- }
- console.log(`Audio monitoring and playing stopped for message ID ${id}`);
- };
- onMount(async () => {
- const setWakeLock = async () => {
- try {
- wakeLock = await navigator.wakeLock.request('screen');
- } catch (err) {
- // The Wake Lock request has failed - usually system related, such as battery.
- console.log(err);
- }
- if (wakeLock) {
- // Add a listener to release the wake lock when the page is unloaded
- wakeLock.addEventListener('release', () => {
- // the wake lock has been released
- console.log('Wake Lock released');
- });
- }
- };
- if ('wakeLock' in navigator) {
- await setWakeLock();
- document.addEventListener('visibilitychange', async () => {
- // Re-request the wake lock if the document becomes visible
- if (wakeLock !== null && document.visibilityState === 'visible') {
- await setWakeLock();
- }
- });
- }
- model = $models.find((m) => m.id === modelId);
- startRecording();
- const chatStartHandler = async (e) => {
- const { id } = e.detail;
- chatStreaming = true;
- if (currentMessageId !== id) {
- console.log(`Received chat start event for message ID ${id}`);
- currentMessageId = id;
- if (audioAbortController) {
- audioAbortController.abort();
- }
- audioAbortController = new AbortController();
- assistantSpeaking = true;
- // Start monitoring and playing audio for the message ID
- monitorAndPlayAudio(id, audioAbortController.signal);
- }
- };
- const chatEventHandler = async (e) => {
- const { id, content } = e.detail;
- // "id" here is message id
- // if "id" is not the same as "currentMessageId" then do not process
- // "content" here is a sentence from the assistant,
- // there will be many sentences for the same "id"
- if (currentMessageId === id) {
- console.log(`Received chat event for message ID ${id}: ${content}`);
- try {
- if (messages[id] === undefined) {
- messages[id] = [content];
- } else {
- messages[id].push(content);
- }
- console.log(content);
- fetchAudio(content);
- } catch (error) {
- console.error('Failed to fetch or play audio:', error);
- }
- }
- };
- const chatFinishHandler = async (e) => {
- const { id, content } = e.detail;
- // "content" here is the entire message from the assistant
- finishedMessages[id] = true;
- chatStreaming = false;
- };
- eventTarget.addEventListener('chat:start', chatStartHandler);
- eventTarget.addEventListener('chat', chatEventHandler);
- eventTarget.addEventListener('chat:finish', chatFinishHandler);
- return async () => {
- eventTarget.removeEventListener('chat:start', chatStartHandler);
- eventTarget.removeEventListener('chat', chatEventHandler);
- eventTarget.removeEventListener('chat:finish', chatFinishHandler);
- audioAbortController.abort();
- await tick();
- await stopAllAudio();
- await stopRecordingCallback(false);
- await stopCamera();
- };
- });
- onDestroy(async () => {
- await stopAllAudio();
- await stopRecordingCallback(false);
- await stopCamera();
- });
- </script>
- {#if $showCallOverlay}
- <div class="max-w-lg w-full h-full max-h-[100dvh] flex flex-col justify-between p-3 md:p-6">
- {#if camera}
- <button
- type="button"
- class="flex justify-center items-center w-full h-20 min-h-20"
- on:click={() => {
- if (assistantSpeaking) {
- stopAllAudio();
- }
- }}
- >
- {#if emoji}
- <div
- class=" transition-all rounded-full"
- style="font-size:{rmsLevel * 100 > 4
- ? '4.5'
- : rmsLevel * 100 > 2
- ? '4.25'
- : rmsLevel * 100 > 1
- ? '3.75'
- : '3.5'}rem;width: 100%; text-align:center;"
- >
- {emoji}
- </div>
- {:else if loading || assistantSpeaking}
- <svg
- class="size-12 text-gray-900 dark:text-gray-400"
- viewBox="0 0 24 24"
- fill="currentColor"
- xmlns="http://www.w3.org/2000/svg"
- ><style>
- .spinner_qM83 {
- animation: spinner_8HQG 1.05s infinite;
- }
- .spinner_oXPr {
- animation-delay: 0.1s;
- }
- .spinner_ZTLf {
- animation-delay: 0.2s;
- }
- @keyframes spinner_8HQG {
- 0%,
- 57.14% {
- animation-timing-function: cubic-bezier(0.33, 0.66, 0.66, 1);
- transform: translate(0);
- }
- 28.57% {
- animation-timing-function: cubic-bezier(0.33, 0, 0.66, 0.33);
- transform: translateY(-6px);
- }
- 100% {
- transform: translate(0);
- }
- }
- </style><circle class="spinner_qM83" cx="4" cy="12" r="3" /><circle
- class="spinner_qM83 spinner_oXPr"
- cx="12"
- cy="12"
- r="3"
- /><circle class="spinner_qM83 spinner_ZTLf" cx="20" cy="12" r="3" /></svg
- >
- {:else}
- <div
- class=" {rmsLevel * 100 > 4
- ? ' size-[4.5rem]'
- : rmsLevel * 100 > 2
- ? ' size-16'
- : rmsLevel * 100 > 1
- ? 'size-14'
- : 'size-12'} transition-all rounded-full {(model?.info?.meta
- ?.profile_image_url ?? '/static/favicon.png') !== '/static/favicon.png'
- ? ' bg-cover bg-center bg-no-repeat'
- : 'bg-black dark:bg-white'} bg-black dark:bg-white"
- style={(model?.info?.meta?.profile_image_url ?? '/static/favicon.png') !==
- '/static/favicon.png'
- ? `background-image: url('${model?.info?.meta?.profile_image_url}');`
- : ''}
- />
- {/if}
- <!-- navbar -->
- </button>
- {/if}
- <div class="flex justify-center items-center flex-1 h-full w-full max-h-full">
- {#if !camera}
- <button
- type="button"
- on:click={() => {
- if (assistantSpeaking) {
- stopAllAudio();
- }
- }}
- >
- {#if emoji}
- <div
- class=" transition-all rounded-full"
- style="font-size:{rmsLevel * 100 > 4
- ? '13'
- : rmsLevel * 100 > 2
- ? '12'
- : rmsLevel * 100 > 1
- ? '11.5'
- : '11'}rem;width:100%;text-align:center;"
- >
- {emoji}
- </div>
- {:else if loading || assistantSpeaking}
- <svg
- class="size-44 text-gray-900 dark:text-gray-400"
- viewBox="0 0 24 24"
- fill="currentColor"
- xmlns="http://www.w3.org/2000/svg"
- ><style>
- .spinner_qM83 {
- animation: spinner_8HQG 1.05s infinite;
- }
- .spinner_oXPr {
- animation-delay: 0.1s;
- }
- .spinner_ZTLf {
- animation-delay: 0.2s;
- }
- @keyframes spinner_8HQG {
- 0%,
- 57.14% {
- animation-timing-function: cubic-bezier(0.33, 0.66, 0.66, 1);
- transform: translate(0);
- }
- 28.57% {
- animation-timing-function: cubic-bezier(0.33, 0, 0.66, 0.33);
- transform: translateY(-6px);
- }
- 100% {
- transform: translate(0);
- }
- }
- </style><circle class="spinner_qM83" cx="4" cy="12" r="3" /><circle
- class="spinner_qM83 spinner_oXPr"
- cx="12"
- cy="12"
- r="3"
- /><circle class="spinner_qM83 spinner_ZTLf" cx="20" cy="12" r="3" /></svg
- >
- {:else}
- <div
- class=" {rmsLevel * 100 > 4
- ? ' size-52'
- : rmsLevel * 100 > 2
- ? 'size-48'
- : rmsLevel * 100 > 1
- ? 'size-44'
- : 'size-40'} transition-all rounded-full {(model?.info?.meta
- ?.profile_image_url ?? '/static/favicon.png') !== '/static/favicon.png'
- ? ' bg-cover bg-center bg-no-repeat'
- : 'bg-black dark:bg-white'} "
- style={(model?.info?.meta?.profile_image_url ?? '/static/favicon.png') !==
- '/static/favicon.png'
- ? `background-image: url('${model?.info?.meta?.profile_image_url}');`
- : ''}
- />
- {/if}
- </button>
- {:else}
- <div class="relative flex video-container w-full max-h-full pt-2 pb-4 md:py-6 px-2 h-full">
- <video
- id="camera-feed"
- autoplay
- class="rounded-2xl h-full min-w-full object-cover object-center"
- playsinline
- />
- <canvas id="camera-canvas" style="display:none;" />
- <div class=" absolute top-4 md:top-8 left-4">
- <button
- type="button"
- class="p-1.5 text-white cursor-pointer backdrop-blur-xl bg-black/10 rounded-full"
- on:click={() => {
- stopCamera();
- }}
- >
- <svg
- xmlns="http://www.w3.org/2000/svg"
- viewBox="0 0 16 16"
- fill="currentColor"
- class="size-6"
- >
- <path
- d="M5.28 4.22a.75.75 0 0 0-1.06 1.06L6.94 8l-2.72 2.72a.75.75 0 1 0 1.06 1.06L8 9.06l2.72 2.72a.75.75 0 1 0 1.06-1.06L9.06 8l2.72-2.72a.75.75 0 0 0-1.06-1.06L8 6.94 5.28 4.22Z"
- />
- </svg>
- </button>
- </div>
- </div>
- {/if}
- </div>
- <div class="flex justify-between items-center pb-2 w-full">
- <div>
- {#if camera}
- <VideoInputMenu
- devices={videoInputDevices}
- on:change={async (e) => {
- console.log(e.detail);
- selectedVideoInputDeviceId = e.detail;
- await stopVideoStream();
- await startVideoStream();
- }}
- >
- <button class=" p-3 rounded-full bg-gray-50 dark:bg-gray-900" type="button">
- <svg
- xmlns="http://www.w3.org/2000/svg"
- viewBox="0 0 20 20"
- fill="currentColor"
- class="size-5"
- >
- <path
- fill-rule="evenodd"
- d="M15.312 11.424a5.5 5.5 0 0 1-9.201 2.466l-.312-.311h2.433a.75.75 0 0 0 0-1.5H3.989a.75.75 0 0 0-.75.75v4.242a.75.75 0 0 0 1.5 0v-2.43l.31.31a7 7 0 0 0 11.712-3.138.75.75 0 0 0-1.449-.39Zm1.23-3.723a.75.75 0 0 0 .219-.53V2.929a.75.75 0 0 0-1.5 0V5.36l-.31-.31A7 7 0 0 0 3.239 8.188a.75.75 0 1 0 1.448.389A5.5 5.5 0 0 1 13.89 6.11l.311.31h-2.432a.75.75 0 0 0 0 1.5h4.243a.75.75 0 0 0 .53-.219Z"
- clip-rule="evenodd"
- />
- </svg>
- </button>
- </VideoInputMenu>
- {:else}
- <Tooltip content={$i18n.t('Camera')}>
- <button
- class=" p-3 rounded-full bg-gray-50 dark:bg-gray-900"
- type="button"
- on:click={async () => {
- await navigator.mediaDevices.getUserMedia({ video: true });
- startCamera();
- }}
- >
- <svg
- xmlns="http://www.w3.org/2000/svg"
- fill="none"
- viewBox="0 0 24 24"
- stroke-width="1.5"
- stroke="currentColor"
- class="size-5"
- >
- <path
- stroke-linecap="round"
- stroke-linejoin="round"
- d="M6.827 6.175A2.31 2.31 0 0 1 5.186 7.23c-.38.054-.757.112-1.134.175C2.999 7.58 2.25 8.507 2.25 9.574V18a2.25 2.25 0 0 0 2.25 2.25h15A2.25 2.25 0 0 0 21.75 18V9.574c0-1.067-.75-1.994-1.802-2.169a47.865 47.865 0 0 0-1.134-.175 2.31 2.31 0 0 1-1.64-1.055l-.822-1.316a2.192 2.192 0 0 0-1.736-1.039 48.774 48.774 0 0 0-5.232 0 2.192 2.192 0 0 0-1.736 1.039l-.821 1.316Z"
- />
- <path
- stroke-linecap="round"
- stroke-linejoin="round"
- d="M16.5 12.75a4.5 4.5 0 1 1-9 0 4.5 4.5 0 0 1 9 0ZM18.75 10.5h.008v.008h-.008V10.5Z"
- />
- </svg>
- </button>
- </Tooltip>
- {/if}
- </div>
- <div>
- <button
- type="button"
- on:click={() => {
- if (assistantSpeaking) {
- stopAllAudio();
- }
- }}
- >
- <div class=" line-clamp-1 text-sm font-medium">
- {#if loading}
- {$i18n.t('Thinking...')}
- {:else if assistantSpeaking}
- {$i18n.t('Tap to interrupt')}
- {:else}
- {$i18n.t('Listening...')}
- {/if}
- </div>
- </button>
- </div>
- <div class="relative">
- <Dropdown bind:show={showSpeedMenu}>
- <button class="p-2 rounded-full bg-gray-50 dark:bg-gray-900">
- <svg width="24" height="24" viewBox="0 0 24 24" xmlns="http://www.w3.org/2000/svg">
- <polygon points="8,5 8,19 19,12" fill="currentColor"/>
- <path d="M12 2A10 10 0 0 0 12 22" fill="none" stroke="currentColor" stroke-width="2" stroke-dasharray="2,2"/>
- <path d="M12 2A10 10 0 0 1 12 22" fill="none" stroke="currentColor" stroke-width="2"/>
- </svg>
- </button>
- <div slot="content">
- <DropdownMenu.Content
- class="w-full max-w-[180px] rounded-lg px-1 py-1.5 border border-gray-300/30 dark:border-gray-700/50 z-[9999] bg-white dark:bg-gray-900 dark:text-white shadow-sm"
- sideOffset={6}
- side="top"
- align="start"
- transition={flyAndScale}
- >
- {#each speedOptions as speed}
- <DropdownMenu.Item
- class="flex gap-2 items-center px-3 py-2 text-sm cursor-pointer hover:bg-gray-50 dark:hover:bg-gray-800 rounded-md {speechRate === speed ? 'bg-gray-200 dark:bg-gray-600' : ''}"
- on:click={() => setSpeedRate(speed)}
- >
- <div class="flex items-center">
- <div class="line-clamp-1">
- {speed}x
- </div>
- </div>
- </DropdownMenu.Item>
- {/each}
- </DropdownMenu.Content>
- </div>
- </Dropdown>
- </div>
- <div>
- <button
- class=" p-3 rounded-full bg-gray-50 dark:bg-gray-900"
- on:click={async () => {
- await stopAudioStream();
- await stopVideoStream();
- showCallOverlay.set(false);
- dispatch('close');
- }}
- type="button"
- >
- <svg
- xmlns="http://www.w3.org/2000/svg"
- viewBox="0 0 20 20"
- fill="currentColor"
- class="size-5"
- >
- <path
- d="M6.28 5.22a.75.75 0 0 0-1.06 1.06L8.94 10l-3.72 3.72a.75.75 0 1 0 1.06 1.06L10 11.06l3.72 3.72a.75.75 0 1 0 1.06-1.06L11.06 10l3.72-3.72a.75.75 0 0 0-1.06-1.06L10 8.94 6.28 5.22Z"
- />
- </svg>
- </button>
- </div>
- </div>
- </div>
- {/if}
|