|
@@ -1,9 +1,6 @@
|
|
|
<script lang="ts">
|
|
|
import { config, models, settings, showCallOverlay } from '$lib/stores';
|
|
|
import { onMount, tick, getContext, onDestroy, createEventDispatcher } from 'svelte';
|
|
|
- import { DropdownMenu } from 'bits-ui';
|
|
|
- import Dropdown from '$lib/components/common/Dropdown.svelte';
|
|
|
- import { flyAndScale } from '$lib/utils/transitions';
|
|
|
|
|
|
const dispatch = createEventDispatcher();
|
|
|
|
|
@@ -35,12 +32,10 @@
|
|
|
let assistantSpeaking = false;
|
|
|
|
|
|
let emoji = null;
|
|
|
-
|
|
|
let camera = false;
|
|
|
let cameraStream = null;
|
|
|
|
|
|
let chatStreaming = false;
|
|
|
-
|
|
|
let rmsLevel = 0;
|
|
|
let hasStartedSpeaking = false;
|
|
|
let mediaRecorder;
|
|
@@ -220,32 +215,42 @@
|
|
|
};
|
|
|
|
|
|
const startRecording = async () => {
|
|
|
- if (!audioStream) {
|
|
|
- audioStream = await navigator.mediaDevices.getUserMedia({ audio: true });
|
|
|
- }
|
|
|
- mediaRecorder = new MediaRecorder(audioStream);
|
|
|
+ if ($showCallOverlay) {
|
|
|
+ if (!audioStream) {
|
|
|
+ audioStream = await navigator.mediaDevices.getUserMedia({ audio: true });
|
|
|
+ }
|
|
|
+ mediaRecorder = new MediaRecorder(audioStream);
|
|
|
|
|
|
- mediaRecorder.onstart = () => {
|
|
|
- console.log('Recording started');
|
|
|
- audioChunks = [];
|
|
|
- analyseAudio(audioStream);
|
|
|
- };
|
|
|
+ mediaRecorder.onstart = () => {
|
|
|
+ console.log('Recording started');
|
|
|
+ audioChunks = [];
|
|
|
+ analyseAudio(audioStream);
|
|
|
+ };
|
|
|
|
|
|
- mediaRecorder.ondataavailable = (event) => {
|
|
|
- if (hasStartedSpeaking) {
|
|
|
- audioChunks.push(event.data);
|
|
|
- }
|
|
|
- };
|
|
|
+ mediaRecorder.ondataavailable = (event) => {
|
|
|
+ if (hasStartedSpeaking) {
|
|
|
+ audioChunks.push(event.data);
|
|
|
+ }
|
|
|
+ };
|
|
|
|
|
|
- mediaRecorder.onstop = (e) => {
|
|
|
- console.log('Recording stopped', audioStream, e);
|
|
|
- stopRecordingCallback();
|
|
|
- };
|
|
|
+ mediaRecorder.onstop = (e) => {
|
|
|
+ console.log('Recording stopped', audioStream, e);
|
|
|
+ stopRecordingCallback();
|
|
|
+ };
|
|
|
|
|
|
- mediaRecorder.start();
|
|
|
+ mediaRecorder.start();
|
|
|
+ }
|
|
|
};
|
|
|
|
|
|
const stopAudioStream = async () => {
|
|
|
+ try {
|
|
|
+ if (mediaRecorder) {
|
|
|
+ mediaRecorder.stop();
|
|
|
+ }
|
|
|
+ } catch (error) {
|
|
|
+ console.log('Error stopping audio stream:', error);
|
|
|
+ }
|
|
|
+
|
|
|
if (!audioStream) return;
|
|
|
|
|
|
audioStream.getAudioTracks().forEach(function (track) {
|
|
@@ -640,19 +645,18 @@
|
|
|
|
|
|
onDestroy(async () => {
|
|
|
await stopAllAudio();
|
|
|
- stopAudioStream();
|
|
|
+ await stopRecordingCallback(false);
|
|
|
+ await stopCamera();
|
|
|
|
|
|
+ await stopAudioStream();
|
|
|
eventTarget.removeEventListener('chat:start', chatStartHandler);
|
|
|
eventTarget.removeEventListener('chat', chatEventHandler);
|
|
|
eventTarget.removeEventListener('chat:finish', chatFinishHandler);
|
|
|
-
|
|
|
audioAbortController.abort();
|
|
|
+
|
|
|
await tick();
|
|
|
|
|
|
await stopAllAudio();
|
|
|
-
|
|
|
- await stopRecordingCallback(false);
|
|
|
- await stopCamera();
|
|
|
});
|
|
|
</script>
|
|
|
|