Selaa lähdekoodia

feat: submit prompt integration

Timothy J. Baek 11 kuukautta sitten
vanhempi
commit
4e640daf83

+ 23 - 4
src/lib/components/chat/Chat.svelte

@@ -298,6 +298,7 @@
 	//////////////////////////
 
 	const submitPrompt = async (userPrompt, _user = null) => {
+		let _responses = [];
 		console.log('submitPrompt', $chatId);
 
 		selectedModels = selectedModels.map((modelId) =>
@@ -379,11 +380,14 @@
 			files = [];
 
 			// Send prompt
-			await sendPrompt(userPrompt, userMessageId);
+			_responses = await sendPrompt(userPrompt, userMessageId);
 		}
+
+		return _responses;
 	};
 
 	const sendPrompt = async (prompt, parentId, modelId = null) => {
+		let _responses = [];
 		const _chatId = JSON.parse(JSON.stringify($chatId));
 
 		await Promise.all(
@@ -470,11 +474,14 @@
 						await getWebSearchResults(model.id, parentId, responseMessageId);
 					}
 
+					let _response = null;
+
 					if (model?.owned_by === 'openai') {
-						await sendPromptOpenAI(model, prompt, responseMessageId, _chatId);
+						_response = await sendPromptOpenAI(model, prompt, responseMessageId, _chatId);
 					} else if (model) {
-						await sendPromptOllama(model, prompt, responseMessageId, _chatId);
+						_response = await sendPromptOllama(model, prompt, responseMessageId, _chatId);
 					}
+					_responses.push(_response);
 
 					console.log('chatEventEmitter', chatEventEmitter);
 
@@ -486,6 +493,8 @@
 		);
 
 		await chats.set(await getChatList(localStorage.token));
+
+		return _responses;
 	};
 
 	const getWebSearchResults = async (model: string, parentId: string, responseId: string) => {
@@ -560,6 +569,8 @@
 	};
 
 	const sendPromptOllama = async (model, userPrompt, responseMessageId, _chatId) => {
+		let _response = null;
+
 		model = model.id;
 
 		const responseMessage = history.messages[responseMessageId];
@@ -670,6 +681,7 @@
 						await chatCompletedHandler(model, messages);
 					}
 
+					_response = responseMessage.content;
 					break;
 				}
 
@@ -806,9 +818,12 @@
 			const _title = await generateChatTitle(userPrompt);
 			await setChatTitle(_chatId, _title);
 		}
+
+		return _response;
 	};
 
 	const sendPromptOpenAI = async (model, userPrompt, responseMessageId, _chatId) => {
+		let _response = null;
 		const responseMessage = history.messages[responseMessageId];
 
 		const docs = messages
@@ -925,6 +940,8 @@
 							await chatCompletedHandler(model.id, messages);
 						}
 
+						_response = responseMessage.content;
+
 						break;
 					}
 
@@ -1000,6 +1017,8 @@
 			const _title = await generateChatTitle(userPrompt);
 			await setChatTitle(_chatId, _title);
 		}
+
+		return _response;
 	};
 
 	const handleOpenAIError = async (error, res: Response | null, model, responseMessage) => {
@@ -1195,7 +1214,7 @@
 	</title>
 </svelte:head>
 
-<CallOverlay />
+<CallOverlay {submitPrompt} />
 
 {#if !chatIdProp || (loaded && chatIdProp)}
 	<div

+ 5 - 1
src/lib/components/chat/MessageInput.svelte

@@ -897,7 +897,11 @@
 												class=" text-gray-600 dark:text-gray-300 hover:bg-gray-50 dark:hover:bg-gray-850 transition rounded-full p-2 self-center"
 												type="button"
 												on:click={() => {
-													showCallOverlay.set(true);
+													if (selectedModels.length > 1) {
+														toast.error($i18n.t('Select only one model to call'));
+													} else {
+														showCallOverlay.set(true);
+													}
 												}}
 											>
 												<Headphone className="size-6" />

+ 31 - 7
src/lib/components/chat/MessageInput/CallOverlay.svelte

@@ -1,5 +1,5 @@
 <script lang="ts">
-	import { showCallOverlay } from '$lib/stores';
+	import { settings, showCallOverlay } from '$lib/stores';
 	import { onMount, tick, getContext } from 'svelte';
 
 	import { blobToFile, calculateSHA256, findWordIndices } from '$lib/utils';
@@ -8,9 +8,14 @@
 
 	const i18n = getContext('i18n');
 
+	export let submitPrompt: Function;
+
 	let loading = false;
 	let confirmed = false;
 
+	let assistantSpeaking = false;
+	let assistantAudio = null;
+
 	let rmsLevel = 0;
 	let hasStartedSpeaking = false;
 
@@ -103,6 +108,14 @@
 				// Check if initial speech/noise has started
 				const hasSound = domainData.some((value) => value > 0);
 				if (hasSound) {
+					if (assistantSpeaking) {
+						speechSynthesis.cancel();
+
+						if (assistantAudio) {
+							assistantAudio.pause();
+							assistantAudio.currentTime = 0;
+						}
+					}
 					hasStartedSpeaking = true;
 					lastSoundTime = Date.now();
 				}
@@ -140,6 +153,22 @@
 
 		if (res) {
 			toast.success(res.text);
+
+			const _responses = await submitPrompt(res.text);
+			console.log(_responses);
+
+			if (_responses.at(0)) {
+				const response = _responses[0];
+				if (response) {
+					assistantSpeaking = true;
+
+					if ($settings?.audio?.TTSEngine ?? '') {
+						speechSynthesis.speak(new SpeechSynthesisUtterance(response));
+					} else {
+						console.log('openai');
+					}
+				}
+			}
 		}
 	};
 
@@ -277,12 +306,7 @@
 					</div>
 
 					<div>
-						<button
-							on:click={() => {
-								loading = !loading;
-							}}
-							type="button"
-						>
+						<button type="button">
 							<div class=" line-clamp-1 text-sm font-medium">
 								{#if loading}
 									Thinking...