瀏覽代碼

Merge pull request #184 from ollama-webui/fix

fix: infinite new chat
Timothy Jaeryang Baek 1 年之前
父節點
當前提交
dc7f595448
共有 3 個文件被更改,包括 144 次插入40 次删除
  1. 23 0
      src/lib/components/chat/Messages.svelte
  2. 59 20
      src/routes/(app)/+page.svelte
  3. 62 20
      src/routes/(app)/c/[id]/+page.svelte

+ 23 - 0
src/lib/components/chat/Messages.svelte

@@ -663,6 +663,29 @@
 														</button>
 													</div>
 												{/if}
+
+												<!-- <button
+													class="invisible group-hover:visible p-1 rounded dark:hover:bg-gray-800 transition"
+													on:click={() => {
+														editMessageHandler(message.id);
+													}}
+												>
+													<svg
+														xmlns="http://www.w3.org/2000/svg"
+														fill="none"
+														viewBox="0 0 24 24"
+														stroke-width="1.5"
+														stroke="currentColor"
+														class="w-4 h-4"
+													>
+														<path
+															stroke-linecap="round"
+															stroke-linejoin="round"
+															d="M16.862 4.487l1.687-1.688a1.875 1.875 0 112.652 2.652L6.832 19.82a4.5 4.5 0 01-1.897 1.13l-2.685.8.8-2.685a4.5 4.5 0 011.13-1.897L16.863 4.487zm0 0L19.5 7.125"
+														/>
+													</svg>
+												</button> -->
+
 												<button
 													class="{messageIdx + 1 === messages.length
 														? 'visible'

+ 59 - 20
src/routes/(app)/+page.svelte

@@ -46,6 +46,8 @@
 				currentMessage.parentId !== null ? history.messages[currentMessage.parentId] : null;
 		}
 		messages = _messages;
+	} else {
+		messages = [];
 	}
 
 	onMount(async () => {
@@ -80,13 +82,13 @@
 	// Ollama functions
 	//////////////////////////
 
-	const sendPrompt = async (userPrompt, parentId) => {
+	const sendPrompt = async (userPrompt, parentId, _chatId) => {
 		await Promise.all(
 			selectedModels.map(async (model) => {
 				if (model.includes('gpt-')) {
-					await sendPromptOpenAI(model, userPrompt, parentId);
+					await sendPromptOpenAI(model, userPrompt, parentId, _chatId);
 				} else {
-					await sendPromptOllama(model, userPrompt, parentId);
+					await sendPromptOllama(model, userPrompt, parentId, _chatId);
 				}
 			})
 		);
@@ -94,7 +96,7 @@
 		await chats.set(await $db.getChats());
 	};
 
-	const sendPromptOllama = async (model, userPrompt, parentId) => {
+	const sendPromptOllama = async (model, userPrompt, parentId, _chatId) => {
 		console.log('sendPromptOllama');
 		let responseMessageId = uuidv4();
 
@@ -147,6 +149,39 @@
 			})
 		});
 
+		// const res = await fetch(`${$settings?.API_BASE_URL ?? OLLAMA_API_BASE_URL}/chat`, {
+		// 	method: 'POST',
+		// 	headers: {
+		// 		'Content-Type': 'text/event-stream',
+		// 		...($settings.authHeader && { Authorization: $settings.authHeader }),
+		// 		...($user && { Authorization: `Bearer ${localStorage.token}` })
+		// 	},
+		// 	body: JSON.stringify({
+		// 		model: model,
+		// 		messages: [
+		// 			$settings.system
+		// 				? {
+		// 						role: 'system',
+		// 						content: $settings.system
+		// 				  }
+		// 				: undefined,
+		// 			...messages
+		// 		]
+		// 			.filter((message) => message)
+		// 			.map((message) => ({ role: message.role, content: message.content })),
+		// 		options: {
+		// 			seed: $settings.seed ?? undefined,
+		// 			temperature: $settings.temperature ?? undefined,
+		// 			repeat_penalty: $settings.repeat_penalty ?? undefined,
+		// 			top_k: $settings.top_k ?? undefined,
+		// 			top_p: $settings.top_p ?? undefined,
+		// 			num_ctx: $settings.num_ctx ?? undefined,
+		// 			...($settings.options ?? {})
+		// 		},
+		// 		format: $settings.requestFormat ?? undefined
+		// 	})
+		// });
+
 		const reader = res.body
 			.pipeThrough(new TextDecoderStream())
 			.pipeThrough(splitStream('\n'))
@@ -198,7 +233,7 @@
 				window.scrollTo({ top: document.body.scrollHeight });
 			}
 
-			await $db.updateChatById($chatId, {
+			await $db.updateChatById(_chatId, {
 				title: title === '' ? 'New Chat' : title,
 				models: selectedModels,
 				system: $settings.system ?? undefined,
@@ -223,12 +258,12 @@
 		}
 
 		if (messages.length == 2 && messages.at(1).content !== '') {
-			window.history.replaceState(history.state, '', `/c/${$chatId}`);
-			await generateChatTitle($chatId, userPrompt);
+			window.history.replaceState(history.state, '', `/c/${_chatId}`);
+			await generateChatTitle(_chatId, userPrompt);
 		}
 	};
 
-	const sendPromptOpenAI = async (model, userPrompt, parentId) => {
+	const sendPromptOpenAI = async (model, userPrompt, parentId, _chatId) => {
 		if ($settings.OPENAI_API_KEY) {
 			if (models) {
 				let responseMessageId = uuidv4();
@@ -328,7 +363,7 @@
 						window.scrollTo({ top: document.body.scrollHeight });
 					}
 
-					await $db.updateChatById($chatId, {
+					await $db.updateChatById(_chatId, {
 						title: title === '' ? 'New Chat' : title,
 						models: selectedModels,
 						system: $settings.system ?? undefined,
@@ -354,15 +389,16 @@
 				}
 
 				if (messages.length == 2) {
-					window.history.replaceState(history.state, '', `/c/${$chatId}`);
-					await setChatTitle($chatId, userPrompt);
+					window.history.replaceState(history.state, '', `/c/${_chatId}`);
+					await setChatTitle(_chatId, userPrompt);
 				}
 			}
 		}
 	};
 
 	const submitPrompt = async (userPrompt) => {
-		console.log('submitPrompt');
+		const _chatId = JSON.parse(JSON.stringify($chatId));
+		console.log('submitPrompt', _chatId);
 
 		if (selectedModels.includes('')) {
 			toast.error('Model not selected');
@@ -388,12 +424,10 @@
 			history.messages[userMessageId] = userMessage;
 			history.currentId = userMessageId;
 
-			prompt = '';
-			files = [];
-
-			if (messages.length == 0) {
+			await tick();
+			if (messages.length == 1) {
 				await $db.createNewChat({
-					id: $chatId,
+					id: _chatId,
 					title: 'New Chat',
 					models: selectedModels,
 					system: $settings.system ?? undefined,
@@ -411,11 +445,14 @@
 				});
 			}
 
+			prompt = '';
+			files = [];
+
 			setTimeout(() => {
 				window.scrollTo({ top: document.body.scrollHeight, behavior: 'smooth' });
 			}, 50);
 
-			await sendPrompt(userPrompt, userMessageId);
+			await sendPrompt(userPrompt, userMessageId, _chatId);
 		}
 	};
 
@@ -425,7 +462,9 @@
 	};
 
 	const regenerateResponse = async () => {
-		console.log('regenerateResponse');
+		const _chatId = JSON.parse(JSON.stringify($chatId));
+		console.log('regenerateResponse', _chatId);
+
 		if (messages.length != 0 && messages.at(-1).done == true) {
 			messages.splice(messages.length - 1, 1);
 			messages = messages;
@@ -433,7 +472,7 @@
 			let userMessage = messages.at(-1);
 			let userPrompt = userMessage.content;
 
-			await sendPrompt(userPrompt, userMessage.id);
+			await sendPrompt(userPrompt, userMessage.id, _chatId);
 		}
 	};
 

+ 62 - 20
src/routes/(app)/c/[id]/+page.svelte

@@ -46,6 +46,8 @@
 				currentMessage.parentId !== null ? history.messages[currentMessage.parentId] : null;
 		}
 		messages = _messages;
+	} else {
+		messages = [];
 	}
 
 	// onMount(async () => {
@@ -113,13 +115,13 @@
 	// Ollama functions
 	//////////////////////////
 
-	const sendPrompt = async (userPrompt, parentId) => {
+	const sendPrompt = async (userPrompt, parentId, _chatId) => {
 		await Promise.all(
 			selectedModels.map(async (model) => {
 				if (model.includes('gpt-')) {
-					await sendPromptOpenAI(model, userPrompt, parentId);
+					await sendPromptOpenAI(model, userPrompt, parentId, _chatId);
 				} else {
-					await sendPromptOllama(model, userPrompt, parentId);
+					await sendPromptOllama(model, userPrompt, parentId, _chatId);
 				}
 			})
 		);
@@ -127,7 +129,8 @@
 		await chats.set(await $db.getChats());
 	};
 
-	const sendPromptOllama = async (model, userPrompt, parentId) => {
+	const sendPromptOllama = async (model, userPrompt, parentId, _chatId) => {
+		console.log('sendPromptOllama');
 		let responseMessageId = uuidv4();
 
 		let responseMessage = {
@@ -179,6 +182,39 @@
 			})
 		});
 
+		// const res = await fetch(`${$settings?.API_BASE_URL ?? OLLAMA_API_BASE_URL}/chat`, {
+		// 	method: 'POST',
+		// 	headers: {
+		// 		'Content-Type': 'text/event-stream',
+		// 		...($settings.authHeader && { Authorization: $settings.authHeader }),
+		// 		...($user && { Authorization: `Bearer ${localStorage.token}` })
+		// 	},
+		// 	body: JSON.stringify({
+		// 		model: model,
+		// 		messages: [
+		// 			$settings.system
+		// 				? {
+		// 						role: 'system',
+		// 						content: $settings.system
+		// 				  }
+		// 				: undefined,
+		// 			...messages
+		// 		]
+		// 			.filter((message) => message)
+		// 			.map((message) => ({ role: message.role, content: message.content })),
+		// 		options: {
+		// 			seed: $settings.seed ?? undefined,
+		// 			temperature: $settings.temperature ?? undefined,
+		// 			repeat_penalty: $settings.repeat_penalty ?? undefined,
+		// 			top_k: $settings.top_k ?? undefined,
+		// 			top_p: $settings.top_p ?? undefined,
+		// 			num_ctx: $settings.num_ctx ?? undefined,
+		// 			...($settings.options ?? {})
+		// 		},
+		// 		format: $settings.requestFormat ?? undefined
+		// 	})
+		// });
+
 		const reader = res.body
 			.pipeThrough(new TextDecoderStream())
 			.pipeThrough(splitStream('\n'))
@@ -230,7 +266,7 @@
 				window.scrollTo({ top: document.body.scrollHeight });
 			}
 
-			await $db.updateChatById($chatId, {
+			await $db.updateChatById(_chatId, {
 				title: title === '' ? 'New Chat' : title,
 				models: selectedModels,
 				system: $settings.system ?? undefined,
@@ -255,12 +291,12 @@
 		}
 
 		if (messages.length == 2 && messages.at(1).content !== '') {
-			window.history.replaceState(history.state, '', `/c/${$chatId}`);
-			await generateChatTitle($chatId, userPrompt);
+			window.history.replaceState(history.state, '', `/c/${_chatId}`);
+			await generateChatTitle(_chatId, userPrompt);
 		}
 	};
 
-	const sendPromptOpenAI = async (model, userPrompt, parentId) => {
+	const sendPromptOpenAI = async (model, userPrompt, parentId, _chatId) => {
 		if ($settings.OPENAI_API_KEY) {
 			if (models) {
 				let responseMessageId = uuidv4();
@@ -360,7 +396,7 @@
 						window.scrollTo({ top: document.body.scrollHeight });
 					}
 
-					await $db.updateChatById($chatId, {
+					await $db.updateChatById(_chatId, {
 						title: title === '' ? 'New Chat' : title,
 						models: selectedModels,
 						system: $settings.system ?? undefined,
@@ -386,15 +422,16 @@
 				}
 
 				if (messages.length == 2) {
-					window.history.replaceState(history.state, '', `/c/${$chatId}`);
-					await setChatTitle($chatId, userPrompt);
+					window.history.replaceState(history.state, '', `/c/${_chatId}`);
+					await setChatTitle(_chatId, userPrompt);
 				}
 			}
 		}
 	};
 
 	const submitPrompt = async (userPrompt) => {
-		console.log('submitPrompt');
+		const _chatId = JSON.parse(JSON.stringify($chatId));
+		console.log('submitPrompt', _chatId);
 
 		if (selectedModels.includes('')) {
 			toast.error('Model not selected');
@@ -409,7 +446,8 @@
 				parentId: messages.length !== 0 ? messages.at(-1).id : null,
 				childrenIds: [],
 				role: 'user',
-				content: userPrompt
+				content: userPrompt,
+				files: files.length > 0 ? files : undefined
 			};
 
 			if (messages.length !== 0) {
@@ -419,11 +457,10 @@
 			history.messages[userMessageId] = userMessage;
 			history.currentId = userMessageId;
 
-			prompt = '';
-
-			if (messages.length == 0) {
+			await tick();
+			if (messages.length == 1) {
 				await $db.createNewChat({
-					id: $chatId,
+					id: _chatId,
 					title: 'New Chat',
 					models: selectedModels,
 					system: $settings.system ?? undefined,
@@ -441,11 +478,14 @@
 				});
 			}
 
+			prompt = '';
+			files = [];
+
 			setTimeout(() => {
 				window.scrollTo({ top: document.body.scrollHeight, behavior: 'smooth' });
 			}, 50);
 
-			await sendPrompt(userPrompt, userMessageId);
+			await sendPrompt(userPrompt, userMessageId, _chatId);
 		}
 	};
 
@@ -455,7 +495,9 @@
 	};
 
 	const regenerateResponse = async () => {
-		console.log('regenerateResponse');
+		const _chatId = JSON.parse(JSON.stringify($chatId));
+		console.log('regenerateResponse', _chatId);
+
 		if (messages.length != 0 && messages.at(-1).done == true) {
 			messages.splice(messages.length - 1, 1);
 			messages = messages;
@@ -463,7 +505,7 @@
 			let userMessage = messages.at(-1);
 			let userPrompt = userMessage.content;
 
-			await sendPrompt(userPrompt, userMessage.id);
+			await sendPrompt(userPrompt, userMessage.id, _chatId);
 		}
 	};