Jelajahi Sumber

feat: add support for using previous messages for query generation

Jun Siang Cheah 1 tahun lalu
induk
melakukan
466b3e3637

+ 6 - 5
src/lib/apis/openai/index.ts

@@ -1,5 +1,6 @@
 import { OPENAI_API_BASE_URL } from '$lib/constants';
 import { promptTemplate } from '$lib/utils';
+import { type Model, models, settings } from '$lib/stores';
 
 export const getOpenAIUrls = async (token: string = '') => {
 	let error = null;
@@ -322,15 +323,14 @@ export const generateTitle = async (
 
 export const generateSearchQuery = async (
 	token: string = '',
-	// template: string,
 	model: string,
+	previousMessages: string[],
 	prompt: string,
 	url: string = OPENAI_API_BASE_URL
 ): Promise<string | undefined> => {
 	let error = null;
 
 	// TODO: Allow users to specify the prompt
-	// template = promptTemplate(template, prompt);
 
 	// Get the current date in the format "January 20, 2024"
 	const currentDate = new Intl.DateTimeFormat('en-US', {
@@ -344,8 +344,6 @@ export const generateSearchQuery = async (
 		day: '2-digit'
 	}).format(new Date());
 
-	// console.log(template);
-
 	const res = await fetch(`${url}/chat/completions`, {
 		method: 'POST',
 		headers: {
@@ -409,7 +407,10 @@ Current Question: Where is it being hosted?`
 				},
 				{
 					role: 'user',
-					content: `Current Question: ${prompt}`
+					content:
+						(previousMessages.length > 0
+							? `Previous Questions:\n${previousMessages.join('\n')}\n\n`
+							: '') + `Current Question: ${prompt}`
 				}
 			],
 			stream: false,

+ 1 - 1
src/lib/stores/index.ts

@@ -39,7 +39,7 @@ export const showSidebar = writable(false);
 export const showSettings = writable(false);
 export const showChangelog = writable(false);
 
-type Model = OpenAIModel | OllamaModel;
+export type Model = OpenAIModel | OllamaModel;
 
 type OpenAIModel = {
 	id: string;

+ 10 - 4
src/routes/(app)/+page.svelte

@@ -290,7 +290,7 @@
 					}
 
 					if (useWebSearch) {
-						await runWebSearchForPrompt(parentId, responseMessageId, prompt);
+						await runWebSearchForPrompt(parentId, responseMessageId);
 					}
 
 					if (model?.external) {
@@ -307,11 +307,11 @@
 		await chats.set(await getChatList(localStorage.token));
 	};
 
-	const runWebSearchForPrompt = async (parentId: string, responseId: string, prompt: string) => {
+	const runWebSearchForPrompt = async (parentId: string, responseId: string) => {
 		const responseMessage = history.messages[responseId];
 		responseMessage.progress = $i18n.t('Generating search query');
 		messages = messages;
-		const searchQuery = await generateChatSearchQuery(prompt);
+		const searchQuery = await generateChatSearchQuery(parentId);
 		if (!searchQuery) {
 			toast.warning($i18n.t('No search query generated'));
 			responseMessage.progress = undefined;
@@ -861,7 +861,7 @@
 	};
 
 	// TODO: Add support for adding all the user's messages as context, and not just the last message
-	const generateChatSearchQuery = async (userPrompt: string) => {
+	const generateChatSearchQuery = async (messageId: string) => {
 		const model = $models.find((model) => model.id === selectedModels[0]);
 
 		// TODO: rename titleModel to taskModel - this is the model used for non-chat tasks (e.g. title generation, search query generation)
@@ -871,10 +871,16 @@
 				: $settings?.title?.model ?? selectedModels[0];
 		const titleModel = $models.find((model) => model.id === titleModelId);
 
+		const userMessage = history.messages[messageId];
+		const userPrompt = userMessage.content;
+
+		const previousMessages = messages.filter((message) => message.role === 'user').map((message) => message.content);
+
 		console.log(titleModel);
 		return await generateSearchQuery(
 			localStorage.token,
 			titleModelId,
+			previousMessages,
 			userPrompt,
 			titleModel?.external ?? false
 				? titleModel?.source?.toLowerCase() === 'litellm'

+ 10 - 4
src/routes/(app)/c/[id]/+page.svelte

@@ -295,7 +295,7 @@
 					}
 
 					if (useWebSearch) {
-						await runWebSearchForPrompt(parentId, responseMessageId, prompt);
+						await runWebSearchForPrompt(parentId, responseMessageId);
 					}
 
 					if (model?.external) {
@@ -312,11 +312,11 @@
 		await chats.set(await getChatList(localStorage.token));
 	};
 
-	const runWebSearchForPrompt = async (parentId: string, responseId: string, prompt: string) => {
+	const runWebSearchForPrompt = async (parentId: string, responseId: string) => {
 		const responseMessage = history.messages[responseId];
 		responseMessage.progress = $i18n.t('Generating search query');
 		messages = messages;
-		const searchQuery = await generateChatSearchQuery(prompt);
+		const searchQuery = await generateChatSearchQuery(parentId);
 		if (!searchQuery) {
 			toast.warning($i18n.t('No search query generated'));
 			responseMessage.progress = undefined;
@@ -866,7 +866,7 @@
 	};
 
 	// TODO: Add support for adding all the user's messages as context, and not just the last message
-	const generateChatSearchQuery = async (userPrompt: string) => {
+	const generateChatSearchQuery = async (messageId: string) => {
 		const model = $models.find((model) => model.id === selectedModels[0]);
 
 		// TODO: rename titleModel to taskModel - this is the model used for non-chat tasks (e.g. title generation, search query generation)
@@ -876,10 +876,16 @@
 				: $settings?.title?.model ?? selectedModels[0];
 		const titleModel = $models.find((model) => model.id === titleModelId);
 
+		const userMessage = history.messages[messageId];
+		const userPrompt = userMessage.content;
+
+		const previousMessages = messages.filter((message) => message.role === 'user').map((message) => message.content);
+
 		console.log(titleModel);
 		return await generateSearchQuery(
 			localStorage.token,
 			titleModelId,
+			previousMessages,
 			userPrompt,
 			titleModel?.external ?? false
 				? titleModel?.source?.toLowerCase() === 'litellm'