|
@@ -64,6 +64,8 @@
|
|
|
export let chatIdProp = '';
|
|
|
let loaded = false;
|
|
|
|
|
|
+ const eventTarget = new EventTarget();
|
|
|
+
|
|
|
let stopResponseFlag = false;
|
|
|
let autoScroll = true;
|
|
|
let processing = '';
|
|
@@ -300,7 +302,7 @@
|
|
|
// Chat functions
|
|
|
//////////////////////////
|
|
|
|
|
|
- const submitPrompt = async (userPrompt, _user = null) => {
|
|
|
+ const submitPrompt = async (userPrompt, { _raw = false } = {}) => {
|
|
|
let _responses = [];
|
|
|
console.log('submitPrompt', $chatId);
|
|
|
|
|
@@ -344,7 +346,6 @@
|
|
|
parentId: messages.length !== 0 ? messages.at(-1).id : null,
|
|
|
childrenIds: [],
|
|
|
role: 'user',
|
|
|
- user: _user ?? undefined,
|
|
|
content: userPrompt,
|
|
|
files: _files.length > 0 ? _files : undefined,
|
|
|
timestamp: Math.floor(Date.now() / 1000), // Unix epoch
|
|
@@ -362,15 +363,13 @@
|
|
|
|
|
|
// Wait until history/message have been updated
|
|
|
await tick();
|
|
|
-
|
|
|
- // Send prompt
|
|
|
- _responses = await sendPrompt(userPrompt, userMessageId);
|
|
|
+ _responses = await sendPrompt(userPrompt, userMessageId, { newChat: true });
|
|
|
}
|
|
|
|
|
|
return _responses;
|
|
|
};
|
|
|
|
|
|
- const sendPrompt = async (prompt, parentId, modelId = null, newChat = true) => {
|
|
|
+ const sendPrompt = async (prompt, parentId, { modelId = null, newChat = false } = {}) => {
|
|
|
let _responses = [];
|
|
|
|
|
|
// If modelId is provided, use it, else use selected model
|
|
@@ -490,7 +489,6 @@
|
|
|
responseMessage.userContext = userContext;
|
|
|
|
|
|
const chatEventEmitter = await getChatEventEmitter(model.id, _chatId);
|
|
|
-
|
|
|
if (webSearchEnabled) {
|
|
|
await getWebSearchResults(model.id, parentId, responseMessageId);
|
|
|
}
|
|
@@ -503,8 +501,6 @@
|
|
|
}
|
|
|
_responses.push(_response);
|
|
|
|
|
|
- console.log('chatEventEmitter', chatEventEmitter);
|
|
|
-
|
|
|
if (chatEventEmitter) clearInterval(chatEventEmitter);
|
|
|
} else {
|
|
|
toast.error($i18n.t(`Model {{modelId}} not found`, { modelId }));
|
|
@@ -513,88 +509,9 @@
|
|
|
);
|
|
|
|
|
|
await chats.set(await getChatList(localStorage.token));
|
|
|
-
|
|
|
return _responses;
|
|
|
};
|
|
|
|
|
|
- const getWebSearchResults = async (model: string, parentId: string, responseId: string) => {
|
|
|
- const responseMessage = history.messages[responseId];
|
|
|
-
|
|
|
- responseMessage.statusHistory = [
|
|
|
- {
|
|
|
- done: false,
|
|
|
- action: 'web_search',
|
|
|
- description: $i18n.t('Generating search query')
|
|
|
- }
|
|
|
- ];
|
|
|
- messages = messages;
|
|
|
-
|
|
|
- const prompt = history.messages[parentId].content;
|
|
|
- let searchQuery = await generateSearchQuery(localStorage.token, model, messages, prompt).catch(
|
|
|
- (error) => {
|
|
|
- console.log(error);
|
|
|
- return prompt;
|
|
|
- }
|
|
|
- );
|
|
|
-
|
|
|
- if (!searchQuery) {
|
|
|
- toast.warning($i18n.t('No search query generated'));
|
|
|
- responseMessage.statusHistory.push({
|
|
|
- done: true,
|
|
|
- error: true,
|
|
|
- action: 'web_search',
|
|
|
- description: 'No search query generated'
|
|
|
- });
|
|
|
-
|
|
|
- messages = messages;
|
|
|
- }
|
|
|
-
|
|
|
- responseMessage.statusHistory.push({
|
|
|
- done: false,
|
|
|
- action: 'web_search',
|
|
|
- description: $i18n.t(`Searching "{{searchQuery}}"`, { searchQuery })
|
|
|
- });
|
|
|
- messages = messages;
|
|
|
-
|
|
|
- const results = await runWebSearch(localStorage.token, searchQuery).catch((error) => {
|
|
|
- console.log(error);
|
|
|
- toast.error(error);
|
|
|
-
|
|
|
- return null;
|
|
|
- });
|
|
|
-
|
|
|
- if (results) {
|
|
|
- responseMessage.statusHistory.push({
|
|
|
- done: true,
|
|
|
- action: 'web_search',
|
|
|
- description: $i18n.t('Searched {{count}} sites', { count: results.filenames.length }),
|
|
|
- query: searchQuery,
|
|
|
- urls: results.filenames
|
|
|
- });
|
|
|
-
|
|
|
- if (responseMessage?.files ?? undefined === undefined) {
|
|
|
- responseMessage.files = [];
|
|
|
- }
|
|
|
-
|
|
|
- responseMessage.files.push({
|
|
|
- collection_name: results.collection_name,
|
|
|
- name: searchQuery,
|
|
|
- type: 'web_search_results',
|
|
|
- urls: results.filenames
|
|
|
- });
|
|
|
-
|
|
|
- messages = messages;
|
|
|
- } else {
|
|
|
- responseMessage.statusHistory.push({
|
|
|
- done: true,
|
|
|
- error: true,
|
|
|
- action: 'web_search',
|
|
|
- description: 'No search results found'
|
|
|
- });
|
|
|
- messages = messages;
|
|
|
- }
|
|
|
- };
|
|
|
-
|
|
|
const sendPromptOllama = async (model, userPrompt, responseMessageId, _chatId) => {
|
|
|
let _response = null;
|
|
|
|
|
@@ -676,6 +593,8 @@
|
|
|
array.findIndex((i) => JSON.stringify(i) === JSON.stringify(item)) === index
|
|
|
);
|
|
|
|
|
|
+ eventTarget.dispatchEvent(new CustomEvent('chat:start'));
|
|
|
+
|
|
|
const [res, controller] = await generateChatCompletion(localStorage.token, {
|
|
|
model: model.id,
|
|
|
messages: messagesBody,
|
|
@@ -745,6 +664,9 @@
|
|
|
continue;
|
|
|
} else {
|
|
|
responseMessage.content += data.message.content;
|
|
|
+ eventTarget.dispatchEvent(
|
|
|
+ new CustomEvent('chat', { detail: { content: data.message.content } })
|
|
|
+ );
|
|
|
messages = messages;
|
|
|
}
|
|
|
} else {
|
|
@@ -771,21 +693,13 @@
|
|
|
messages = messages;
|
|
|
|
|
|
if ($settings.notificationEnabled && !document.hasFocus()) {
|
|
|
- const notification = new Notification(
|
|
|
- selectedModelfile
|
|
|
- ? `${
|
|
|
- selectedModelfile.title.charAt(0).toUpperCase() +
|
|
|
- selectedModelfile.title.slice(1)
|
|
|
- }`
|
|
|
- : `${model.id}`,
|
|
|
- {
|
|
|
- body: responseMessage.content,
|
|
|
- icon: selectedModelfile?.imageUrl ?? `${WEBUI_BASE_URL}/static/favicon.png`
|
|
|
- }
|
|
|
- );
|
|
|
+ const notification = new Notification(`${model.id}`, {
|
|
|
+ body: responseMessage.content,
|
|
|
+ icon: `${WEBUI_BASE_URL}/static/favicon.png`
|
|
|
+ });
|
|
|
}
|
|
|
|
|
|
- if ($settings.responseAutoCopy) {
|
|
|
+ if ($settings?.responseAutoCopy ?? false) {
|
|
|
copyToClipboard(responseMessage.content);
|
|
|
}
|
|
|
|
|
@@ -846,6 +760,7 @@
|
|
|
|
|
|
stopResponseFlag = false;
|
|
|
await tick();
|
|
|
+ eventTarget.dispatchEvent(new CustomEvent('chat:finish'));
|
|
|
|
|
|
if (autoScroll) {
|
|
|
scrollToBottom();
|
|
@@ -887,6 +802,8 @@
|
|
|
|
|
|
scrollToBottom();
|
|
|
|
|
|
+ eventTarget.dispatchEvent(new CustomEvent('chat:start'));
|
|
|
+
|
|
|
try {
|
|
|
const [res, controller] = await generateOpenAIChatCompletion(
|
|
|
localStorage.token,
|
|
@@ -1007,6 +924,7 @@
|
|
|
continue;
|
|
|
} else {
|
|
|
responseMessage.content += value;
|
|
|
+ eventTarget.dispatchEvent(new CustomEvent('chat', { detail: { content: value } }));
|
|
|
messages = messages;
|
|
|
}
|
|
|
|
|
@@ -1057,6 +975,8 @@
|
|
|
stopResponseFlag = false;
|
|
|
await tick();
|
|
|
|
|
|
+ eventTarget.dispatchEvent(new CustomEvent('chat:finish'));
|
|
|
+
|
|
|
if (autoScroll) {
|
|
|
scrollToBottom();
|
|
|
}
|
|
@@ -1123,9 +1043,12 @@
|
|
|
let userPrompt = userMessage.content;
|
|
|
|
|
|
if ((userMessage?.models ?? [...selectedModels]).length == 1) {
|
|
|
- await sendPrompt(userPrompt, userMessage.id, undefined, false);
|
|
|
+ // If user message has only one model selected, sendPrompt automatically selects it for regeneration
|
|
|
+ await sendPrompt(userPrompt, userMessage.id);
|
|
|
} else {
|
|
|
- await sendPrompt(userPrompt, userMessage.id, message.model, false);
|
|
|
+ // If there are multiple models selected, use the model of the response message for regeneration
|
|
|
+ // e.g. many model chat
|
|
|
+ await sendPrompt(userPrompt, userMessage.id, { modelId: message.model });
|
|
|
}
|
|
|
}
|
|
|
};
|
|
@@ -1191,6 +1114,84 @@
|
|
|
}
|
|
|
};
|
|
|
|
|
|
+ const getWebSearchResults = async (model: string, parentId: string, responseId: string) => {
|
|
|
+ const responseMessage = history.messages[responseId];
|
|
|
+
|
|
|
+ responseMessage.statusHistory = [
|
|
|
+ {
|
|
|
+ done: false,
|
|
|
+ action: 'web_search',
|
|
|
+ description: $i18n.t('Generating search query')
|
|
|
+ }
|
|
|
+ ];
|
|
|
+ messages = messages;
|
|
|
+
|
|
|
+ const prompt = history.messages[parentId].content;
|
|
|
+ let searchQuery = await generateSearchQuery(localStorage.token, model, messages, prompt).catch(
|
|
|
+ (error) => {
|
|
|
+ console.log(error);
|
|
|
+ return prompt;
|
|
|
+ }
|
|
|
+ );
|
|
|
+
|
|
|
+ if (!searchQuery) {
|
|
|
+ toast.warning($i18n.t('No search query generated'));
|
|
|
+ responseMessage.statusHistory.push({
|
|
|
+ done: true,
|
|
|
+ error: true,
|
|
|
+ action: 'web_search',
|
|
|
+ description: 'No search query generated'
|
|
|
+ });
|
|
|
+
|
|
|
+ messages = messages;
|
|
|
+ }
|
|
|
+
|
|
|
+ responseMessage.statusHistory.push({
|
|
|
+ done: false,
|
|
|
+ action: 'web_search',
|
|
|
+ description: $i18n.t(`Searching "{{searchQuery}}"`, { searchQuery })
|
|
|
+ });
|
|
|
+ messages = messages;
|
|
|
+
|
|
|
+ const results = await runWebSearch(localStorage.token, searchQuery).catch((error) => {
|
|
|
+ console.log(error);
|
|
|
+ toast.error(error);
|
|
|
+
|
|
|
+ return null;
|
|
|
+ });
|
|
|
+
|
|
|
+ if (results) {
|
|
|
+ responseMessage.statusHistory.push({
|
|
|
+ done: true,
|
|
|
+ action: 'web_search',
|
|
|
+ description: $i18n.t('Searched {{count}} sites', { count: results.filenames.length }),
|
|
|
+ query: searchQuery,
|
|
|
+ urls: results.filenames
|
|
|
+ });
|
|
|
+
|
|
|
+ if (responseMessage?.files ?? undefined === undefined) {
|
|
|
+ responseMessage.files = [];
|
|
|
+ }
|
|
|
+
|
|
|
+ responseMessage.files.push({
|
|
|
+ collection_name: results.collection_name,
|
|
|
+ name: searchQuery,
|
|
|
+ type: 'web_search_results',
|
|
|
+ urls: results.filenames
|
|
|
+ });
|
|
|
+
|
|
|
+ messages = messages;
|
|
|
+ } else {
|
|
|
+ responseMessage.statusHistory.push({
|
|
|
+ done: true,
|
|
|
+ error: true,
|
|
|
+ action: 'web_search',
|
|
|
+ description: 'No search results found'
|
|
|
+ });
|
|
|
+ messages = messages;
|
|
|
+ }
|
|
|
+ };
|
|
|
+
|
|
|
const getTags = async () => {
|
|
|
return await getTagsById(localStorage.token, $chatId).catch(async (error) => {
|
|
|
return [];
|
|
@@ -1206,7 +1207,13 @@
|
|
|
</title>
|
|
|
</svelte:head>
|
|
|
|
|
|
-<CallOverlay {submitPrompt} bind:files />
|
|
|
+<CallOverlay
|
|
|
+ {submitPrompt}
|
|
|
+ bind:files
|
|
|
+ modelId={selectedModelIds?.at(0) ?? null}
|
|
|
+ chatId={$chatId}
|
|
|
+ {eventTarget}
|
|
|
+/>
|
|
|
|
|
|
{#if !chatIdProp || (loaded && chatIdProp)}
|
|
|
<div
|