Chat.svelte 43 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660
  1. <script lang="ts">
  2. import { v4 as uuidv4 } from 'uuid';
  3. import { toast } from 'svelte-sonner';
  4. import mermaid from 'mermaid';
  5. import { getContext, onMount, tick } from 'svelte';
  6. import { goto } from '$app/navigation';
  7. import { page } from '$app/stores';
  8. import type { Writable } from 'svelte/store';
  9. import type { i18n as i18nType } from 'i18next';
  10. import { OLLAMA_API_BASE_URL, OPENAI_API_BASE_URL, WEBUI_BASE_URL } from '$lib/constants';
  11. import {
  12. chatId,
  13. chats,
  14. config,
  15. type Model,
  16. models,
  17. settings,
  18. showSidebar,
  19. tags as _tags,
  20. WEBUI_NAME,
  21. banners,
  22. user,
  23. socket,
  24. showCallOverlay,
  25. tools,
  26. pageSkip,
  27. pageLimit
  28. } from '$lib/stores';
  29. import {
  30. convertMessagesToHistory,
  31. copyToClipboard,
  32. extractSentencesForAudio,
  33. getUserPosition,
  34. promptTemplate,
  35. splitStream
  36. } from '$lib/utils';
  37. import { generateChatCompletion } from '$lib/apis/ollama';
  38. import {
  39. addTagById,
  40. createNewChat,
  41. deleteTagById,
  42. getAllChatTags,
  43. getChatById,
  44. getChatList,
  45. getTagsById,
  46. updateChatById
  47. } from '$lib/apis/chats';
  48. import { generateOpenAIChatCompletion } from '$lib/apis/openai';
  49. import { runWebSearch } from '$lib/apis/rag';
  50. import { createOpenAITextStream } from '$lib/apis/streaming';
  51. import { queryMemory } from '$lib/apis/memories';
  52. import { getAndUpdateUserLocation, getUserSettings } from '$lib/apis/users';
  53. import { chatCompleted, generateTitle, generateSearchQuery, chatAction } from '$lib/apis';
  54. import Banner from '../common/Banner.svelte';
  55. import MessageInput from '$lib/components/chat/MessageInput.svelte';
  56. import Messages from '$lib/components/chat/Messages.svelte';
  57. import Navbar from '$lib/components/layout/Navbar.svelte';
  58. import CallOverlay from './MessageInput/CallOverlay.svelte';
  59. import { error } from '@sveltejs/kit';
  60. import ChatControls from './ChatControls.svelte';
  61. import EventConfirmDialog from '../common/ConfirmDialog.svelte';
  62. const i18n: Writable<i18nType> = getContext('i18n');
  63. export let chatIdProp = '';
  64. let loaded = false;
  65. const eventTarget = new EventTarget();
  66. let showControls = false;
  67. let stopResponseFlag = false;
  68. let autoScroll = true;
  69. let processing = '';
  70. let messagesContainerElement: HTMLDivElement;
  71. let showEventConfirmation = false;
  72. let eventConfirmationTitle = '';
  73. let eventConfirmationMessage = '';
  74. let eventConfirmationInput = false;
  75. let eventConfirmationInputPlaceholder = '';
  76. let eventConfirmationInputValue = '';
  77. let eventCallback = null;
  78. let showModelSelector = true;
  79. let selectedModels = [''];
  80. let atSelectedModel: Model | undefined;
  81. let selectedModelIds = [];
  82. $: selectedModelIds = atSelectedModel !== undefined ? [atSelectedModel.id] : selectedModels;
  83. let selectedToolIds = [];
  84. let webSearchEnabled = false;
  85. let chat = null;
  86. let tags = [];
  87. let title = '';
  88. let prompt = '';
  89. let chatFiles = [];
  90. let files = [];
  91. let messages = [];
  92. let history = {
  93. messages: {},
  94. currentId: null
  95. };
  96. let params = {};
  97. let valves = {};
  98. $: if (history.currentId !== null) {
  99. let _messages = [];
  100. let currentMessage = history.messages[history.currentId];
  101. while (currentMessage !== null) {
  102. _messages.unshift({ ...currentMessage });
  103. currentMessage =
  104. currentMessage.parentId !== null ? history.messages[currentMessage.parentId] : null;
  105. }
  106. messages = _messages;
  107. } else {
  108. messages = [];
  109. }
  110. $: if (chatIdProp) {
  111. (async () => {
  112. console.log(chatIdProp);
  113. if (chatIdProp && (await loadChat())) {
  114. await tick();
  115. loaded = true;
  116. window.setTimeout(() => scrollToBottom(), 0);
  117. const chatInput = document.getElementById('chat-textarea');
  118. chatInput?.focus();
  119. } else {
  120. await goto('/');
  121. }
  122. })();
  123. }
  124. const chatEventHandler = async (event, cb) => {
  125. if (event.chat_id === $chatId) {
  126. await tick();
  127. console.log(event);
  128. let message = history.messages[event.message_id];
  129. const type = event?.data?.type ?? null;
  130. const data = event?.data?.data ?? null;
  131. if (type === 'status') {
  132. if (message?.statusHistory) {
  133. message.statusHistory.push(data);
  134. } else {
  135. message.statusHistory = [data];
  136. }
  137. } else if (type === 'citation') {
  138. if (message?.citations) {
  139. message.citations.push(data);
  140. } else {
  141. message.citations = [data];
  142. }
  143. } else if (type === 'message') {
  144. message.content += data.content;
  145. } else if (type === 'replace') {
  146. message.content = data.content;
  147. } else if (type === 'confirmation') {
  148. eventCallback = cb;
  149. eventConfirmationInput = false;
  150. showEventConfirmation = true;
  151. eventConfirmationTitle = data.title;
  152. eventConfirmationMessage = data.message;
  153. } else if (type === 'input') {
  154. eventCallback = cb;
  155. eventConfirmationInput = true;
  156. showEventConfirmation = true;
  157. eventConfirmationTitle = data.title;
  158. eventConfirmationMessage = data.message;
  159. eventConfirmationInputPlaceholder = data.placeholder;
  160. eventConfirmationInputValue = data?.value ?? '';
  161. } else {
  162. console.log('Unknown message type', data);
  163. }
  164. messages = messages;
  165. }
  166. };
  167. onMount(async () => {
  168. const onMessageHandler = async (event) => {
  169. if (event.origin === window.origin) {
  170. // Replace with your iframe's origin
  171. console.log('Message received from iframe:', event.data);
  172. if (event.data.type === 'input:prompt') {
  173. console.log(event.data.text);
  174. const inputElement = document.getElementById('chat-textarea');
  175. if (inputElement) {
  176. prompt = event.data.text;
  177. inputElement.focus();
  178. }
  179. }
  180. if (event.data.type === 'action:submit') {
  181. console.log(event.data.text);
  182. if (prompt !== '') {
  183. await tick();
  184. submitPrompt(prompt);
  185. }
  186. }
  187. if (event.data.type === 'input:prompt:submit') {
  188. console.log(event.data.text);
  189. if (prompt !== '') {
  190. await tick();
  191. submitPrompt(event.data.text);
  192. }
  193. }
  194. }
  195. };
  196. window.addEventListener('message', onMessageHandler);
  197. $socket.on('chat-events', chatEventHandler);
  198. if (!$chatId) {
  199. chatId.subscribe(async (value) => {
  200. if (!value) {
  201. await initNewChat();
  202. }
  203. });
  204. } else {
  205. if (!($settings.saveChatHistory ?? true)) {
  206. await goto('/');
  207. }
  208. }
  209. return () => {
  210. window.removeEventListener('message', onMessageHandler);
  211. $socket.off('chat-events');
  212. };
  213. });
  214. //////////////////////////
  215. // Web functions
  216. //////////////////////////
  217. const initNewChat = async () => {
  218. window.history.replaceState(history.state, '', `/`);
  219. await chatId.set('');
  220. autoScroll = true;
  221. title = '';
  222. messages = [];
  223. history = {
  224. messages: {},
  225. currentId: null
  226. };
  227. chatFiles = [];
  228. params = {};
  229. if ($page.url.searchParams.get('models')) {
  230. selectedModels = $page.url.searchParams.get('models')?.split(',');
  231. } else if ($settings?.models) {
  232. selectedModels = $settings?.models;
  233. } else if ($config?.default_models) {
  234. console.log($config?.default_models.split(',') ?? '');
  235. selectedModels = $config?.default_models.split(',');
  236. } else {
  237. selectedModels = [''];
  238. }
  239. if ($page.url.searchParams.get('q')) {
  240. prompt = $page.url.searchParams.get('q') ?? '';
  241. if (prompt) {
  242. await tick();
  243. submitPrompt(prompt);
  244. }
  245. }
  246. selectedModels = selectedModels.map((modelId) =>
  247. $models.map((m) => m.id).includes(modelId) ? modelId : ''
  248. );
  249. const userSettings = await getUserSettings(localStorage.token);
  250. if (userSettings) {
  251. settings.set(userSettings.ui);
  252. } else {
  253. settings.set(JSON.parse(localStorage.getItem('settings') ?? '{}'));
  254. }
  255. const chatInput = document.getElementById('chat-textarea');
  256. setTimeout(() => chatInput?.focus(), 0);
  257. };
  258. const loadChat = async () => {
  259. chatId.set(chatIdProp);
  260. chat = await getChatById(localStorage.token, $chatId).catch(async (error) => {
  261. await goto('/');
  262. return null;
  263. });
  264. if (chat) {
  265. tags = await getTags();
  266. const chatContent = chat.chat;
  267. if (chatContent) {
  268. console.log(chatContent);
  269. selectedModels =
  270. (chatContent?.models ?? undefined) !== undefined
  271. ? chatContent.models
  272. : [chatContent.models ?? ''];
  273. history =
  274. (chatContent?.history ?? undefined) !== undefined
  275. ? chatContent.history
  276. : convertMessagesToHistory(chatContent.messages);
  277. title = chatContent.title;
  278. const userSettings = await getUserSettings(localStorage.token);
  279. if (userSettings) {
  280. await settings.set(userSettings.ui);
  281. } else {
  282. await settings.set(JSON.parse(localStorage.getItem('settings') ?? '{}'));
  283. }
  284. params = chatContent?.params ?? {};
  285. chatFiles = chatContent?.files ?? [];
  286. autoScroll = true;
  287. await tick();
  288. if (messages.length > 0) {
  289. history.messages[messages.at(-1).id].done = true;
  290. }
  291. await tick();
  292. return true;
  293. } else {
  294. return null;
  295. }
  296. }
  297. };
  298. const scrollToBottom = async () => {
  299. await tick();
  300. if (messagesContainerElement) {
  301. messagesContainerElement.scrollTop = messagesContainerElement.scrollHeight;
  302. }
  303. };
  304. const createMessagesList = (responseMessageId) => {
  305. const message = history.messages[responseMessageId];
  306. if (message.parentId) {
  307. return [...createMessagesList(message.parentId), message];
  308. } else {
  309. return [message];
  310. }
  311. };
  312. const chatCompletedHandler = async (chatId, modelId, responseMessageId, messages) => {
  313. await mermaid.run({
  314. querySelector: '.mermaid'
  315. });
  316. const res = await chatCompleted(localStorage.token, {
  317. model: modelId,
  318. messages: messages.map((m) => ({
  319. id: m.id,
  320. role: m.role,
  321. content: m.content,
  322. info: m.info ? m.info : undefined,
  323. timestamp: m.timestamp
  324. })),
  325. chat_id: chatId,
  326. session_id: $socket?.id,
  327. id: responseMessageId
  328. }).catch((error) => {
  329. toast.error(error);
  330. messages.at(-1).error = { content: error };
  331. return null;
  332. });
  333. if (res !== null) {
  334. // Update chat history with the new messages
  335. for (const message of res.messages) {
  336. history.messages[message.id] = {
  337. ...history.messages[message.id],
  338. ...(history.messages[message.id].content !== message.content
  339. ? { originalContent: history.messages[message.id].content }
  340. : {}),
  341. ...message
  342. };
  343. }
  344. }
  345. if ($chatId == chatId) {
  346. if ($settings.saveChatHistory ?? true) {
  347. chat = await updateChatById(localStorage.token, chatId, {
  348. models: selectedModels,
  349. messages: messages,
  350. history: history,
  351. params: params,
  352. files: chatFiles
  353. });
  354. await chats.set(
  355. await getChatList(localStorage.token, 0, $pageSkip * $pageLimit || $pageLimit)
  356. );
  357. }
  358. }
  359. };
  360. const chatActionHandler = async (chatId, actionId, modelId, responseMessageId) => {
  361. const res = await chatAction(localStorage.token, actionId, {
  362. model: modelId,
  363. messages: messages.map((m) => ({
  364. id: m.id,
  365. role: m.role,
  366. content: m.content,
  367. info: m.info ? m.info : undefined,
  368. timestamp: m.timestamp
  369. })),
  370. chat_id: chatId,
  371. session_id: $socket?.id,
  372. id: responseMessageId
  373. }).catch((error) => {
  374. toast.error(error);
  375. messages.at(-1).error = { content: error };
  376. return null;
  377. });
  378. if (res !== null) {
  379. // Update chat history with the new messages
  380. for (const message of res.messages) {
  381. history.messages[message.id] = {
  382. ...history.messages[message.id],
  383. ...(history.messages[message.id].content !== message.content
  384. ? { originalContent: history.messages[message.id].content }
  385. : {}),
  386. ...message
  387. };
  388. }
  389. }
  390. if ($chatId == chatId) {
  391. if ($settings.saveChatHistory ?? true) {
  392. chat = await updateChatById(localStorage.token, chatId, {
  393. models: selectedModels,
  394. messages: messages,
  395. history: history,
  396. params: params,
  397. files: chatFiles
  398. });
  399. await chats.set(
  400. await getChatList(localStorage.token, 0, $pageSkip * $pageLimit || $pageLimit)
  401. );
  402. }
  403. }
  404. };
  405. const getChatEventEmitter = async (modelId: string, chatId: string = '') => {
  406. return setInterval(() => {
  407. $socket?.emit('usage', {
  408. action: 'chat',
  409. model: modelId,
  410. chat_id: chatId
  411. });
  412. }, 1000);
  413. };
  414. //////////////////////////
  415. // Chat functions
  416. //////////////////////////
  417. const submitPrompt = async (userPrompt, { _raw = false } = {}) => {
  418. let _responses = [];
  419. console.log('submitPrompt', $chatId);
  420. selectedModels = selectedModels.map((modelId) =>
  421. $models.map((m) => m.id).includes(modelId) ? modelId : ''
  422. );
  423. if (selectedModels.includes('')) {
  424. toast.error($i18n.t('Model not selected'));
  425. } else if (messages.length != 0 && messages.at(-1).done != true) {
  426. // Response not done
  427. console.log('wait');
  428. } else if (messages.length != 0 && messages.at(-1).error) {
  429. // Error in response
  430. toast.error(
  431. $i18n.t(
  432. `Oops! There was an error in the previous response. Please try again or contact admin.`
  433. )
  434. );
  435. } else if (
  436. files.length > 0 &&
  437. files.filter((file) => file.type !== 'image' && file.status !== 'processed').length > 0
  438. ) {
  439. // Upload not done
  440. toast.error(
  441. $i18n.t(
  442. `Oops! Hold tight! Your files are still in the processing oven. We're cooking them up to perfection. Please be patient and we'll let you know once they're ready.`
  443. )
  444. );
  445. } else {
  446. // Reset chat input textarea
  447. const chatTextAreaElement = document.getElementById('chat-textarea');
  448. if (chatTextAreaElement) {
  449. chatTextAreaElement.value = '';
  450. chatTextAreaElement.style.height = '';
  451. }
  452. const _files = JSON.parse(JSON.stringify(files));
  453. chatFiles.push(..._files.filter((item) => ['doc', 'file', 'collection'].includes(item.type)));
  454. chatFiles = chatFiles.filter(
  455. // Remove duplicates
  456. (item, index, array) =>
  457. array.findIndex((i) => JSON.stringify(i) === JSON.stringify(item)) === index
  458. );
  459. files = [];
  460. prompt = '';
  461. // Create user message
  462. let userMessageId = uuidv4();
  463. let userMessage = {
  464. id: userMessageId,
  465. parentId: messages.length !== 0 ? messages.at(-1).id : null,
  466. childrenIds: [],
  467. role: 'user',
  468. content: userPrompt,
  469. files: _files.length > 0 ? _files : undefined,
  470. timestamp: Math.floor(Date.now() / 1000), // Unix epoch
  471. models: selectedModels.filter((m, mIdx) => selectedModels.indexOf(m) === mIdx)
  472. };
  473. // Add message to history and Set currentId to messageId
  474. history.messages[userMessageId] = userMessage;
  475. history.currentId = userMessageId;
  476. // Append messageId to childrenIds of parent message
  477. if (messages.length !== 0) {
  478. history.messages[messages.at(-1).id].childrenIds.push(userMessageId);
  479. }
  480. // Wait until history/message have been updated
  481. await tick();
  482. _responses = await sendPrompt(userPrompt, userMessageId, { newChat: true });
  483. }
  484. return _responses;
  485. };
  486. const sendPrompt = async (prompt, parentId, { modelId = null, newChat = false } = {}) => {
  487. let _responses = [];
  488. // If modelId is provided, use it, else use selected model
  489. let selectedModelIds = modelId
  490. ? [modelId]
  491. : atSelectedModel !== undefined
  492. ? [atSelectedModel.id]
  493. : selectedModels;
  494. // Create response messages for each selected model
  495. const responseMessageIds = {};
  496. for (const modelId of selectedModelIds) {
  497. const model = $models.filter((m) => m.id === modelId).at(0);
  498. if (model) {
  499. let responseMessageId = uuidv4();
  500. let responseMessage = {
  501. parentId: parentId,
  502. id: responseMessageId,
  503. childrenIds: [],
  504. role: 'assistant',
  505. content: '',
  506. model: model.id,
  507. modelName: model.name ?? model.id,
  508. userContext: null,
  509. timestamp: Math.floor(Date.now() / 1000) // Unix epoch
  510. };
  511. // Add message to history and Set currentId to messageId
  512. history.messages[responseMessageId] = responseMessage;
  513. history.currentId = responseMessageId;
  514. // Append messageId to childrenIds of parent message
  515. if (parentId !== null) {
  516. history.messages[parentId].childrenIds = [
  517. ...history.messages[parentId].childrenIds,
  518. responseMessageId
  519. ];
  520. }
  521. responseMessageIds[modelId] = responseMessageId;
  522. }
  523. }
  524. await tick();
  525. // Create new chat if only one message in messages
  526. if (newChat && messages.length == 2) {
  527. if ($settings.saveChatHistory ?? true) {
  528. chat = await createNewChat(localStorage.token, {
  529. id: $chatId,
  530. title: $i18n.t('New Chat'),
  531. models: selectedModels,
  532. system: $settings.system ?? undefined,
  533. params: params,
  534. messages: messages,
  535. history: history,
  536. tags: [],
  537. timestamp: Date.now()
  538. });
  539. await chats.set(
  540. await getChatList(localStorage.token, 0, $pageSkip * $pageLimit || $pageLimit)
  541. );
  542. await chatId.set(chat.id);
  543. } else {
  544. await chatId.set('local');
  545. }
  546. await tick();
  547. }
  548. const _chatId = JSON.parse(JSON.stringify($chatId));
  549. await Promise.all(
  550. selectedModelIds.map(async (modelId) => {
  551. console.log('modelId', modelId);
  552. const model = $models.filter((m) => m.id === modelId).at(0);
  553. if (model) {
  554. // If there are image files, check if model is vision capable
  555. const hasImages = messages.some((message) =>
  556. message.files?.some((file) => file.type === 'image')
  557. );
  558. if (hasImages && !(model.info?.meta?.capabilities?.vision ?? true)) {
  559. toast.error(
  560. $i18n.t('Model {{modelName}} is not vision capable', {
  561. modelName: model.name ?? model.id
  562. })
  563. );
  564. }
  565. let responseMessageId = responseMessageIds[modelId];
  566. let responseMessage = history.messages[responseMessageId];
  567. let userContext = null;
  568. if ($settings?.memory ?? false) {
  569. if (userContext === null) {
  570. const res = await queryMemory(localStorage.token, prompt).catch((error) => {
  571. toast.error(error);
  572. return null;
  573. });
  574. if (res) {
  575. if (res.documents[0].length > 0) {
  576. userContext = res.documents[0].reduce((acc, doc, index) => {
  577. const createdAtTimestamp = res.metadatas[0][index].created_at;
  578. const createdAtDate = new Date(createdAtTimestamp * 1000)
  579. .toISOString()
  580. .split('T')[0];
  581. return `${acc}${index + 1}. [${createdAtDate}]. ${doc}\n`;
  582. }, '');
  583. }
  584. console.log(userContext);
  585. }
  586. }
  587. }
  588. responseMessage.userContext = userContext;
  589. const chatEventEmitter = await getChatEventEmitter(model.id, _chatId);
  590. if (webSearchEnabled) {
  591. await getWebSearchResults(model.id, parentId, responseMessageId);
  592. }
  593. let _response = null;
  594. if (model?.owned_by === 'openai') {
  595. _response = await sendPromptOpenAI(model, prompt, responseMessageId, _chatId);
  596. } else if (model) {
  597. _response = await sendPromptOllama(model, prompt, responseMessageId, _chatId);
  598. }
  599. _responses.push(_response);
  600. if (chatEventEmitter) clearInterval(chatEventEmitter);
  601. } else {
  602. toast.error($i18n.t(`Model {{modelId}} not found`, { modelId }));
  603. }
  604. })
  605. );
  606. await chats.set(await getChatList(localStorage.token, 0, $pageSkip * $pageLimit || $pageLimit));
  607. return _responses;
  608. };
  609. const sendPromptOllama = async (model, userPrompt, responseMessageId, _chatId) => {
  610. let _response = null;
  611. const responseMessage = history.messages[responseMessageId];
  612. const userMessage = history.messages[responseMessage.parentId];
  613. // Wait until history/message have been updated
  614. await tick();
  615. // Scroll down
  616. scrollToBottom();
  617. const messagesBody = [
  618. params?.system || $settings.system || (responseMessage?.userContext ?? null)
  619. ? {
  620. role: 'system',
  621. content: `${promptTemplate(
  622. params?.system ?? $settings?.system ?? '',
  623. $user.name,
  624. $settings?.userLocation
  625. ? await getAndUpdateUserLocation(localStorage.token)
  626. : undefined
  627. )}${
  628. responseMessage?.userContext ?? null
  629. ? `\n\nUser Context:\n${responseMessage?.userContext ?? ''}`
  630. : ''
  631. }`
  632. }
  633. : undefined,
  634. ...messages
  635. ]
  636. .filter((message) => message?.content?.trim())
  637. .map((message, idx, arr) => {
  638. // Prepare the base message object
  639. const baseMessage = {
  640. role: message.role,
  641. content: message.content
  642. };
  643. // Extract and format image URLs if any exist
  644. const imageUrls = message.files
  645. ?.filter((file) => file.type === 'image')
  646. .map((file) => file.url.slice(file.url.indexOf(',') + 1));
  647. // Add images array only if it contains elements
  648. if (imageUrls && imageUrls.length > 0 && message.role === 'user') {
  649. baseMessage.images = imageUrls;
  650. }
  651. return baseMessage;
  652. });
  653. let lastImageIndex = -1;
  654. // Find the index of the last object with images
  655. messagesBody.forEach((item, index) => {
  656. if (item.images) {
  657. lastImageIndex = index;
  658. }
  659. });
  660. // Remove images from all but the last one
  661. messagesBody.forEach((item, index) => {
  662. if (index !== lastImageIndex) {
  663. delete item.images;
  664. }
  665. });
  666. let files = JSON.parse(JSON.stringify(chatFiles));
  667. if (model?.info?.meta?.knowledge ?? false) {
  668. files.push(...model.info.meta.knowledge);
  669. }
  670. files.push(
  671. ...(userMessage?.files ?? []).filter((item) =>
  672. ['doc', 'file', 'collection'].includes(item.type)
  673. ),
  674. ...(responseMessage?.files ?? []).filter((item) => ['web_search_results'].includes(item.type))
  675. );
  676. eventTarget.dispatchEvent(
  677. new CustomEvent('chat:start', {
  678. detail: {
  679. id: responseMessageId
  680. }
  681. })
  682. );
  683. await tick();
  684. const [res, controller] = await generateChatCompletion(localStorage.token, {
  685. stream: true,
  686. model: model.id,
  687. messages: messagesBody,
  688. options: {
  689. ...(params ?? $settings.params ?? {}),
  690. stop:
  691. params?.stop ?? $settings?.params?.stop ?? undefined
  692. ? (params?.stop ?? $settings.params.stop).map((str) =>
  693. decodeURIComponent(JSON.parse('"' + str.replace(/\"/g, '\\"') + '"'))
  694. )
  695. : undefined,
  696. num_predict: params?.max_tokens ?? $settings?.params?.max_tokens ?? undefined,
  697. repeat_penalty:
  698. params?.frequency_penalty ?? $settings?.params?.frequency_penalty ?? undefined
  699. },
  700. format: $settings.requestFormat ?? undefined,
  701. keep_alive: $settings.keepAlive ?? undefined,
  702. tool_ids: selectedToolIds.length > 0 ? selectedToolIds : undefined,
  703. files: files.length > 0 ? files : undefined,
  704. ...(Object.keys(valves).length ? { valves } : {}),
  705. session_id: $socket?.id,
  706. chat_id: $chatId,
  707. id: responseMessageId
  708. });
  709. if (res && res.ok) {
  710. console.log('controller', controller);
  711. const reader = res.body
  712. .pipeThrough(new TextDecoderStream())
  713. .pipeThrough(splitStream('\n'))
  714. .getReader();
  715. while (true) {
  716. const { value, done } = await reader.read();
  717. if (done || stopResponseFlag || _chatId !== $chatId) {
  718. responseMessage.done = true;
  719. messages = messages;
  720. if (stopResponseFlag) {
  721. controller.abort('User: Stop Response');
  722. } else {
  723. const messages = createMessagesList(responseMessageId);
  724. await chatCompletedHandler(_chatId, model.id, responseMessageId, messages);
  725. }
  726. _response = responseMessage.content;
  727. break;
  728. }
  729. try {
  730. let lines = value.split('\n');
  731. for (const line of lines) {
  732. if (line !== '') {
  733. console.log(line);
  734. let data = JSON.parse(line);
  735. if ('citations' in data) {
  736. responseMessage.citations = data.citations;
  737. continue;
  738. }
  739. if ('detail' in data) {
  740. throw data;
  741. }
  742. if (data.done == false) {
  743. if (responseMessage.content == '' && data.message.content == '\n') {
  744. continue;
  745. } else {
  746. responseMessage.content += data.message.content;
  747. const sentences = extractSentencesForAudio(responseMessage.content);
  748. sentences.pop();
  749. // dispatch only last sentence and make sure it hasn't been dispatched before
  750. if (
  751. sentences.length > 0 &&
  752. sentences[sentences.length - 1] !== responseMessage.lastSentence
  753. ) {
  754. responseMessage.lastSentence = sentences[sentences.length - 1];
  755. eventTarget.dispatchEvent(
  756. new CustomEvent('chat', {
  757. detail: { id: responseMessageId, content: sentences[sentences.length - 1] }
  758. })
  759. );
  760. }
  761. messages = messages;
  762. }
  763. } else {
  764. responseMessage.done = true;
  765. if (responseMessage.content == '') {
  766. responseMessage.error = {
  767. code: 400,
  768. content: `Oops! No text generated from Ollama, Please try again.`
  769. };
  770. }
  771. responseMessage.context = data.context ?? null;
  772. responseMessage.info = {
  773. total_duration: data.total_duration,
  774. load_duration: data.load_duration,
  775. sample_count: data.sample_count,
  776. sample_duration: data.sample_duration,
  777. prompt_eval_count: data.prompt_eval_count,
  778. prompt_eval_duration: data.prompt_eval_duration,
  779. eval_count: data.eval_count,
  780. eval_duration: data.eval_duration
  781. };
  782. messages = messages;
  783. if ($settings.notificationEnabled && !document.hasFocus()) {
  784. const notification = new Notification(`${model.id}`, {
  785. body: responseMessage.content,
  786. icon: `${WEBUI_BASE_URL}/static/favicon.png`
  787. });
  788. }
  789. if ($settings?.responseAutoCopy ?? false) {
  790. copyToClipboard(responseMessage.content);
  791. }
  792. if ($settings.responseAutoPlayback && !$showCallOverlay) {
  793. await tick();
  794. document.getElementById(`speak-button-${responseMessage.id}`)?.click();
  795. }
  796. }
  797. }
  798. }
  799. } catch (error) {
  800. console.log(error);
  801. if ('detail' in error) {
  802. toast.error(error.detail);
  803. }
  804. break;
  805. }
  806. if (autoScroll) {
  807. scrollToBottom();
  808. }
  809. }
  810. if ($chatId == _chatId) {
  811. if ($settings.saveChatHistory ?? true) {
  812. chat = await updateChatById(localStorage.token, _chatId, {
  813. messages: messages,
  814. history: history,
  815. models: selectedModels,
  816. params: params,
  817. files: chatFiles
  818. });
  819. await chats.set(
  820. await getChatList(localStorage.token, 0, $pageSkip * $pageLimit || $pageLimit)
  821. );
  822. }
  823. }
  824. } else {
  825. if (res !== null) {
  826. const error = await res.json();
  827. console.log(error);
  828. if ('detail' in error) {
  829. toast.error(error.detail);
  830. responseMessage.error = { content: error.detail };
  831. } else {
  832. toast.error(error.error);
  833. responseMessage.error = { content: error.error };
  834. }
  835. } else {
  836. toast.error(
  837. $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, { provider: 'Ollama' })
  838. );
  839. responseMessage.error = {
  840. content: $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
  841. provider: 'Ollama'
  842. })
  843. };
  844. }
  845. responseMessage.done = true;
  846. messages = messages;
  847. }
  848. stopResponseFlag = false;
  849. await tick();
  850. let lastSentence = extractSentencesForAudio(responseMessage.content)?.at(-1) ?? '';
  851. if (lastSentence) {
  852. eventTarget.dispatchEvent(
  853. new CustomEvent('chat', {
  854. detail: { id: responseMessageId, content: lastSentence }
  855. })
  856. );
  857. }
  858. eventTarget.dispatchEvent(
  859. new CustomEvent('chat:finish', {
  860. detail: {
  861. id: responseMessageId,
  862. content: responseMessage.content
  863. }
  864. })
  865. );
  866. if (autoScroll) {
  867. scrollToBottom();
  868. }
  869. if (messages.length == 2 && messages.at(1).content !== '' && selectedModels[0] === model.id) {
  870. window.history.replaceState(history.state, '', `/c/${_chatId}`);
  871. const _title = await generateChatTitle(userPrompt);
  872. await setChatTitle(_chatId, _title);
  873. }
  874. return _response;
  875. };
  876. const sendPromptOpenAI = async (model, userPrompt, responseMessageId, _chatId) => {
  877. let _response = null;
  878. const responseMessage = history.messages[responseMessageId];
  879. const userMessage = history.messages[responseMessage.parentId];
  880. let files = JSON.parse(JSON.stringify(chatFiles));
  881. if (model?.info?.meta?.knowledge ?? false) {
  882. files.push(...model.info.meta.knowledge);
  883. }
  884. files.push(
  885. ...(userMessage?.files ?? []).filter((item) =>
  886. ['doc', 'file', 'collection'].includes(item.type)
  887. ),
  888. ...(responseMessage?.files ?? []).filter((item) => ['web_search_results'].includes(item.type))
  889. );
  890. scrollToBottom();
  891. eventTarget.dispatchEvent(
  892. new CustomEvent('chat:start', {
  893. detail: {
  894. id: responseMessageId
  895. }
  896. })
  897. );
  898. await tick();
  899. try {
  900. const [res, controller] = await generateOpenAIChatCompletion(
  901. localStorage.token,
  902. {
  903. stream: true,
  904. model: model.id,
  905. stream_options:
  906. model.info?.meta?.capabilities?.usage ?? false
  907. ? {
  908. include_usage: true
  909. }
  910. : undefined,
  911. messages: [
  912. params?.system || $settings.system || (responseMessage?.userContext ?? null)
  913. ? {
  914. role: 'system',
  915. content: `${promptTemplate(
  916. params?.system ?? $settings?.system ?? '',
  917. $user.name,
  918. $settings?.userLocation
  919. ? await getAndUpdateUserLocation(localStorage.token)
  920. : undefined
  921. )}${
  922. responseMessage?.userContext ?? null
  923. ? `\n\nUser Context:\n${responseMessage?.userContext ?? ''}`
  924. : ''
  925. }`
  926. }
  927. : undefined,
  928. ...messages
  929. ]
  930. .filter((message) => message?.content?.trim())
  931. .map((message, idx, arr) => ({
  932. role: message.role,
  933. ...((message.files?.filter((file) => file.type === 'image').length > 0 ?? false) &&
  934. message.role === 'user'
  935. ? {
  936. content: [
  937. {
  938. type: 'text',
  939. text:
  940. arr.length - 1 !== idx
  941. ? message.content
  942. : message?.raContent ?? message.content
  943. },
  944. ...message.files
  945. .filter((file) => file.type === 'image')
  946. .map((file) => ({
  947. type: 'image_url',
  948. image_url: {
  949. url: file.url
  950. }
  951. }))
  952. ]
  953. }
  954. : {
  955. content:
  956. arr.length - 1 !== idx
  957. ? message.content
  958. : message?.raContent ?? message.content
  959. })
  960. })),
  961. seed: params?.seed ?? $settings?.params?.seed ?? undefined,
  962. stop:
  963. params?.stop ?? $settings?.params?.stop ?? undefined
  964. ? (params?.stop ?? $settings.params.stop).map((str) =>
  965. decodeURIComponent(JSON.parse('"' + str.replace(/\"/g, '\\"') + '"'))
  966. )
  967. : undefined,
  968. temperature: params?.temperature ?? $settings?.params?.temperature ?? undefined,
  969. top_p: params?.top_p ?? $settings?.params?.top_p ?? undefined,
  970. frequency_penalty:
  971. params?.frequency_penalty ?? $settings?.params?.frequency_penalty ?? undefined,
  972. max_tokens: params?.max_tokens ?? $settings?.params?.max_tokens ?? undefined,
  973. tool_ids: selectedToolIds.length > 0 ? selectedToolIds : undefined,
  974. files: files.length > 0 ? files : undefined,
  975. ...(Object.keys(valves).length ? { valves } : {}),
  976. session_id: $socket?.id,
  977. chat_id: $chatId,
  978. id: responseMessageId
  979. },
  980. `${WEBUI_BASE_URL}/api`
  981. );
  982. // Wait until history/message have been updated
  983. await tick();
  984. scrollToBottom();
  985. if (res && res.ok && res.body) {
  986. const textStream = await createOpenAITextStream(res.body, $settings.splitLargeChunks);
  987. let lastUsage = null;
  988. for await (const update of textStream) {
  989. const { value, done, citations, error, usage } = update;
  990. if (error) {
  991. await handleOpenAIError(error, null, model, responseMessage);
  992. break;
  993. }
  994. if (done || stopResponseFlag || _chatId !== $chatId) {
  995. responseMessage.done = true;
  996. messages = messages;
  997. if (stopResponseFlag) {
  998. controller.abort('User: Stop Response');
  999. } else {
  1000. const messages = createMessagesList(responseMessageId);
  1001. await chatCompletedHandler(_chatId, model.id, responseMessageId, messages);
  1002. }
  1003. _response = responseMessage.content;
  1004. break;
  1005. }
  1006. if (usage) {
  1007. lastUsage = usage;
  1008. }
  1009. if (citations) {
  1010. responseMessage.citations = citations;
  1011. continue;
  1012. }
  1013. if (responseMessage.content == '' && value == '\n') {
  1014. continue;
  1015. } else {
  1016. responseMessage.content += value;
  1017. const sentences = extractSentencesForAudio(responseMessage.content);
  1018. sentences.pop();
  1019. // dispatch only last sentence and make sure it hasn't been dispatched before
  1020. if (
  1021. sentences.length > 0 &&
  1022. sentences[sentences.length - 1] !== responseMessage.lastSentence
  1023. ) {
  1024. responseMessage.lastSentence = sentences[sentences.length - 1];
  1025. eventTarget.dispatchEvent(
  1026. new CustomEvent('chat', {
  1027. detail: { id: responseMessageId, content: sentences[sentences.length - 1] }
  1028. })
  1029. );
  1030. }
  1031. messages = messages;
  1032. }
  1033. if (autoScroll) {
  1034. scrollToBottom();
  1035. }
  1036. }
  1037. if ($settings.notificationEnabled && !document.hasFocus()) {
  1038. const notification = new Notification(`${model.id}`, {
  1039. body: responseMessage.content,
  1040. icon: `${WEBUI_BASE_URL}/static/favicon.png`
  1041. });
  1042. }
  1043. if ($settings.responseAutoCopy) {
  1044. copyToClipboard(responseMessage.content);
  1045. }
  1046. if ($settings.responseAutoPlayback && !$showCallOverlay) {
  1047. await tick();
  1048. document.getElementById(`speak-button-${responseMessage.id}`)?.click();
  1049. }
  1050. if (lastUsage) {
  1051. responseMessage.info = { ...lastUsage, openai: true };
  1052. }
  1053. if ($chatId == _chatId) {
  1054. if ($settings.saveChatHistory ?? true) {
  1055. chat = await updateChatById(localStorage.token, _chatId, {
  1056. models: selectedModels,
  1057. messages: messages,
  1058. history: history,
  1059. params: params,
  1060. files: chatFiles
  1061. });
  1062. await chats.set(
  1063. await getChatList(localStorage.token, 0, $pageSkip * $pageLimit || $pageLimit)
  1064. );
  1065. }
  1066. }
  1067. } else {
  1068. await handleOpenAIError(null, res, model, responseMessage);
  1069. }
  1070. } catch (error) {
  1071. await handleOpenAIError(error, null, model, responseMessage);
  1072. }
  1073. messages = messages;
  1074. stopResponseFlag = false;
  1075. await tick();
  1076. let lastSentence = extractSentencesForAudio(responseMessage.content)?.at(-1) ?? '';
  1077. if (lastSentence) {
  1078. eventTarget.dispatchEvent(
  1079. new CustomEvent('chat', {
  1080. detail: { id: responseMessageId, content: lastSentence }
  1081. })
  1082. );
  1083. }
  1084. eventTarget.dispatchEvent(
  1085. new CustomEvent('chat:finish', {
  1086. detail: {
  1087. id: responseMessageId,
  1088. content: responseMessage.content
  1089. }
  1090. })
  1091. );
  1092. if (autoScroll) {
  1093. scrollToBottom();
  1094. }
  1095. if (messages.length == 2 && selectedModels[0] === model.id) {
  1096. window.history.replaceState(history.state, '', `/c/${_chatId}`);
  1097. const _title = await generateChatTitle(userPrompt);
  1098. await setChatTitle(_chatId, _title);
  1099. }
  1100. return _response;
  1101. };
  1102. const handleOpenAIError = async (error, res: Response | null, model, responseMessage) => {
  1103. let errorMessage = '';
  1104. let innerError;
  1105. if (error) {
  1106. innerError = error;
  1107. } else if (res !== null) {
  1108. innerError = await res.json();
  1109. }
  1110. console.error(innerError);
  1111. if ('detail' in innerError) {
  1112. toast.error(innerError.detail);
  1113. errorMessage = innerError.detail;
  1114. } else if ('error' in innerError) {
  1115. if ('message' in innerError.error) {
  1116. toast.error(innerError.error.message);
  1117. errorMessage = innerError.error.message;
  1118. } else {
  1119. toast.error(innerError.error);
  1120. errorMessage = innerError.error;
  1121. }
  1122. } else if ('message' in innerError) {
  1123. toast.error(innerError.message);
  1124. errorMessage = innerError.message;
  1125. }
  1126. responseMessage.error = {
  1127. content:
  1128. $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
  1129. provider: model.name ?? model.id
  1130. }) +
  1131. '\n' +
  1132. errorMessage
  1133. };
  1134. responseMessage.done = true;
  1135. messages = messages;
  1136. };
  1137. const stopResponse = () => {
  1138. stopResponseFlag = true;
  1139. console.log('stopResponse');
  1140. };
  1141. const regenerateResponse = async (message) => {
  1142. console.log('regenerateResponse');
  1143. if (messages.length != 0) {
  1144. let userMessage = history.messages[message.parentId];
  1145. let userPrompt = userMessage.content;
  1146. if ((userMessage?.models ?? [...selectedModels]).length == 1) {
  1147. // If user message has only one model selected, sendPrompt automatically selects it for regeneration
  1148. await sendPrompt(userPrompt, userMessage.id);
  1149. } else {
  1150. // If there are multiple models selected, use the model of the response message for regeneration
  1151. // e.g. many model chat
  1152. await sendPrompt(userPrompt, userMessage.id, { modelId: message.model });
  1153. }
  1154. }
  1155. };
  1156. const continueGeneration = async () => {
  1157. console.log('continueGeneration');
  1158. const _chatId = JSON.parse(JSON.stringify($chatId));
  1159. if (messages.length != 0 && messages.at(-1).done == true) {
  1160. const responseMessage = history.messages[history.currentId];
  1161. responseMessage.done = false;
  1162. await tick();
  1163. const model = $models.filter((m) => m.id === responseMessage.model).at(0);
  1164. if (model) {
  1165. if (model?.owned_by === 'openai') {
  1166. await sendPromptOpenAI(
  1167. model,
  1168. history.messages[responseMessage.parentId].content,
  1169. responseMessage.id,
  1170. _chatId
  1171. );
  1172. } else
  1173. await sendPromptOllama(
  1174. model,
  1175. history.messages[responseMessage.parentId].content,
  1176. responseMessage.id,
  1177. _chatId
  1178. );
  1179. }
  1180. } else {
  1181. toast.error($i18n.t(`Model {{modelId}} not found`, { modelId }));
  1182. }
  1183. };
  1184. const generateChatTitle = async (userPrompt) => {
  1185. if ($settings?.title?.auto ?? true) {
  1186. const title = await generateTitle(
  1187. localStorage.token,
  1188. selectedModels[0],
  1189. userPrompt,
  1190. $chatId
  1191. ).catch((error) => {
  1192. console.error(error);
  1193. return 'New Chat';
  1194. });
  1195. return title;
  1196. } else {
  1197. return `${userPrompt}`;
  1198. }
  1199. };
  1200. const setChatTitle = async (_chatId, _title) => {
  1201. if (_chatId === $chatId) {
  1202. title = _title;
  1203. }
  1204. if ($settings.saveChatHistory ?? true) {
  1205. chat = await updateChatById(localStorage.token, _chatId, { title: _title });
  1206. await chats.set(
  1207. await getChatList(localStorage.token, 0, $pageSkip * $pageLimit || $pageLimit)
  1208. );
  1209. }
  1210. };
  1211. const getWebSearchResults = async (model: string, parentId: string, responseId: string) => {
  1212. const responseMessage = history.messages[responseId];
  1213. const userMessage = history.messages[parentId];
  1214. responseMessage.statusHistory = [
  1215. {
  1216. done: false,
  1217. action: 'web_search',
  1218. description: $i18n.t('Generating search query')
  1219. }
  1220. ];
  1221. messages = messages;
  1222. const prompt = userMessage.content;
  1223. let searchQuery = await generateSearchQuery(localStorage.token, model, messages, prompt).catch(
  1224. (error) => {
  1225. console.log(error);
  1226. return prompt;
  1227. }
  1228. );
  1229. if (!searchQuery) {
  1230. toast.warning($i18n.t('No search query generated'));
  1231. responseMessage.statusHistory.push({
  1232. done: true,
  1233. error: true,
  1234. action: 'web_search',
  1235. description: 'No search query generated'
  1236. });
  1237. messages = messages;
  1238. }
  1239. responseMessage.statusHistory.push({
  1240. done: false,
  1241. action: 'web_search',
  1242. description: $i18n.t(`Searching "{{searchQuery}}"`, { searchQuery })
  1243. });
  1244. messages = messages;
  1245. const results = await runWebSearch(localStorage.token, searchQuery).catch((error) => {
  1246. console.log(error);
  1247. toast.error(error);
  1248. return null;
  1249. });
  1250. if (results) {
  1251. responseMessage.statusHistory.push({
  1252. done: true,
  1253. action: 'web_search',
  1254. description: $i18n.t('Searched {{count}} sites', { count: results.filenames.length }),
  1255. query: searchQuery,
  1256. urls: results.filenames
  1257. });
  1258. if (responseMessage?.files ?? undefined === undefined) {
  1259. responseMessage.files = [];
  1260. }
  1261. responseMessage.files.push({
  1262. collection_name: results.collection_name,
  1263. name: searchQuery,
  1264. type: 'web_search_results',
  1265. urls: results.filenames
  1266. });
  1267. messages = messages;
  1268. } else {
  1269. responseMessage.statusHistory.push({
  1270. done: true,
  1271. error: true,
  1272. action: 'web_search',
  1273. description: 'No search results found'
  1274. });
  1275. messages = messages;
  1276. }
  1277. };
  1278. const getTags = async () => {
  1279. return await getTagsById(localStorage.token, $chatId).catch(async (error) => {
  1280. return [];
  1281. });
  1282. };
  1283. </script>
  1284. <svelte:head>
  1285. <title>
  1286. {title
  1287. ? `${title.length > 30 ? `${title.slice(0, 30)}...` : title} | ${$WEBUI_NAME}`
  1288. : `${$WEBUI_NAME}`}
  1289. </title>
  1290. </svelte:head>
  1291. <audio id="audioElement" src="" style="display: none;" />
  1292. <EventConfirmDialog
  1293. bind:show={showEventConfirmation}
  1294. title={eventConfirmationTitle}
  1295. message={eventConfirmationMessage}
  1296. input={eventConfirmationInput}
  1297. inputPlaceholder={eventConfirmationInputPlaceholder}
  1298. inputValue={eventConfirmationInputValue}
  1299. on:confirm={(e) => {
  1300. if (e.detail) {
  1301. eventCallback(e.detail);
  1302. } else {
  1303. eventCallback(true);
  1304. }
  1305. }}
  1306. on:cancel={() => {
  1307. eventCallback(false);
  1308. }}
  1309. />
  1310. {#if $showCallOverlay}
  1311. <CallOverlay
  1312. {submitPrompt}
  1313. {stopResponse}
  1314. bind:files
  1315. modelId={selectedModelIds?.at(0) ?? null}
  1316. chatId={$chatId}
  1317. {eventTarget}
  1318. />
  1319. {/if}
  1320. {#if !chatIdProp || (loaded && chatIdProp)}
  1321. <div
  1322. class="h-screen max-h-[100dvh] {$showSidebar
  1323. ? 'md:max-w-[calc(100%-260px)]'
  1324. : ''} w-full max-w-full flex flex-col"
  1325. >
  1326. {#if $settings?.backgroundImageUrl ?? null}
  1327. <div
  1328. class="absolute {$showSidebar
  1329. ? 'md:max-w-[calc(100%-260px)] md:translate-x-[260px]'
  1330. : ''} top-0 left-0 w-full h-full bg-cover bg-center bg-no-repeat"
  1331. style="background-image: url({$settings.backgroundImageUrl}) "
  1332. />
  1333. <div
  1334. class="absolute top-0 left-0 w-full h-full bg-gradient-to-t from-white to-white/85 dark:from-gray-900 dark:to-[#171717]/90 z-0"
  1335. />
  1336. {/if}
  1337. <Navbar
  1338. {title}
  1339. bind:selectedModels
  1340. bind:showModelSelector
  1341. bind:showControls
  1342. shareEnabled={messages.length > 0}
  1343. {chat}
  1344. {initNewChat}
  1345. />
  1346. {#if $banners.length > 0 && messages.length === 0 && !$chatId && selectedModels.length <= 1}
  1347. <div
  1348. class="absolute top-[4.25rem] w-full {$showSidebar
  1349. ? 'md:max-w-[calc(100%-260px)]'
  1350. : ''} {showControls ? 'lg:pr-[24rem]' : ''} z-20"
  1351. >
  1352. <div class=" flex flex-col gap-1 w-full">
  1353. {#each $banners.filter( (b) => (b.dismissible ? !JSON.parse(localStorage.getItem('dismissedBannerIds') ?? '[]').includes(b.id) : true) ) as banner}
  1354. <Banner
  1355. {banner}
  1356. on:dismiss={(e) => {
  1357. const bannerId = e.detail;
  1358. localStorage.setItem(
  1359. 'dismissedBannerIds',
  1360. JSON.stringify(
  1361. [
  1362. bannerId,
  1363. ...JSON.parse(localStorage.getItem('dismissedBannerIds') ?? '[]')
  1364. ].filter((id) => $banners.find((b) => b.id === id))
  1365. )
  1366. );
  1367. }}
  1368. />
  1369. {/each}
  1370. </div>
  1371. </div>
  1372. {/if}
  1373. <div class="flex flex-col flex-auto z-10">
  1374. <div
  1375. class=" pb-2.5 flex flex-col justify-between w-full flex-auto overflow-auto h-0 max-w-full z-10 scrollbar-hidden {showControls
  1376. ? 'lg:pr-[24rem]'
  1377. : ''}"
  1378. id="messages-container"
  1379. bind:this={messagesContainerElement}
  1380. on:scroll={(e) => {
  1381. autoScroll =
  1382. messagesContainerElement.scrollHeight - messagesContainerElement.scrollTop <=
  1383. messagesContainerElement.clientHeight + 5;
  1384. }}
  1385. >
  1386. <div class=" h-full w-full flex flex-col {chatIdProp ? 'py-4' : 'pt-2 pb-4'}">
  1387. <Messages
  1388. chatId={$chatId}
  1389. {selectedModels}
  1390. {processing}
  1391. bind:history
  1392. bind:messages
  1393. bind:autoScroll
  1394. bind:prompt
  1395. bottomPadding={files.length > 0}
  1396. {sendPrompt}
  1397. {continueGeneration}
  1398. {regenerateResponse}
  1399. {chatActionHandler}
  1400. />
  1401. </div>
  1402. </div>
  1403. <div class={showControls ? 'lg:pr-[24rem]' : ''}>
  1404. <MessageInput
  1405. bind:files
  1406. bind:prompt
  1407. bind:autoScroll
  1408. bind:selectedToolIds
  1409. bind:webSearchEnabled
  1410. bind:atSelectedModel
  1411. availableToolIds={selectedModelIds.reduce((a, e, i, arr) => {
  1412. const model = $models.find((m) => m.id === e);
  1413. if (model?.info?.meta?.toolIds ?? false) {
  1414. return [...new Set([...a, ...model.info.meta.toolIds])];
  1415. }
  1416. return a;
  1417. }, [])}
  1418. transparentBackground={$settings?.backgroundImageUrl ?? false}
  1419. {selectedModels}
  1420. {messages}
  1421. {submitPrompt}
  1422. {stopResponse}
  1423. />
  1424. </div>
  1425. </div>
  1426. <ChatControls
  1427. models={selectedModelIds.reduce((a, e, i, arr) => {
  1428. const model = $models.find((m) => m.id === e);
  1429. if (model) {
  1430. return [...a, model];
  1431. }
  1432. return a;
  1433. }, [])}
  1434. bind:show={showControls}
  1435. bind:chatFiles
  1436. bind:params
  1437. bind:valves
  1438. />
  1439. </div>
  1440. {/if}