Chat.svelte 43 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655
  1. <script lang="ts">
  2. import { v4 as uuidv4 } from 'uuid';
  3. import { toast } from 'svelte-sonner';
  4. import mermaid from 'mermaid';
  5. import { getContext, onMount, tick } from 'svelte';
  6. import { goto } from '$app/navigation';
  7. import { page } from '$app/stores';
  8. import type { Writable } from 'svelte/store';
  9. import type { i18n as i18nType } from 'i18next';
  10. import { OLLAMA_API_BASE_URL, OPENAI_API_BASE_URL, WEBUI_BASE_URL } from '$lib/constants';
  11. import {
  12. chatId,
  13. chats,
  14. config,
  15. type Model,
  16. models,
  17. settings,
  18. showSidebar,
  19. tags as _tags,
  20. WEBUI_NAME,
  21. banners,
  22. user,
  23. socket,
  24. showCallOverlay,
  25. tools,
  26. currentChatPage
  27. } from '$lib/stores';
  28. import {
  29. convertMessagesToHistory,
  30. copyToClipboard,
  31. extractSentencesForAudio,
  32. getUserPosition,
  33. promptTemplate,
  34. splitStream
  35. } from '$lib/utils';
  36. import { generateChatCompletion } from '$lib/apis/ollama';
  37. import {
  38. addTagById,
  39. createNewChat,
  40. deleteTagById,
  41. getAllChatTags,
  42. getChatById,
  43. getChatList,
  44. getTagsById,
  45. updateChatById
  46. } from '$lib/apis/chats';
  47. import { generateOpenAIChatCompletion } from '$lib/apis/openai';
  48. import { runWebSearch } from '$lib/apis/rag';
  49. import { createOpenAITextStream } from '$lib/apis/streaming';
  50. import { queryMemory } from '$lib/apis/memories';
  51. import { getAndUpdateUserLocation, getUserSettings } from '$lib/apis/users';
  52. import { chatCompleted, generateTitle, generateSearchQuery, chatAction } from '$lib/apis';
  53. import Banner from '../common/Banner.svelte';
  54. import MessageInput from '$lib/components/chat/MessageInput.svelte';
  55. import Messages from '$lib/components/chat/Messages.svelte';
  56. import Navbar from '$lib/components/layout/Navbar.svelte';
  57. import CallOverlay from './MessageInput/CallOverlay.svelte';
  58. import { error } from '@sveltejs/kit';
  59. import ChatControls from './ChatControls.svelte';
  60. import EventConfirmDialog from '../common/ConfirmDialog.svelte';
  61. const i18n: Writable<i18nType> = getContext('i18n');
  62. export let chatIdProp = '';
  63. let loaded = false;
  64. const eventTarget = new EventTarget();
  65. let showControls = false;
  66. let stopResponseFlag = false;
  67. let autoScroll = true;
  68. let processing = '';
  69. let messagesContainerElement: HTMLDivElement;
  70. let showEventConfirmation = false;
  71. let eventConfirmationTitle = '';
  72. let eventConfirmationMessage = '';
  73. let eventConfirmationInput = false;
  74. let eventConfirmationInputPlaceholder = '';
  75. let eventConfirmationInputValue = '';
  76. let eventCallback = null;
  77. let showModelSelector = true;
  78. let selectedModels = [''];
  79. let atSelectedModel: Model | undefined;
  80. let selectedModelIds = [];
  81. $: selectedModelIds = atSelectedModel !== undefined ? [atSelectedModel.id] : selectedModels;
  82. let selectedToolIds = [];
  83. let webSearchEnabled = false;
  84. let chat = null;
  85. let tags = [];
  86. let title = '';
  87. let prompt = '';
  88. let chatFiles = [];
  89. let files = [];
  90. let messages = [];
  91. let history = {
  92. messages: {},
  93. currentId: null
  94. };
  95. let params = {};
  96. $: if (history.currentId !== null) {
  97. let _messages = [];
  98. let currentMessage = history.messages[history.currentId];
  99. while (currentMessage !== null) {
  100. _messages.unshift({ ...currentMessage });
  101. currentMessage =
  102. currentMessage.parentId !== null ? history.messages[currentMessage.parentId] : null;
  103. }
  104. messages = _messages;
  105. } else {
  106. messages = [];
  107. }
  108. $: if (chatIdProp) {
  109. (async () => {
  110. console.log(chatIdProp);
  111. if (chatIdProp && (await loadChat())) {
  112. await tick();
  113. loaded = true;
  114. window.setTimeout(() => scrollToBottom(), 0);
  115. const chatInput = document.getElementById('chat-textarea');
  116. chatInput?.focus();
  117. } else {
  118. await goto('/');
  119. }
  120. })();
  121. }
  122. const chatEventHandler = async (event, cb) => {
  123. if (event.chat_id === $chatId) {
  124. await tick();
  125. console.log(event);
  126. let message = history.messages[event.message_id];
  127. const type = event?.data?.type ?? null;
  128. const data = event?.data?.data ?? null;
  129. if (type === 'status') {
  130. if (message?.statusHistory) {
  131. message.statusHistory.push(data);
  132. } else {
  133. message.statusHistory = [data];
  134. }
  135. } else if (type === 'citation') {
  136. if (message?.citations) {
  137. message.citations.push(data);
  138. } else {
  139. message.citations = [data];
  140. }
  141. } else if (type === 'message') {
  142. message.content += data.content;
  143. } else if (type === 'replace') {
  144. message.content = data.content;
  145. } else if (type === 'confirmation') {
  146. eventCallback = cb;
  147. eventConfirmationInput = false;
  148. showEventConfirmation = true;
  149. eventConfirmationTitle = data.title;
  150. eventConfirmationMessage = data.message;
  151. } else if (type === 'input') {
  152. eventCallback = cb;
  153. eventConfirmationInput = true;
  154. showEventConfirmation = true;
  155. eventConfirmationTitle = data.title;
  156. eventConfirmationMessage = data.message;
  157. eventConfirmationInputPlaceholder = data.placeholder;
  158. eventConfirmationInputValue = data?.value ?? '';
  159. } else {
  160. console.log('Unknown message type', data);
  161. }
  162. messages = messages;
  163. }
  164. };
  165. onMount(async () => {
  166. const onMessageHandler = async (event) => {
  167. if (event.origin === window.origin) {
  168. // Replace with your iframe's origin
  169. console.log('Message received from iframe:', event.data);
  170. if (event.data.type === 'input:prompt') {
  171. console.log(event.data.text);
  172. const inputElement = document.getElementById('chat-textarea');
  173. if (inputElement) {
  174. prompt = event.data.text;
  175. inputElement.focus();
  176. }
  177. }
  178. if (event.data.type === 'action:submit') {
  179. console.log(event.data.text);
  180. if (prompt !== '') {
  181. await tick();
  182. submitPrompt(prompt);
  183. }
  184. }
  185. if (event.data.type === 'input:prompt:submit') {
  186. console.log(event.data.text);
  187. if (prompt !== '') {
  188. await tick();
  189. submitPrompt(event.data.text);
  190. }
  191. }
  192. }
  193. };
  194. window.addEventListener('message', onMessageHandler);
  195. $socket.on('chat-events', chatEventHandler);
  196. if (!$chatId) {
  197. chatId.subscribe(async (value) => {
  198. if (!value) {
  199. await initNewChat();
  200. }
  201. });
  202. } else {
  203. if (!($settings.saveChatHistory ?? true)) {
  204. await goto('/');
  205. }
  206. }
  207. return () => {
  208. window.removeEventListener('message', onMessageHandler);
  209. $socket.off('chat-events');
  210. };
  211. });
  212. //////////////////////////
  213. // Web functions
  214. //////////////////////////
  215. const initNewChat = async () => {
  216. window.history.replaceState(history.state, '', `/`);
  217. await chatId.set('');
  218. autoScroll = true;
  219. title = '';
  220. messages = [];
  221. history = {
  222. messages: {},
  223. currentId: null
  224. };
  225. chatFiles = [];
  226. params = {};
  227. if ($page.url.searchParams.get('models')) {
  228. selectedModels = $page.url.searchParams.get('models')?.split(',');
  229. } else if ($settings?.models) {
  230. selectedModels = $settings?.models;
  231. } else if ($config?.default_models) {
  232. console.log($config?.default_models.split(',') ?? '');
  233. selectedModels = $config?.default_models.split(',');
  234. } else {
  235. selectedModels = [''];
  236. }
  237. if ($page.url.searchParams.get('q')) {
  238. prompt = $page.url.searchParams.get('q') ?? '';
  239. selectedToolIds = ($page.url.searchParams.get('tool_ids') ?? '')
  240. .split(',')
  241. .map((id) => id.trim())
  242. .filter((id) => id);
  243. if (prompt) {
  244. await tick();
  245. submitPrompt(prompt);
  246. }
  247. }
  248. selectedModels = selectedModels.map((modelId) =>
  249. $models.map((m) => m.id).includes(modelId) ? modelId : ''
  250. );
  251. const userSettings = await getUserSettings(localStorage.token);
  252. if (userSettings) {
  253. settings.set(userSettings.ui);
  254. } else {
  255. settings.set(JSON.parse(localStorage.getItem('settings') ?? '{}'));
  256. }
  257. const chatInput = document.getElementById('chat-textarea');
  258. setTimeout(() => chatInput?.focus(), 0);
  259. };
  260. const loadChat = async () => {
  261. chatId.set(chatIdProp);
  262. chat = await getChatById(localStorage.token, $chatId).catch(async (error) => {
  263. await goto('/');
  264. return null;
  265. });
  266. if (chat) {
  267. tags = await getTags();
  268. const chatContent = chat.chat;
  269. if (chatContent) {
  270. console.log(chatContent);
  271. selectedModels =
  272. (chatContent?.models ?? undefined) !== undefined
  273. ? chatContent.models
  274. : [chatContent.models ?? ''];
  275. history =
  276. (chatContent?.history ?? undefined) !== undefined
  277. ? chatContent.history
  278. : convertMessagesToHistory(chatContent.messages);
  279. title = chatContent.title;
  280. const userSettings = await getUserSettings(localStorage.token);
  281. if (userSettings) {
  282. await settings.set(userSettings.ui);
  283. } else {
  284. await settings.set(JSON.parse(localStorage.getItem('settings') ?? '{}'));
  285. }
  286. params = chatContent?.params ?? {};
  287. chatFiles = chatContent?.files ?? [];
  288. autoScroll = true;
  289. await tick();
  290. if (messages.length > 0) {
  291. history.messages[messages.at(-1).id].done = true;
  292. }
  293. await tick();
  294. return true;
  295. } else {
  296. return null;
  297. }
  298. }
  299. };
  300. const scrollToBottom = async () => {
  301. await tick();
  302. if (messagesContainerElement) {
  303. messagesContainerElement.scrollTop = messagesContainerElement.scrollHeight;
  304. }
  305. };
  306. const createMessagesList = (responseMessageId) => {
  307. const message = history.messages[responseMessageId];
  308. if (message.parentId) {
  309. return [...createMessagesList(message.parentId), message];
  310. } else {
  311. return [message];
  312. }
  313. };
  314. const chatCompletedHandler = async (chatId, modelId, responseMessageId, messages) => {
  315. await mermaid.run({
  316. querySelector: '.mermaid'
  317. });
  318. const res = await chatCompleted(localStorage.token, {
  319. model: modelId,
  320. messages: messages.map((m) => ({
  321. id: m.id,
  322. role: m.role,
  323. content: m.content,
  324. info: m.info ? m.info : undefined,
  325. timestamp: m.timestamp
  326. })),
  327. chat_id: chatId,
  328. session_id: $socket?.id,
  329. id: responseMessageId
  330. }).catch((error) => {
  331. toast.error(error);
  332. messages.at(-1).error = { content: error };
  333. return null;
  334. });
  335. if (res !== null) {
  336. // Update chat history with the new messages
  337. for (const message of res.messages) {
  338. history.messages[message.id] = {
  339. ...history.messages[message.id],
  340. ...(history.messages[message.id].content !== message.content
  341. ? { originalContent: history.messages[message.id].content }
  342. : {}),
  343. ...message
  344. };
  345. }
  346. }
  347. if ($chatId == chatId) {
  348. if ($settings.saveChatHistory ?? true) {
  349. chat = await updateChatById(localStorage.token, chatId, {
  350. models: selectedModels,
  351. messages: messages,
  352. history: history,
  353. params: params,
  354. files: chatFiles
  355. });
  356. currentChatPage.set(1);
  357. await chats.set(await getChatList(localStorage.token, $currentChatPage));
  358. }
  359. }
  360. };
  361. const chatActionHandler = async (chatId, actionId, modelId, responseMessageId) => {
  362. const res = await chatAction(localStorage.token, actionId, {
  363. model: modelId,
  364. messages: messages.map((m) => ({
  365. id: m.id,
  366. role: m.role,
  367. content: m.content,
  368. info: m.info ? m.info : undefined,
  369. timestamp: m.timestamp
  370. })),
  371. chat_id: chatId,
  372. session_id: $socket?.id,
  373. id: responseMessageId
  374. }).catch((error) => {
  375. toast.error(error);
  376. messages.at(-1).error = { content: error };
  377. return null;
  378. });
  379. if (res !== null) {
  380. // Update chat history with the new messages
  381. for (const message of res.messages) {
  382. history.messages[message.id] = {
  383. ...history.messages[message.id],
  384. ...(history.messages[message.id].content !== message.content
  385. ? { originalContent: history.messages[message.id].content }
  386. : {}),
  387. ...message
  388. };
  389. }
  390. }
  391. if ($chatId == chatId) {
  392. if ($settings.saveChatHistory ?? true) {
  393. chat = await updateChatById(localStorage.token, chatId, {
  394. models: selectedModels,
  395. messages: messages,
  396. history: history,
  397. params: params,
  398. files: chatFiles
  399. });
  400. currentChatPage.set(1);
  401. await chats.set(await getChatList(localStorage.token, $currentChatPage));
  402. }
  403. }
  404. };
  405. const getChatEventEmitter = async (modelId: string, chatId: string = '') => {
  406. return setInterval(() => {
  407. $socket?.emit('usage', {
  408. action: 'chat',
  409. model: modelId,
  410. chat_id: chatId
  411. });
  412. }, 1000);
  413. };
  414. //////////////////////////
  415. // Chat functions
  416. //////////////////////////
  417. const submitPrompt = async (userPrompt, { _raw = false } = {}) => {
  418. let _responses = [];
  419. console.log('submitPrompt', $chatId);
  420. selectedModels = selectedModels.map((modelId) =>
  421. $models.map((m) => m.id).includes(modelId) ? modelId : ''
  422. );
  423. if (selectedModels.includes('')) {
  424. toast.error($i18n.t('Model not selected'));
  425. } else if (messages.length != 0 && messages.at(-1).done != true) {
  426. // Response not done
  427. console.log('wait');
  428. } else if (messages.length != 0 && messages.at(-1).error) {
  429. // Error in response
  430. toast.error(
  431. $i18n.t(
  432. `Oops! There was an error in the previous response. Please try again or contact admin.`
  433. )
  434. );
  435. } else if (
  436. files.length > 0 &&
  437. files.filter((file) => file.type !== 'image' && file.status !== 'processed').length > 0
  438. ) {
  439. // Upload not done
  440. toast.error(
  441. $i18n.t(
  442. `Oops! Hold tight! Your files are still in the processing oven. We're cooking them up to perfection. Please be patient and we'll let you know once they're ready.`
  443. )
  444. );
  445. } else {
  446. // Reset chat input textarea
  447. const chatTextAreaElement = document.getElementById('chat-textarea');
  448. if (chatTextAreaElement) {
  449. chatTextAreaElement.value = '';
  450. chatTextAreaElement.style.height = '';
  451. }
  452. const _files = JSON.parse(JSON.stringify(files));
  453. chatFiles.push(..._files.filter((item) => ['doc', 'file', 'collection'].includes(item.type)));
  454. chatFiles = chatFiles.filter(
  455. // Remove duplicates
  456. (item, index, array) =>
  457. array.findIndex((i) => JSON.stringify(i) === JSON.stringify(item)) === index
  458. );
  459. files = [];
  460. prompt = '';
  461. // Create user message
  462. let userMessageId = uuidv4();
  463. let userMessage = {
  464. id: userMessageId,
  465. parentId: messages.length !== 0 ? messages.at(-1).id : null,
  466. childrenIds: [],
  467. role: 'user',
  468. content: userPrompt,
  469. files: _files.length > 0 ? _files : undefined,
  470. timestamp: Math.floor(Date.now() / 1000), // Unix epoch
  471. models: selectedModels.filter((m, mIdx) => selectedModels.indexOf(m) === mIdx)
  472. };
  473. // Add message to history and Set currentId to messageId
  474. history.messages[userMessageId] = userMessage;
  475. history.currentId = userMessageId;
  476. // Append messageId to childrenIds of parent message
  477. if (messages.length !== 0) {
  478. history.messages[messages.at(-1).id].childrenIds.push(userMessageId);
  479. }
  480. // Wait until history/message have been updated
  481. await tick();
  482. _responses = await sendPrompt(userPrompt, userMessageId, { newChat: true });
  483. }
  484. return _responses;
  485. };
  486. const sendPrompt = async (prompt, parentId, { modelId = null, newChat = false } = {}) => {
  487. let _responses = [];
  488. // If modelId is provided, use it, else use selected model
  489. let selectedModelIds = modelId
  490. ? [modelId]
  491. : atSelectedModel !== undefined
  492. ? [atSelectedModel.id]
  493. : selectedModels;
  494. // Create response messages for each selected model
  495. const responseMessageIds = {};
  496. for (const modelId of selectedModelIds) {
  497. const model = $models.filter((m) => m.id === modelId).at(0);
  498. if (model) {
  499. let responseMessageId = uuidv4();
  500. let responseMessage = {
  501. parentId: parentId,
  502. id: responseMessageId,
  503. childrenIds: [],
  504. role: 'assistant',
  505. content: '',
  506. model: model.id,
  507. modelName: model.name ?? model.id,
  508. userContext: null,
  509. timestamp: Math.floor(Date.now() / 1000) // Unix epoch
  510. };
  511. // Add message to history and Set currentId to messageId
  512. history.messages[responseMessageId] = responseMessage;
  513. history.currentId = responseMessageId;
  514. // Append messageId to childrenIds of parent message
  515. if (parentId !== null) {
  516. history.messages[parentId].childrenIds = [
  517. ...history.messages[parentId].childrenIds,
  518. responseMessageId
  519. ];
  520. }
  521. responseMessageIds[modelId] = responseMessageId;
  522. }
  523. }
  524. await tick();
  525. // Create new chat if only one message in messages
  526. if (newChat && messages.length == 2) {
  527. if ($settings.saveChatHistory ?? true) {
  528. chat = await createNewChat(localStorage.token, {
  529. id: $chatId,
  530. title: $i18n.t('New Chat'),
  531. models: selectedModels,
  532. system: $settings.system ?? undefined,
  533. params: params,
  534. messages: messages,
  535. history: history,
  536. tags: [],
  537. timestamp: Date.now()
  538. });
  539. currentChatPage.set(1);
  540. await chats.set(await getChatList(localStorage.token, $currentChatPage));
  541. await chatId.set(chat.id);
  542. } else {
  543. await chatId.set('local');
  544. }
  545. await tick();
  546. }
  547. const _chatId = JSON.parse(JSON.stringify($chatId));
  548. await Promise.all(
  549. selectedModelIds.map(async (modelId) => {
  550. console.log('modelId', modelId);
  551. const model = $models.filter((m) => m.id === modelId).at(0);
  552. if (model) {
  553. // If there are image files, check if model is vision capable
  554. const hasImages = messages.some((message) =>
  555. message.files?.some((file) => file.type === 'image')
  556. );
  557. if (hasImages && !(model.info?.meta?.capabilities?.vision ?? true)) {
  558. toast.error(
  559. $i18n.t('Model {{modelName}} is not vision capable', {
  560. modelName: model.name ?? model.id
  561. })
  562. );
  563. }
  564. let responseMessageId = responseMessageIds[modelId];
  565. let responseMessage = history.messages[responseMessageId];
  566. let userContext = null;
  567. if ($settings?.memory ?? false) {
  568. if (userContext === null) {
  569. const res = await queryMemory(localStorage.token, prompt).catch((error) => {
  570. toast.error(error);
  571. return null;
  572. });
  573. if (res) {
  574. if (res.documents[0].length > 0) {
  575. userContext = res.documents[0].reduce((acc, doc, index) => {
  576. const createdAtTimestamp = res.metadatas[0][index].created_at;
  577. const createdAtDate = new Date(createdAtTimestamp * 1000)
  578. .toISOString()
  579. .split('T')[0];
  580. return `${acc}${index + 1}. [${createdAtDate}]. ${doc}\n`;
  581. }, '');
  582. }
  583. console.log(userContext);
  584. }
  585. }
  586. }
  587. responseMessage.userContext = userContext;
  588. const chatEventEmitter = await getChatEventEmitter(model.id, _chatId);
  589. if (webSearchEnabled) {
  590. await getWebSearchResults(model.id, parentId, responseMessageId);
  591. }
  592. let _response = null;
  593. if (model?.owned_by === 'openai') {
  594. _response = await sendPromptOpenAI(model, prompt, responseMessageId, _chatId);
  595. } else if (model) {
  596. _response = await sendPromptOllama(model, prompt, responseMessageId, _chatId);
  597. }
  598. _responses.push(_response);
  599. if (chatEventEmitter) clearInterval(chatEventEmitter);
  600. } else {
  601. toast.error($i18n.t(`Model {{modelId}} not found`, { modelId }));
  602. }
  603. })
  604. );
  605. currentChatPage.set(1);
  606. await chats.set(await getChatList(localStorage.token, $currentChatPage));
  607. return _responses;
  608. };
  609. const sendPromptOllama = async (model, userPrompt, responseMessageId, _chatId) => {
  610. let _response = null;
  611. const responseMessage = history.messages[responseMessageId];
  612. const userMessage = history.messages[responseMessage.parentId];
  613. // Wait until history/message have been updated
  614. await tick();
  615. // Scroll down
  616. scrollToBottom();
  617. const messagesBody = [
  618. params?.system || $settings.system || (responseMessage?.userContext ?? null)
  619. ? {
  620. role: 'system',
  621. content: `${promptTemplate(
  622. params?.system ?? $settings?.system ?? '',
  623. $user.name,
  624. $settings?.userLocation
  625. ? await getAndUpdateUserLocation(localStorage.token)
  626. : undefined
  627. )}${
  628. responseMessage?.userContext ?? null
  629. ? `\n\nUser Context:\n${responseMessage?.userContext ?? ''}`
  630. : ''
  631. }`
  632. }
  633. : undefined,
  634. ...messages
  635. ]
  636. .filter((message) => message?.content?.trim())
  637. .map((message, idx, arr) => {
  638. // Prepare the base message object
  639. const baseMessage = {
  640. role: message.role,
  641. content: message.content
  642. };
  643. // Extract and format image URLs if any exist
  644. const imageUrls = message.files
  645. ?.filter((file) => file.type === 'image')
  646. .map((file) => file.url.slice(file.url.indexOf(',') + 1));
  647. // Add images array only if it contains elements
  648. if (imageUrls && imageUrls.length > 0 && message.role === 'user') {
  649. baseMessage.images = imageUrls;
  650. }
  651. return baseMessage;
  652. });
  653. let lastImageIndex = -1;
  654. // Find the index of the last object with images
  655. messagesBody.forEach((item, index) => {
  656. if (item.images) {
  657. lastImageIndex = index;
  658. }
  659. });
  660. // Remove images from all but the last one
  661. messagesBody.forEach((item, index) => {
  662. if (index !== lastImageIndex) {
  663. delete item.images;
  664. }
  665. });
  666. let files = JSON.parse(JSON.stringify(chatFiles));
  667. if (model?.info?.meta?.knowledge ?? false) {
  668. files.push(...model.info.meta.knowledge);
  669. }
  670. files.push(
  671. ...(userMessage?.files ?? []).filter((item) =>
  672. ['doc', 'file', 'collection'].includes(item.type)
  673. ),
  674. ...(responseMessage?.files ?? []).filter((item) => ['web_search_results'].includes(item.type))
  675. );
  676. eventTarget.dispatchEvent(
  677. new CustomEvent('chat:start', {
  678. detail: {
  679. id: responseMessageId
  680. }
  681. })
  682. );
  683. await tick();
  684. const [res, controller] = await generateChatCompletion(localStorage.token, {
  685. stream: true,
  686. model: model.id,
  687. messages: messagesBody,
  688. options: {
  689. ...(params ?? $settings.params ?? {}),
  690. stop:
  691. params?.stop ?? $settings?.params?.stop ?? undefined
  692. ? (params?.stop.split(',').map((token) => token.trim()) ?? $settings.params.stop).map(
  693. (str) => decodeURIComponent(JSON.parse('"' + str.replace(/\"/g, '\\"') + '"'))
  694. )
  695. : undefined,
  696. num_predict: params?.max_tokens ?? $settings?.params?.max_tokens ?? undefined,
  697. repeat_penalty:
  698. params?.frequency_penalty ?? $settings?.params?.frequency_penalty ?? undefined
  699. },
  700. format: $settings.requestFormat ?? undefined,
  701. keep_alive: $settings.keepAlive ?? undefined,
  702. tool_ids: selectedToolIds.length > 0 ? selectedToolIds : undefined,
  703. files: files.length > 0 ? files : undefined,
  704. session_id: $socket?.id,
  705. chat_id: $chatId,
  706. id: responseMessageId
  707. });
  708. if (res && res.ok) {
  709. console.log('controller', controller);
  710. const reader = res.body
  711. .pipeThrough(new TextDecoderStream())
  712. .pipeThrough(splitStream('\n'))
  713. .getReader();
  714. while (true) {
  715. const { value, done } = await reader.read();
  716. if (done || stopResponseFlag || _chatId !== $chatId) {
  717. responseMessage.done = true;
  718. messages = messages;
  719. if (stopResponseFlag) {
  720. controller.abort('User: Stop Response');
  721. } else {
  722. const messages = createMessagesList(responseMessageId);
  723. await chatCompletedHandler(_chatId, model.id, responseMessageId, messages);
  724. }
  725. _response = responseMessage.content;
  726. break;
  727. }
  728. try {
  729. let lines = value.split('\n');
  730. for (const line of lines) {
  731. if (line !== '') {
  732. console.log(line);
  733. let data = JSON.parse(line);
  734. if ('citations' in data) {
  735. responseMessage.citations = data.citations;
  736. continue;
  737. }
  738. if ('detail' in data) {
  739. throw data;
  740. }
  741. if (data.done == false) {
  742. if (responseMessage.content == '' && data.message.content == '\n') {
  743. continue;
  744. } else {
  745. responseMessage.content += data.message.content;
  746. const sentences = extractSentencesForAudio(responseMessage.content);
  747. sentences.pop();
  748. // dispatch only last sentence and make sure it hasn't been dispatched before
  749. if (
  750. sentences.length > 0 &&
  751. sentences[sentences.length - 1] !== responseMessage.lastSentence
  752. ) {
  753. responseMessage.lastSentence = sentences[sentences.length - 1];
  754. eventTarget.dispatchEvent(
  755. new CustomEvent('chat', {
  756. detail: { id: responseMessageId, content: sentences[sentences.length - 1] }
  757. })
  758. );
  759. }
  760. messages = messages;
  761. }
  762. } else {
  763. responseMessage.done = true;
  764. if (responseMessage.content == '') {
  765. responseMessage.error = {
  766. code: 400,
  767. content: `Oops! No text generated from Ollama, Please try again.`
  768. };
  769. }
  770. responseMessage.context = data.context ?? null;
  771. responseMessage.info = {
  772. total_duration: data.total_duration,
  773. load_duration: data.load_duration,
  774. sample_count: data.sample_count,
  775. sample_duration: data.sample_duration,
  776. prompt_eval_count: data.prompt_eval_count,
  777. prompt_eval_duration: data.prompt_eval_duration,
  778. eval_count: data.eval_count,
  779. eval_duration: data.eval_duration
  780. };
  781. messages = messages;
  782. if ($settings.notificationEnabled && !document.hasFocus()) {
  783. const notification = new Notification(`${model.id}`, {
  784. body: responseMessage.content,
  785. icon: `${WEBUI_BASE_URL}/static/favicon.png`
  786. });
  787. }
  788. if ($settings?.responseAutoCopy ?? false) {
  789. copyToClipboard(responseMessage.content);
  790. }
  791. if ($settings.responseAutoPlayback && !$showCallOverlay) {
  792. await tick();
  793. document.getElementById(`speak-button-${responseMessage.id}`)?.click();
  794. }
  795. }
  796. }
  797. }
  798. } catch (error) {
  799. console.log(error);
  800. if ('detail' in error) {
  801. toast.error(error.detail);
  802. }
  803. break;
  804. }
  805. if (autoScroll) {
  806. scrollToBottom();
  807. }
  808. }
  809. if ($chatId == _chatId) {
  810. if ($settings.saveChatHistory ?? true) {
  811. chat = await updateChatById(localStorage.token, _chatId, {
  812. messages: messages,
  813. history: history,
  814. models: selectedModels,
  815. params: params,
  816. files: chatFiles
  817. });
  818. currentChatPage.set(1);
  819. await chats.set(await getChatList(localStorage.token, $currentChatPage));
  820. }
  821. }
  822. } else {
  823. if (res !== null) {
  824. const error = await res.json();
  825. console.log(error);
  826. if ('detail' in error) {
  827. toast.error(error.detail);
  828. responseMessage.error = { content: error.detail };
  829. } else {
  830. toast.error(error.error);
  831. responseMessage.error = { content: error.error };
  832. }
  833. } else {
  834. toast.error(
  835. $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, { provider: 'Ollama' })
  836. );
  837. responseMessage.error = {
  838. content: $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
  839. provider: 'Ollama'
  840. })
  841. };
  842. }
  843. responseMessage.done = true;
  844. messages = messages;
  845. }
  846. stopResponseFlag = false;
  847. await tick();
  848. let lastSentence = extractSentencesForAudio(responseMessage.content)?.at(-1) ?? '';
  849. if (lastSentence) {
  850. eventTarget.dispatchEvent(
  851. new CustomEvent('chat', {
  852. detail: { id: responseMessageId, content: lastSentence }
  853. })
  854. );
  855. }
  856. eventTarget.dispatchEvent(
  857. new CustomEvent('chat:finish', {
  858. detail: {
  859. id: responseMessageId,
  860. content: responseMessage.content
  861. }
  862. })
  863. );
  864. if (autoScroll) {
  865. scrollToBottom();
  866. }
  867. if (messages.length == 2 && messages.at(1).content !== '' && selectedModels[0] === model.id) {
  868. window.history.replaceState(history.state, '', `/c/${_chatId}`);
  869. const _title = await generateChatTitle(userPrompt);
  870. await setChatTitle(_chatId, _title);
  871. }
  872. return _response;
  873. };
  874. const sendPromptOpenAI = async (model, userPrompt, responseMessageId, _chatId) => {
  875. let _response = null;
  876. const responseMessage = history.messages[responseMessageId];
  877. const userMessage = history.messages[responseMessage.parentId];
  878. let files = JSON.parse(JSON.stringify(chatFiles));
  879. if (model?.info?.meta?.knowledge ?? false) {
  880. files.push(...model.info.meta.knowledge);
  881. }
  882. files.push(
  883. ...(userMessage?.files ?? []).filter((item) =>
  884. ['doc', 'file', 'collection'].includes(item.type)
  885. ),
  886. ...(responseMessage?.files ?? []).filter((item) => ['web_search_results'].includes(item.type))
  887. );
  888. scrollToBottom();
  889. eventTarget.dispatchEvent(
  890. new CustomEvent('chat:start', {
  891. detail: {
  892. id: responseMessageId
  893. }
  894. })
  895. );
  896. await tick();
  897. try {
  898. const [res, controller] = await generateOpenAIChatCompletion(
  899. localStorage.token,
  900. {
  901. stream: true,
  902. model: model.id,
  903. stream_options:
  904. model.info?.meta?.capabilities?.usage ?? false
  905. ? {
  906. include_usage: true
  907. }
  908. : undefined,
  909. messages: [
  910. params?.system || $settings.system || (responseMessage?.userContext ?? null)
  911. ? {
  912. role: 'system',
  913. content: `${promptTemplate(
  914. params?.system ?? $settings?.system ?? '',
  915. $user.name,
  916. $settings?.userLocation
  917. ? await getAndUpdateUserLocation(localStorage.token)
  918. : undefined
  919. )}${
  920. responseMessage?.userContext ?? null
  921. ? `\n\nUser Context:\n${responseMessage?.userContext ?? ''}`
  922. : ''
  923. }`
  924. }
  925. : undefined,
  926. ...messages
  927. ]
  928. .filter((message) => message?.content?.trim())
  929. .map((message, idx, arr) => ({
  930. role: message.role,
  931. ...((message.files?.filter((file) => file.type === 'image').length > 0 ?? false) &&
  932. message.role === 'user'
  933. ? {
  934. content: [
  935. {
  936. type: 'text',
  937. text:
  938. arr.length - 1 !== idx
  939. ? message.content
  940. : message?.raContent ?? message.content
  941. },
  942. ...message.files
  943. .filter((file) => file.type === 'image')
  944. .map((file) => ({
  945. type: 'image_url',
  946. image_url: {
  947. url: file.url
  948. }
  949. }))
  950. ]
  951. }
  952. : {
  953. content:
  954. arr.length - 1 !== idx
  955. ? message.content
  956. : message?.raContent ?? message.content
  957. })
  958. })),
  959. seed: params?.seed ?? $settings?.params?.seed ?? undefined,
  960. stop:
  961. params?.stop ?? $settings?.params?.stop ?? undefined
  962. ? (params?.stop.split(',').map((token) => token.trim()) ?? $settings.params.stop).map(
  963. (str) => decodeURIComponent(JSON.parse('"' + str.replace(/\"/g, '\\"') + '"'))
  964. )
  965. : undefined,
  966. temperature: params?.temperature ?? $settings?.params?.temperature ?? undefined,
  967. top_p: params?.top_p ?? $settings?.params?.top_p ?? undefined,
  968. frequency_penalty:
  969. params?.frequency_penalty ?? $settings?.params?.frequency_penalty ?? undefined,
  970. max_tokens: params?.max_tokens ?? $settings?.params?.max_tokens ?? undefined,
  971. tool_ids: selectedToolIds.length > 0 ? selectedToolIds : undefined,
  972. files: files.length > 0 ? files : undefined,
  973. session_id: $socket?.id,
  974. chat_id: $chatId,
  975. id: responseMessageId
  976. },
  977. `${WEBUI_BASE_URL}/api`
  978. );
  979. // Wait until history/message have been updated
  980. await tick();
  981. scrollToBottom();
  982. if (res && res.ok && res.body) {
  983. const textStream = await createOpenAITextStream(res.body, $settings.splitLargeChunks);
  984. for await (const update of textStream) {
  985. const { value, done, citations, error, usage } = update;
  986. if (error) {
  987. await handleOpenAIError(error, null, model, responseMessage);
  988. break;
  989. }
  990. if (done || stopResponseFlag || _chatId !== $chatId) {
  991. responseMessage.done = true;
  992. messages = messages;
  993. if (stopResponseFlag) {
  994. controller.abort('User: Stop Response');
  995. } else {
  996. const messages = createMessagesList(responseMessageId);
  997. await chatCompletedHandler(_chatId, model.id, responseMessageId, messages);
  998. }
  999. _response = responseMessage.content;
  1000. break;
  1001. }
  1002. if (usage) {
  1003. responseMessage.info = { ...usage, openai: true };
  1004. }
  1005. if (citations) {
  1006. responseMessage.citations = citations;
  1007. continue;
  1008. }
  1009. if (responseMessage.content == '' && value == '\n') {
  1010. continue;
  1011. } else {
  1012. responseMessage.content += value;
  1013. const sentences = extractSentencesForAudio(responseMessage.content);
  1014. sentences.pop();
  1015. // dispatch only last sentence and make sure it hasn't been dispatched before
  1016. if (
  1017. sentences.length > 0 &&
  1018. sentences[sentences.length - 1] !== responseMessage.lastSentence
  1019. ) {
  1020. responseMessage.lastSentence = sentences[sentences.length - 1];
  1021. eventTarget.dispatchEvent(
  1022. new CustomEvent('chat', {
  1023. detail: { id: responseMessageId, content: sentences[sentences.length - 1] }
  1024. })
  1025. );
  1026. }
  1027. messages = messages;
  1028. }
  1029. if (autoScroll) {
  1030. scrollToBottom();
  1031. }
  1032. }
  1033. if ($settings.notificationEnabled && !document.hasFocus()) {
  1034. const notification = new Notification(`${model.id}`, {
  1035. body: responseMessage.content,
  1036. icon: `${WEBUI_BASE_URL}/static/favicon.png`
  1037. });
  1038. }
  1039. if ($settings.responseAutoCopy) {
  1040. copyToClipboard(responseMessage.content);
  1041. }
  1042. if ($settings.responseAutoPlayback && !$showCallOverlay) {
  1043. await tick();
  1044. document.getElementById(`speak-button-${responseMessage.id}`)?.click();
  1045. }
  1046. if ($chatId == _chatId) {
  1047. if ($settings.saveChatHistory ?? true) {
  1048. chat = await updateChatById(localStorage.token, _chatId, {
  1049. models: selectedModels,
  1050. messages: messages,
  1051. history: history,
  1052. params: params,
  1053. files: chatFiles
  1054. });
  1055. currentChatPage.set(1);
  1056. await chats.set(await getChatList(localStorage.token, $currentChatPage));
  1057. }
  1058. }
  1059. } else {
  1060. await handleOpenAIError(null, res, model, responseMessage);
  1061. }
  1062. } catch (error) {
  1063. await handleOpenAIError(error, null, model, responseMessage);
  1064. }
  1065. messages = messages;
  1066. stopResponseFlag = false;
  1067. await tick();
  1068. let lastSentence = extractSentencesForAudio(responseMessage.content)?.at(-1) ?? '';
  1069. if (lastSentence) {
  1070. eventTarget.dispatchEvent(
  1071. new CustomEvent('chat', {
  1072. detail: { id: responseMessageId, content: lastSentence }
  1073. })
  1074. );
  1075. }
  1076. eventTarget.dispatchEvent(
  1077. new CustomEvent('chat:finish', {
  1078. detail: {
  1079. id: responseMessageId,
  1080. content: responseMessage.content
  1081. }
  1082. })
  1083. );
  1084. if (autoScroll) {
  1085. scrollToBottom();
  1086. }
  1087. if (messages.length == 2 && selectedModels[0] === model.id) {
  1088. window.history.replaceState(history.state, '', `/c/${_chatId}`);
  1089. const _title = await generateChatTitle(userPrompt);
  1090. await setChatTitle(_chatId, _title);
  1091. }
  1092. return _response;
  1093. };
  1094. const handleOpenAIError = async (error, res: Response | null, model, responseMessage) => {
  1095. let errorMessage = '';
  1096. let innerError;
  1097. if (error) {
  1098. innerError = error;
  1099. } else if (res !== null) {
  1100. innerError = await res.json();
  1101. }
  1102. console.error(innerError);
  1103. if ('detail' in innerError) {
  1104. toast.error(innerError.detail);
  1105. errorMessage = innerError.detail;
  1106. } else if ('error' in innerError) {
  1107. if ('message' in innerError.error) {
  1108. toast.error(innerError.error.message);
  1109. errorMessage = innerError.error.message;
  1110. } else {
  1111. toast.error(innerError.error);
  1112. errorMessage = innerError.error;
  1113. }
  1114. } else if ('message' in innerError) {
  1115. toast.error(innerError.message);
  1116. errorMessage = innerError.message;
  1117. }
  1118. responseMessage.error = {
  1119. content:
  1120. $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
  1121. provider: model.name ?? model.id
  1122. }) +
  1123. '\n' +
  1124. errorMessage
  1125. };
  1126. responseMessage.done = true;
  1127. messages = messages;
  1128. };
  1129. const stopResponse = () => {
  1130. stopResponseFlag = true;
  1131. console.log('stopResponse');
  1132. };
  1133. const regenerateResponse = async (message) => {
  1134. console.log('regenerateResponse');
  1135. if (messages.length != 0) {
  1136. let userMessage = history.messages[message.parentId];
  1137. let userPrompt = userMessage.content;
  1138. if ((userMessage?.models ?? [...selectedModels]).length == 1) {
  1139. // If user message has only one model selected, sendPrompt automatically selects it for regeneration
  1140. await sendPrompt(userPrompt, userMessage.id);
  1141. } else {
  1142. // If there are multiple models selected, use the model of the response message for regeneration
  1143. // e.g. many model chat
  1144. await sendPrompt(userPrompt, userMessage.id, { modelId: message.model });
  1145. }
  1146. }
  1147. };
  1148. const continueGeneration = async () => {
  1149. console.log('continueGeneration');
  1150. const _chatId = JSON.parse(JSON.stringify($chatId));
  1151. if (messages.length != 0 && messages.at(-1).done == true) {
  1152. const responseMessage = history.messages[history.currentId];
  1153. responseMessage.done = false;
  1154. await tick();
  1155. const model = $models.filter((m) => m.id === responseMessage.model).at(0);
  1156. if (model) {
  1157. if (model?.owned_by === 'openai') {
  1158. await sendPromptOpenAI(
  1159. model,
  1160. history.messages[responseMessage.parentId].content,
  1161. responseMessage.id,
  1162. _chatId
  1163. );
  1164. } else
  1165. await sendPromptOllama(
  1166. model,
  1167. history.messages[responseMessage.parentId].content,
  1168. responseMessage.id,
  1169. _chatId
  1170. );
  1171. }
  1172. } else {
  1173. toast.error($i18n.t(`Model {{modelId}} not found`, { modelId }));
  1174. }
  1175. };
  1176. const generateChatTitle = async (userPrompt) => {
  1177. if ($settings?.title?.auto ?? true) {
  1178. const title = await generateTitle(
  1179. localStorage.token,
  1180. selectedModels[0],
  1181. userPrompt,
  1182. $chatId
  1183. ).catch((error) => {
  1184. console.error(error);
  1185. return 'New Chat';
  1186. });
  1187. return title;
  1188. } else {
  1189. return `${userPrompt}`;
  1190. }
  1191. };
  1192. const setChatTitle = async (_chatId, _title) => {
  1193. if (_chatId === $chatId) {
  1194. title = _title;
  1195. }
  1196. if ($settings.saveChatHistory ?? true) {
  1197. chat = await updateChatById(localStorage.token, _chatId, { title: _title });
  1198. currentChatPage.set(1);
  1199. await chats.set(await getChatList(localStorage.token, $currentChatPage));
  1200. }
  1201. };
  1202. const getWebSearchResults = async (model: string, parentId: string, responseId: string) => {
  1203. const responseMessage = history.messages[responseId];
  1204. const userMessage = history.messages[parentId];
  1205. responseMessage.statusHistory = [
  1206. {
  1207. done: false,
  1208. action: 'web_search',
  1209. description: $i18n.t('Generating search query')
  1210. }
  1211. ];
  1212. messages = messages;
  1213. const prompt = userMessage.content;
  1214. let searchQuery = await generateSearchQuery(localStorage.token, model, messages, prompt).catch(
  1215. (error) => {
  1216. console.log(error);
  1217. return prompt;
  1218. }
  1219. );
  1220. if (!searchQuery) {
  1221. toast.warning($i18n.t('No search query generated'));
  1222. responseMessage.statusHistory.push({
  1223. done: true,
  1224. error: true,
  1225. action: 'web_search',
  1226. description: 'No search query generated'
  1227. });
  1228. messages = messages;
  1229. }
  1230. responseMessage.statusHistory.push({
  1231. done: false,
  1232. action: 'web_search',
  1233. description: $i18n.t(`Searching "{{searchQuery}}"`, { searchQuery })
  1234. });
  1235. messages = messages;
  1236. const results = await runWebSearch(localStorage.token, searchQuery).catch((error) => {
  1237. console.log(error);
  1238. toast.error(error);
  1239. return null;
  1240. });
  1241. if (results) {
  1242. responseMessage.statusHistory.push({
  1243. done: true,
  1244. action: 'web_search',
  1245. description: $i18n.t('Searched {{count}} sites', { count: results.filenames.length }),
  1246. query: searchQuery,
  1247. urls: results.filenames
  1248. });
  1249. if (responseMessage?.files ?? undefined === undefined) {
  1250. responseMessage.files = [];
  1251. }
  1252. responseMessage.files.push({
  1253. collection_name: results.collection_name,
  1254. name: searchQuery,
  1255. type: 'web_search_results',
  1256. urls: results.filenames
  1257. });
  1258. messages = messages;
  1259. } else {
  1260. responseMessage.statusHistory.push({
  1261. done: true,
  1262. error: true,
  1263. action: 'web_search',
  1264. description: 'No search results found'
  1265. });
  1266. messages = messages;
  1267. }
  1268. };
  1269. const getTags = async () => {
  1270. return await getTagsById(localStorage.token, $chatId).catch(async (error) => {
  1271. return [];
  1272. });
  1273. };
  1274. </script>
  1275. <svelte:head>
  1276. <title>
  1277. {title
  1278. ? `${title.length > 30 ? `${title.slice(0, 30)}...` : title} | ${$WEBUI_NAME}`
  1279. : `${$WEBUI_NAME}`}
  1280. </title>
  1281. </svelte:head>
  1282. <audio id="audioElement" src="" style="display: none;" />
  1283. <EventConfirmDialog
  1284. bind:show={showEventConfirmation}
  1285. title={eventConfirmationTitle}
  1286. message={eventConfirmationMessage}
  1287. input={eventConfirmationInput}
  1288. inputPlaceholder={eventConfirmationInputPlaceholder}
  1289. inputValue={eventConfirmationInputValue}
  1290. on:confirm={(e) => {
  1291. if (e.detail) {
  1292. eventCallback(e.detail);
  1293. } else {
  1294. eventCallback(true);
  1295. }
  1296. }}
  1297. on:cancel={() => {
  1298. eventCallback(false);
  1299. }}
  1300. />
  1301. {#if $showCallOverlay}
  1302. <CallOverlay
  1303. {submitPrompt}
  1304. {stopResponse}
  1305. bind:files
  1306. modelId={selectedModelIds?.at(0) ?? null}
  1307. chatId={$chatId}
  1308. {eventTarget}
  1309. />
  1310. {/if}
  1311. {#if !chatIdProp || (loaded && chatIdProp)}
  1312. <div
  1313. class="h-screen max-h-[100dvh] {$showSidebar
  1314. ? 'md:max-w-[calc(100%-260px)]'
  1315. : ''} w-full max-w-full flex flex-col"
  1316. >
  1317. {#if $settings?.backgroundImageUrl ?? null}
  1318. <div
  1319. class="absolute {$showSidebar
  1320. ? 'md:max-w-[calc(100%-260px)] md:translate-x-[260px]'
  1321. : ''} top-0 left-0 w-full h-full bg-cover bg-center bg-no-repeat"
  1322. style="background-image: url({$settings.backgroundImageUrl}) "
  1323. />
  1324. <div
  1325. class="absolute top-0 left-0 w-full h-full bg-gradient-to-t from-white to-white/85 dark:from-gray-900 dark:to-[#171717]/90 z-0"
  1326. />
  1327. {/if}
  1328. <Navbar
  1329. {title}
  1330. bind:selectedModels
  1331. bind:showModelSelector
  1332. bind:showControls
  1333. shareEnabled={messages.length > 0}
  1334. {chat}
  1335. {initNewChat}
  1336. />
  1337. {#if $banners.length > 0 && messages.length === 0 && !$chatId && selectedModels.length <= 1}
  1338. <div
  1339. class="absolute top-[4.25rem] w-full {$showSidebar
  1340. ? 'md:max-w-[calc(100%-260px)]'
  1341. : ''} {showControls ? 'lg:pr-[24rem]' : ''} z-20"
  1342. >
  1343. <div class=" flex flex-col gap-1 w-full">
  1344. {#each $banners.filter( (b) => (b.dismissible ? !JSON.parse(localStorage.getItem('dismissedBannerIds') ?? '[]').includes(b.id) : true) ) as banner}
  1345. <Banner
  1346. {banner}
  1347. on:dismiss={(e) => {
  1348. const bannerId = e.detail;
  1349. localStorage.setItem(
  1350. 'dismissedBannerIds',
  1351. JSON.stringify(
  1352. [
  1353. bannerId,
  1354. ...JSON.parse(localStorage.getItem('dismissedBannerIds') ?? '[]')
  1355. ].filter((id) => $banners.find((b) => b.id === id))
  1356. )
  1357. );
  1358. }}
  1359. />
  1360. {/each}
  1361. </div>
  1362. </div>
  1363. {/if}
  1364. <div class="flex flex-col flex-auto z-10">
  1365. <div
  1366. class=" pb-2.5 flex flex-col justify-between w-full flex-auto overflow-auto h-0 max-w-full z-10 scrollbar-hidden {showControls
  1367. ? 'lg:pr-[24rem]'
  1368. : ''}"
  1369. id="messages-container"
  1370. bind:this={messagesContainerElement}
  1371. on:scroll={(e) => {
  1372. autoScroll =
  1373. messagesContainerElement.scrollHeight - messagesContainerElement.scrollTop <=
  1374. messagesContainerElement.clientHeight + 5;
  1375. }}
  1376. >
  1377. <div class=" h-full w-full flex flex-col {chatIdProp ? 'py-4' : 'pt-2 pb-4'}">
  1378. <Messages
  1379. chatId={$chatId}
  1380. {selectedModels}
  1381. {processing}
  1382. bind:history
  1383. bind:messages
  1384. bind:autoScroll
  1385. bind:prompt
  1386. bottomPadding={files.length > 0}
  1387. {sendPrompt}
  1388. {continueGeneration}
  1389. {regenerateResponse}
  1390. {chatActionHandler}
  1391. />
  1392. </div>
  1393. </div>
  1394. <div class={showControls ? 'lg:pr-[24rem]' : ''}>
  1395. <MessageInput
  1396. bind:files
  1397. bind:prompt
  1398. bind:autoScroll
  1399. bind:selectedToolIds
  1400. bind:webSearchEnabled
  1401. bind:atSelectedModel
  1402. availableToolIds={selectedModelIds.reduce((a, e, i, arr) => {
  1403. const model = $models.find((m) => m.id === e);
  1404. if (model?.info?.meta?.toolIds ?? false) {
  1405. return [...new Set([...a, ...model.info.meta.toolIds])];
  1406. }
  1407. return a;
  1408. }, [])}
  1409. transparentBackground={$settings?.backgroundImageUrl ?? false}
  1410. {selectedModels}
  1411. {messages}
  1412. {submitPrompt}
  1413. {stopResponse}
  1414. />
  1415. </div>
  1416. </div>
  1417. <ChatControls
  1418. models={selectedModelIds.reduce((a, e, i, arr) => {
  1419. const model = $models.find((m) => m.id === e);
  1420. if (model) {
  1421. return [...a, model];
  1422. }
  1423. return a;
  1424. }, [])}
  1425. bind:show={showControls}
  1426. bind:chatFiles
  1427. bind:params
  1428. />
  1429. </div>
  1430. {/if}