Chat.svelte 49 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880
  1. <script lang="ts">
  2. import { v4 as uuidv4 } from 'uuid';
  3. import { toast } from 'svelte-sonner';
  4. import mermaid from 'mermaid';
  5. import { PaneGroup, Pane, PaneResizer } from 'paneforge';
  6. import { getContext, onDestroy, onMount, tick } from 'svelte';
  7. import { goto } from '$app/navigation';
  8. import { page } from '$app/stores';
  9. import type { Unsubscriber, Writable } from 'svelte/store';
  10. import type { i18n as i18nType } from 'i18next';
  11. import { WEBUI_BASE_URL } from '$lib/constants';
  12. import {
  13. chatId,
  14. chats,
  15. config,
  16. type Model,
  17. models,
  18. settings,
  19. showSidebar,
  20. WEBUI_NAME,
  21. banners,
  22. user,
  23. socket,
  24. showControls,
  25. showCallOverlay,
  26. currentChatPage,
  27. temporaryChatEnabled,
  28. mobile
  29. } from '$lib/stores';
  30. import {
  31. convertMessagesToHistory,
  32. copyToClipboard,
  33. getMessageContentParts,
  34. extractSentencesForAudio,
  35. promptTemplate,
  36. splitStream
  37. } from '$lib/utils';
  38. import { generateChatCompletion } from '$lib/apis/ollama';
  39. import {
  40. createNewChat,
  41. getChatById,
  42. getChatList,
  43. getTagsById,
  44. updateChatById
  45. } from '$lib/apis/chats';
  46. import { generateOpenAIChatCompletion } from '$lib/apis/openai';
  47. import { runWebSearch } from '$lib/apis/rag';
  48. import { createOpenAITextStream } from '$lib/apis/streaming';
  49. import { queryMemory } from '$lib/apis/memories';
  50. import { getAndUpdateUserLocation, getUserSettings } from '$lib/apis/users';
  51. import {
  52. chatCompleted,
  53. generateTitle,
  54. generateSearchQuery,
  55. chatAction,
  56. generateMoACompletion
  57. } from '$lib/apis';
  58. import Banner from '../common/Banner.svelte';
  59. import MessageInput from '$lib/components/chat/MessageInput.svelte';
  60. import Messages from '$lib/components/chat/Messages.svelte';
  61. import Navbar from '$lib/components/layout/Navbar.svelte';
  62. import ChatControls from './ChatControls.svelte';
  63. import EventConfirmDialog from '../common/ConfirmDialog.svelte';
  64. import EllipsisVertical from '../icons/EllipsisVertical.svelte';
  65. const i18n: Writable<i18nType> = getContext('i18n');
  66. export let chatIdProp = '';
  67. let loaded = false;
  68. const eventTarget = new EventTarget();
  69. let controlPane;
  70. let stopResponseFlag = false;
  71. let autoScroll = true;
  72. let processing = '';
  73. let messagesContainerElement: HTMLDivElement;
  74. let showEventConfirmation = false;
  75. let eventConfirmationTitle = '';
  76. let eventConfirmationMessage = '';
  77. let eventConfirmationInput = false;
  78. let eventConfirmationInputPlaceholder = '';
  79. let eventConfirmationInputValue = '';
  80. let eventCallback = null;
  81. let showModelSelector = true;
  82. let selectedModels = [''];
  83. let atSelectedModel: Model | undefined;
  84. let selectedModelIds = [];
  85. $: selectedModelIds = atSelectedModel !== undefined ? [atSelectedModel.id] : selectedModels;
  86. let selectedToolIds = [];
  87. let webSearchEnabled = false;
  88. let chat = null;
  89. let tags = [];
  90. let title = '';
  91. let prompt = '';
  92. let chatFiles = [];
  93. let files = [];
  94. let messages = [];
  95. let history = {
  96. messages: {},
  97. currentId: null
  98. };
  99. let params = {};
  100. let chatIdUnsubscriber: Unsubscriber | undefined;
  101. $: if (history.currentId !== null) {
  102. let _messages = [];
  103. let currentMessage = history.messages[history.currentId];
  104. while (currentMessage) {
  105. _messages.unshift({ ...currentMessage });
  106. currentMessage =
  107. currentMessage.parentId !== null ? history.messages[currentMessage.parentId] : null;
  108. }
  109. // This is most likely causing the performance issue
  110. messages = _messages;
  111. } else {
  112. messages = [];
  113. }
  114. $: if (chatIdProp) {
  115. (async () => {
  116. console.log(chatIdProp);
  117. if (chatIdProp && (await loadChat())) {
  118. await tick();
  119. loaded = true;
  120. window.setTimeout(() => scrollToBottom(), 0);
  121. const chatInput = document.getElementById('chat-textarea');
  122. chatInput?.focus();
  123. } else {
  124. await goto('/');
  125. }
  126. })();
  127. }
  128. const showMessage = async (message) => {
  129. let _messageId = JSON.parse(JSON.stringify(message.id));
  130. let messageChildrenIds = history.messages[_messageId].childrenIds;
  131. while (messageChildrenIds.length !== 0) {
  132. _messageId = messageChildrenIds.at(-1);
  133. messageChildrenIds = history.messages[_messageId].childrenIds;
  134. }
  135. history.currentId = _messageId;
  136. await tick();
  137. await tick();
  138. await tick();
  139. const messageElement = document.getElementById(`message-${message.id}`);
  140. if (messageElement) {
  141. messageElement.scrollIntoView({ behavior: 'smooth' });
  142. }
  143. };
  144. const chatEventHandler = async (event, cb) => {
  145. if (event.chat_id === $chatId) {
  146. await tick();
  147. console.log(event);
  148. let message = history.messages[event.message_id];
  149. const type = event?.data?.type ?? null;
  150. const data = event?.data?.data ?? null;
  151. if (type === 'status') {
  152. if (message?.statusHistory) {
  153. message.statusHistory.push(data);
  154. } else {
  155. message.statusHistory = [data];
  156. }
  157. } else if (type === 'citation') {
  158. if (message?.citations) {
  159. message.citations.push(data);
  160. } else {
  161. message.citations = [data];
  162. }
  163. } else if (type === 'message') {
  164. message.content += data.content;
  165. } else if (type === 'replace') {
  166. message.content = data.content;
  167. } else if (type === 'action') {
  168. if (data.action === 'continue') {
  169. const continueButton = document.getElementById('continue-response-button');
  170. if (continueButton) {
  171. continueButton.click();
  172. }
  173. }
  174. } else if (type === 'confirmation') {
  175. eventCallback = cb;
  176. eventConfirmationInput = false;
  177. showEventConfirmation = true;
  178. eventConfirmationTitle = data.title;
  179. eventConfirmationMessage = data.message;
  180. } else if (type === 'input') {
  181. eventCallback = cb;
  182. eventConfirmationInput = true;
  183. showEventConfirmation = true;
  184. eventConfirmationTitle = data.title;
  185. eventConfirmationMessage = data.message;
  186. eventConfirmationInputPlaceholder = data.placeholder;
  187. eventConfirmationInputValue = data?.value ?? '';
  188. } else {
  189. console.log('Unknown message type', data);
  190. }
  191. messages = messages;
  192. }
  193. };
  194. const onMessageHandler = async (event: {
  195. origin: string;
  196. data: { type: string; text: string };
  197. }) => {
  198. if (event.origin !== window.origin) {
  199. return;
  200. }
  201. // Replace with your iframe's origin
  202. if (event.data.type === 'input:prompt') {
  203. console.debug(event.data.text);
  204. const inputElement = document.getElementById('chat-textarea');
  205. if (inputElement) {
  206. prompt = event.data.text;
  207. inputElement.focus();
  208. }
  209. }
  210. if (event.data.type === 'action:submit') {
  211. console.debug(event.data.text);
  212. if (prompt !== '') {
  213. await tick();
  214. submitPrompt(prompt);
  215. }
  216. }
  217. if (event.data.type === 'input:prompt:submit') {
  218. console.debug(event.data.text);
  219. if (prompt !== '') {
  220. await tick();
  221. submitPrompt(event.data.text);
  222. }
  223. }
  224. };
  225. onMount(async () => {
  226. window.addEventListener('message', onMessageHandler);
  227. $socket?.on('chat-events', chatEventHandler);
  228. if (!$chatId) {
  229. chatIdUnsubscriber = chatId.subscribe(async (value) => {
  230. if (!value) {
  231. await initNewChat();
  232. }
  233. });
  234. } else {
  235. if ($temporaryChatEnabled) {
  236. await goto('/');
  237. }
  238. }
  239. });
  240. onDestroy(() => {
  241. chatIdUnsubscriber?.();
  242. window.removeEventListener('message', onMessageHandler);
  243. $socket?.off('chat-events');
  244. });
  245. //////////////////////////
  246. // Web functions
  247. //////////////////////////
  248. const initNewChat = async () => {
  249. if ($page.url.pathname.includes('/c/')) {
  250. window.history.replaceState(history.state, '', `/`);
  251. }
  252. await chatId.set('');
  253. autoScroll = true;
  254. title = '';
  255. messages = [];
  256. history = {
  257. messages: {},
  258. currentId: null
  259. };
  260. chatFiles = [];
  261. params = {};
  262. if ($page.url.searchParams.get('models')) {
  263. selectedModels = $page.url.searchParams.get('models')?.split(',');
  264. } else if ($settings?.models) {
  265. selectedModels = $settings?.models;
  266. } else if ($config?.default_models) {
  267. console.log($config?.default_models.split(',') ?? '');
  268. selectedModels = $config?.default_models.split(',');
  269. } else {
  270. selectedModels = [''];
  271. }
  272. if ($page.url.searchParams.get('web-search') === 'true') {
  273. webSearchEnabled = true;
  274. }
  275. if ($page.url.searchParams.get('q')) {
  276. prompt = $page.url.searchParams.get('q') ?? '';
  277. selectedToolIds = ($page.url.searchParams.get('tool_ids') ?? '')
  278. .split(',')
  279. .map((id) => id.trim())
  280. .filter((id) => id);
  281. if (prompt) {
  282. await tick();
  283. submitPrompt(prompt);
  284. }
  285. }
  286. if ($page.url.searchParams.get('call') === 'true') {
  287. showCallOverlay.set(true);
  288. }
  289. selectedModels = selectedModels.map((modelId) =>
  290. $models.map((m) => m.id).includes(modelId) ? modelId : ''
  291. );
  292. const userSettings = await getUserSettings(localStorage.token);
  293. if (userSettings) {
  294. settings.set(userSettings.ui);
  295. } else {
  296. settings.set(JSON.parse(localStorage.getItem('settings') ?? '{}'));
  297. }
  298. const chatInput = document.getElementById('chat-textarea');
  299. setTimeout(() => chatInput?.focus(), 0);
  300. };
  301. const loadChat = async () => {
  302. chatId.set(chatIdProp);
  303. chat = await getChatById(localStorage.token, $chatId).catch(async (error) => {
  304. await goto('/');
  305. return null;
  306. });
  307. if (chat) {
  308. tags = await getTags();
  309. const chatContent = chat.chat;
  310. if (chatContent) {
  311. console.log(chatContent);
  312. selectedModels =
  313. (chatContent?.models ?? undefined) !== undefined
  314. ? chatContent.models
  315. : [chatContent.models ?? ''];
  316. history =
  317. (chatContent?.history ?? undefined) !== undefined
  318. ? chatContent.history
  319. : convertMessagesToHistory(chatContent.messages);
  320. title = chatContent.title;
  321. const userSettings = await getUserSettings(localStorage.token);
  322. if (userSettings) {
  323. await settings.set(userSettings.ui);
  324. } else {
  325. await settings.set(JSON.parse(localStorage.getItem('settings') ?? '{}'));
  326. }
  327. params = chatContent?.params ?? {};
  328. chatFiles = chatContent?.files ?? [];
  329. autoScroll = true;
  330. await tick();
  331. if (messages.length > 0) {
  332. history.messages[messages.at(-1).id].done = true;
  333. }
  334. await tick();
  335. return true;
  336. } else {
  337. return null;
  338. }
  339. }
  340. };
  341. const scrollToBottom = async () => {
  342. await tick();
  343. if (messagesContainerElement) {
  344. messagesContainerElement.scrollTop = messagesContainerElement.scrollHeight;
  345. }
  346. };
  347. const createMessagesList = (responseMessageId) => {
  348. const message = history.messages[responseMessageId];
  349. if (message.parentId) {
  350. return [...createMessagesList(message.parentId), message];
  351. } else {
  352. return [message];
  353. }
  354. };
  355. const chatCompletedHandler = async (chatId, modelId, responseMessageId, messages) => {
  356. await mermaid.run({
  357. querySelector: '.mermaid'
  358. });
  359. const res = await chatCompleted(localStorage.token, {
  360. model: modelId,
  361. messages: messages.map((m) => ({
  362. id: m.id,
  363. role: m.role,
  364. content: m.content,
  365. info: m.info ? m.info : undefined,
  366. timestamp: m.timestamp
  367. })),
  368. chat_id: chatId,
  369. session_id: $socket?.id,
  370. id: responseMessageId
  371. }).catch((error) => {
  372. toast.error(error);
  373. messages.at(-1).error = { content: error };
  374. return null;
  375. });
  376. if (res !== null) {
  377. // Update chat history with the new messages
  378. for (const message of res.messages) {
  379. history.messages[message.id] = {
  380. ...history.messages[message.id],
  381. ...(history.messages[message.id].content !== message.content
  382. ? { originalContent: history.messages[message.id].content }
  383. : {}),
  384. ...message
  385. };
  386. }
  387. }
  388. if ($chatId == chatId) {
  389. if (!$temporaryChatEnabled) {
  390. chat = await updateChatById(localStorage.token, chatId, {
  391. models: selectedModels,
  392. messages: messages,
  393. history: history,
  394. params: params,
  395. files: chatFiles
  396. });
  397. currentChatPage.set(1);
  398. await chats.set(await getChatList(localStorage.token, $currentChatPage));
  399. }
  400. }
  401. };
  402. const chatActionHandler = async (chatId, actionId, modelId, responseMessageId, event = null) => {
  403. const res = await chatAction(localStorage.token, actionId, {
  404. model: modelId,
  405. messages: messages.map((m) => ({
  406. id: m.id,
  407. role: m.role,
  408. content: m.content,
  409. info: m.info ? m.info : undefined,
  410. timestamp: m.timestamp
  411. })),
  412. ...(event ? { event: event } : {}),
  413. chat_id: chatId,
  414. session_id: $socket?.id,
  415. id: responseMessageId
  416. }).catch((error) => {
  417. toast.error(error);
  418. messages.at(-1).error = { content: error };
  419. return null;
  420. });
  421. if (res !== null) {
  422. // Update chat history with the new messages
  423. for (const message of res.messages) {
  424. history.messages[message.id] = {
  425. ...history.messages[message.id],
  426. ...(history.messages[message.id].content !== message.content
  427. ? { originalContent: history.messages[message.id].content }
  428. : {}),
  429. ...message
  430. };
  431. }
  432. }
  433. if ($chatId == chatId) {
  434. if (!$temporaryChatEnabled) {
  435. chat = await updateChatById(localStorage.token, chatId, {
  436. models: selectedModels,
  437. messages: messages,
  438. history: history,
  439. params: params,
  440. files: chatFiles
  441. });
  442. currentChatPage.set(1);
  443. await chats.set(await getChatList(localStorage.token, $currentChatPage));
  444. }
  445. }
  446. };
  447. const getChatEventEmitter = async (modelId: string, chatId: string = '') => {
  448. return setInterval(() => {
  449. $socket?.emit('usage', {
  450. action: 'chat',
  451. model: modelId,
  452. chat_id: chatId
  453. });
  454. }, 1000);
  455. };
  456. //////////////////////////
  457. // Chat functions
  458. //////////////////////////
  459. const submitPrompt = async (userPrompt, { _raw = false } = {}) => {
  460. let _responses = [];
  461. console.log('submitPrompt', $chatId);
  462. selectedModels = selectedModels.map((modelId) =>
  463. $models.map((m) => m.id).includes(modelId) ? modelId : ''
  464. );
  465. if (selectedModels.includes('')) {
  466. toast.error($i18n.t('Model not selected'));
  467. } else if (messages.length != 0 && messages.at(-1).done != true) {
  468. // Response not done
  469. console.log('wait');
  470. } else if (messages.length != 0 && messages.at(-1).error) {
  471. // Error in response
  472. toast.error(
  473. $i18n.t(
  474. `Oops! There was an error in the previous response. Please try again or contact admin.`
  475. )
  476. );
  477. } else if (
  478. files.length > 0 &&
  479. files.filter((file) => file.type !== 'image' && file.status !== 'processed').length > 0
  480. ) {
  481. // Upload not done
  482. toast.error(
  483. $i18n.t(
  484. `Oops! Hold tight! Your files are still in the processing oven. We're cooking them up to perfection. Please be patient and we'll let you know once they're ready.`
  485. )
  486. );
  487. } else if (
  488. ($config?.file?.max_count ?? null) !== null &&
  489. files.length + chatFiles.length > $config?.file?.max_count
  490. ) {
  491. console.log(chatFiles.length, files.length);
  492. toast.error(
  493. $i18n.t(`You can only chat with a maximum of {{maxCount}} file(s) at a time.`, {
  494. maxCount: $config?.file?.max_count
  495. })
  496. );
  497. } else {
  498. // Reset chat input textarea
  499. const chatTextAreaElement = document.getElementById('chat-textarea');
  500. if (chatTextAreaElement) {
  501. chatTextAreaElement.value = '';
  502. chatTextAreaElement.style.height = '';
  503. }
  504. const _files = JSON.parse(JSON.stringify(files));
  505. chatFiles.push(..._files.filter((item) => ['doc', 'file', 'collection'].includes(item.type)));
  506. chatFiles = chatFiles.filter(
  507. // Remove duplicates
  508. (item, index, array) =>
  509. array.findIndex((i) => JSON.stringify(i) === JSON.stringify(item)) === index
  510. );
  511. files = [];
  512. prompt = '';
  513. // Create user message
  514. let userMessageId = uuidv4();
  515. let userMessage = {
  516. id: userMessageId,
  517. parentId: messages.length !== 0 ? messages.at(-1).id : null,
  518. childrenIds: [],
  519. role: 'user',
  520. content: userPrompt,
  521. files: _files.length > 0 ? _files : undefined,
  522. timestamp: Math.floor(Date.now() / 1000), // Unix epoch
  523. models: selectedModels
  524. };
  525. // Add message to history and Set currentId to messageId
  526. history.messages[userMessageId] = userMessage;
  527. history.currentId = userMessageId;
  528. // Append messageId to childrenIds of parent message
  529. if (messages.length !== 0) {
  530. history.messages[messages.at(-1).id].childrenIds.push(userMessageId);
  531. }
  532. // Wait until history/message have been updated
  533. await tick();
  534. _responses = await sendPrompt(userPrompt, userMessageId, { newChat: true });
  535. }
  536. return _responses;
  537. };
  538. const sendPrompt = async (
  539. prompt: string,
  540. parentId: string,
  541. { modelId = null, modelIdx = null, newChat = false } = {}
  542. ) => {
  543. let _responses: string[] = [];
  544. // If modelId is provided, use it, else use selected model
  545. let selectedModelIds = modelId
  546. ? [modelId]
  547. : atSelectedModel !== undefined
  548. ? [atSelectedModel.id]
  549. : selectedModels;
  550. // Create response messages for each selected model
  551. const responseMessageIds: Record<PropertyKey, string> = {};
  552. for (const [_modelIdx, modelId] of selectedModelIds.entries()) {
  553. const model = $models.filter((m) => m.id === modelId).at(0);
  554. if (model) {
  555. let responseMessageId = uuidv4();
  556. let responseMessage = {
  557. parentId: parentId,
  558. id: responseMessageId,
  559. childrenIds: [],
  560. role: 'assistant',
  561. content: '',
  562. model: model.id,
  563. modelName: model.name ?? model.id,
  564. modelIdx: modelIdx ? modelIdx : _modelIdx,
  565. userContext: null,
  566. timestamp: Math.floor(Date.now() / 1000) // Unix epoch
  567. };
  568. // Add message to history and Set currentId to messageId
  569. history.messages[responseMessageId] = responseMessage;
  570. history.currentId = responseMessageId;
  571. // Append messageId to childrenIds of parent message
  572. if (parentId !== null) {
  573. history.messages[parentId].childrenIds = [
  574. ...history.messages[parentId].childrenIds,
  575. responseMessageId
  576. ];
  577. }
  578. responseMessageIds[`${modelId}-${modelIdx ? modelIdx : _modelIdx}`] = responseMessageId;
  579. }
  580. }
  581. await tick();
  582. // Create new chat if only one message in messages
  583. if (newChat && messages.length == 2) {
  584. if (!$temporaryChatEnabled) {
  585. chat = await createNewChat(localStorage.token, {
  586. id: $chatId,
  587. title: $i18n.t('New Chat'),
  588. models: selectedModels,
  589. system: $settings.system ?? undefined,
  590. params: params,
  591. messages: messages,
  592. history: history,
  593. tags: [],
  594. timestamp: Date.now()
  595. });
  596. currentChatPage.set(1);
  597. await chats.set(await getChatList(localStorage.token, $currentChatPage));
  598. await chatId.set(chat.id);
  599. } else {
  600. await chatId.set('local');
  601. }
  602. await tick();
  603. }
  604. const _chatId = JSON.parse(JSON.stringify($chatId));
  605. await Promise.all(
  606. selectedModelIds.map(async (modelId, _modelIdx) => {
  607. console.log('modelId', modelId);
  608. const model = $models.filter((m) => m.id === modelId).at(0);
  609. if (model) {
  610. // If there are image files, check if model is vision capable
  611. const hasImages = messages.some((message) =>
  612. message.files?.some((file) => file.type === 'image')
  613. );
  614. if (hasImages && !(model.info?.meta?.capabilities?.vision ?? true)) {
  615. toast.error(
  616. $i18n.t('Model {{modelName}} is not vision capable', {
  617. modelName: model.name ?? model.id
  618. })
  619. );
  620. }
  621. let responseMessageId =
  622. responseMessageIds[`${modelId}-${modelIdx ? modelIdx : _modelIdx}`];
  623. let responseMessage = history.messages[responseMessageId];
  624. let userContext = null;
  625. if ($settings?.memory ?? false) {
  626. if (userContext === null) {
  627. const res = await queryMemory(localStorage.token, prompt).catch((error) => {
  628. toast.error(error);
  629. return null;
  630. });
  631. if (res) {
  632. if (res.documents[0].length > 0) {
  633. userContext = res.documents[0].reduce((acc, doc, index) => {
  634. const createdAtTimestamp = res.metadatas[0][index].created_at;
  635. const createdAtDate = new Date(createdAtTimestamp * 1000)
  636. .toISOString()
  637. .split('T')[0];
  638. return `${acc}${index + 1}. [${createdAtDate}]. ${doc}\n`;
  639. }, '');
  640. }
  641. console.log(userContext);
  642. }
  643. }
  644. }
  645. responseMessage.userContext = userContext;
  646. const chatEventEmitter = await getChatEventEmitter(model.id, _chatId);
  647. scrollToBottom();
  648. if (webSearchEnabled) {
  649. await getWebSearchResults(model.id, parentId, responseMessageId);
  650. }
  651. let _response = null;
  652. if (model?.owned_by === 'openai') {
  653. _response = await sendPromptOpenAI(model, prompt, responseMessageId, _chatId);
  654. } else if (model) {
  655. _response = await sendPromptOllama(model, prompt, responseMessageId, _chatId);
  656. }
  657. _responses.push(_response);
  658. if (chatEventEmitter) clearInterval(chatEventEmitter);
  659. } else {
  660. toast.error($i18n.t(`Model {{modelId}} not found`, { modelId }));
  661. }
  662. })
  663. );
  664. currentChatPage.set(1);
  665. chats.set(await getChatList(localStorage.token, $currentChatPage));
  666. return _responses;
  667. };
  668. const sendPromptOllama = async (model, userPrompt, responseMessageId, _chatId) => {
  669. let _response: string | null = null;
  670. const responseMessage = history.messages[responseMessageId];
  671. const userMessage = history.messages[responseMessage.parentId];
  672. // Wait until history/message have been updated
  673. await tick();
  674. // Scroll down
  675. scrollToBottom();
  676. const messagesBody = [
  677. params?.system || $settings.system || (responseMessage?.userContext ?? null)
  678. ? {
  679. role: 'system',
  680. content: `${promptTemplate(
  681. params?.system ?? $settings?.system ?? '',
  682. $user.name,
  683. $settings?.userLocation
  684. ? await getAndUpdateUserLocation(localStorage.token)
  685. : undefined
  686. )}${
  687. (responseMessage?.userContext ?? null)
  688. ? `\n\nUser Context:\n${responseMessage?.userContext ?? ''}`
  689. : ''
  690. }`
  691. }
  692. : undefined,
  693. ...messages
  694. ]
  695. .filter((message) => message?.content?.trim())
  696. .map((message) => {
  697. // Prepare the base message object
  698. const baseMessage = {
  699. role: message.role,
  700. content: message.content
  701. };
  702. // Extract and format image URLs if any exist
  703. const imageUrls = message.files
  704. ?.filter((file) => file.type === 'image')
  705. .map((file) => file.url.slice(file.url.indexOf(',') + 1));
  706. // Add images array only if it contains elements
  707. if (imageUrls && imageUrls.length > 0 && message.role === 'user') {
  708. baseMessage.images = imageUrls;
  709. }
  710. return baseMessage;
  711. });
  712. let lastImageIndex = -1;
  713. // Find the index of the last object with images
  714. messagesBody.forEach((item, index) => {
  715. if (item.images) {
  716. lastImageIndex = index;
  717. }
  718. });
  719. // Remove images from all but the last one
  720. messagesBody.forEach((item, index) => {
  721. if (index !== lastImageIndex) {
  722. delete item.images;
  723. }
  724. });
  725. let files = JSON.parse(JSON.stringify(chatFiles));
  726. if (model?.info?.meta?.knowledge ?? false) {
  727. // Only initialize and add status if knowledge exists
  728. responseMessage.statusHistory = [
  729. {
  730. action: 'knowledge_search',
  731. description: $i18n.t(`Searching Knowledge for "{{searchQuery}}"`, {
  732. searchQuery: userMessage.content
  733. }),
  734. done: false
  735. }
  736. ];
  737. files.push(...model.info.meta.knowledge);
  738. messages = messages; // Trigger Svelte update
  739. }
  740. files.push(
  741. ...(userMessage?.files ?? []).filter((item) =>
  742. ['doc', 'file', 'collection'].includes(item.type)
  743. ),
  744. ...(responseMessage?.files ?? []).filter((item) => ['web_search_results'].includes(item.type))
  745. );
  746. scrollToBottom();
  747. eventTarget.dispatchEvent(
  748. new CustomEvent('chat:start', {
  749. detail: {
  750. id: responseMessageId
  751. }
  752. })
  753. );
  754. await tick();
  755. const stream = $settings?.streamResponse ?? true;
  756. const [res, controller] = await generateChatCompletion(localStorage.token, {
  757. stream: stream,
  758. model: model.id,
  759. messages: messagesBody,
  760. options: {
  761. ...{ ...($settings?.params ?? {}), ...params },
  762. stop:
  763. (params?.stop ?? $settings?.params?.stop ?? undefined)
  764. ? (params?.stop.split(',').map((token) => token.trim()) ?? $settings.params.stop).map(
  765. (str) => decodeURIComponent(JSON.parse('"' + str.replace(/\"/g, '\\"') + '"'))
  766. )
  767. : undefined,
  768. num_predict: params?.max_tokens ?? $settings?.params?.max_tokens ?? undefined,
  769. repeat_penalty:
  770. params?.frequency_penalty ?? $settings?.params?.frequency_penalty ?? undefined
  771. },
  772. format: $settings.requestFormat ?? undefined,
  773. keep_alive: $settings.keepAlive ?? undefined,
  774. tool_ids: selectedToolIds.length > 0 ? selectedToolIds : undefined,
  775. files: files.length > 0 ? files : undefined,
  776. session_id: $socket?.id,
  777. chat_id: $chatId,
  778. id: responseMessageId
  779. });
  780. if (res && res.ok) {
  781. if (!stream) {
  782. const response = await res.json();
  783. console.log(response);
  784. responseMessage.content = response.message.content;
  785. responseMessage.info = {
  786. eval_count: response.eval_count,
  787. eval_duration: response.eval_duration,
  788. load_duration: response.load_duration,
  789. prompt_eval_count: response.prompt_eval_count,
  790. prompt_eval_duration: response.prompt_eval_duration,
  791. total_duration: response.total_duration
  792. };
  793. responseMessage.done = true;
  794. } else {
  795. console.log('controller', controller);
  796. const reader = res.body
  797. .pipeThrough(new TextDecoderStream())
  798. .pipeThrough(splitStream('\n'))
  799. .getReader();
  800. while (true) {
  801. const { value, done } = await reader.read();
  802. if (done || stopResponseFlag || _chatId !== $chatId) {
  803. responseMessage.done = true;
  804. messages = messages;
  805. if (stopResponseFlag) {
  806. controller.abort('User: Stop Response');
  807. }
  808. _response = responseMessage.content;
  809. break;
  810. }
  811. try {
  812. let lines = value.split('\n');
  813. for (const line of lines) {
  814. if (line !== '') {
  815. console.log(line);
  816. let data = JSON.parse(line);
  817. if ('citations' in data) {
  818. responseMessage.citations = data.citations;
  819. // Only remove status if it was initially set
  820. if (model?.info?.meta?.knowledge ?? false) {
  821. responseMessage.statusHistory = responseMessage.statusHistory.filter(
  822. (status) => status.action !== 'knowledge_search'
  823. );
  824. }
  825. continue;
  826. }
  827. if ('detail' in data) {
  828. throw data;
  829. }
  830. if (data.done == false) {
  831. if (responseMessage.content == '' && data.message.content == '\n') {
  832. continue;
  833. } else {
  834. responseMessage.content += data.message.content;
  835. if (navigator.vibrate && ($settings?.hapticFeedback ?? false)) {
  836. navigator.vibrate(5);
  837. }
  838. const messageContentParts = getMessageContentParts(
  839. responseMessage.content,
  840. $config?.audio?.tts?.split_on ?? 'punctuation'
  841. );
  842. messageContentParts.pop();
  843. // dispatch only last sentence and make sure it hasn't been dispatched before
  844. if (
  845. messageContentParts.length > 0 &&
  846. messageContentParts[messageContentParts.length - 1] !==
  847. responseMessage.lastSentence
  848. ) {
  849. responseMessage.lastSentence =
  850. messageContentParts[messageContentParts.length - 1];
  851. eventTarget.dispatchEvent(
  852. new CustomEvent('chat', {
  853. detail: {
  854. id: responseMessageId,
  855. content: messageContentParts[messageContentParts.length - 1]
  856. }
  857. })
  858. );
  859. }
  860. messages = messages;
  861. }
  862. } else {
  863. responseMessage.done = true;
  864. if (responseMessage.content == '') {
  865. responseMessage.error = {
  866. code: 400,
  867. content: `Oops! No text generated from Ollama, Please try again.`
  868. };
  869. }
  870. responseMessage.context = data.context ?? null;
  871. responseMessage.info = {
  872. total_duration: data.total_duration,
  873. load_duration: data.load_duration,
  874. sample_count: data.sample_count,
  875. sample_duration: data.sample_duration,
  876. prompt_eval_count: data.prompt_eval_count,
  877. prompt_eval_duration: data.prompt_eval_duration,
  878. eval_count: data.eval_count,
  879. eval_duration: data.eval_duration
  880. };
  881. messages = messages;
  882. if ($settings.notificationEnabled && !document.hasFocus()) {
  883. const notification = new Notification(`${model.id}`, {
  884. body: responseMessage.content,
  885. icon: `${WEBUI_BASE_URL}/static/favicon.png`
  886. });
  887. }
  888. if ($settings?.responseAutoCopy ?? false) {
  889. copyToClipboard(responseMessage.content);
  890. }
  891. if ($settings.responseAutoPlayback && !$showCallOverlay) {
  892. await tick();
  893. document.getElementById(`speak-button-${responseMessage.id}`)?.click();
  894. }
  895. }
  896. }
  897. }
  898. } catch (error) {
  899. console.log(error);
  900. if ('detail' in error) {
  901. toast.error(error.detail);
  902. }
  903. break;
  904. }
  905. if (autoScroll) {
  906. scrollToBottom();
  907. }
  908. }
  909. }
  910. await chatCompletedHandler(
  911. _chatId,
  912. model.id,
  913. responseMessageId,
  914. createMessagesList(responseMessageId)
  915. );
  916. } else {
  917. if (res !== null) {
  918. const error = await res.json();
  919. console.log(error);
  920. if ('detail' in error) {
  921. toast.error(error.detail);
  922. responseMessage.error = { content: error.detail };
  923. } else {
  924. toast.error(error.error);
  925. responseMessage.error = { content: error.error };
  926. }
  927. } else {
  928. toast.error(
  929. $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, { provider: 'Ollama' })
  930. );
  931. responseMessage.error = {
  932. content: $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
  933. provider: 'Ollama'
  934. })
  935. };
  936. }
  937. responseMessage.done = true;
  938. if (responseMessage.statusHistory) {
  939. responseMessage.statusHistory = responseMessage.statusHistory.filter(
  940. (status) => status.action !== 'knowledge_search'
  941. );
  942. }
  943. messages = messages;
  944. }
  945. await saveChatHandler(_chatId);
  946. stopResponseFlag = false;
  947. await tick();
  948. let lastMessageContentPart =
  949. getMessageContentParts(
  950. responseMessage.content,
  951. $config?.audio?.tts?.split_on ?? 'punctuation'
  952. )?.at(-1) ?? '';
  953. if (lastMessageContentPart) {
  954. eventTarget.dispatchEvent(
  955. new CustomEvent('chat', {
  956. detail: { id: responseMessageId, content: lastMessageContentPart }
  957. })
  958. );
  959. }
  960. eventTarget.dispatchEvent(
  961. new CustomEvent('chat:finish', {
  962. detail: {
  963. id: responseMessageId,
  964. content: responseMessage.content
  965. }
  966. })
  967. );
  968. if (autoScroll) {
  969. scrollToBottom();
  970. }
  971. if (messages.length == 2 && messages.at(1).content !== '' && selectedModels[0] === model.id) {
  972. window.history.replaceState(history.state, '', `/c/${_chatId}`);
  973. const _title = await generateChatTitle(userPrompt);
  974. await setChatTitle(_chatId, _title);
  975. }
  976. return _response;
  977. };
  978. const sendPromptOpenAI = async (model, userPrompt, responseMessageId, _chatId) => {
  979. let _response = null;
  980. const responseMessage = history.messages[responseMessageId];
  981. const userMessage = history.messages[responseMessage.parentId];
  982. let files = JSON.parse(JSON.stringify(chatFiles));
  983. if (model?.info?.meta?.knowledge ?? false) {
  984. // Only initialize and add status if knowledge exists
  985. responseMessage.statusHistory = [
  986. {
  987. action: 'knowledge_search',
  988. description: $i18n.t(`Searching Knowledge for "{{searchQuery}}"`, {
  989. searchQuery: userMessage.content
  990. }),
  991. done: false
  992. }
  993. ];
  994. files.push(...model.info.meta.knowledge);
  995. messages = messages; // Trigger Svelte update
  996. }
  997. files.push(
  998. ...(userMessage?.files ?? []).filter((item) =>
  999. ['doc', 'file', 'collection'].includes(item.type)
  1000. ),
  1001. ...(responseMessage?.files ?? []).filter((item) => ['web_search_results'].includes(item.type))
  1002. );
  1003. scrollToBottom();
  1004. eventTarget.dispatchEvent(
  1005. new CustomEvent('chat:start', {
  1006. detail: {
  1007. id: responseMessageId
  1008. }
  1009. })
  1010. );
  1011. await tick();
  1012. try {
  1013. const stream = $settings?.streamResponse ?? true;
  1014. const [res, controller] = await generateOpenAIChatCompletion(
  1015. localStorage.token,
  1016. {
  1017. stream: stream,
  1018. model: model.id,
  1019. ...(stream && (model.info?.meta?.capabilities?.usage ?? false)
  1020. ? {
  1021. stream_options: {
  1022. include_usage: true
  1023. }
  1024. }
  1025. : {}),
  1026. messages: [
  1027. params?.system || $settings.system || (responseMessage?.userContext ?? null)
  1028. ? {
  1029. role: 'system',
  1030. content: `${promptTemplate(
  1031. params?.system ?? $settings?.system ?? '',
  1032. $user.name,
  1033. $settings?.userLocation
  1034. ? await getAndUpdateUserLocation(localStorage.token)
  1035. : undefined
  1036. )}${
  1037. (responseMessage?.userContext ?? null)
  1038. ? `\n\nUser Context:\n${responseMessage?.userContext ?? ''}`
  1039. : ''
  1040. }`
  1041. }
  1042. : undefined,
  1043. ...messages
  1044. ]
  1045. .filter((message) => message?.content?.trim())
  1046. .map((message, idx, arr) => ({
  1047. role: message.role,
  1048. ...((message.files?.filter((file) => file.type === 'image').length > 0 ?? false) &&
  1049. message.role === 'user'
  1050. ? {
  1051. content: [
  1052. {
  1053. type: 'text',
  1054. text:
  1055. arr.length - 1 !== idx
  1056. ? message.content
  1057. : (message?.raContent ?? message.content)
  1058. },
  1059. ...message.files
  1060. .filter((file) => file.type === 'image')
  1061. .map((file) => ({
  1062. type: 'image_url',
  1063. image_url: {
  1064. url: file.url
  1065. }
  1066. }))
  1067. ]
  1068. }
  1069. : {
  1070. content:
  1071. arr.length - 1 !== idx
  1072. ? message.content
  1073. : (message?.raContent ?? message.content)
  1074. })
  1075. })),
  1076. seed: params?.seed ?? $settings?.params?.seed ?? undefined,
  1077. stop:
  1078. (params?.stop ?? $settings?.params?.stop ?? undefined)
  1079. ? (params?.stop.split(',').map((token) => token.trim()) ?? $settings.params.stop).map(
  1080. (str) => decodeURIComponent(JSON.parse('"' + str.replace(/\"/g, '\\"') + '"'))
  1081. )
  1082. : undefined,
  1083. temperature: params?.temperature ?? $settings?.params?.temperature ?? undefined,
  1084. top_p: params?.top_p ?? $settings?.params?.top_p ?? undefined,
  1085. frequency_penalty:
  1086. params?.frequency_penalty ?? $settings?.params?.frequency_penalty ?? undefined,
  1087. max_tokens: params?.max_tokens ?? $settings?.params?.max_tokens ?? undefined,
  1088. tool_ids: selectedToolIds.length > 0 ? selectedToolIds : undefined,
  1089. files: files.length > 0 ? files : undefined,
  1090. session_id: $socket?.id,
  1091. chat_id: $chatId,
  1092. id: responseMessageId
  1093. },
  1094. `${WEBUI_BASE_URL}/api`
  1095. );
  1096. // Wait until history/message have been updated
  1097. await tick();
  1098. scrollToBottom();
  1099. if (res && res.ok && res.body) {
  1100. if (!stream) {
  1101. const response = await res.json();
  1102. console.log(response);
  1103. responseMessage.content = response.choices[0].message.content;
  1104. responseMessage.info = { ...response.usage, openai: true };
  1105. responseMessage.done = true;
  1106. } else {
  1107. const textStream = await createOpenAITextStream(res.body, $settings.splitLargeChunks);
  1108. for await (const update of textStream) {
  1109. const { value, done, citations, error, usage } = update;
  1110. if (error) {
  1111. await handleOpenAIError(error, null, model, responseMessage);
  1112. break;
  1113. }
  1114. if (done || stopResponseFlag || _chatId !== $chatId) {
  1115. responseMessage.done = true;
  1116. messages = messages;
  1117. if (stopResponseFlag) {
  1118. controller.abort('User: Stop Response');
  1119. }
  1120. _response = responseMessage.content;
  1121. break;
  1122. }
  1123. if (usage) {
  1124. responseMessage.info = { ...usage, openai: true };
  1125. }
  1126. if (citations) {
  1127. responseMessage.citations = citations;
  1128. // Only remove status if it was initially set
  1129. if (model?.info?.meta?.knowledge ?? false) {
  1130. responseMessage.statusHistory = responseMessage.statusHistory.filter(
  1131. (status) => status.action !== 'knowledge_search'
  1132. );
  1133. }
  1134. continue;
  1135. }
  1136. if (responseMessage.content == '' && value == '\n') {
  1137. continue;
  1138. } else {
  1139. responseMessage.content += value;
  1140. if (navigator.vibrate && ($settings?.hapticFeedback ?? false)) {
  1141. navigator.vibrate(5);
  1142. }
  1143. const messageContentParts = getMessageContentParts(
  1144. responseMessage.content,
  1145. $config?.audio?.tts?.split_on ?? 'punctuation'
  1146. );
  1147. messageContentParts.pop();
  1148. // dispatch only last sentence and make sure it hasn't been dispatched before
  1149. if (
  1150. messageContentParts.length > 0 &&
  1151. messageContentParts[messageContentParts.length - 1] !== responseMessage.lastSentence
  1152. ) {
  1153. responseMessage.lastSentence = messageContentParts[messageContentParts.length - 1];
  1154. eventTarget.dispatchEvent(
  1155. new CustomEvent('chat', {
  1156. detail: {
  1157. id: responseMessageId,
  1158. content: messageContentParts[messageContentParts.length - 1]
  1159. }
  1160. })
  1161. );
  1162. }
  1163. messages = messages;
  1164. }
  1165. if (autoScroll) {
  1166. scrollToBottom();
  1167. }
  1168. }
  1169. }
  1170. await chatCompletedHandler(
  1171. _chatId,
  1172. model.id,
  1173. responseMessageId,
  1174. createMessagesList(responseMessageId)
  1175. );
  1176. if ($settings.notificationEnabled && !document.hasFocus()) {
  1177. const notification = new Notification(`${model.id}`, {
  1178. body: responseMessage.content,
  1179. icon: `${WEBUI_BASE_URL}/static/favicon.png`
  1180. });
  1181. }
  1182. if ($settings.responseAutoCopy) {
  1183. copyToClipboard(responseMessage.content);
  1184. }
  1185. if ($settings.responseAutoPlayback && !$showCallOverlay) {
  1186. await tick();
  1187. document.getElementById(`speak-button-${responseMessage.id}`)?.click();
  1188. }
  1189. } else {
  1190. await handleOpenAIError(null, res, model, responseMessage);
  1191. }
  1192. } catch (error) {
  1193. await handleOpenAIError(error, null, model, responseMessage);
  1194. }
  1195. await saveChatHandler(_chatId);
  1196. messages = messages;
  1197. stopResponseFlag = false;
  1198. await tick();
  1199. let lastMessageContentPart =
  1200. getMessageContentParts(
  1201. responseMessage.content,
  1202. $config?.audio?.tts?.split_on ?? 'punctuation'
  1203. )?.at(-1) ?? '';
  1204. if (lastMessageContentPart) {
  1205. eventTarget.dispatchEvent(
  1206. new CustomEvent('chat', {
  1207. detail: { id: responseMessageId, content: lastMessageContentPart }
  1208. })
  1209. );
  1210. }
  1211. eventTarget.dispatchEvent(
  1212. new CustomEvent('chat:finish', {
  1213. detail: {
  1214. id: responseMessageId,
  1215. content: responseMessage.content
  1216. }
  1217. })
  1218. );
  1219. if (autoScroll) {
  1220. scrollToBottom();
  1221. }
  1222. if (messages.length == 2 && selectedModels[0] === model.id) {
  1223. window.history.replaceState(history.state, '', `/c/${_chatId}`);
  1224. const _title = await generateChatTitle(userPrompt);
  1225. await setChatTitle(_chatId, _title);
  1226. }
  1227. return _response;
  1228. };
  1229. const handleOpenAIError = async (error, res: Response | null, model, responseMessage) => {
  1230. let errorMessage = '';
  1231. let innerError;
  1232. if (error) {
  1233. innerError = error;
  1234. } else if (res !== null) {
  1235. innerError = await res.json();
  1236. }
  1237. console.error(innerError);
  1238. if ('detail' in innerError) {
  1239. toast.error(innerError.detail);
  1240. errorMessage = innerError.detail;
  1241. } else if ('error' in innerError) {
  1242. if ('message' in innerError.error) {
  1243. toast.error(innerError.error.message);
  1244. errorMessage = innerError.error.message;
  1245. } else {
  1246. toast.error(innerError.error);
  1247. errorMessage = innerError.error;
  1248. }
  1249. } else if ('message' in innerError) {
  1250. toast.error(innerError.message);
  1251. errorMessage = innerError.message;
  1252. }
  1253. responseMessage.error = {
  1254. content:
  1255. $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
  1256. provider: model.name ?? model.id
  1257. }) +
  1258. '\n' +
  1259. errorMessage
  1260. };
  1261. responseMessage.done = true;
  1262. if (responseMessage.statusHistory) {
  1263. responseMessage.statusHistory = responseMessage.statusHistory.filter(
  1264. (status) => status.action !== 'knowledge_search'
  1265. );
  1266. }
  1267. messages = messages;
  1268. };
  1269. const stopResponse = () => {
  1270. stopResponseFlag = true;
  1271. console.log('stopResponse');
  1272. };
  1273. const regenerateResponse = async (message) => {
  1274. console.log('regenerateResponse');
  1275. if (messages.length != 0) {
  1276. let userMessage = history.messages[message.parentId];
  1277. let userPrompt = userMessage.content;
  1278. if ((userMessage?.models ?? [...selectedModels]).length == 1) {
  1279. // If user message has only one model selected, sendPrompt automatically selects it for regeneration
  1280. await sendPrompt(userPrompt, userMessage.id);
  1281. } else {
  1282. // If there are multiple models selected, use the model of the response message for regeneration
  1283. // e.g. many model chat
  1284. await sendPrompt(userPrompt, userMessage.id, {
  1285. modelId: message.model,
  1286. modelIdx: message.modelIdx
  1287. });
  1288. }
  1289. }
  1290. };
  1291. const continueGeneration = async () => {
  1292. console.log('continueGeneration');
  1293. const _chatId = JSON.parse(JSON.stringify($chatId));
  1294. if (messages.length != 0 && messages.at(-1).done == true) {
  1295. const responseMessage = history.messages[history.currentId];
  1296. responseMessage.done = false;
  1297. await tick();
  1298. const model = $models.filter((m) => m.id === responseMessage.model).at(0);
  1299. if (model) {
  1300. if (model?.owned_by === 'openai') {
  1301. await sendPromptOpenAI(
  1302. model,
  1303. history.messages[responseMessage.parentId].content,
  1304. responseMessage.id,
  1305. _chatId
  1306. );
  1307. } else
  1308. await sendPromptOllama(
  1309. model,
  1310. history.messages[responseMessage.parentId].content,
  1311. responseMessage.id,
  1312. _chatId
  1313. );
  1314. }
  1315. } else {
  1316. toast.error($i18n.t(`Model {{modelId}} not found`, { modelId }));
  1317. }
  1318. };
  1319. const generateChatTitle = async (userPrompt) => {
  1320. if ($settings?.title?.auto ?? true) {
  1321. const title = await generateTitle(
  1322. localStorage.token,
  1323. selectedModels[0],
  1324. userPrompt,
  1325. $chatId
  1326. ).catch((error) => {
  1327. console.error(error);
  1328. return 'New Chat';
  1329. });
  1330. return title;
  1331. } else {
  1332. return `${userPrompt}`;
  1333. }
  1334. };
  1335. const setChatTitle = async (_chatId, _title) => {
  1336. if (_chatId === $chatId) {
  1337. title = _title;
  1338. }
  1339. if (!$temporaryChatEnabled) {
  1340. chat = await updateChatById(localStorage.token, _chatId, { title: _title });
  1341. currentChatPage.set(1);
  1342. await chats.set(await getChatList(localStorage.token, $currentChatPage));
  1343. }
  1344. };
  1345. const getWebSearchResults = async (model: string, parentId: string, responseId: string) => {
  1346. const responseMessage = history.messages[responseId];
  1347. const userMessage = history.messages[parentId];
  1348. responseMessage.statusHistory = [
  1349. {
  1350. done: false,
  1351. action: 'web_search',
  1352. description: $i18n.t('Generating search query')
  1353. }
  1354. ];
  1355. messages = messages;
  1356. const prompt = userMessage.content;
  1357. let searchQuery = await generateSearchQuery(
  1358. localStorage.token,
  1359. model,
  1360. messages.filter((message) => message?.content?.trim()),
  1361. prompt
  1362. ).catch((error) => {
  1363. console.log(error);
  1364. return prompt;
  1365. });
  1366. if (!searchQuery || searchQuery == '') {
  1367. responseMessage.statusHistory.push({
  1368. done: true,
  1369. error: true,
  1370. action: 'web_search',
  1371. description: $i18n.t('No search query generated')
  1372. });
  1373. messages = messages;
  1374. return;
  1375. }
  1376. responseMessage.statusHistory.push({
  1377. done: false,
  1378. action: 'web_search',
  1379. description: $i18n.t(`Searching "{{searchQuery}}"`, { searchQuery })
  1380. });
  1381. messages = messages;
  1382. const results = await runWebSearch(localStorage.token, searchQuery).catch((error) => {
  1383. console.log(error);
  1384. toast.error(error);
  1385. return null;
  1386. });
  1387. if (results) {
  1388. responseMessage.statusHistory.push({
  1389. done: true,
  1390. action: 'web_search',
  1391. description: $i18n.t('Searched {{count}} sites', { count: results.filenames.length }),
  1392. query: searchQuery,
  1393. urls: results.filenames
  1394. });
  1395. if (responseMessage?.files ?? undefined === undefined) {
  1396. responseMessage.files = [];
  1397. }
  1398. responseMessage.files.push({
  1399. collection_name: results.collection_name,
  1400. name: searchQuery,
  1401. type: 'web_search_results',
  1402. urls: results.filenames
  1403. });
  1404. messages = messages;
  1405. } else {
  1406. responseMessage.statusHistory.push({
  1407. done: true,
  1408. error: true,
  1409. action: 'web_search',
  1410. description: 'No search results found'
  1411. });
  1412. messages = messages;
  1413. }
  1414. };
  1415. const getTags = async () => {
  1416. return await getTagsById(localStorage.token, $chatId).catch(async (error) => {
  1417. return [];
  1418. });
  1419. };
  1420. const saveChatHandler = async (_chatId) => {
  1421. if ($chatId == _chatId) {
  1422. if (!$temporaryChatEnabled) {
  1423. chat = await updateChatById(localStorage.token, _chatId, {
  1424. messages: messages,
  1425. history: history,
  1426. models: selectedModels,
  1427. params: params,
  1428. files: chatFiles
  1429. });
  1430. currentChatPage.set(1);
  1431. await chats.set(await getChatList(localStorage.token, $currentChatPage));
  1432. }
  1433. }
  1434. };
  1435. const mergeResponses = async (messageId, responses, _chatId) => {
  1436. console.log('mergeResponses', messageId, responses);
  1437. const message = history.messages[messageId];
  1438. const mergedResponse = {
  1439. status: true,
  1440. content: ''
  1441. };
  1442. message.merged = mergedResponse;
  1443. messages = messages;
  1444. try {
  1445. const [res, controller] = await generateMoACompletion(
  1446. localStorage.token,
  1447. message.model,
  1448. history.messages[message.parentId].content,
  1449. responses
  1450. );
  1451. if (res && res.ok && res.body) {
  1452. const textStream = await createOpenAITextStream(res.body, $settings.splitLargeChunks);
  1453. for await (const update of textStream) {
  1454. const { value, done, citations, error, usage } = update;
  1455. if (error || done) {
  1456. break;
  1457. }
  1458. if (mergedResponse.content == '' && value == '\n') {
  1459. continue;
  1460. } else {
  1461. mergedResponse.content += value;
  1462. messages = messages;
  1463. }
  1464. if (autoScroll) {
  1465. scrollToBottom();
  1466. }
  1467. }
  1468. await saveChatHandler(_chatId);
  1469. } else {
  1470. console.error(res);
  1471. }
  1472. } catch (e) {
  1473. console.error(e);
  1474. }
  1475. };
  1476. </script>
  1477. <svelte:head>
  1478. <title>
  1479. {title
  1480. ? `${title.length > 30 ? `${title.slice(0, 30)}...` : title} | ${$WEBUI_NAME}`
  1481. : `${$WEBUI_NAME}`}
  1482. </title>
  1483. </svelte:head>
  1484. <audio id="audioElement" src="" style="display: none;" />
  1485. <EventConfirmDialog
  1486. bind:show={showEventConfirmation}
  1487. title={eventConfirmationTitle}
  1488. message={eventConfirmationMessage}
  1489. input={eventConfirmationInput}
  1490. inputPlaceholder={eventConfirmationInputPlaceholder}
  1491. inputValue={eventConfirmationInputValue}
  1492. on:confirm={(e) => {
  1493. if (e.detail) {
  1494. eventCallback(e.detail);
  1495. } else {
  1496. eventCallback(true);
  1497. }
  1498. }}
  1499. on:cancel={() => {
  1500. eventCallback(false);
  1501. }}
  1502. />
  1503. {#if !chatIdProp || (loaded && chatIdProp)}
  1504. <div
  1505. class="h-screen max-h-[100dvh] {$showSidebar
  1506. ? 'md:max-w-[calc(100%-260px)]'
  1507. : ''} w-full max-w-full flex flex-col"
  1508. >
  1509. {#if $settings?.backgroundImageUrl ?? null}
  1510. <div
  1511. class="absolute {$showSidebar
  1512. ? 'md:max-w-[calc(100%-260px)] md:translate-x-[260px]'
  1513. : ''} top-0 left-0 w-full h-full bg-cover bg-center bg-no-repeat"
  1514. style="background-image: url({$settings.backgroundImageUrl}) "
  1515. />
  1516. <div
  1517. class="absolute top-0 left-0 w-full h-full bg-gradient-to-t from-white to-white/85 dark:from-gray-900 dark:to-[#171717]/90 z-0"
  1518. />
  1519. {/if}
  1520. <Navbar
  1521. {title}
  1522. bind:selectedModels
  1523. bind:showModelSelector
  1524. shareEnabled={messages.length > 0}
  1525. {controlPane}
  1526. {chat}
  1527. {initNewChat}
  1528. />
  1529. <PaneGroup direction="horizontal" class="w-full h-full">
  1530. <Pane defaultSize={50} class="h-full flex w-full relative">
  1531. {#if $banners.length > 0 && messages.length === 0 && !$chatId && selectedModels.length <= 1}
  1532. <div class="absolute top-3 left-0 right-0 w-full z-20">
  1533. <div class=" flex flex-col gap-1 w-full">
  1534. {#each $banners.filter( (b) => (b.dismissible ? !JSON.parse(localStorage.getItem('dismissedBannerIds') ?? '[]').includes(b.id) : true) ) as banner}
  1535. <Banner
  1536. {banner}
  1537. on:dismiss={(e) => {
  1538. const bannerId = e.detail;
  1539. localStorage.setItem(
  1540. 'dismissedBannerIds',
  1541. JSON.stringify(
  1542. [
  1543. bannerId,
  1544. ...JSON.parse(localStorage.getItem('dismissedBannerIds') ?? '[]')
  1545. ].filter((id) => $banners.find((b) => b.id === id))
  1546. )
  1547. );
  1548. }}
  1549. />
  1550. {/each}
  1551. </div>
  1552. </div>
  1553. {/if}
  1554. <div class="flex flex-col flex-auto z-10 w-full">
  1555. <div
  1556. class=" pb-2.5 flex flex-col justify-between w-full flex-auto overflow-auto h-0 max-w-full z-10 scrollbar-hidden"
  1557. id="messages-container"
  1558. bind:this={messagesContainerElement}
  1559. on:scroll={(e) => {
  1560. autoScroll =
  1561. messagesContainerElement.scrollHeight - messagesContainerElement.scrollTop <=
  1562. messagesContainerElement.clientHeight + 5;
  1563. }}
  1564. >
  1565. <div class=" h-full w-full flex flex-col {chatIdProp ? 'py-4' : 'pt-2 pb-4'}">
  1566. <Messages
  1567. chatId={$chatId}
  1568. {selectedModels}
  1569. {processing}
  1570. bind:history
  1571. bind:messages
  1572. bind:autoScroll
  1573. bind:prompt
  1574. bottomPadding={files.length > 0}
  1575. {sendPrompt}
  1576. {continueGeneration}
  1577. {regenerateResponse}
  1578. {mergeResponses}
  1579. {chatActionHandler}
  1580. {showMessage}
  1581. />
  1582. </div>
  1583. </div>
  1584. <div class="">
  1585. <MessageInput
  1586. bind:files
  1587. bind:prompt
  1588. bind:autoScroll
  1589. bind:selectedToolIds
  1590. bind:webSearchEnabled
  1591. bind:atSelectedModel
  1592. availableToolIds={selectedModelIds.reduce((a, e, i, arr) => {
  1593. const model = $models.find((m) => m.id === e);
  1594. if (model?.info?.meta?.toolIds ?? false) {
  1595. return [...new Set([...a, ...model.info.meta.toolIds])];
  1596. }
  1597. return a;
  1598. }, [])}
  1599. transparentBackground={$settings?.backgroundImageUrl ?? false}
  1600. {selectedModels}
  1601. {messages}
  1602. {submitPrompt}
  1603. {stopResponse}
  1604. on:call={() => {
  1605. showControls.set(true);
  1606. }}
  1607. />
  1608. </div>
  1609. </div>
  1610. </Pane>
  1611. <ChatControls
  1612. models={selectedModelIds.reduce((a, e, i, arr) => {
  1613. const model = $models.find((m) => m.id === e);
  1614. if (model) {
  1615. return [...a, model];
  1616. }
  1617. return a;
  1618. }, [])}
  1619. bind:history
  1620. bind:chatFiles
  1621. bind:params
  1622. bind:files
  1623. bind:pane={controlPane}
  1624. {submitPrompt}
  1625. {stopResponse}
  1626. {showMessage}
  1627. modelId={selectedModelIds?.at(0) ?? null}
  1628. chatId={$chatId}
  1629. {eventTarget}
  1630. />
  1631. </PaneGroup>
  1632. </div>
  1633. {/if}