Chat.svelte 47 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794
  1. <script lang="ts">
  2. import { v4 as uuidv4 } from 'uuid';
  3. import { toast } from 'svelte-sonner';
  4. import mermaid from 'mermaid';
  5. import { getContext, onMount, tick } from 'svelte';
  6. import { goto } from '$app/navigation';
  7. import { page } from '$app/stores';
  8. import type { Writable } from 'svelte/store';
  9. import type { i18n as i18nType } from 'i18next';
  10. import { OLLAMA_API_BASE_URL, OPENAI_API_BASE_URL, WEBUI_BASE_URL } from '$lib/constants';
  11. import {
  12. chatId,
  13. chats,
  14. config,
  15. type Model,
  16. models,
  17. settings,
  18. showSidebar,
  19. tags as _tags,
  20. WEBUI_NAME,
  21. banners,
  22. user,
  23. socket,
  24. showCallOverlay,
  25. tools,
  26. currentChatPage,
  27. temporaryChatEnabled
  28. } from '$lib/stores';
  29. import {
  30. convertMessagesToHistory,
  31. copyToClipboard,
  32. extractSentencesForAudio,
  33. getUserPosition,
  34. promptTemplate,
  35. splitStream
  36. } from '$lib/utils';
  37. import { generateChatCompletion } from '$lib/apis/ollama';
  38. import {
  39. addTagById,
  40. createNewChat,
  41. deleteTagById,
  42. getAllChatTags,
  43. getChatById,
  44. getChatList,
  45. getTagsById,
  46. updateChatById
  47. } from '$lib/apis/chats';
  48. import { generateOpenAIChatCompletion } from '$lib/apis/openai';
  49. import { runWebSearch } from '$lib/apis/rag';
  50. import { createOpenAITextStream } from '$lib/apis/streaming';
  51. import { queryMemory } from '$lib/apis/memories';
  52. import { getAndUpdateUserLocation, getUserSettings } from '$lib/apis/users';
  53. import {
  54. chatCompleted,
  55. generateTitle,
  56. generateSearchQuery,
  57. chatAction,
  58. generateMoACompletion
  59. } from '$lib/apis';
  60. import Banner from '../common/Banner.svelte';
  61. import MessageInput from '$lib/components/chat/MessageInput.svelte';
  62. import Messages from '$lib/components/chat/Messages.svelte';
  63. import Navbar from '$lib/components/layout/Navbar.svelte';
  64. import CallOverlay from './MessageInput/CallOverlay.svelte';
  65. import { error } from '@sveltejs/kit';
  66. import ChatControls from './ChatControls.svelte';
  67. import EventConfirmDialog from '../common/ConfirmDialog.svelte';
  68. const i18n: Writable<i18nType> = getContext('i18n');
  69. export let chatIdProp = '';
  70. let loaded = false;
  71. const eventTarget = new EventTarget();
  72. let showControls = false;
  73. let stopResponseFlag = false;
  74. let autoScroll = true;
  75. let processing = '';
  76. let messagesContainerElement: HTMLDivElement;
  77. let showEventConfirmation = false;
  78. let eventConfirmationTitle = '';
  79. let eventConfirmationMessage = '';
  80. let eventConfirmationInput = false;
  81. let eventConfirmationInputPlaceholder = '';
  82. let eventConfirmationInputValue = '';
  83. let eventCallback = null;
  84. let showModelSelector = true;
  85. let selectedModels = [''];
  86. let atSelectedModel: Model | undefined;
  87. let selectedModelIds = [];
  88. $: selectedModelIds = atSelectedModel !== undefined ? [atSelectedModel.id] : selectedModels;
  89. let selectedToolIds = [];
  90. let webSearchEnabled = false;
  91. let chat = null;
  92. let tags = [];
  93. let title = '';
  94. let prompt = '';
  95. let chatFiles = [];
  96. let files = [];
  97. let messages = [];
  98. let history = {
  99. messages: {},
  100. currentId: null
  101. };
  102. let params = {};
  103. $: if (history.currentId !== null) {
  104. let _messages = [];
  105. let currentMessage = history.messages[history.currentId];
  106. while (currentMessage !== null) {
  107. _messages.unshift({ ...currentMessage });
  108. currentMessage =
  109. currentMessage.parentId !== null ? history.messages[currentMessage.parentId] : null;
  110. }
  111. messages = _messages;
  112. } else {
  113. messages = [];
  114. }
  115. $: if (chatIdProp) {
  116. (async () => {
  117. console.log(chatIdProp);
  118. if (chatIdProp && (await loadChat())) {
  119. await tick();
  120. loaded = true;
  121. window.setTimeout(() => scrollToBottom(), 0);
  122. const chatInput = document.getElementById('chat-textarea');
  123. chatInput?.focus();
  124. } else {
  125. await goto('/');
  126. }
  127. })();
  128. }
  129. const chatEventHandler = async (event, cb) => {
  130. if (event.chat_id === $chatId) {
  131. await tick();
  132. console.log(event);
  133. let message = history.messages[event.message_id];
  134. const type = event?.data?.type ?? null;
  135. const data = event?.data?.data ?? null;
  136. if (type === 'status') {
  137. if (message?.statusHistory) {
  138. message.statusHistory.push(data);
  139. } else {
  140. message.statusHistory = [data];
  141. }
  142. } else if (type === 'citation') {
  143. if (message?.citations) {
  144. message.citations.push(data);
  145. } else {
  146. message.citations = [data];
  147. }
  148. } else if (type === 'message') {
  149. message.content += data.content;
  150. } else if (type === 'replace') {
  151. message.content = data.content;
  152. } else if (type === 'action') {
  153. if (data.action === 'continue') {
  154. const continueButton = document.getElementById('continue-response-button');
  155. if (continueButton) {
  156. continueButton.click();
  157. }
  158. }
  159. } else if (type === 'confirmation') {
  160. eventCallback = cb;
  161. eventConfirmationInput = false;
  162. showEventConfirmation = true;
  163. eventConfirmationTitle = data.title;
  164. eventConfirmationMessage = data.message;
  165. } else if (type === 'input') {
  166. eventCallback = cb;
  167. eventConfirmationInput = true;
  168. showEventConfirmation = true;
  169. eventConfirmationTitle = data.title;
  170. eventConfirmationMessage = data.message;
  171. eventConfirmationInputPlaceholder = data.placeholder;
  172. eventConfirmationInputValue = data?.value ?? '';
  173. } else {
  174. console.log('Unknown message type', data);
  175. }
  176. messages = messages;
  177. }
  178. };
  179. onMount(async () => {
  180. const onMessageHandler = async (event) => {
  181. if (event.origin === window.origin) {
  182. // Replace with your iframe's origin
  183. console.log('Message received from iframe:', event.data);
  184. if (event.data.type === 'input:prompt') {
  185. console.log(event.data.text);
  186. const inputElement = document.getElementById('chat-textarea');
  187. if (inputElement) {
  188. prompt = event.data.text;
  189. inputElement.focus();
  190. }
  191. }
  192. if (event.data.type === 'action:submit') {
  193. console.log(event.data.text);
  194. if (prompt !== '') {
  195. await tick();
  196. submitPrompt(prompt);
  197. }
  198. }
  199. if (event.data.type === 'input:prompt:submit') {
  200. console.log(event.data.text);
  201. if (prompt !== '') {
  202. await tick();
  203. submitPrompt(event.data.text);
  204. }
  205. }
  206. }
  207. };
  208. window.addEventListener('message', onMessageHandler);
  209. $socket.on('chat-events', chatEventHandler);
  210. if (!$chatId) {
  211. chatId.subscribe(async (value) => {
  212. if (!value) {
  213. await initNewChat();
  214. }
  215. });
  216. } else {
  217. if ($temporaryChatEnabled) {
  218. await goto('/');
  219. }
  220. }
  221. return () => {
  222. window.removeEventListener('message', onMessageHandler);
  223. $socket.off('chat-events');
  224. };
  225. });
  226. //////////////////////////
  227. // Web functions
  228. //////////////////////////
  229. const initNewChat = async () => {
  230. if ($page.url.pathname.includes('/c/')) {
  231. window.history.replaceState(history.state, '', `/`);
  232. }
  233. await chatId.set('');
  234. autoScroll = true;
  235. title = '';
  236. messages = [];
  237. history = {
  238. messages: {},
  239. currentId: null
  240. };
  241. chatFiles = [];
  242. params = {};
  243. if ($page.url.searchParams.get('models')) {
  244. selectedModels = $page.url.searchParams.get('models')?.split(',');
  245. } else if ($settings?.models) {
  246. selectedModels = $settings?.models;
  247. } else if ($config?.default_models) {
  248. console.log($config?.default_models.split(',') ?? '');
  249. selectedModels = $config?.default_models.split(',');
  250. } else {
  251. selectedModels = [''];
  252. }
  253. if ($page.url.searchParams.get('q')) {
  254. prompt = $page.url.searchParams.get('q') ?? '';
  255. selectedToolIds = ($page.url.searchParams.get('tool_ids') ?? '')
  256. .split(',')
  257. .map((id) => id.trim())
  258. .filter((id) => id);
  259. if (prompt) {
  260. await tick();
  261. submitPrompt(prompt);
  262. }
  263. }
  264. if ($page.url.searchParams.get('call') === 'true') {
  265. showCallOverlay.set(true);
  266. }
  267. selectedModels = selectedModels.map((modelId) =>
  268. $models.map((m) => m.id).includes(modelId) ? modelId : ''
  269. );
  270. const userSettings = await getUserSettings(localStorage.token);
  271. if (userSettings) {
  272. settings.set(userSettings.ui);
  273. } else {
  274. settings.set(JSON.parse(localStorage.getItem('settings') ?? '{}'));
  275. }
  276. const chatInput = document.getElementById('chat-textarea');
  277. setTimeout(() => chatInput?.focus(), 0);
  278. };
  279. const loadChat = async () => {
  280. chatId.set(chatIdProp);
  281. chat = await getChatById(localStorage.token, $chatId).catch(async (error) => {
  282. await goto('/');
  283. return null;
  284. });
  285. if (chat) {
  286. tags = await getTags();
  287. const chatContent = chat.chat;
  288. if (chatContent) {
  289. console.log(chatContent);
  290. selectedModels =
  291. (chatContent?.models ?? undefined) !== undefined
  292. ? chatContent.models
  293. : [chatContent.models ?? ''];
  294. history =
  295. (chatContent?.history ?? undefined) !== undefined
  296. ? chatContent.history
  297. : convertMessagesToHistory(chatContent.messages);
  298. title = chatContent.title;
  299. const userSettings = await getUserSettings(localStorage.token);
  300. if (userSettings) {
  301. await settings.set(userSettings.ui);
  302. } else {
  303. await settings.set(JSON.parse(localStorage.getItem('settings') ?? '{}'));
  304. }
  305. params = chatContent?.params ?? {};
  306. chatFiles = chatContent?.files ?? [];
  307. autoScroll = true;
  308. await tick();
  309. if (messages.length > 0) {
  310. history.messages[messages.at(-1).id].done = true;
  311. }
  312. await tick();
  313. return true;
  314. } else {
  315. return null;
  316. }
  317. }
  318. };
  319. const scrollToBottom = async () => {
  320. await tick();
  321. if (messagesContainerElement) {
  322. messagesContainerElement.scrollTop = messagesContainerElement.scrollHeight;
  323. }
  324. };
  325. const createMessagesList = (responseMessageId) => {
  326. const message = history.messages[responseMessageId];
  327. if (message.parentId) {
  328. return [...createMessagesList(message.parentId), message];
  329. } else {
  330. return [message];
  331. }
  332. };
  333. const chatCompletedHandler = async (chatId, modelId, responseMessageId, messages) => {
  334. await mermaid.run({
  335. querySelector: '.mermaid'
  336. });
  337. const res = await chatCompleted(localStorage.token, {
  338. model: modelId,
  339. messages: messages.map((m) => ({
  340. id: m.id,
  341. role: m.role,
  342. content: m.content,
  343. info: m.info ? m.info : undefined,
  344. timestamp: m.timestamp
  345. })),
  346. chat_id: chatId,
  347. session_id: $socket?.id,
  348. id: responseMessageId
  349. }).catch((error) => {
  350. toast.error(error);
  351. messages.at(-1).error = { content: error };
  352. return null;
  353. });
  354. if (res !== null) {
  355. // Update chat history with the new messages
  356. for (const message of res.messages) {
  357. history.messages[message.id] = {
  358. ...history.messages[message.id],
  359. ...(history.messages[message.id].content !== message.content
  360. ? { originalContent: history.messages[message.id].content }
  361. : {}),
  362. ...message
  363. };
  364. }
  365. }
  366. if ($chatId == chatId) {
  367. if (!$temporaryChatEnabled) {
  368. chat = await updateChatById(localStorage.token, chatId, {
  369. models: selectedModels,
  370. messages: messages,
  371. history: history,
  372. params: params,
  373. files: chatFiles
  374. });
  375. currentChatPage.set(1);
  376. await chats.set(await getChatList(localStorage.token, $currentChatPage));
  377. }
  378. }
  379. };
  380. const chatActionHandler = async (chatId, actionId, modelId, responseMessageId, event = null) => {
  381. const res = await chatAction(localStorage.token, actionId, {
  382. model: modelId,
  383. messages: messages.map((m) => ({
  384. id: m.id,
  385. role: m.role,
  386. content: m.content,
  387. info: m.info ? m.info : undefined,
  388. timestamp: m.timestamp
  389. })),
  390. ...(event ? { event: event } : {}),
  391. chat_id: chatId,
  392. session_id: $socket?.id,
  393. id: responseMessageId
  394. }).catch((error) => {
  395. toast.error(error);
  396. messages.at(-1).error = { content: error };
  397. return null;
  398. });
  399. if (res !== null) {
  400. // Update chat history with the new messages
  401. for (const message of res.messages) {
  402. history.messages[message.id] = {
  403. ...history.messages[message.id],
  404. ...(history.messages[message.id].content !== message.content
  405. ? { originalContent: history.messages[message.id].content }
  406. : {}),
  407. ...message
  408. };
  409. }
  410. }
  411. if ($chatId == chatId) {
  412. if (!$temporaryChatEnabled) {
  413. chat = await updateChatById(localStorage.token, chatId, {
  414. models: selectedModels,
  415. messages: messages,
  416. history: history,
  417. params: params,
  418. files: chatFiles
  419. });
  420. currentChatPage.set(1);
  421. await chats.set(await getChatList(localStorage.token, $currentChatPage));
  422. }
  423. }
  424. };
  425. const getChatEventEmitter = async (modelId: string, chatId: string = '') => {
  426. return setInterval(() => {
  427. $socket?.emit('usage', {
  428. action: 'chat',
  429. model: modelId,
  430. chat_id: chatId
  431. });
  432. }, 1000);
  433. };
  434. //////////////////////////
  435. // Chat functions
  436. //////////////////////////
  437. const submitPrompt = async (userPrompt, { _raw = false } = {}) => {
  438. let _responses = [];
  439. console.log('submitPrompt', $chatId);
  440. selectedModels = selectedModels.map((modelId) =>
  441. $models.map((m) => m.id).includes(modelId) ? modelId : ''
  442. );
  443. if (selectedModels.includes('')) {
  444. toast.error($i18n.t('Model not selected'));
  445. } else if (messages.length != 0 && messages.at(-1).done != true) {
  446. // Response not done
  447. console.log('wait');
  448. } else if (messages.length != 0 && messages.at(-1).error) {
  449. // Error in response
  450. toast.error(
  451. $i18n.t(
  452. `Oops! There was an error in the previous response. Please try again or contact admin.`
  453. )
  454. );
  455. } else if (
  456. files.length > 0 &&
  457. files.filter((file) => file.type !== 'image' && file.status !== 'processed').length > 0
  458. ) {
  459. // Upload not done
  460. toast.error(
  461. $i18n.t(
  462. `Oops! Hold tight! Your files are still in the processing oven. We're cooking them up to perfection. Please be patient and we'll let you know once they're ready.`
  463. )
  464. );
  465. } else {
  466. // Reset chat input textarea
  467. const chatTextAreaElement = document.getElementById('chat-textarea');
  468. if (chatTextAreaElement) {
  469. chatTextAreaElement.value = '';
  470. chatTextAreaElement.style.height = '';
  471. }
  472. const _files = JSON.parse(JSON.stringify(files));
  473. chatFiles.push(..._files.filter((item) => ['doc', 'file', 'collection'].includes(item.type)));
  474. chatFiles = chatFiles.filter(
  475. // Remove duplicates
  476. (item, index, array) =>
  477. array.findIndex((i) => JSON.stringify(i) === JSON.stringify(item)) === index
  478. );
  479. files = [];
  480. prompt = '';
  481. // Create user message
  482. let userMessageId = uuidv4();
  483. let userMessage = {
  484. id: userMessageId,
  485. parentId: messages.length !== 0 ? messages.at(-1).id : null,
  486. childrenIds: [],
  487. role: 'user',
  488. content: userPrompt,
  489. files: _files.length > 0 ? _files : undefined,
  490. timestamp: Math.floor(Date.now() / 1000), // Unix epoch
  491. models: selectedModels
  492. };
  493. // Add message to history and Set currentId to messageId
  494. history.messages[userMessageId] = userMessage;
  495. history.currentId = userMessageId;
  496. // Append messageId to childrenIds of parent message
  497. if (messages.length !== 0) {
  498. history.messages[messages.at(-1).id].childrenIds.push(userMessageId);
  499. }
  500. // Wait until history/message have been updated
  501. await tick();
  502. _responses = await sendPrompt(userPrompt, userMessageId, { newChat: true });
  503. }
  504. return _responses;
  505. };
  506. const sendPrompt = async (
  507. prompt,
  508. parentId,
  509. { modelId = null, modelIdx = null, newChat = false } = {}
  510. ) => {
  511. let _responses = [];
  512. // If modelId is provided, use it, else use selected model
  513. let selectedModelIds = modelId
  514. ? [modelId]
  515. : atSelectedModel !== undefined
  516. ? [atSelectedModel.id]
  517. : selectedModels;
  518. // Create response messages for each selected model
  519. const responseMessageIds = {};
  520. for (const [_modelIdx, modelId] of selectedModelIds.entries()) {
  521. const model = $models.filter((m) => m.id === modelId).at(0);
  522. if (model) {
  523. let responseMessageId = uuidv4();
  524. let responseMessage = {
  525. parentId: parentId,
  526. id: responseMessageId,
  527. childrenIds: [],
  528. role: 'assistant',
  529. content: '',
  530. model: model.id,
  531. modelName: model.name ?? model.id,
  532. modelIdx: modelIdx ? modelIdx : _modelIdx,
  533. userContext: null,
  534. timestamp: Math.floor(Date.now() / 1000) // Unix epoch
  535. };
  536. // Add message to history and Set currentId to messageId
  537. history.messages[responseMessageId] = responseMessage;
  538. history.currentId = responseMessageId;
  539. // Append messageId to childrenIds of parent message
  540. if (parentId !== null) {
  541. history.messages[parentId].childrenIds = [
  542. ...history.messages[parentId].childrenIds,
  543. responseMessageId
  544. ];
  545. }
  546. responseMessageIds[`${modelId}-${modelIdx ? modelIdx : _modelIdx}`] = responseMessageId;
  547. }
  548. }
  549. await tick();
  550. // Create new chat if only one message in messages
  551. if (newChat && messages.length == 2) {
  552. if (!$temporaryChatEnabled) {
  553. chat = await createNewChat(localStorage.token, {
  554. id: $chatId,
  555. title: $i18n.t('New Chat'),
  556. models: selectedModels,
  557. system: $settings.system ?? undefined,
  558. params: params,
  559. messages: messages,
  560. history: history,
  561. tags: [],
  562. timestamp: Date.now()
  563. });
  564. currentChatPage.set(1);
  565. await chats.set(await getChatList(localStorage.token, $currentChatPage));
  566. await chatId.set(chat.id);
  567. } else {
  568. await chatId.set('local');
  569. }
  570. await tick();
  571. }
  572. const _chatId = JSON.parse(JSON.stringify($chatId));
  573. await Promise.all(
  574. selectedModelIds.map(async (modelId, _modelIdx) => {
  575. console.log('modelId', modelId);
  576. const model = $models.filter((m) => m.id === modelId).at(0);
  577. if (model) {
  578. // If there are image files, check if model is vision capable
  579. const hasImages = messages.some((message) =>
  580. message.files?.some((file) => file.type === 'image')
  581. );
  582. if (hasImages && !(model.info?.meta?.capabilities?.vision ?? true)) {
  583. toast.error(
  584. $i18n.t('Model {{modelName}} is not vision capable', {
  585. modelName: model.name ?? model.id
  586. })
  587. );
  588. }
  589. let responseMessageId =
  590. responseMessageIds[`${modelId}-${modelIdx ? modelIdx : _modelIdx}`];
  591. let responseMessage = history.messages[responseMessageId];
  592. let userContext = null;
  593. if ($settings?.memory ?? false) {
  594. if (userContext === null) {
  595. const res = await queryMemory(localStorage.token, prompt).catch((error) => {
  596. toast.error(error);
  597. return null;
  598. });
  599. if (res) {
  600. if (res.documents[0].length > 0) {
  601. userContext = res.documents[0].reduce((acc, doc, index) => {
  602. const createdAtTimestamp = res.metadatas[0][index].created_at;
  603. const createdAtDate = new Date(createdAtTimestamp * 1000)
  604. .toISOString()
  605. .split('T')[0];
  606. return `${acc}${index + 1}. [${createdAtDate}]. ${doc}\n`;
  607. }, '');
  608. }
  609. console.log(userContext);
  610. }
  611. }
  612. }
  613. responseMessage.userContext = userContext;
  614. const chatEventEmitter = await getChatEventEmitter(model.id, _chatId);
  615. if (webSearchEnabled) {
  616. await getWebSearchResults(model.id, parentId, responseMessageId);
  617. }
  618. let _response = null;
  619. if (model?.owned_by === 'openai') {
  620. _response = await sendPromptOpenAI(model, prompt, responseMessageId, _chatId);
  621. } else if (model) {
  622. _response = await sendPromptOllama(model, prompt, responseMessageId, _chatId);
  623. }
  624. _responses.push(_response);
  625. if (chatEventEmitter) clearInterval(chatEventEmitter);
  626. } else {
  627. toast.error($i18n.t(`Model {{modelId}} not found`, { modelId }));
  628. }
  629. })
  630. );
  631. currentChatPage.set(1);
  632. await chats.set(await getChatList(localStorage.token, $currentChatPage));
  633. return _responses;
  634. };
  635. const sendPromptOllama = async (model, userPrompt, responseMessageId, _chatId) => {
  636. let _response = null;
  637. const responseMessage = history.messages[responseMessageId];
  638. const userMessage = history.messages[responseMessage.parentId];
  639. // Wait until history/message have been updated
  640. await tick();
  641. // Scroll down
  642. scrollToBottom();
  643. const messagesBody = [
  644. params?.system || $settings.system || (responseMessage?.userContext ?? null)
  645. ? {
  646. role: 'system',
  647. content: `${promptTemplate(
  648. params?.system ?? $settings?.system ?? '',
  649. $user.name,
  650. $settings?.userLocation
  651. ? await getAndUpdateUserLocation(localStorage.token)
  652. : undefined
  653. )}${
  654. (responseMessage?.userContext ?? null)
  655. ? `\n\nUser Context:\n${responseMessage?.userContext ?? ''}`
  656. : ''
  657. }`
  658. }
  659. : undefined,
  660. ...messages
  661. ]
  662. .filter((message) => message?.content?.trim())
  663. .map((message, idx, arr) => {
  664. // Prepare the base message object
  665. const baseMessage = {
  666. role: message.role,
  667. content: message.content
  668. };
  669. // Extract and format image URLs if any exist
  670. const imageUrls = message.files
  671. ?.filter((file) => file.type === 'image')
  672. .map((file) => file.url.slice(file.url.indexOf(',') + 1));
  673. // Add images array only if it contains elements
  674. if (imageUrls && imageUrls.length > 0 && message.role === 'user') {
  675. baseMessage.images = imageUrls;
  676. }
  677. return baseMessage;
  678. });
  679. let lastImageIndex = -1;
  680. // Find the index of the last object with images
  681. messagesBody.forEach((item, index) => {
  682. if (item.images) {
  683. lastImageIndex = index;
  684. }
  685. });
  686. // Remove images from all but the last one
  687. messagesBody.forEach((item, index) => {
  688. if (index !== lastImageIndex) {
  689. delete item.images;
  690. }
  691. });
  692. let files = JSON.parse(JSON.stringify(chatFiles));
  693. if (model?.info?.meta?.knowledge ?? false) {
  694. // Only initialize and add status if knowledge exists
  695. responseMessage.statusHistory = [
  696. {
  697. action: 'knowledge_search',
  698. description: $i18n.t(`Searching Knowledge for "{{searchQuery}}"`, {
  699. searchQuery: userMessage.content
  700. }),
  701. done: false
  702. }
  703. ];
  704. files.push(...model.info.meta.knowledge);
  705. messages = messages; // Trigger Svelte update
  706. }
  707. files.push(
  708. ...(userMessage?.files ?? []).filter((item) =>
  709. ['doc', 'file', 'collection'].includes(item.type)
  710. ),
  711. ...(responseMessage?.files ?? []).filter((item) => ['web_search_results'].includes(item.type))
  712. );
  713. scrollToBottom();
  714. eventTarget.dispatchEvent(
  715. new CustomEvent('chat:start', {
  716. detail: {
  717. id: responseMessageId
  718. }
  719. })
  720. );
  721. await tick();
  722. const [res, controller] = await generateChatCompletion(localStorage.token, {
  723. stream: true,
  724. model: model.id,
  725. messages: messagesBody,
  726. options: {
  727. ...(params ?? $settings.params ?? {}),
  728. stop:
  729. (params?.stop ?? $settings?.params?.stop ?? undefined)
  730. ? (params?.stop.split(',').map((token) => token.trim()) ?? $settings.params.stop).map(
  731. (str) => decodeURIComponent(JSON.parse('"' + str.replace(/\"/g, '\\"') + '"'))
  732. )
  733. : undefined,
  734. num_predict: params?.max_tokens ?? $settings?.params?.max_tokens ?? undefined,
  735. repeat_penalty:
  736. params?.frequency_penalty ?? $settings?.params?.frequency_penalty ?? undefined
  737. },
  738. format: $settings.requestFormat ?? undefined,
  739. keep_alive: $settings.keepAlive ?? undefined,
  740. tool_ids: selectedToolIds.length > 0 ? selectedToolIds : undefined,
  741. files: files.length > 0 ? files : undefined,
  742. session_id: $socket?.id,
  743. chat_id: $chatId,
  744. id: responseMessageId
  745. });
  746. if (res && res.ok) {
  747. console.log('controller', controller);
  748. const reader = res.body
  749. .pipeThrough(new TextDecoderStream())
  750. .pipeThrough(splitStream('\n'))
  751. .getReader();
  752. while (true) {
  753. const { value, done } = await reader.read();
  754. if (done || stopResponseFlag || _chatId !== $chatId) {
  755. responseMessage.done = true;
  756. messages = messages;
  757. if (stopResponseFlag) {
  758. controller.abort('User: Stop Response');
  759. } else {
  760. const messages = createMessagesList(responseMessageId);
  761. await chatCompletedHandler(_chatId, model.id, responseMessageId, messages);
  762. }
  763. _response = responseMessage.content;
  764. break;
  765. }
  766. try {
  767. let lines = value.split('\n');
  768. for (const line of lines) {
  769. if (line !== '') {
  770. console.log(line);
  771. let data = JSON.parse(line);
  772. if ('citations' in data) {
  773. responseMessage.citations = data.citations;
  774. // Only remove status if it was initially set
  775. if (model?.info?.meta?.knowledge ?? false) {
  776. responseMessage.statusHistory = responseMessage.statusHistory.filter(
  777. (status) => status.action !== 'knowledge_search'
  778. );
  779. }
  780. continue;
  781. }
  782. if ('detail' in data) {
  783. throw data;
  784. }
  785. if (data.done == false) {
  786. if (responseMessage.content == '' && data.message.content == '\n') {
  787. continue;
  788. } else {
  789. responseMessage.content += data.message.content;
  790. if (navigator.vibrate && ($settings?.hapticFeedback ?? false)) {
  791. navigator.vibrate(5);
  792. }
  793. const sentences = extractSentencesForAudio(responseMessage.content);
  794. sentences.pop();
  795. // dispatch only last sentence and make sure it hasn't been dispatched before
  796. if (
  797. sentences.length > 0 &&
  798. sentences[sentences.length - 1] !== responseMessage.lastSentence
  799. ) {
  800. responseMessage.lastSentence = sentences[sentences.length - 1];
  801. eventTarget.dispatchEvent(
  802. new CustomEvent('chat', {
  803. detail: { id: responseMessageId, content: sentences[sentences.length - 1] }
  804. })
  805. );
  806. }
  807. messages = messages;
  808. }
  809. } else {
  810. responseMessage.done = true;
  811. if (responseMessage.content == '') {
  812. responseMessage.error = {
  813. code: 400,
  814. content: `Oops! No text generated from Ollama, Please try again.`
  815. };
  816. }
  817. responseMessage.context = data.context ?? null;
  818. responseMessage.info = {
  819. total_duration: data.total_duration,
  820. load_duration: data.load_duration,
  821. sample_count: data.sample_count,
  822. sample_duration: data.sample_duration,
  823. prompt_eval_count: data.prompt_eval_count,
  824. prompt_eval_duration: data.prompt_eval_duration,
  825. eval_count: data.eval_count,
  826. eval_duration: data.eval_duration
  827. };
  828. messages = messages;
  829. if ($settings.notificationEnabled && !document.hasFocus()) {
  830. const notification = new Notification(`${model.id}`, {
  831. body: responseMessage.content,
  832. icon: `${WEBUI_BASE_URL}/static/favicon.png`
  833. });
  834. }
  835. if ($settings?.responseAutoCopy ?? false) {
  836. copyToClipboard(responseMessage.content);
  837. }
  838. if ($settings.responseAutoPlayback && !$showCallOverlay) {
  839. await tick();
  840. document.getElementById(`speak-button-${responseMessage.id}`)?.click();
  841. }
  842. }
  843. }
  844. }
  845. } catch (error) {
  846. console.log(error);
  847. if ('detail' in error) {
  848. toast.error(error.detail);
  849. }
  850. break;
  851. }
  852. if (autoScroll) {
  853. scrollToBottom();
  854. }
  855. }
  856. if ($chatId == _chatId) {
  857. if ($settings.saveChatHistory ?? true) {
  858. chat = await updateChatById(localStorage.token, _chatId, {
  859. messages: messages,
  860. history: history,
  861. models: selectedModels,
  862. params: params,
  863. files: chatFiles
  864. });
  865. currentChatPage.set(1);
  866. await chats.set(await getChatList(localStorage.token, $currentChatPage));
  867. }
  868. }
  869. } else {
  870. if (res !== null) {
  871. const error = await res.json();
  872. console.log(error);
  873. if ('detail' in error) {
  874. toast.error(error.detail);
  875. responseMessage.error = { content: error.detail };
  876. } else {
  877. toast.error(error.error);
  878. responseMessage.error = { content: error.error };
  879. }
  880. } else {
  881. toast.error(
  882. $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, { provider: 'Ollama' })
  883. );
  884. responseMessage.error = {
  885. content: $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
  886. provider: 'Ollama'
  887. })
  888. };
  889. }
  890. responseMessage.done = true;
  891. messages = messages;
  892. }
  893. stopResponseFlag = false;
  894. await tick();
  895. let lastSentence = extractSentencesForAudio(responseMessage.content)?.at(-1) ?? '';
  896. if (lastSentence) {
  897. eventTarget.dispatchEvent(
  898. new CustomEvent('chat', {
  899. detail: { id: responseMessageId, content: lastSentence }
  900. })
  901. );
  902. }
  903. eventTarget.dispatchEvent(
  904. new CustomEvent('chat:finish', {
  905. detail: {
  906. id: responseMessageId,
  907. content: responseMessage.content
  908. }
  909. })
  910. );
  911. if (autoScroll) {
  912. scrollToBottom();
  913. }
  914. if (messages.length == 2 && messages.at(1).content !== '' && selectedModels[0] === model.id) {
  915. window.history.replaceState(history.state, '', `/c/${_chatId}`);
  916. const _title = await generateChatTitle(userPrompt);
  917. await setChatTitle(_chatId, _title);
  918. }
  919. return _response;
  920. };
  921. const sendPromptOpenAI = async (model, userPrompt, responseMessageId, _chatId) => {
  922. let _response = null;
  923. const responseMessage = history.messages[responseMessageId];
  924. const userMessage = history.messages[responseMessage.parentId];
  925. let files = JSON.parse(JSON.stringify(chatFiles));
  926. if (model?.info?.meta?.knowledge ?? false) {
  927. // Only initialize and add status if knowledge exists
  928. responseMessage.statusHistory = [
  929. {
  930. action: 'knowledge_search',
  931. description: $i18n.t(`Searching Knowledge for "{{searchQuery}}"`, {
  932. searchQuery: userMessage.content
  933. }),
  934. done: false
  935. }
  936. ];
  937. files.push(...model.info.meta.knowledge);
  938. messages = messages; // Trigger Svelte update
  939. }
  940. files.push(
  941. ...(userMessage?.files ?? []).filter((item) =>
  942. ['doc', 'file', 'collection'].includes(item.type)
  943. ),
  944. ...(responseMessage?.files ?? []).filter((item) => ['web_search_results'].includes(item.type))
  945. );
  946. scrollToBottom();
  947. eventTarget.dispatchEvent(
  948. new CustomEvent('chat:start', {
  949. detail: {
  950. id: responseMessageId
  951. }
  952. })
  953. );
  954. await tick();
  955. try {
  956. const [res, controller] = await generateOpenAIChatCompletion(
  957. localStorage.token,
  958. {
  959. stream: true,
  960. model: model.id,
  961. stream_options:
  962. (model.info?.meta?.capabilities?.usage ?? false)
  963. ? {
  964. include_usage: true
  965. }
  966. : undefined,
  967. messages: [
  968. params?.system || $settings.system || (responseMessage?.userContext ?? null)
  969. ? {
  970. role: 'system',
  971. content: `${promptTemplate(
  972. params?.system ?? $settings?.system ?? '',
  973. $user.name,
  974. $settings?.userLocation
  975. ? await getAndUpdateUserLocation(localStorage.token)
  976. : undefined
  977. )}${
  978. (responseMessage?.userContext ?? null)
  979. ? `\n\nUser Context:\n${responseMessage?.userContext ?? ''}`
  980. : ''
  981. }`
  982. }
  983. : undefined,
  984. ...messages
  985. ]
  986. .filter((message) => message?.content?.trim())
  987. .map((message, idx, arr) => ({
  988. role: message.role,
  989. ...((message.files?.filter((file) => file.type === 'image').length > 0 ?? false) &&
  990. message.role === 'user'
  991. ? {
  992. content: [
  993. {
  994. type: 'text',
  995. text:
  996. arr.length - 1 !== idx
  997. ? message.content
  998. : (message?.raContent ?? message.content)
  999. },
  1000. ...message.files
  1001. .filter((file) => file.type === 'image')
  1002. .map((file) => ({
  1003. type: 'image_url',
  1004. image_url: {
  1005. url: file.url
  1006. }
  1007. }))
  1008. ]
  1009. }
  1010. : {
  1011. content:
  1012. arr.length - 1 !== idx
  1013. ? message.content
  1014. : (message?.raContent ?? message.content)
  1015. })
  1016. })),
  1017. seed: params?.seed ?? $settings?.params?.seed ?? undefined,
  1018. stop:
  1019. (params?.stop ?? $settings?.params?.stop ?? undefined)
  1020. ? (params?.stop.split(',').map((token) => token.trim()) ?? $settings.params.stop).map(
  1021. (str) => decodeURIComponent(JSON.parse('"' + str.replace(/\"/g, '\\"') + '"'))
  1022. )
  1023. : undefined,
  1024. temperature: params?.temperature ?? $settings?.params?.temperature ?? undefined,
  1025. top_p: params?.top_p ?? $settings?.params?.top_p ?? undefined,
  1026. frequency_penalty:
  1027. params?.frequency_penalty ?? $settings?.params?.frequency_penalty ?? undefined,
  1028. max_tokens: params?.max_tokens ?? $settings?.params?.max_tokens ?? undefined,
  1029. tool_ids: selectedToolIds.length > 0 ? selectedToolIds : undefined,
  1030. files: files.length > 0 ? files : undefined,
  1031. session_id: $socket?.id,
  1032. chat_id: $chatId,
  1033. id: responseMessageId
  1034. },
  1035. `${WEBUI_BASE_URL}/api`
  1036. );
  1037. // Wait until history/message have been updated
  1038. await tick();
  1039. scrollToBottom();
  1040. if (res && res.ok && res.body) {
  1041. const textStream = await createOpenAITextStream(res.body, $settings.splitLargeChunks);
  1042. for await (const update of textStream) {
  1043. const { value, done, citations, error, usage } = update;
  1044. if (error) {
  1045. await handleOpenAIError(error, null, model, responseMessage);
  1046. break;
  1047. }
  1048. if (done || stopResponseFlag || _chatId !== $chatId) {
  1049. responseMessage.done = true;
  1050. messages = messages;
  1051. if (stopResponseFlag) {
  1052. controller.abort('User: Stop Response');
  1053. } else {
  1054. const messages = createMessagesList(responseMessageId);
  1055. await chatCompletedHandler(_chatId, model.id, responseMessageId, messages);
  1056. }
  1057. _response = responseMessage.content;
  1058. break;
  1059. }
  1060. if (usage) {
  1061. responseMessage.info = { ...usage, openai: true };
  1062. }
  1063. if (citations) {
  1064. responseMessage.citations = citations;
  1065. // Only remove status if it was initially set
  1066. if (model?.info?.meta?.knowledge ?? false) {
  1067. responseMessage.statusHistory = responseMessage.statusHistory.filter(
  1068. (status) => status.action !== 'knowledge_search'
  1069. );
  1070. }
  1071. continue;
  1072. }
  1073. if (responseMessage.content == '' && value == '\n') {
  1074. continue;
  1075. } else {
  1076. responseMessage.content += value;
  1077. if (navigator.vibrate && ($settings?.hapticFeedback ?? false)) {
  1078. navigator.vibrate(5);
  1079. }
  1080. const sentences = extractSentencesForAudio(responseMessage.content);
  1081. sentences.pop();
  1082. // dispatch only last sentence and make sure it hasn't been dispatched before
  1083. if (
  1084. sentences.length > 0 &&
  1085. sentences[sentences.length - 1] !== responseMessage.lastSentence
  1086. ) {
  1087. responseMessage.lastSentence = sentences[sentences.length - 1];
  1088. eventTarget.dispatchEvent(
  1089. new CustomEvent('chat', {
  1090. detail: { id: responseMessageId, content: sentences[sentences.length - 1] }
  1091. })
  1092. );
  1093. }
  1094. messages = messages;
  1095. }
  1096. if (autoScroll) {
  1097. scrollToBottom();
  1098. }
  1099. }
  1100. if ($settings.notificationEnabled && !document.hasFocus()) {
  1101. const notification = new Notification(`${model.id}`, {
  1102. body: responseMessage.content,
  1103. icon: `${WEBUI_BASE_URL}/static/favicon.png`
  1104. });
  1105. }
  1106. if ($settings.responseAutoCopy) {
  1107. copyToClipboard(responseMessage.content);
  1108. }
  1109. if ($settings.responseAutoPlayback && !$showCallOverlay) {
  1110. await tick();
  1111. document.getElementById(`speak-button-${responseMessage.id}`)?.click();
  1112. }
  1113. if ($chatId == _chatId) {
  1114. if ($settings.saveChatHistory ?? true) {
  1115. chat = await updateChatById(localStorage.token, _chatId, {
  1116. models: selectedModels,
  1117. messages: messages,
  1118. history: history,
  1119. params: params,
  1120. files: chatFiles
  1121. });
  1122. currentChatPage.set(1);
  1123. await chats.set(await getChatList(localStorage.token, $currentChatPage));
  1124. }
  1125. }
  1126. } else {
  1127. await handleOpenAIError(null, res, model, responseMessage);
  1128. }
  1129. } catch (error) {
  1130. await handleOpenAIError(error, null, model, responseMessage);
  1131. }
  1132. messages = messages;
  1133. stopResponseFlag = false;
  1134. await tick();
  1135. let lastSentence = extractSentencesForAudio(responseMessage.content)?.at(-1) ?? '';
  1136. if (lastSentence) {
  1137. eventTarget.dispatchEvent(
  1138. new CustomEvent('chat', {
  1139. detail: { id: responseMessageId, content: lastSentence }
  1140. })
  1141. );
  1142. }
  1143. eventTarget.dispatchEvent(
  1144. new CustomEvent('chat:finish', {
  1145. detail: {
  1146. id: responseMessageId,
  1147. content: responseMessage.content
  1148. }
  1149. })
  1150. );
  1151. if (autoScroll) {
  1152. scrollToBottom();
  1153. }
  1154. if (messages.length == 2 && selectedModels[0] === model.id) {
  1155. window.history.replaceState(history.state, '', `/c/${_chatId}`);
  1156. const _title = await generateChatTitle(userPrompt);
  1157. await setChatTitle(_chatId, _title);
  1158. }
  1159. return _response;
  1160. };
  1161. const handleOpenAIError = async (error, res: Response | null, model, responseMessage) => {
  1162. let errorMessage = '';
  1163. let innerError;
  1164. if (error) {
  1165. innerError = error;
  1166. } else if (res !== null) {
  1167. innerError = await res.json();
  1168. }
  1169. console.error(innerError);
  1170. if ('detail' in innerError) {
  1171. toast.error(innerError.detail);
  1172. errorMessage = innerError.detail;
  1173. } else if ('error' in innerError) {
  1174. if ('message' in innerError.error) {
  1175. toast.error(innerError.error.message);
  1176. errorMessage = innerError.error.message;
  1177. } else {
  1178. toast.error(innerError.error);
  1179. errorMessage = innerError.error;
  1180. }
  1181. } else if ('message' in innerError) {
  1182. toast.error(innerError.message);
  1183. errorMessage = innerError.message;
  1184. }
  1185. responseMessage.error = {
  1186. content:
  1187. $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
  1188. provider: model.name ?? model.id
  1189. }) +
  1190. '\n' +
  1191. errorMessage
  1192. };
  1193. responseMessage.done = true;
  1194. messages = messages;
  1195. };
  1196. const stopResponse = () => {
  1197. stopResponseFlag = true;
  1198. console.log('stopResponse');
  1199. };
  1200. const regenerateResponse = async (message) => {
  1201. console.log('regenerateResponse');
  1202. if (messages.length != 0) {
  1203. let userMessage = history.messages[message.parentId];
  1204. let userPrompt = userMessage.content;
  1205. if ((userMessage?.models ?? [...selectedModels]).length == 1) {
  1206. // If user message has only one model selected, sendPrompt automatically selects it for regeneration
  1207. await sendPrompt(userPrompt, userMessage.id);
  1208. } else {
  1209. // If there are multiple models selected, use the model of the response message for regeneration
  1210. // e.g. many model chat
  1211. await sendPrompt(userPrompt, userMessage.id, {
  1212. modelId: message.model,
  1213. modelIdx: message.modelIdx
  1214. });
  1215. }
  1216. }
  1217. };
  1218. const continueGeneration = async () => {
  1219. console.log('continueGeneration');
  1220. const _chatId = JSON.parse(JSON.stringify($chatId));
  1221. if (messages.length != 0 && messages.at(-1).done == true) {
  1222. const responseMessage = history.messages[history.currentId];
  1223. responseMessage.done = false;
  1224. await tick();
  1225. const model = $models.filter((m) => m.id === responseMessage.model).at(0);
  1226. if (model) {
  1227. if (model?.owned_by === 'openai') {
  1228. await sendPromptOpenAI(
  1229. model,
  1230. history.messages[responseMessage.parentId].content,
  1231. responseMessage.id,
  1232. _chatId
  1233. );
  1234. } else
  1235. await sendPromptOllama(
  1236. model,
  1237. history.messages[responseMessage.parentId].content,
  1238. responseMessage.id,
  1239. _chatId
  1240. );
  1241. }
  1242. } else {
  1243. toast.error($i18n.t(`Model {{modelId}} not found`, { modelId }));
  1244. }
  1245. };
  1246. const generateChatTitle = async (userPrompt) => {
  1247. if ($settings?.title?.auto ?? true) {
  1248. const title = await generateTitle(
  1249. localStorage.token,
  1250. selectedModels[0],
  1251. userPrompt,
  1252. $chatId
  1253. ).catch((error) => {
  1254. console.error(error);
  1255. return 'New Chat';
  1256. });
  1257. return title;
  1258. } else {
  1259. return `${userPrompt}`;
  1260. }
  1261. };
  1262. const setChatTitle = async (_chatId, _title) => {
  1263. if (_chatId === $chatId) {
  1264. title = _title;
  1265. }
  1266. if (!$temporaryChatEnabled) {
  1267. chat = await updateChatById(localStorage.token, _chatId, { title: _title });
  1268. currentChatPage.set(1);
  1269. await chats.set(await getChatList(localStorage.token, $currentChatPage));
  1270. }
  1271. };
  1272. const getWebSearchResults = async (model: string, parentId: string, responseId: string) => {
  1273. const responseMessage = history.messages[responseId];
  1274. const userMessage = history.messages[parentId];
  1275. responseMessage.statusHistory = [
  1276. {
  1277. done: false,
  1278. action: 'web_search',
  1279. description: $i18n.t('Generating search query')
  1280. }
  1281. ];
  1282. messages = messages;
  1283. const prompt = userMessage.content;
  1284. let searchQuery = await generateSearchQuery(localStorage.token, model, messages, prompt).catch(
  1285. (error) => {
  1286. console.log(error);
  1287. return prompt;
  1288. }
  1289. );
  1290. if (!searchQuery) {
  1291. toast.warning($i18n.t('No search query generated'));
  1292. responseMessage.statusHistory.push({
  1293. done: true,
  1294. error: true,
  1295. action: 'web_search',
  1296. description: 'No search query generated'
  1297. });
  1298. messages = messages;
  1299. }
  1300. responseMessage.statusHistory.push({
  1301. done: false,
  1302. action: 'web_search',
  1303. description: $i18n.t(`Searching "{{searchQuery}}"`, { searchQuery })
  1304. });
  1305. messages = messages;
  1306. const results = await runWebSearch(localStorage.token, searchQuery).catch((error) => {
  1307. console.log(error);
  1308. toast.error(error);
  1309. return null;
  1310. });
  1311. if (results) {
  1312. responseMessage.statusHistory.push({
  1313. done: true,
  1314. action: 'web_search',
  1315. description: $i18n.t('Searched {{count}} sites', { count: results.filenames.length }),
  1316. query: searchQuery,
  1317. urls: results.filenames
  1318. });
  1319. if (responseMessage?.files ?? undefined === undefined) {
  1320. responseMessage.files = [];
  1321. }
  1322. responseMessage.files.push({
  1323. collection_name: results.collection_name,
  1324. name: searchQuery,
  1325. type: 'web_search_results',
  1326. urls: results.filenames
  1327. });
  1328. messages = messages;
  1329. } else {
  1330. responseMessage.statusHistory.push({
  1331. done: true,
  1332. error: true,
  1333. action: 'web_search',
  1334. description: 'No search results found'
  1335. });
  1336. messages = messages;
  1337. }
  1338. };
  1339. const getTags = async () => {
  1340. return await getTagsById(localStorage.token, $chatId).catch(async (error) => {
  1341. return [];
  1342. });
  1343. };
  1344. const saveChatHandler = async (_chatId) => {
  1345. if ($chatId == _chatId) {
  1346. if (!$temporaryChatEnabled) {
  1347. chat = await updateChatById(localStorage.token, _chatId, {
  1348. messages: messages,
  1349. history: history,
  1350. models: selectedModels,
  1351. params: params,
  1352. files: chatFiles
  1353. });
  1354. currentChatPage.set(1);
  1355. await chats.set(await getChatList(localStorage.token, $currentChatPage));
  1356. }
  1357. }
  1358. };
  1359. const mergeResponses = async (messageId, responses, _chatId) => {
  1360. console.log('mergeResponses', messageId, responses);
  1361. const message = history.messages[messageId];
  1362. const mergedResponse = {
  1363. status: true,
  1364. content: ''
  1365. };
  1366. message.merged = mergedResponse;
  1367. messages = messages;
  1368. try {
  1369. const [res, controller] = await generateMoACompletion(
  1370. localStorage.token,
  1371. message.model,
  1372. history.messages[message.parentId].content,
  1373. responses
  1374. );
  1375. if (res && res.ok && res.body) {
  1376. const textStream = await createOpenAITextStream(res.body, $settings.splitLargeChunks);
  1377. for await (const update of textStream) {
  1378. const { value, done, citations, error, usage } = update;
  1379. if (error || done) {
  1380. break;
  1381. }
  1382. if (mergedResponse.content == '' && value == '\n') {
  1383. continue;
  1384. } else {
  1385. mergedResponse.content += value;
  1386. messages = messages;
  1387. }
  1388. if (autoScroll) {
  1389. scrollToBottom();
  1390. }
  1391. }
  1392. await saveChatHandler(_chatId);
  1393. } else {
  1394. console.error(res);
  1395. }
  1396. } catch (e) {
  1397. console.error(e);
  1398. }
  1399. };
  1400. </script>
  1401. <svelte:head>
  1402. <title>
  1403. {title
  1404. ? `${title.length > 30 ? `${title.slice(0, 30)}...` : title} | ${$WEBUI_NAME}`
  1405. : `${$WEBUI_NAME}`}
  1406. </title>
  1407. </svelte:head>
  1408. <audio id="audioElement" src="" style="display: none;" />
  1409. <EventConfirmDialog
  1410. bind:show={showEventConfirmation}
  1411. title={eventConfirmationTitle}
  1412. message={eventConfirmationMessage}
  1413. input={eventConfirmationInput}
  1414. inputPlaceholder={eventConfirmationInputPlaceholder}
  1415. inputValue={eventConfirmationInputValue}
  1416. on:confirm={(e) => {
  1417. if (e.detail) {
  1418. eventCallback(e.detail);
  1419. } else {
  1420. eventCallback(true);
  1421. }
  1422. }}
  1423. on:cancel={() => {
  1424. eventCallback(false);
  1425. }}
  1426. />
  1427. {#if $showCallOverlay}
  1428. <CallOverlay
  1429. {submitPrompt}
  1430. {stopResponse}
  1431. bind:files
  1432. modelId={selectedModelIds?.at(0) ?? null}
  1433. chatId={$chatId}
  1434. {eventTarget}
  1435. />
  1436. {/if}
  1437. {#if !chatIdProp || (loaded && chatIdProp)}
  1438. <div
  1439. class="h-screen max-h-[100dvh] {$showSidebar
  1440. ? 'md:max-w-[calc(100%-260px)]'
  1441. : ''} w-full max-w-full flex flex-col"
  1442. >
  1443. {#if $settings?.backgroundImageUrl ?? null}
  1444. <div
  1445. class="absolute {$showSidebar
  1446. ? 'md:max-w-[calc(100%-260px)] md:translate-x-[260px]'
  1447. : ''} top-0 left-0 w-full h-full bg-cover bg-center bg-no-repeat"
  1448. style="background-image: url({$settings.backgroundImageUrl}) "
  1449. />
  1450. <div
  1451. class="absolute top-0 left-0 w-full h-full bg-gradient-to-t from-white to-white/85 dark:from-gray-900 dark:to-[#171717]/90 z-0"
  1452. />
  1453. {/if}
  1454. <Navbar
  1455. {title}
  1456. bind:selectedModels
  1457. bind:showModelSelector
  1458. bind:showControls
  1459. shareEnabled={messages.length > 0}
  1460. {chat}
  1461. {initNewChat}
  1462. />
  1463. {#if $banners.length > 0 && messages.length === 0 && !$chatId && selectedModels.length <= 1}
  1464. <div
  1465. class="absolute top-[4.25rem] w-full {$showSidebar
  1466. ? 'md:max-w-[calc(100%-260px)]'
  1467. : ''} {showControls ? 'lg:pr-[24rem]' : ''} z-20"
  1468. >
  1469. <div class=" flex flex-col gap-1 w-full">
  1470. {#each $banners.filter( (b) => (b.dismissible ? !JSON.parse(localStorage.getItem('dismissedBannerIds') ?? '[]').includes(b.id) : true) ) as banner}
  1471. <Banner
  1472. {banner}
  1473. on:dismiss={(e) => {
  1474. const bannerId = e.detail;
  1475. localStorage.setItem(
  1476. 'dismissedBannerIds',
  1477. JSON.stringify(
  1478. [
  1479. bannerId,
  1480. ...JSON.parse(localStorage.getItem('dismissedBannerIds') ?? '[]')
  1481. ].filter((id) => $banners.find((b) => b.id === id))
  1482. )
  1483. );
  1484. }}
  1485. />
  1486. {/each}
  1487. </div>
  1488. </div>
  1489. {/if}
  1490. <div class="flex flex-col flex-auto z-10">
  1491. <div
  1492. class=" pb-2.5 flex flex-col justify-between w-full flex-auto overflow-auto h-0 max-w-full z-10 scrollbar-hidden {showControls
  1493. ? 'lg:pr-[24rem]'
  1494. : ''}"
  1495. id="messages-container"
  1496. bind:this={messagesContainerElement}
  1497. on:scroll={(e) => {
  1498. autoScroll =
  1499. messagesContainerElement.scrollHeight - messagesContainerElement.scrollTop <=
  1500. messagesContainerElement.clientHeight + 5;
  1501. }}
  1502. >
  1503. <div class=" h-full w-full flex flex-col {chatIdProp ? 'py-4' : 'pt-2 pb-4'}">
  1504. <Messages
  1505. chatId={$chatId}
  1506. {selectedModels}
  1507. {processing}
  1508. bind:history
  1509. bind:messages
  1510. bind:autoScroll
  1511. bind:prompt
  1512. bottomPadding={files.length > 0}
  1513. {sendPrompt}
  1514. {continueGeneration}
  1515. {regenerateResponse}
  1516. {mergeResponses}
  1517. {chatActionHandler}
  1518. />
  1519. </div>
  1520. </div>
  1521. <div class={showControls ? 'lg:pr-[24rem]' : ''}>
  1522. <MessageInput
  1523. bind:files
  1524. bind:prompt
  1525. bind:autoScroll
  1526. bind:selectedToolIds
  1527. bind:webSearchEnabled
  1528. bind:atSelectedModel
  1529. availableToolIds={selectedModelIds.reduce((a, e, i, arr) => {
  1530. const model = $models.find((m) => m.id === e);
  1531. if (model?.info?.meta?.toolIds ?? false) {
  1532. return [...new Set([...a, ...model.info.meta.toolIds])];
  1533. }
  1534. return a;
  1535. }, [])}
  1536. transparentBackground={$settings?.backgroundImageUrl ?? false}
  1537. {selectedModels}
  1538. {messages}
  1539. {submitPrompt}
  1540. {stopResponse}
  1541. />
  1542. </div>
  1543. </div>
  1544. <ChatControls
  1545. models={selectedModelIds.reduce((a, e, i, arr) => {
  1546. const model = $models.find((m) => m.id === e);
  1547. if (model) {
  1548. return [...a, model];
  1549. }
  1550. return a;
  1551. }, [])}
  1552. bind:show={showControls}
  1553. bind:chatFiles
  1554. bind:params
  1555. />
  1556. </div>
  1557. {/if}