Chat.svelte 50 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908
  1. <script lang="ts">
  2. import { v4 as uuidv4 } from 'uuid';
  3. import { toast } from 'svelte-sonner';
  4. import mermaid from 'mermaid';
  5. import { PaneGroup, Pane, PaneResizer } from 'paneforge';
  6. import { getContext, onDestroy, onMount, tick } from 'svelte';
  7. import { goto } from '$app/navigation';
  8. import { page } from '$app/stores';
  9. import type { Unsubscriber, Writable } from 'svelte/store';
  10. import type { i18n as i18nType } from 'i18next';
  11. import { WEBUI_BASE_URL } from '$lib/constants';
  12. import {
  13. chatId,
  14. chats,
  15. config,
  16. type Model,
  17. models,
  18. settings,
  19. showSidebar,
  20. WEBUI_NAME,
  21. banners,
  22. user,
  23. socket,
  24. showControls,
  25. showCallOverlay,
  26. currentChatPage,
  27. temporaryChatEnabled,
  28. mobile,
  29. showOverview
  30. } from '$lib/stores';
  31. import {
  32. convertMessagesToHistory,
  33. copyToClipboard,
  34. getMessageContentParts,
  35. extractSentencesForAudio,
  36. promptTemplate,
  37. splitStream
  38. } from '$lib/utils';
  39. import { generateChatCompletion } from '$lib/apis/ollama';
  40. import {
  41. createNewChat,
  42. getChatById,
  43. getChatList,
  44. getTagsById,
  45. updateChatById
  46. } from '$lib/apis/chats';
  47. import { generateOpenAIChatCompletion } from '$lib/apis/openai';
  48. import { runWebSearch } from '$lib/apis/rag';
  49. import { createOpenAITextStream } from '$lib/apis/streaming';
  50. import { queryMemory } from '$lib/apis/memories';
  51. import { getAndUpdateUserLocation, getUserSettings } from '$lib/apis/users';
  52. import {
  53. chatCompleted,
  54. generateTitle,
  55. generateSearchQuery,
  56. chatAction,
  57. generateMoACompletion
  58. } from '$lib/apis';
  59. import Banner from '../common/Banner.svelte';
  60. import MessageInput from '$lib/components/chat/MessageInput.svelte';
  61. import Messages from '$lib/components/chat/Messages.svelte';
  62. import Navbar from '$lib/components/layout/Navbar.svelte';
  63. import ChatControls from './ChatControls.svelte';
  64. import EventConfirmDialog from '../common/ConfirmDialog.svelte';
  65. import EllipsisVertical from '../icons/EllipsisVertical.svelte';
  66. const i18n: Writable<i18nType> = getContext('i18n');
  67. export let chatIdProp = '';
  68. let loaded = false;
  69. const eventTarget = new EventTarget();
  70. let controlPane;
  71. let stopResponseFlag = false;
  72. let autoScroll = true;
  73. let processing = '';
  74. let messagesContainerElement: HTMLDivElement;
  75. let showEventConfirmation = false;
  76. let eventConfirmationTitle = '';
  77. let eventConfirmationMessage = '';
  78. let eventConfirmationInput = false;
  79. let eventConfirmationInputPlaceholder = '';
  80. let eventConfirmationInputValue = '';
  81. let eventCallback = null;
  82. let showModelSelector = true;
  83. let selectedModels = [''];
  84. let atSelectedModel: Model | undefined;
  85. let selectedModelIds = [];
  86. $: selectedModelIds = atSelectedModel !== undefined ? [atSelectedModel.id] : selectedModels;
  87. let selectedToolIds = [];
  88. let webSearchEnabled = false;
  89. let chat = null;
  90. let tags = [];
  91. let title = '';
  92. let prompt = '';
  93. let chatFiles = [];
  94. let files = [];
  95. let messages = [];
  96. let history = {
  97. messages: {},
  98. currentId: null
  99. };
  100. let params = {};
  101. let chatIdUnsubscriber: Unsubscriber | undefined;
  102. $: if (history.currentId !== null) {
  103. let _messages = [];
  104. let currentMessage = history.messages[history.currentId];
  105. while (currentMessage) {
  106. _messages.unshift({ ...currentMessage });
  107. currentMessage =
  108. currentMessage.parentId !== null ? history.messages[currentMessage.parentId] : null;
  109. }
  110. // This is most likely causing the performance issue
  111. messages = _messages;
  112. } else {
  113. messages = [];
  114. }
  115. $: if (chatIdProp) {
  116. (async () => {
  117. console.log(chatIdProp);
  118. if (chatIdProp && (await loadChat())) {
  119. await tick();
  120. loaded = true;
  121. window.setTimeout(() => scrollToBottom(), 0);
  122. const chatInput = document.getElementById('chat-textarea');
  123. chatInput?.focus();
  124. } else {
  125. await goto('/');
  126. }
  127. })();
  128. }
  129. const showMessage = async (message) => {
  130. const _chatId = JSON.parse(JSON.stringify($chatId));
  131. let _messageId = JSON.parse(JSON.stringify(message.id));
  132. let messageChildrenIds = history.messages[_messageId].childrenIds;
  133. while (messageChildrenIds.length !== 0) {
  134. _messageId = messageChildrenIds.at(-1);
  135. messageChildrenIds = history.messages[_messageId].childrenIds;
  136. }
  137. history.currentId = _messageId;
  138. await tick();
  139. await tick();
  140. await tick();
  141. const messageElement = document.getElementById(`message-${message.id}`);
  142. if (messageElement) {
  143. messageElement.scrollIntoView({ behavior: 'smooth' });
  144. }
  145. await tick();
  146. saveChatHandler(_chatId);
  147. };
  148. const chatEventHandler = async (event, cb) => {
  149. if (event.chat_id === $chatId) {
  150. await tick();
  151. console.log(event);
  152. let message = history.messages[event.message_id];
  153. const type = event?.data?.type ?? null;
  154. const data = event?.data?.data ?? null;
  155. if (type === 'status') {
  156. if (message?.statusHistory) {
  157. message.statusHistory.push(data);
  158. } else {
  159. message.statusHistory = [data];
  160. }
  161. } else if (type === 'citation') {
  162. if (message?.citations) {
  163. message.citations.push(data);
  164. } else {
  165. message.citations = [data];
  166. }
  167. } else if (type === 'message') {
  168. message.content += data.content;
  169. } else if (type === 'replace') {
  170. message.content = data.content;
  171. } else if (type === 'action') {
  172. if (data.action === 'continue') {
  173. const continueButton = document.getElementById('continue-response-button');
  174. if (continueButton) {
  175. continueButton.click();
  176. }
  177. }
  178. } else if (type === 'confirmation') {
  179. eventCallback = cb;
  180. eventConfirmationInput = false;
  181. showEventConfirmation = true;
  182. eventConfirmationTitle = data.title;
  183. eventConfirmationMessage = data.message;
  184. } else if (type === 'input') {
  185. eventCallback = cb;
  186. eventConfirmationInput = true;
  187. showEventConfirmation = true;
  188. eventConfirmationTitle = data.title;
  189. eventConfirmationMessage = data.message;
  190. eventConfirmationInputPlaceholder = data.placeholder;
  191. eventConfirmationInputValue = data?.value ?? '';
  192. } else {
  193. console.log('Unknown message type', data);
  194. }
  195. messages = messages;
  196. }
  197. };
  198. const onMessageHandler = async (event: {
  199. origin: string;
  200. data: { type: string; text: string };
  201. }) => {
  202. if (event.origin !== window.origin) {
  203. return;
  204. }
  205. // Replace with your iframe's origin
  206. if (event.data.type === 'input:prompt') {
  207. console.debug(event.data.text);
  208. const inputElement = document.getElementById('chat-textarea');
  209. if (inputElement) {
  210. prompt = event.data.text;
  211. inputElement.focus();
  212. }
  213. }
  214. if (event.data.type === 'action:submit') {
  215. console.debug(event.data.text);
  216. if (prompt !== '') {
  217. await tick();
  218. submitPrompt(prompt);
  219. }
  220. }
  221. if (event.data.type === 'input:prompt:submit') {
  222. console.debug(event.data.text);
  223. if (prompt !== '') {
  224. await tick();
  225. submitPrompt(event.data.text);
  226. }
  227. }
  228. };
  229. onMount(async () => {
  230. window.addEventListener('message', onMessageHandler);
  231. $socket?.on('chat-events', chatEventHandler);
  232. if (!$chatId) {
  233. chatIdUnsubscriber = chatId.subscribe(async (value) => {
  234. if (!value) {
  235. await initNewChat();
  236. }
  237. });
  238. } else {
  239. if ($temporaryChatEnabled) {
  240. await goto('/');
  241. }
  242. }
  243. showControls.subscribe(async (value) => {
  244. if (controlPane && !$mobile) {
  245. try {
  246. if (value) {
  247. const currentSize = controlPane.getSize();
  248. if (currentSize === 0) {
  249. const size = parseInt(localStorage?.chatControlsSize ?? '30');
  250. controlPane.resize(size ? size : 30);
  251. }
  252. } else {
  253. controlPane.resize(0);
  254. }
  255. } catch (e) {
  256. // ignore
  257. }
  258. }
  259. if (!value) {
  260. showCallOverlay.set(false);
  261. showOverview.set(false);
  262. }
  263. });
  264. });
  265. onDestroy(() => {
  266. chatIdUnsubscriber?.();
  267. window.removeEventListener('message', onMessageHandler);
  268. $socket?.off('chat-events');
  269. });
  270. //////////////////////////
  271. // Web functions
  272. //////////////////////////
  273. const initNewChat = async () => {
  274. if ($page.url.pathname.includes('/c/')) {
  275. window.history.replaceState(history.state, '', `/`);
  276. }
  277. await chatId.set('');
  278. autoScroll = true;
  279. title = '';
  280. messages = [];
  281. history = {
  282. messages: {},
  283. currentId: null
  284. };
  285. chatFiles = [];
  286. params = {};
  287. if ($page.url.searchParams.get('models')) {
  288. selectedModels = $page.url.searchParams.get('models')?.split(',');
  289. } else if ($settings?.models) {
  290. selectedModels = $settings?.models;
  291. } else if ($config?.default_models) {
  292. console.log($config?.default_models.split(',') ?? '');
  293. selectedModels = $config?.default_models.split(',');
  294. } else {
  295. selectedModels = [''];
  296. }
  297. if ($page.url.searchParams.get('web-search') === 'true') {
  298. webSearchEnabled = true;
  299. }
  300. if ($page.url.searchParams.get('q')) {
  301. prompt = $page.url.searchParams.get('q') ?? '';
  302. selectedToolIds = ($page.url.searchParams.get('tool_ids') ?? '')
  303. .split(',')
  304. .map((id) => id.trim())
  305. .filter((id) => id);
  306. if (prompt) {
  307. await tick();
  308. submitPrompt(prompt);
  309. }
  310. }
  311. if ($page.url.searchParams.get('call') === 'true') {
  312. showCallOverlay.set(true);
  313. }
  314. selectedModels = selectedModels.map((modelId) =>
  315. $models.map((m) => m.id).includes(modelId) ? modelId : ''
  316. );
  317. const userSettings = await getUserSettings(localStorage.token);
  318. if (userSettings) {
  319. settings.set(userSettings.ui);
  320. } else {
  321. settings.set(JSON.parse(localStorage.getItem('settings') ?? '{}'));
  322. }
  323. const chatInput = document.getElementById('chat-textarea');
  324. setTimeout(() => chatInput?.focus(), 0);
  325. };
  326. const loadChat = async () => {
  327. chatId.set(chatIdProp);
  328. chat = await getChatById(localStorage.token, $chatId).catch(async (error) => {
  329. await goto('/');
  330. return null;
  331. });
  332. if (chat) {
  333. tags = await getTags();
  334. const chatContent = chat.chat;
  335. if (chatContent) {
  336. console.log(chatContent);
  337. selectedModels =
  338. (chatContent?.models ?? undefined) !== undefined
  339. ? chatContent.models
  340. : [chatContent.models ?? ''];
  341. history =
  342. (chatContent?.history ?? undefined) !== undefined
  343. ? chatContent.history
  344. : convertMessagesToHistory(chatContent.messages);
  345. title = chatContent.title;
  346. const userSettings = await getUserSettings(localStorage.token);
  347. if (userSettings) {
  348. await settings.set(userSettings.ui);
  349. } else {
  350. await settings.set(JSON.parse(localStorage.getItem('settings') ?? '{}'));
  351. }
  352. params = chatContent?.params ?? {};
  353. chatFiles = chatContent?.files ?? [];
  354. autoScroll = true;
  355. await tick();
  356. if (messages.length > 0) {
  357. history.messages[messages.at(-1).id].done = true;
  358. }
  359. await tick();
  360. return true;
  361. } else {
  362. return null;
  363. }
  364. }
  365. };
  366. const scrollToBottom = async () => {
  367. await tick();
  368. if (messagesContainerElement) {
  369. messagesContainerElement.scrollTop = messagesContainerElement.scrollHeight;
  370. }
  371. };
  372. const createMessagesList = (responseMessageId) => {
  373. const message = history.messages[responseMessageId];
  374. if (message.parentId) {
  375. return [...createMessagesList(message.parentId), message];
  376. } else {
  377. return [message];
  378. }
  379. };
  380. const chatCompletedHandler = async (chatId, modelId, responseMessageId, messages) => {
  381. await mermaid.run({
  382. querySelector: '.mermaid'
  383. });
  384. const res = await chatCompleted(localStorage.token, {
  385. model: modelId,
  386. messages: messages.map((m) => ({
  387. id: m.id,
  388. role: m.role,
  389. content: m.content,
  390. info: m.info ? m.info : undefined,
  391. timestamp: m.timestamp
  392. })),
  393. chat_id: chatId,
  394. session_id: $socket?.id,
  395. id: responseMessageId
  396. }).catch((error) => {
  397. toast.error(error);
  398. messages.at(-1).error = { content: error };
  399. return null;
  400. });
  401. if (res !== null) {
  402. // Update chat history with the new messages
  403. for (const message of res.messages) {
  404. history.messages[message.id] = {
  405. ...history.messages[message.id],
  406. ...(history.messages[message.id].content !== message.content
  407. ? { originalContent: history.messages[message.id].content }
  408. : {}),
  409. ...message
  410. };
  411. }
  412. }
  413. if ($chatId == chatId) {
  414. if (!$temporaryChatEnabled) {
  415. chat = await updateChatById(localStorage.token, chatId, {
  416. models: selectedModels,
  417. messages: messages,
  418. history: history,
  419. params: params,
  420. files: chatFiles
  421. });
  422. currentChatPage.set(1);
  423. await chats.set(await getChatList(localStorage.token, $currentChatPage));
  424. }
  425. }
  426. };
  427. const chatActionHandler = async (chatId, actionId, modelId, responseMessageId, event = null) => {
  428. const res = await chatAction(localStorage.token, actionId, {
  429. model: modelId,
  430. messages: messages.map((m) => ({
  431. id: m.id,
  432. role: m.role,
  433. content: m.content,
  434. info: m.info ? m.info : undefined,
  435. timestamp: m.timestamp
  436. })),
  437. ...(event ? { event: event } : {}),
  438. chat_id: chatId,
  439. session_id: $socket?.id,
  440. id: responseMessageId
  441. }).catch((error) => {
  442. toast.error(error);
  443. messages.at(-1).error = { content: error };
  444. return null;
  445. });
  446. if (res !== null) {
  447. // Update chat history with the new messages
  448. for (const message of res.messages) {
  449. history.messages[message.id] = {
  450. ...history.messages[message.id],
  451. ...(history.messages[message.id].content !== message.content
  452. ? { originalContent: history.messages[message.id].content }
  453. : {}),
  454. ...message
  455. };
  456. }
  457. }
  458. if ($chatId == chatId) {
  459. if (!$temporaryChatEnabled) {
  460. chat = await updateChatById(localStorage.token, chatId, {
  461. models: selectedModels,
  462. messages: messages,
  463. history: history,
  464. params: params,
  465. files: chatFiles
  466. });
  467. currentChatPage.set(1);
  468. await chats.set(await getChatList(localStorage.token, $currentChatPage));
  469. }
  470. }
  471. };
  472. const getChatEventEmitter = async (modelId: string, chatId: string = '') => {
  473. return setInterval(() => {
  474. $socket?.emit('usage', {
  475. action: 'chat',
  476. model: modelId,
  477. chat_id: chatId
  478. });
  479. }, 1000);
  480. };
  481. //////////////////////////
  482. // Chat functions
  483. //////////////////////////
  484. const submitPrompt = async (userPrompt, { _raw = false } = {}) => {
  485. let _responses = [];
  486. console.log('submitPrompt', $chatId);
  487. selectedModels = selectedModels.map((modelId) =>
  488. $models.map((m) => m.id).includes(modelId) ? modelId : ''
  489. );
  490. if (selectedModels.includes('')) {
  491. toast.error($i18n.t('Model not selected'));
  492. } else if (messages.length != 0 && messages.at(-1).done != true) {
  493. // Response not done
  494. console.log('wait');
  495. } else if (messages.length != 0 && messages.at(-1).error) {
  496. // Error in response
  497. toast.error(
  498. $i18n.t(
  499. `Oops! There was an error in the previous response. Please try again or contact admin.`
  500. )
  501. );
  502. } else if (
  503. files.length > 0 &&
  504. files.filter((file) => file.type !== 'image' && file.status !== 'processed').length > 0
  505. ) {
  506. // Upload not done
  507. toast.error(
  508. $i18n.t(
  509. `Oops! Hold tight! Your files are still in the processing oven. We're cooking them up to perfection. Please be patient and we'll let you know once they're ready.`
  510. )
  511. );
  512. } else if (
  513. ($config?.file?.max_count ?? null) !== null &&
  514. files.length + chatFiles.length > $config?.file?.max_count
  515. ) {
  516. console.log(chatFiles.length, files.length);
  517. toast.error(
  518. $i18n.t(`You can only chat with a maximum of {{maxCount}} file(s) at a time.`, {
  519. maxCount: $config?.file?.max_count
  520. })
  521. );
  522. } else {
  523. // Reset chat input textarea
  524. const chatTextAreaElement = document.getElementById('chat-textarea');
  525. if (chatTextAreaElement) {
  526. chatTextAreaElement.value = '';
  527. chatTextAreaElement.style.height = '';
  528. }
  529. const _files = JSON.parse(JSON.stringify(files));
  530. chatFiles.push(..._files.filter((item) => ['doc', 'file', 'collection'].includes(item.type)));
  531. chatFiles = chatFiles.filter(
  532. // Remove duplicates
  533. (item, index, array) =>
  534. array.findIndex((i) => JSON.stringify(i) === JSON.stringify(item)) === index
  535. );
  536. files = [];
  537. prompt = '';
  538. // Create user message
  539. let userMessageId = uuidv4();
  540. let userMessage = {
  541. id: userMessageId,
  542. parentId: messages.length !== 0 ? messages.at(-1).id : null,
  543. childrenIds: [],
  544. role: 'user',
  545. content: userPrompt,
  546. files: _files.length > 0 ? _files : undefined,
  547. timestamp: Math.floor(Date.now() / 1000), // Unix epoch
  548. models: selectedModels
  549. };
  550. // Add message to history and Set currentId to messageId
  551. history.messages[userMessageId] = userMessage;
  552. history.currentId = userMessageId;
  553. // Append messageId to childrenIds of parent message
  554. if (messages.length !== 0) {
  555. history.messages[messages.at(-1).id].childrenIds.push(userMessageId);
  556. }
  557. // Wait until history/message have been updated
  558. await tick();
  559. _responses = await sendPrompt(userPrompt, userMessageId, { newChat: true });
  560. }
  561. return _responses;
  562. };
  563. const sendPrompt = async (
  564. prompt: string,
  565. parentId: string,
  566. { modelId = null, modelIdx = null, newChat = false } = {}
  567. ) => {
  568. let _responses: string[] = [];
  569. // If modelId is provided, use it, else use selected model
  570. let selectedModelIds = modelId
  571. ? [modelId]
  572. : atSelectedModel !== undefined
  573. ? [atSelectedModel.id]
  574. : selectedModels;
  575. // Create response messages for each selected model
  576. const responseMessageIds: Record<PropertyKey, string> = {};
  577. for (const [_modelIdx, modelId] of selectedModelIds.entries()) {
  578. const model = $models.filter((m) => m.id === modelId).at(0);
  579. if (model) {
  580. let responseMessageId = uuidv4();
  581. let responseMessage = {
  582. parentId: parentId,
  583. id: responseMessageId,
  584. childrenIds: [],
  585. role: 'assistant',
  586. content: '',
  587. model: model.id,
  588. modelName: model.name ?? model.id,
  589. modelIdx: modelIdx ? modelIdx : _modelIdx,
  590. userContext: null,
  591. timestamp: Math.floor(Date.now() / 1000) // Unix epoch
  592. };
  593. // Add message to history and Set currentId to messageId
  594. history.messages[responseMessageId] = responseMessage;
  595. history.currentId = responseMessageId;
  596. // Append messageId to childrenIds of parent message
  597. if (parentId !== null) {
  598. history.messages[parentId].childrenIds = [
  599. ...history.messages[parentId].childrenIds,
  600. responseMessageId
  601. ];
  602. }
  603. responseMessageIds[`${modelId}-${modelIdx ? modelIdx : _modelIdx}`] = responseMessageId;
  604. }
  605. }
  606. await tick();
  607. // Create new chat if only one message in messages
  608. if (newChat && messages.length == 2) {
  609. if (!$temporaryChatEnabled) {
  610. chat = await createNewChat(localStorage.token, {
  611. id: $chatId,
  612. title: $i18n.t('New Chat'),
  613. models: selectedModels,
  614. system: $settings.system ?? undefined,
  615. params: params,
  616. messages: messages,
  617. history: history,
  618. tags: [],
  619. timestamp: Date.now()
  620. });
  621. currentChatPage.set(1);
  622. await chats.set(await getChatList(localStorage.token, $currentChatPage));
  623. await chatId.set(chat.id);
  624. } else {
  625. await chatId.set('local');
  626. }
  627. await tick();
  628. }
  629. const _chatId = JSON.parse(JSON.stringify($chatId));
  630. await Promise.all(
  631. selectedModelIds.map(async (modelId, _modelIdx) => {
  632. console.log('modelId', modelId);
  633. const model = $models.filter((m) => m.id === modelId).at(0);
  634. if (model) {
  635. // If there are image files, check if model is vision capable
  636. const hasImages = messages.some((message) =>
  637. message.files?.some((file) => file.type === 'image')
  638. );
  639. if (hasImages && !(model.info?.meta?.capabilities?.vision ?? true)) {
  640. toast.error(
  641. $i18n.t('Model {{modelName}} is not vision capable', {
  642. modelName: model.name ?? model.id
  643. })
  644. );
  645. }
  646. let responseMessageId =
  647. responseMessageIds[`${modelId}-${modelIdx ? modelIdx : _modelIdx}`];
  648. let responseMessage = history.messages[responseMessageId];
  649. let userContext = null;
  650. if ($settings?.memory ?? false) {
  651. if (userContext === null) {
  652. const res = await queryMemory(localStorage.token, prompt).catch((error) => {
  653. toast.error(error);
  654. return null;
  655. });
  656. if (res) {
  657. if (res.documents[0].length > 0) {
  658. userContext = res.documents[0].reduce((acc, doc, index) => {
  659. const createdAtTimestamp = res.metadatas[0][index].created_at;
  660. const createdAtDate = new Date(createdAtTimestamp * 1000)
  661. .toISOString()
  662. .split('T')[0];
  663. return `${acc}${index + 1}. [${createdAtDate}]. ${doc}\n`;
  664. }, '');
  665. }
  666. console.log(userContext);
  667. }
  668. }
  669. }
  670. responseMessage.userContext = userContext;
  671. const chatEventEmitter = await getChatEventEmitter(model.id, _chatId);
  672. scrollToBottom();
  673. if (webSearchEnabled) {
  674. await getWebSearchResults(model.id, parentId, responseMessageId);
  675. }
  676. let _response = null;
  677. if (model?.owned_by === 'openai') {
  678. _response = await sendPromptOpenAI(model, prompt, responseMessageId, _chatId);
  679. } else if (model) {
  680. _response = await sendPromptOllama(model, prompt, responseMessageId, _chatId);
  681. }
  682. _responses.push(_response);
  683. if (chatEventEmitter) clearInterval(chatEventEmitter);
  684. } else {
  685. toast.error($i18n.t(`Model {{modelId}} not found`, { modelId }));
  686. }
  687. })
  688. );
  689. currentChatPage.set(1);
  690. chats.set(await getChatList(localStorage.token, $currentChatPage));
  691. return _responses;
  692. };
  693. const sendPromptOllama = async (model, userPrompt, responseMessageId, _chatId) => {
  694. let _response: string | null = null;
  695. const responseMessage = history.messages[responseMessageId];
  696. const userMessage = history.messages[responseMessage.parentId];
  697. // Wait until history/message have been updated
  698. await tick();
  699. // Scroll down
  700. scrollToBottom();
  701. const messagesBody = [
  702. params?.system || $settings.system || (responseMessage?.userContext ?? null)
  703. ? {
  704. role: 'system',
  705. content: `${promptTemplate(
  706. params?.system ?? $settings?.system ?? '',
  707. $user.name,
  708. $settings?.userLocation
  709. ? await getAndUpdateUserLocation(localStorage.token)
  710. : undefined
  711. )}${
  712. (responseMessage?.userContext ?? null)
  713. ? `\n\nUser Context:\n${responseMessage?.userContext ?? ''}`
  714. : ''
  715. }`
  716. }
  717. : undefined,
  718. ...messages
  719. ]
  720. .filter((message) => message?.content?.trim())
  721. .map((message) => {
  722. // Prepare the base message object
  723. const baseMessage = {
  724. role: message.role,
  725. content: message.content
  726. };
  727. // Extract and format image URLs if any exist
  728. const imageUrls = message.files
  729. ?.filter((file) => file.type === 'image')
  730. .map((file) => file.url.slice(file.url.indexOf(',') + 1));
  731. // Add images array only if it contains elements
  732. if (imageUrls && imageUrls.length > 0 && message.role === 'user') {
  733. baseMessage.images = imageUrls;
  734. }
  735. return baseMessage;
  736. });
  737. let lastImageIndex = -1;
  738. // Find the index of the last object with images
  739. messagesBody.forEach((item, index) => {
  740. if (item.images) {
  741. lastImageIndex = index;
  742. }
  743. });
  744. // Remove images from all but the last one
  745. messagesBody.forEach((item, index) => {
  746. if (index !== lastImageIndex) {
  747. delete item.images;
  748. }
  749. });
  750. let files = JSON.parse(JSON.stringify(chatFiles));
  751. if (model?.info?.meta?.knowledge ?? false) {
  752. // Only initialize and add status if knowledge exists
  753. responseMessage.statusHistory = [
  754. {
  755. action: 'knowledge_search',
  756. description: $i18n.t(`Searching Knowledge for "{{searchQuery}}"`, {
  757. searchQuery: userMessage.content
  758. }),
  759. done: false
  760. }
  761. ];
  762. files.push(...model.info.meta.knowledge);
  763. messages = messages; // Trigger Svelte update
  764. }
  765. files.push(
  766. ...(userMessage?.files ?? []).filter((item) =>
  767. ['doc', 'file', 'collection'].includes(item.type)
  768. ),
  769. ...(responseMessage?.files ?? []).filter((item) => ['web_search_results'].includes(item.type))
  770. );
  771. scrollToBottom();
  772. eventTarget.dispatchEvent(
  773. new CustomEvent('chat:start', {
  774. detail: {
  775. id: responseMessageId
  776. }
  777. })
  778. );
  779. await tick();
  780. const stream = $settings?.streamResponse ?? true;
  781. const [res, controller] = await generateChatCompletion(localStorage.token, {
  782. stream: stream,
  783. model: model.id,
  784. messages: messagesBody,
  785. options: {
  786. ...{ ...($settings?.params ?? {}), ...params },
  787. stop:
  788. (params?.stop ?? $settings?.params?.stop ?? undefined)
  789. ? (params?.stop.split(',').map((token) => token.trim()) ?? $settings.params.stop).map(
  790. (str) => decodeURIComponent(JSON.parse('"' + str.replace(/\"/g, '\\"') + '"'))
  791. )
  792. : undefined,
  793. num_predict: params?.max_tokens ?? $settings?.params?.max_tokens ?? undefined,
  794. repeat_penalty:
  795. params?.frequency_penalty ?? $settings?.params?.frequency_penalty ?? undefined
  796. },
  797. format: $settings.requestFormat ?? undefined,
  798. keep_alive: $settings.keepAlive ?? undefined,
  799. tool_ids: selectedToolIds.length > 0 ? selectedToolIds : undefined,
  800. files: files.length > 0 ? files : undefined,
  801. session_id: $socket?.id,
  802. chat_id: $chatId,
  803. id: responseMessageId
  804. });
  805. if (res && res.ok) {
  806. if (!stream) {
  807. const response = await res.json();
  808. console.log(response);
  809. responseMessage.content = response.message.content;
  810. responseMessage.info = {
  811. eval_count: response.eval_count,
  812. eval_duration: response.eval_duration,
  813. load_duration: response.load_duration,
  814. prompt_eval_count: response.prompt_eval_count,
  815. prompt_eval_duration: response.prompt_eval_duration,
  816. total_duration: response.total_duration
  817. };
  818. responseMessage.done = true;
  819. } else {
  820. console.log('controller', controller);
  821. const reader = res.body
  822. .pipeThrough(new TextDecoderStream())
  823. .pipeThrough(splitStream('\n'))
  824. .getReader();
  825. while (true) {
  826. const { value, done } = await reader.read();
  827. if (done || stopResponseFlag || _chatId !== $chatId) {
  828. responseMessage.done = true;
  829. messages = messages;
  830. if (stopResponseFlag) {
  831. controller.abort('User: Stop Response');
  832. }
  833. _response = responseMessage.content;
  834. break;
  835. }
  836. try {
  837. let lines = value.split('\n');
  838. for (const line of lines) {
  839. if (line !== '') {
  840. console.log(line);
  841. let data = JSON.parse(line);
  842. if ('citations' in data) {
  843. responseMessage.citations = data.citations;
  844. // Only remove status if it was initially set
  845. if (model?.info?.meta?.knowledge ?? false) {
  846. responseMessage.statusHistory = responseMessage.statusHistory.filter(
  847. (status) => status.action !== 'knowledge_search'
  848. );
  849. }
  850. continue;
  851. }
  852. if ('detail' in data) {
  853. throw data;
  854. }
  855. if (data.done == false) {
  856. if (responseMessage.content == '' && data.message.content == '\n') {
  857. continue;
  858. } else {
  859. responseMessage.content += data.message.content;
  860. if (navigator.vibrate && ($settings?.hapticFeedback ?? false)) {
  861. navigator.vibrate(5);
  862. }
  863. const messageContentParts = getMessageContentParts(
  864. responseMessage.content,
  865. $config?.audio?.tts?.split_on ?? 'punctuation'
  866. );
  867. messageContentParts.pop();
  868. // dispatch only last sentence and make sure it hasn't been dispatched before
  869. if (
  870. messageContentParts.length > 0 &&
  871. messageContentParts[messageContentParts.length - 1] !==
  872. responseMessage.lastSentence
  873. ) {
  874. responseMessage.lastSentence =
  875. messageContentParts[messageContentParts.length - 1];
  876. eventTarget.dispatchEvent(
  877. new CustomEvent('chat', {
  878. detail: {
  879. id: responseMessageId,
  880. content: messageContentParts[messageContentParts.length - 1]
  881. }
  882. })
  883. );
  884. }
  885. messages = messages;
  886. }
  887. } else {
  888. responseMessage.done = true;
  889. if (responseMessage.content == '') {
  890. responseMessage.error = {
  891. code: 400,
  892. content: `Oops! No text generated from Ollama, Please try again.`
  893. };
  894. }
  895. responseMessage.context = data.context ?? null;
  896. responseMessage.info = {
  897. total_duration: data.total_duration,
  898. load_duration: data.load_duration,
  899. sample_count: data.sample_count,
  900. sample_duration: data.sample_duration,
  901. prompt_eval_count: data.prompt_eval_count,
  902. prompt_eval_duration: data.prompt_eval_duration,
  903. eval_count: data.eval_count,
  904. eval_duration: data.eval_duration
  905. };
  906. messages = messages;
  907. if ($settings.notificationEnabled && !document.hasFocus()) {
  908. const notification = new Notification(`${model.id}`, {
  909. body: responseMessage.content,
  910. icon: `${WEBUI_BASE_URL}/static/favicon.png`
  911. });
  912. }
  913. if ($settings?.responseAutoCopy ?? false) {
  914. copyToClipboard(responseMessage.content);
  915. }
  916. if ($settings.responseAutoPlayback && !$showCallOverlay) {
  917. await tick();
  918. document.getElementById(`speak-button-${responseMessage.id}`)?.click();
  919. }
  920. }
  921. }
  922. }
  923. } catch (error) {
  924. console.log(error);
  925. if ('detail' in error) {
  926. toast.error(error.detail);
  927. }
  928. break;
  929. }
  930. if (autoScroll) {
  931. scrollToBottom();
  932. }
  933. }
  934. }
  935. await chatCompletedHandler(
  936. _chatId,
  937. model.id,
  938. responseMessageId,
  939. createMessagesList(responseMessageId)
  940. );
  941. } else {
  942. if (res !== null) {
  943. const error = await res.json();
  944. console.log(error);
  945. if ('detail' in error) {
  946. toast.error(error.detail);
  947. responseMessage.error = { content: error.detail };
  948. } else {
  949. toast.error(error.error);
  950. responseMessage.error = { content: error.error };
  951. }
  952. } else {
  953. toast.error(
  954. $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, { provider: 'Ollama' })
  955. );
  956. responseMessage.error = {
  957. content: $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
  958. provider: 'Ollama'
  959. })
  960. };
  961. }
  962. responseMessage.done = true;
  963. if (responseMessage.statusHistory) {
  964. responseMessage.statusHistory = responseMessage.statusHistory.filter(
  965. (status) => status.action !== 'knowledge_search'
  966. );
  967. }
  968. messages = messages;
  969. }
  970. await saveChatHandler(_chatId);
  971. stopResponseFlag = false;
  972. await tick();
  973. let lastMessageContentPart =
  974. getMessageContentParts(
  975. responseMessage.content,
  976. $config?.audio?.tts?.split_on ?? 'punctuation'
  977. )?.at(-1) ?? '';
  978. if (lastMessageContentPart) {
  979. eventTarget.dispatchEvent(
  980. new CustomEvent('chat', {
  981. detail: { id: responseMessageId, content: lastMessageContentPart }
  982. })
  983. );
  984. }
  985. eventTarget.dispatchEvent(
  986. new CustomEvent('chat:finish', {
  987. detail: {
  988. id: responseMessageId,
  989. content: responseMessage.content
  990. }
  991. })
  992. );
  993. if (autoScroll) {
  994. scrollToBottom();
  995. }
  996. if (messages.length == 2 && messages.at(1).content !== '' && selectedModels[0] === model.id) {
  997. window.history.replaceState(history.state, '', `/c/${_chatId}`);
  998. const _title = await generateChatTitle(userPrompt);
  999. await setChatTitle(_chatId, _title);
  1000. }
  1001. return _response;
  1002. };
  1003. const sendPromptOpenAI = async (model, userPrompt, responseMessageId, _chatId) => {
  1004. let _response = null;
  1005. const responseMessage = history.messages[responseMessageId];
  1006. const userMessage = history.messages[responseMessage.parentId];
  1007. let files = JSON.parse(JSON.stringify(chatFiles));
  1008. if (model?.info?.meta?.knowledge ?? false) {
  1009. // Only initialize and add status if knowledge exists
  1010. responseMessage.statusHistory = [
  1011. {
  1012. action: 'knowledge_search',
  1013. description: $i18n.t(`Searching Knowledge for "{{searchQuery}}"`, {
  1014. searchQuery: userMessage.content
  1015. }),
  1016. done: false
  1017. }
  1018. ];
  1019. files.push(...model.info.meta.knowledge);
  1020. messages = messages; // Trigger Svelte update
  1021. }
  1022. files.push(
  1023. ...(userMessage?.files ?? []).filter((item) =>
  1024. ['doc', 'file', 'collection'].includes(item.type)
  1025. ),
  1026. ...(responseMessage?.files ?? []).filter((item) => ['web_search_results'].includes(item.type))
  1027. );
  1028. scrollToBottom();
  1029. eventTarget.dispatchEvent(
  1030. new CustomEvent('chat:start', {
  1031. detail: {
  1032. id: responseMessageId
  1033. }
  1034. })
  1035. );
  1036. await tick();
  1037. try {
  1038. const stream = $settings?.streamResponse ?? true;
  1039. const [res, controller] = await generateOpenAIChatCompletion(
  1040. localStorage.token,
  1041. {
  1042. stream: stream,
  1043. model: model.id,
  1044. ...(stream && (model.info?.meta?.capabilities?.usage ?? false)
  1045. ? {
  1046. stream_options: {
  1047. include_usage: true
  1048. }
  1049. }
  1050. : {}),
  1051. messages: [
  1052. params?.system || $settings.system || (responseMessage?.userContext ?? null)
  1053. ? {
  1054. role: 'system',
  1055. content: `${promptTemplate(
  1056. params?.system ?? $settings?.system ?? '',
  1057. $user.name,
  1058. $settings?.userLocation
  1059. ? await getAndUpdateUserLocation(localStorage.token)
  1060. : undefined
  1061. )}${
  1062. (responseMessage?.userContext ?? null)
  1063. ? `\n\nUser Context:\n${responseMessage?.userContext ?? ''}`
  1064. : ''
  1065. }`
  1066. }
  1067. : undefined,
  1068. ...messages
  1069. ]
  1070. .filter((message) => message?.content?.trim())
  1071. .map((message, idx, arr) => ({
  1072. role: message.role,
  1073. ...((message.files?.filter((file) => file.type === 'image').length > 0 ?? false) &&
  1074. message.role === 'user'
  1075. ? {
  1076. content: [
  1077. {
  1078. type: 'text',
  1079. text:
  1080. arr.length - 1 !== idx
  1081. ? message.content
  1082. : (message?.raContent ?? message.content)
  1083. },
  1084. ...message.files
  1085. .filter((file) => file.type === 'image')
  1086. .map((file) => ({
  1087. type: 'image_url',
  1088. image_url: {
  1089. url: file.url
  1090. }
  1091. }))
  1092. ]
  1093. }
  1094. : {
  1095. content:
  1096. arr.length - 1 !== idx
  1097. ? message.content
  1098. : (message?.raContent ?? message.content)
  1099. })
  1100. })),
  1101. seed: params?.seed ?? $settings?.params?.seed ?? undefined,
  1102. stop:
  1103. (params?.stop ?? $settings?.params?.stop ?? undefined)
  1104. ? (params?.stop.split(',').map((token) => token.trim()) ?? $settings.params.stop).map(
  1105. (str) => decodeURIComponent(JSON.parse('"' + str.replace(/\"/g, '\\"') + '"'))
  1106. )
  1107. : undefined,
  1108. temperature: params?.temperature ?? $settings?.params?.temperature ?? undefined,
  1109. top_p: params?.top_p ?? $settings?.params?.top_p ?? undefined,
  1110. frequency_penalty:
  1111. params?.frequency_penalty ?? $settings?.params?.frequency_penalty ?? undefined,
  1112. max_tokens: params?.max_tokens ?? $settings?.params?.max_tokens ?? undefined,
  1113. tool_ids: selectedToolIds.length > 0 ? selectedToolIds : undefined,
  1114. files: files.length > 0 ? files : undefined,
  1115. session_id: $socket?.id,
  1116. chat_id: $chatId,
  1117. id: responseMessageId
  1118. },
  1119. `${WEBUI_BASE_URL}/api`
  1120. );
  1121. // Wait until history/message have been updated
  1122. await tick();
  1123. scrollToBottom();
  1124. if (res && res.ok && res.body) {
  1125. if (!stream) {
  1126. const response = await res.json();
  1127. console.log(response);
  1128. responseMessage.content = response.choices[0].message.content;
  1129. responseMessage.info = { ...response.usage, openai: true };
  1130. responseMessage.done = true;
  1131. } else {
  1132. const textStream = await createOpenAITextStream(res.body, $settings.splitLargeChunks);
  1133. for await (const update of textStream) {
  1134. const { value, done, citations, error, usage } = update;
  1135. if (error) {
  1136. await handleOpenAIError(error, null, model, responseMessage);
  1137. break;
  1138. }
  1139. if (done || stopResponseFlag || _chatId !== $chatId) {
  1140. responseMessage.done = true;
  1141. messages = messages;
  1142. if (stopResponseFlag) {
  1143. controller.abort('User: Stop Response');
  1144. }
  1145. _response = responseMessage.content;
  1146. break;
  1147. }
  1148. if (usage) {
  1149. responseMessage.info = { ...usage, openai: true };
  1150. }
  1151. if (citations) {
  1152. responseMessage.citations = citations;
  1153. // Only remove status if it was initially set
  1154. if (model?.info?.meta?.knowledge ?? false) {
  1155. responseMessage.statusHistory = responseMessage.statusHistory.filter(
  1156. (status) => status.action !== 'knowledge_search'
  1157. );
  1158. }
  1159. continue;
  1160. }
  1161. if (responseMessage.content == '' && value == '\n') {
  1162. continue;
  1163. } else {
  1164. responseMessage.content += value;
  1165. if (navigator.vibrate && ($settings?.hapticFeedback ?? false)) {
  1166. navigator.vibrate(5);
  1167. }
  1168. const messageContentParts = getMessageContentParts(
  1169. responseMessage.content,
  1170. $config?.audio?.tts?.split_on ?? 'punctuation'
  1171. );
  1172. messageContentParts.pop();
  1173. // dispatch only last sentence and make sure it hasn't been dispatched before
  1174. if (
  1175. messageContentParts.length > 0 &&
  1176. messageContentParts[messageContentParts.length - 1] !== responseMessage.lastSentence
  1177. ) {
  1178. responseMessage.lastSentence = messageContentParts[messageContentParts.length - 1];
  1179. eventTarget.dispatchEvent(
  1180. new CustomEvent('chat', {
  1181. detail: {
  1182. id: responseMessageId,
  1183. content: messageContentParts[messageContentParts.length - 1]
  1184. }
  1185. })
  1186. );
  1187. }
  1188. messages = messages;
  1189. }
  1190. if (autoScroll) {
  1191. scrollToBottom();
  1192. }
  1193. }
  1194. }
  1195. await chatCompletedHandler(
  1196. _chatId,
  1197. model.id,
  1198. responseMessageId,
  1199. createMessagesList(responseMessageId)
  1200. );
  1201. if ($settings.notificationEnabled && !document.hasFocus()) {
  1202. const notification = new Notification(`${model.id}`, {
  1203. body: responseMessage.content,
  1204. icon: `${WEBUI_BASE_URL}/static/favicon.png`
  1205. });
  1206. }
  1207. if ($settings.responseAutoCopy) {
  1208. copyToClipboard(responseMessage.content);
  1209. }
  1210. if ($settings.responseAutoPlayback && !$showCallOverlay) {
  1211. await tick();
  1212. document.getElementById(`speak-button-${responseMessage.id}`)?.click();
  1213. }
  1214. } else {
  1215. await handleOpenAIError(null, res, model, responseMessage);
  1216. }
  1217. } catch (error) {
  1218. await handleOpenAIError(error, null, model, responseMessage);
  1219. }
  1220. await saveChatHandler(_chatId);
  1221. messages = messages;
  1222. stopResponseFlag = false;
  1223. await tick();
  1224. let lastMessageContentPart =
  1225. getMessageContentParts(
  1226. responseMessage.content,
  1227. $config?.audio?.tts?.split_on ?? 'punctuation'
  1228. )?.at(-1) ?? '';
  1229. if (lastMessageContentPart) {
  1230. eventTarget.dispatchEvent(
  1231. new CustomEvent('chat', {
  1232. detail: { id: responseMessageId, content: lastMessageContentPart }
  1233. })
  1234. );
  1235. }
  1236. eventTarget.dispatchEvent(
  1237. new CustomEvent('chat:finish', {
  1238. detail: {
  1239. id: responseMessageId,
  1240. content: responseMessage.content
  1241. }
  1242. })
  1243. );
  1244. if (autoScroll) {
  1245. scrollToBottom();
  1246. }
  1247. if (messages.length == 2 && selectedModels[0] === model.id) {
  1248. window.history.replaceState(history.state, '', `/c/${_chatId}`);
  1249. const _title = await generateChatTitle(userPrompt);
  1250. await setChatTitle(_chatId, _title);
  1251. }
  1252. return _response;
  1253. };
  1254. const handleOpenAIError = async (error, res: Response | null, model, responseMessage) => {
  1255. let errorMessage = '';
  1256. let innerError;
  1257. if (error) {
  1258. innerError = error;
  1259. } else if (res !== null) {
  1260. innerError = await res.json();
  1261. }
  1262. console.error(innerError);
  1263. if ('detail' in innerError) {
  1264. toast.error(innerError.detail);
  1265. errorMessage = innerError.detail;
  1266. } else if ('error' in innerError) {
  1267. if ('message' in innerError.error) {
  1268. toast.error(innerError.error.message);
  1269. errorMessage = innerError.error.message;
  1270. } else {
  1271. toast.error(innerError.error);
  1272. errorMessage = innerError.error;
  1273. }
  1274. } else if ('message' in innerError) {
  1275. toast.error(innerError.message);
  1276. errorMessage = innerError.message;
  1277. }
  1278. responseMessage.error = {
  1279. content:
  1280. $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
  1281. provider: model.name ?? model.id
  1282. }) +
  1283. '\n' +
  1284. errorMessage
  1285. };
  1286. responseMessage.done = true;
  1287. if (responseMessage.statusHistory) {
  1288. responseMessage.statusHistory = responseMessage.statusHistory.filter(
  1289. (status) => status.action !== 'knowledge_search'
  1290. );
  1291. }
  1292. messages = messages;
  1293. };
  1294. const stopResponse = () => {
  1295. stopResponseFlag = true;
  1296. console.log('stopResponse');
  1297. };
  1298. const regenerateResponse = async (message) => {
  1299. console.log('regenerateResponse');
  1300. if (messages.length != 0) {
  1301. let userMessage = history.messages[message.parentId];
  1302. let userPrompt = userMessage.content;
  1303. if ((userMessage?.models ?? [...selectedModels]).length == 1) {
  1304. // If user message has only one model selected, sendPrompt automatically selects it for regeneration
  1305. await sendPrompt(userPrompt, userMessage.id);
  1306. } else {
  1307. // If there are multiple models selected, use the model of the response message for regeneration
  1308. // e.g. many model chat
  1309. await sendPrompt(userPrompt, userMessage.id, {
  1310. modelId: message.model,
  1311. modelIdx: message.modelIdx
  1312. });
  1313. }
  1314. }
  1315. };
  1316. const continueGeneration = async () => {
  1317. console.log('continueGeneration');
  1318. const _chatId = JSON.parse(JSON.stringify($chatId));
  1319. if (messages.length != 0 && messages.at(-1).done == true) {
  1320. const responseMessage = history.messages[history.currentId];
  1321. responseMessage.done = false;
  1322. await tick();
  1323. const model = $models.filter((m) => m.id === responseMessage.model).at(0);
  1324. if (model) {
  1325. if (model?.owned_by === 'openai') {
  1326. await sendPromptOpenAI(
  1327. model,
  1328. history.messages[responseMessage.parentId].content,
  1329. responseMessage.id,
  1330. _chatId
  1331. );
  1332. } else
  1333. await sendPromptOllama(
  1334. model,
  1335. history.messages[responseMessage.parentId].content,
  1336. responseMessage.id,
  1337. _chatId
  1338. );
  1339. }
  1340. } else {
  1341. toast.error($i18n.t(`Model {{modelId}} not found`, { modelId }));
  1342. }
  1343. };
  1344. const generateChatTitle = async (userPrompt) => {
  1345. if ($settings?.title?.auto ?? true) {
  1346. const title = await generateTitle(
  1347. localStorage.token,
  1348. selectedModels[0],
  1349. userPrompt,
  1350. $chatId
  1351. ).catch((error) => {
  1352. console.error(error);
  1353. return 'New Chat';
  1354. });
  1355. return title;
  1356. } else {
  1357. return `${userPrompt}`;
  1358. }
  1359. };
  1360. const setChatTitle = async (_chatId, _title) => {
  1361. if (_chatId === $chatId) {
  1362. title = _title;
  1363. }
  1364. if (!$temporaryChatEnabled) {
  1365. chat = await updateChatById(localStorage.token, _chatId, { title: _title });
  1366. currentChatPage.set(1);
  1367. await chats.set(await getChatList(localStorage.token, $currentChatPage));
  1368. }
  1369. };
  1370. const getWebSearchResults = async (model: string, parentId: string, responseId: string) => {
  1371. const responseMessage = history.messages[responseId];
  1372. const userMessage = history.messages[parentId];
  1373. responseMessage.statusHistory = [
  1374. {
  1375. done: false,
  1376. action: 'web_search',
  1377. description: $i18n.t('Generating search query')
  1378. }
  1379. ];
  1380. messages = messages;
  1381. const prompt = userMessage.content;
  1382. let searchQuery = await generateSearchQuery(
  1383. localStorage.token,
  1384. model,
  1385. messages.filter((message) => message?.content?.trim()),
  1386. prompt
  1387. ).catch((error) => {
  1388. console.log(error);
  1389. return prompt;
  1390. });
  1391. if (!searchQuery || searchQuery == '') {
  1392. responseMessage.statusHistory.push({
  1393. done: true,
  1394. error: true,
  1395. action: 'web_search',
  1396. description: $i18n.t('No search query generated')
  1397. });
  1398. messages = messages;
  1399. return;
  1400. }
  1401. responseMessage.statusHistory.push({
  1402. done: false,
  1403. action: 'web_search',
  1404. description: $i18n.t(`Searching "{{searchQuery}}"`, { searchQuery })
  1405. });
  1406. messages = messages;
  1407. const results = await runWebSearch(localStorage.token, searchQuery).catch((error) => {
  1408. console.log(error);
  1409. toast.error(error);
  1410. return null;
  1411. });
  1412. if (results) {
  1413. responseMessage.statusHistory.push({
  1414. done: true,
  1415. action: 'web_search',
  1416. description: $i18n.t('Searched {{count}} sites', { count: results.filenames.length }),
  1417. query: searchQuery,
  1418. urls: results.filenames
  1419. });
  1420. if (responseMessage?.files ?? undefined === undefined) {
  1421. responseMessage.files = [];
  1422. }
  1423. responseMessage.files.push({
  1424. collection_name: results.collection_name,
  1425. name: searchQuery,
  1426. type: 'web_search_results',
  1427. urls: results.filenames
  1428. });
  1429. messages = messages;
  1430. } else {
  1431. responseMessage.statusHistory.push({
  1432. done: true,
  1433. error: true,
  1434. action: 'web_search',
  1435. description: 'No search results found'
  1436. });
  1437. messages = messages;
  1438. }
  1439. };
  1440. const getTags = async () => {
  1441. return await getTagsById(localStorage.token, $chatId).catch(async (error) => {
  1442. return [];
  1443. });
  1444. };
  1445. const saveChatHandler = async (_chatId) => {
  1446. if ($chatId == _chatId) {
  1447. if (!$temporaryChatEnabled) {
  1448. chat = await updateChatById(localStorage.token, _chatId, {
  1449. messages: messages,
  1450. history: history,
  1451. models: selectedModels,
  1452. params: params,
  1453. files: chatFiles
  1454. });
  1455. currentChatPage.set(1);
  1456. await chats.set(await getChatList(localStorage.token, $currentChatPage));
  1457. }
  1458. }
  1459. };
  1460. const mergeResponses = async (messageId, responses, _chatId) => {
  1461. console.log('mergeResponses', messageId, responses);
  1462. const message = history.messages[messageId];
  1463. const mergedResponse = {
  1464. status: true,
  1465. content: ''
  1466. };
  1467. message.merged = mergedResponse;
  1468. messages = messages;
  1469. try {
  1470. const [res, controller] = await generateMoACompletion(
  1471. localStorage.token,
  1472. message.model,
  1473. history.messages[message.parentId].content,
  1474. responses
  1475. );
  1476. if (res && res.ok && res.body) {
  1477. const textStream = await createOpenAITextStream(res.body, $settings.splitLargeChunks);
  1478. for await (const update of textStream) {
  1479. const { value, done, citations, error, usage } = update;
  1480. if (error || done) {
  1481. break;
  1482. }
  1483. if (mergedResponse.content == '' && value == '\n') {
  1484. continue;
  1485. } else {
  1486. mergedResponse.content += value;
  1487. messages = messages;
  1488. }
  1489. if (autoScroll) {
  1490. scrollToBottom();
  1491. }
  1492. }
  1493. await saveChatHandler(_chatId);
  1494. } else {
  1495. console.error(res);
  1496. }
  1497. } catch (e) {
  1498. console.error(e);
  1499. }
  1500. };
  1501. </script>
  1502. <svelte:head>
  1503. <title>
  1504. {title
  1505. ? `${title.length > 30 ? `${title.slice(0, 30)}...` : title} | ${$WEBUI_NAME}`
  1506. : `${$WEBUI_NAME}`}
  1507. </title>
  1508. </svelte:head>
  1509. <audio id="audioElement" src="" style="display: none;" />
  1510. <EventConfirmDialog
  1511. bind:show={showEventConfirmation}
  1512. title={eventConfirmationTitle}
  1513. message={eventConfirmationMessage}
  1514. input={eventConfirmationInput}
  1515. inputPlaceholder={eventConfirmationInputPlaceholder}
  1516. inputValue={eventConfirmationInputValue}
  1517. on:confirm={(e) => {
  1518. if (e.detail) {
  1519. eventCallback(e.detail);
  1520. } else {
  1521. eventCallback(true);
  1522. }
  1523. }}
  1524. on:cancel={() => {
  1525. eventCallback(false);
  1526. }}
  1527. />
  1528. {#if !chatIdProp || (loaded && chatIdProp)}
  1529. <div
  1530. class="h-screen max-h-[100dvh] {$showSidebar
  1531. ? 'md:max-w-[calc(100%-260px)]'
  1532. : ''} w-full max-w-full flex flex-col"
  1533. >
  1534. {#if $settings?.backgroundImageUrl ?? null}
  1535. <div
  1536. class="absolute {$showSidebar
  1537. ? 'md:max-w-[calc(100%-260px)] md:translate-x-[260px]'
  1538. : ''} top-0 left-0 w-full h-full bg-cover bg-center bg-no-repeat"
  1539. style="background-image: url({$settings.backgroundImageUrl}) "
  1540. />
  1541. <div
  1542. class="absolute top-0 left-0 w-full h-full bg-gradient-to-t from-white to-white/85 dark:from-gray-900 dark:to-[#171717]/90 z-0"
  1543. />
  1544. {/if}
  1545. <Navbar
  1546. {title}
  1547. bind:selectedModels
  1548. bind:showModelSelector
  1549. shareEnabled={messages.length > 0}
  1550. {chat}
  1551. {initNewChat}
  1552. />
  1553. <PaneGroup direction="horizontal" class="w-full h-full">
  1554. <Pane defaultSize={50} class="h-full flex w-full relative">
  1555. {#if $banners.length > 0 && messages.length === 0 && !$chatId && selectedModels.length <= 1}
  1556. <div class="absolute top-3 left-0 right-0 w-full z-20">
  1557. <div class=" flex flex-col gap-1 w-full">
  1558. {#each $banners.filter( (b) => (b.dismissible ? !JSON.parse(localStorage.getItem('dismissedBannerIds') ?? '[]').includes(b.id) : true) ) as banner}
  1559. <Banner
  1560. {banner}
  1561. on:dismiss={(e) => {
  1562. const bannerId = e.detail;
  1563. localStorage.setItem(
  1564. 'dismissedBannerIds',
  1565. JSON.stringify(
  1566. [
  1567. bannerId,
  1568. ...JSON.parse(localStorage.getItem('dismissedBannerIds') ?? '[]')
  1569. ].filter((id) => $banners.find((b) => b.id === id))
  1570. )
  1571. );
  1572. }}
  1573. />
  1574. {/each}
  1575. </div>
  1576. </div>
  1577. {/if}
  1578. <div class="flex flex-col flex-auto z-10 w-full">
  1579. <div
  1580. class=" pb-2.5 flex flex-col justify-between w-full flex-auto overflow-auto h-0 max-w-full z-10 scrollbar-hidden"
  1581. id="messages-container"
  1582. bind:this={messagesContainerElement}
  1583. on:scroll={(e) => {
  1584. autoScroll =
  1585. messagesContainerElement.scrollHeight - messagesContainerElement.scrollTop <=
  1586. messagesContainerElement.clientHeight + 5;
  1587. }}
  1588. >
  1589. <div class=" h-full w-full flex flex-col {chatIdProp ? 'py-4' : 'pt-2 pb-4'}">
  1590. <Messages
  1591. chatId={$chatId}
  1592. {selectedModels}
  1593. {processing}
  1594. bind:history
  1595. bind:messages
  1596. bind:autoScroll
  1597. bind:prompt
  1598. bottomPadding={files.length > 0}
  1599. {sendPrompt}
  1600. {continueGeneration}
  1601. {regenerateResponse}
  1602. {mergeResponses}
  1603. {chatActionHandler}
  1604. {showMessage}
  1605. />
  1606. </div>
  1607. </div>
  1608. <div class="">
  1609. <MessageInput
  1610. bind:files
  1611. bind:prompt
  1612. bind:autoScroll
  1613. bind:selectedToolIds
  1614. bind:webSearchEnabled
  1615. bind:atSelectedModel
  1616. availableToolIds={selectedModelIds.reduce((a, e, i, arr) => {
  1617. const model = $models.find((m) => m.id === e);
  1618. if (model?.info?.meta?.toolIds ?? false) {
  1619. return [...new Set([...a, ...model.info.meta.toolIds])];
  1620. }
  1621. return a;
  1622. }, [])}
  1623. transparentBackground={$settings?.backgroundImageUrl ?? false}
  1624. {selectedModels}
  1625. {messages}
  1626. {submitPrompt}
  1627. {stopResponse}
  1628. on:call={async () => {
  1629. await showControls.set(true);
  1630. }}
  1631. />
  1632. </div>
  1633. </div>
  1634. </Pane>
  1635. <ChatControls
  1636. models={selectedModelIds.reduce((a, e, i, arr) => {
  1637. const model = $models.find((m) => m.id === e);
  1638. if (model) {
  1639. return [...a, model];
  1640. }
  1641. return a;
  1642. }, [])}
  1643. bind:history
  1644. bind:chatFiles
  1645. bind:params
  1646. bind:files
  1647. bind:pane={controlPane}
  1648. {submitPrompt}
  1649. {stopResponse}
  1650. {showMessage}
  1651. modelId={selectedModelIds?.at(0) ?? null}
  1652. chatId={$chatId}
  1653. {eventTarget}
  1654. />
  1655. </PaneGroup>
  1656. </div>
  1657. {/if}