Chat.svelte 56 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106
  1. <script lang="ts">
  2. import { v4 as uuidv4 } from 'uuid';
  3. import { toast } from 'svelte-sonner';
  4. import mermaid from 'mermaid';
  5. import { PaneGroup, Pane, PaneResizer } from 'paneforge';
  6. import { getContext, onDestroy, onMount, tick } from 'svelte';
  7. const i18n: Writable<i18nType> = getContext('i18n');
  8. import { goto } from '$app/navigation';
  9. import { page } from '$app/stores';
  10. import type { Unsubscriber, Writable } from 'svelte/store';
  11. import type { i18n as i18nType } from 'i18next';
  12. import { WEBUI_BASE_URL } from '$lib/constants';
  13. import {
  14. chatId,
  15. chats,
  16. config,
  17. type Model,
  18. models,
  19. settings,
  20. showSidebar,
  21. WEBUI_NAME,
  22. banners,
  23. user,
  24. socket,
  25. showControls,
  26. showCallOverlay,
  27. currentChatPage,
  28. temporaryChatEnabled,
  29. mobile,
  30. showOverview,
  31. chatTitle
  32. } from '$lib/stores';
  33. import {
  34. convertMessagesToHistory,
  35. copyToClipboard,
  36. getMessageContentParts,
  37. extractSentencesForAudio,
  38. promptTemplate,
  39. splitStream
  40. } from '$lib/utils';
  41. import { generateChatCompletion } from '$lib/apis/ollama';
  42. import {
  43. createNewChat,
  44. getChatById,
  45. getChatList,
  46. getTagsById,
  47. updateChatById
  48. } from '$lib/apis/chats';
  49. import { generateOpenAIChatCompletion } from '$lib/apis/openai';
  50. import { processWebSearch } from '$lib/apis/retrieval';
  51. import { createOpenAITextStream } from '$lib/apis/streaming';
  52. import { queryMemory } from '$lib/apis/memories';
  53. import { getAndUpdateUserLocation, getUserSettings } from '$lib/apis/users';
  54. import {
  55. chatCompleted,
  56. generateTitle,
  57. generateSearchQuery,
  58. chatAction,
  59. generateMoACompletion
  60. } from '$lib/apis';
  61. import Banner from '../common/Banner.svelte';
  62. import MessageInput from '$lib/components/chat/MessageInput.svelte';
  63. import Messages from '$lib/components/chat/Messages.svelte';
  64. import Navbar from '$lib/components/layout/Navbar.svelte';
  65. import ChatControls from './ChatControls.svelte';
  66. import EventConfirmDialog from '../common/ConfirmDialog.svelte';
  67. import Placeholder from './Placeholder.svelte';
  68. export let chatIdProp = '';
  69. let loaded = false;
  70. const eventTarget = new EventTarget();
  71. let controlPane;
  72. let stopResponseFlag = false;
  73. let autoScroll = true;
  74. let processing = '';
  75. let messagesContainerElement: HTMLDivElement;
  76. let showEventConfirmation = false;
  77. let eventConfirmationTitle = '';
  78. let eventConfirmationMessage = '';
  79. let eventConfirmationInput = false;
  80. let eventConfirmationInputPlaceholder = '';
  81. let eventConfirmationInputValue = '';
  82. let eventCallback = null;
  83. let chatIdUnsubscriber: Unsubscriber | undefined;
  84. let selectedModels = [''];
  85. let atSelectedModel: Model | undefined;
  86. let selectedModelIds = [];
  87. $: selectedModelIds = atSelectedModel !== undefined ? [atSelectedModel.id] : selectedModels;
  88. let selectedToolIds = [];
  89. let webSearchEnabled = false;
  90. let chat = null;
  91. let tags = [];
  92. let history = {
  93. messages: {},
  94. currentId: null
  95. };
  96. // Chat Input
  97. let prompt = '';
  98. let chatFiles = [];
  99. let files = [];
  100. let params = {};
  101. $: if (chatIdProp) {
  102. (async () => {
  103. console.log(chatIdProp);
  104. if (chatIdProp && (await loadChat())) {
  105. await tick();
  106. loaded = true;
  107. window.setTimeout(() => scrollToBottom(), 0);
  108. const chatInput = document.getElementById('chat-textarea');
  109. chatInput?.focus();
  110. } else {
  111. await goto('/');
  112. }
  113. })();
  114. }
  115. const showMessage = async (message) => {
  116. const _chatId = JSON.parse(JSON.stringify($chatId));
  117. let _messageId = JSON.parse(JSON.stringify(message.id));
  118. let messageChildrenIds = history.messages[_messageId].childrenIds;
  119. while (messageChildrenIds.length !== 0) {
  120. _messageId = messageChildrenIds.at(-1);
  121. messageChildrenIds = history.messages[_messageId].childrenIds;
  122. }
  123. history.currentId = _messageId;
  124. await tick();
  125. await tick();
  126. await tick();
  127. const messageElement = document.getElementById(`message-${message.id}`);
  128. if (messageElement) {
  129. messageElement.scrollIntoView({ behavior: 'smooth' });
  130. }
  131. await tick();
  132. saveChatHandler(_chatId);
  133. };
  134. const chatEventHandler = async (event, cb) => {
  135. if (event.chat_id === $chatId) {
  136. await tick();
  137. console.log(event);
  138. let message = history.messages[event.message_id];
  139. const type = event?.data?.type ?? null;
  140. const data = event?.data?.data ?? null;
  141. if (type === 'status') {
  142. if (message?.statusHistory) {
  143. message.statusHistory.push(data);
  144. } else {
  145. message.statusHistory = [data];
  146. }
  147. } else if (type === 'citation') {
  148. if (message?.citations) {
  149. message.citations.push(data);
  150. } else {
  151. message.citations = [data];
  152. }
  153. } else if (type === 'message') {
  154. message.content += data.content;
  155. } else if (type === 'replace') {
  156. message.content = data.content;
  157. } else if (type === 'action') {
  158. if (data.action === 'continue') {
  159. const continueButton = document.getElementById('continue-response-button');
  160. if (continueButton) {
  161. continueButton.click();
  162. }
  163. }
  164. } else if (type === 'confirmation') {
  165. eventCallback = cb;
  166. eventConfirmationInput = false;
  167. showEventConfirmation = true;
  168. eventConfirmationTitle = data.title;
  169. eventConfirmationMessage = data.message;
  170. } else if (type === 'input') {
  171. eventCallback = cb;
  172. eventConfirmationInput = true;
  173. showEventConfirmation = true;
  174. eventConfirmationTitle = data.title;
  175. eventConfirmationMessage = data.message;
  176. eventConfirmationInputPlaceholder = data.placeholder;
  177. eventConfirmationInputValue = data?.value ?? '';
  178. } else {
  179. console.log('Unknown message type', data);
  180. }
  181. history.messages[event.message_id] = message;
  182. }
  183. };
  184. const onMessageHandler = async (event: {
  185. origin: string;
  186. data: { type: string; text: string };
  187. }) => {
  188. if (event.origin !== window.origin) {
  189. return;
  190. }
  191. // Replace with your iframe's origin
  192. if (event.data.type === 'input:prompt') {
  193. console.debug(event.data.text);
  194. const inputElement = document.getElementById('chat-textarea');
  195. if (inputElement) {
  196. prompt = event.data.text;
  197. inputElement.focus();
  198. }
  199. }
  200. if (event.data.type === 'action:submit') {
  201. console.debug(event.data.text);
  202. if (prompt !== '') {
  203. await tick();
  204. submitPrompt(prompt);
  205. }
  206. }
  207. if (event.data.type === 'input:prompt:submit') {
  208. console.debug(event.data.text);
  209. if (prompt !== '') {
  210. await tick();
  211. submitPrompt(event.data.text);
  212. }
  213. }
  214. };
  215. onMount(async () => {
  216. window.addEventListener('message', onMessageHandler);
  217. $socket?.on('chat-events', chatEventHandler);
  218. if (!$chatId) {
  219. chatIdUnsubscriber = chatId.subscribe(async (value) => {
  220. if (!value) {
  221. await initNewChat();
  222. }
  223. });
  224. } else {
  225. if ($temporaryChatEnabled) {
  226. await goto('/');
  227. }
  228. }
  229. showControls.subscribe(async (value) => {
  230. if (controlPane && !$mobile) {
  231. try {
  232. if (value) {
  233. const currentSize = controlPane.getSize();
  234. if (currentSize === 0) {
  235. const size = parseInt(localStorage?.chatControlsSize ?? '30');
  236. controlPane.resize(size ? size : 30);
  237. }
  238. } else {
  239. controlPane.resize(0);
  240. }
  241. } catch (e) {
  242. // ignore
  243. }
  244. }
  245. if (!value) {
  246. showCallOverlay.set(false);
  247. showOverview.set(false);
  248. }
  249. });
  250. const chatInput = document.getElementById('chat-textarea');
  251. chatInput?.focus();
  252. chats.subscribe(() => {});
  253. });
  254. onDestroy(() => {
  255. chatIdUnsubscriber?.();
  256. window.removeEventListener('message', onMessageHandler);
  257. $socket?.off('chat-events');
  258. });
  259. //////////////////////////
  260. // Web functions
  261. //////////////////////////
  262. const initNewChat = async () => {
  263. if ($page.url.pathname.includes('/c/')) {
  264. window.history.replaceState(history.state, '', `/`);
  265. }
  266. autoScroll = true;
  267. await chatId.set('');
  268. await chatTitle.set('');
  269. history = {
  270. messages: {},
  271. currentId: null
  272. };
  273. chatFiles = [];
  274. params = {};
  275. if ($page.url.searchParams.get('models')) {
  276. selectedModels = $page.url.searchParams.get('models')?.split(',');
  277. } else if ($page.url.searchParams.get('model')) {
  278. selectedModels = $page.url.searchParams.get('model')?.split(',');
  279. } else if ($settings?.models) {
  280. selectedModels = $settings?.models;
  281. } else if ($config?.default_models) {
  282. console.log($config?.default_models.split(',') ?? '');
  283. selectedModels = $config?.default_models.split(',');
  284. } else {
  285. selectedModels = [''];
  286. }
  287. if ($page.url.searchParams.get('web-search') === 'true') {
  288. webSearchEnabled = true;
  289. }
  290. if ($page.url.searchParams.get('q')) {
  291. prompt = $page.url.searchParams.get('q') ?? '';
  292. selectedToolIds = ($page.url.searchParams.get('tool_ids') ?? '')
  293. .split(',')
  294. .map((id) => id.trim())
  295. .filter((id) => id);
  296. if (prompt) {
  297. await tick();
  298. submitPrompt(prompt);
  299. }
  300. }
  301. if ($page.url.searchParams.get('call') === 'true') {
  302. showCallOverlay.set(true);
  303. showControls.set(true);
  304. }
  305. selectedModels = selectedModels.map((modelId) =>
  306. $models.map((m) => m.id).includes(modelId) ? modelId : ''
  307. );
  308. const userSettings = await getUserSettings(localStorage.token);
  309. if (userSettings) {
  310. settings.set(userSettings.ui);
  311. } else {
  312. settings.set(JSON.parse(localStorage.getItem('settings') ?? '{}'));
  313. }
  314. const chatInput = document.getElementById('chat-textarea');
  315. setTimeout(() => chatInput?.focus(), 0);
  316. };
  317. const loadChat = async () => {
  318. chatId.set(chatIdProp);
  319. chat = await getChatById(localStorage.token, $chatId).catch(async (error) => {
  320. await goto('/');
  321. return null;
  322. });
  323. if (chat) {
  324. tags = await getTags();
  325. const chatContent = chat.chat;
  326. if (chatContent) {
  327. console.log(chatContent);
  328. selectedModels =
  329. (chatContent?.models ?? undefined) !== undefined
  330. ? chatContent.models
  331. : [chatContent.models ?? ''];
  332. history =
  333. (chatContent?.history ?? undefined) !== undefined
  334. ? chatContent.history
  335. : convertMessagesToHistory(chatContent.messages);
  336. chatTitle.set(chatContent.title);
  337. const userSettings = await getUserSettings(localStorage.token);
  338. if (userSettings) {
  339. await settings.set(userSettings.ui);
  340. } else {
  341. await settings.set(JSON.parse(localStorage.getItem('settings') ?? '{}'));
  342. }
  343. params = chatContent?.params ?? {};
  344. chatFiles = chatContent?.files ?? [];
  345. autoScroll = true;
  346. await tick();
  347. if (history.currentId) {
  348. history.messages[history.currentId].done = true;
  349. }
  350. await tick();
  351. return true;
  352. } else {
  353. return null;
  354. }
  355. }
  356. };
  357. const scrollToBottom = async () => {
  358. await tick();
  359. if (messagesContainerElement) {
  360. messagesContainerElement.scrollTop = messagesContainerElement.scrollHeight;
  361. }
  362. };
  363. const createMessagesList = (responseMessageId) => {
  364. if (responseMessageId === null) {
  365. return [];
  366. }
  367. const message = history.messages[responseMessageId];
  368. if (message?.parentId) {
  369. return [...createMessagesList(message.parentId), message];
  370. } else {
  371. return [message];
  372. }
  373. };
  374. const chatCompletedHandler = async (chatId, modelId, responseMessageId, messages) => {
  375. await mermaid.run({
  376. querySelector: '.mermaid'
  377. });
  378. const res = await chatCompleted(localStorage.token, {
  379. model: modelId,
  380. messages: messages.map((m) => ({
  381. id: m.id,
  382. role: m.role,
  383. content: m.content,
  384. info: m.info ? m.info : undefined,
  385. timestamp: m.timestamp
  386. })),
  387. chat_id: chatId,
  388. session_id: $socket?.id,
  389. id: responseMessageId
  390. }).catch((error) => {
  391. toast.error(error);
  392. messages.at(-1).error = { content: error };
  393. return null;
  394. });
  395. if (res !== null) {
  396. // Update chat history with the new messages
  397. for (const message of res.messages) {
  398. history.messages[message.id] = {
  399. ...history.messages[message.id],
  400. ...(history.messages[message.id].content !== message.content
  401. ? { originalContent: history.messages[message.id].content }
  402. : {}),
  403. ...message
  404. };
  405. }
  406. }
  407. await tick();
  408. if ($chatId == chatId) {
  409. if (!$temporaryChatEnabled) {
  410. chat = await updateChatById(localStorage.token, chatId, {
  411. models: selectedModels,
  412. messages: messages,
  413. history: history,
  414. params: params,
  415. files: chatFiles
  416. });
  417. currentChatPage.set(1);
  418. await chats.set(await getChatList(localStorage.token, $currentChatPage));
  419. }
  420. }
  421. };
  422. const chatActionHandler = async (chatId, actionId, modelId, responseMessageId, event = null) => {
  423. const messages = createMessagesList(responseMessageId);
  424. const res = await chatAction(localStorage.token, actionId, {
  425. model: modelId,
  426. messages: messages.map((m) => ({
  427. id: m.id,
  428. role: m.role,
  429. content: m.content,
  430. info: m.info ? m.info : undefined,
  431. timestamp: m.timestamp
  432. })),
  433. ...(event ? { event: event } : {}),
  434. chat_id: chatId,
  435. session_id: $socket?.id,
  436. id: responseMessageId
  437. }).catch((error) => {
  438. toast.error(error);
  439. messages.at(-1).error = { content: error };
  440. return null;
  441. });
  442. if (res !== null) {
  443. // Update chat history with the new messages
  444. for (const message of res.messages) {
  445. history.messages[message.id] = {
  446. ...history.messages[message.id],
  447. ...(history.messages[message.id].content !== message.content
  448. ? { originalContent: history.messages[message.id].content }
  449. : {}),
  450. ...message
  451. };
  452. }
  453. }
  454. if ($chatId == chatId) {
  455. if (!$temporaryChatEnabled) {
  456. chat = await updateChatById(localStorage.token, chatId, {
  457. models: selectedModels,
  458. messages: messages,
  459. history: history,
  460. params: params,
  461. files: chatFiles
  462. });
  463. currentChatPage.set(1);
  464. await chats.set(await getChatList(localStorage.token, $currentChatPage));
  465. }
  466. }
  467. };
  468. const getChatEventEmitter = async (modelId: string, chatId: string = '') => {
  469. return setInterval(() => {
  470. $socket?.emit('usage', {
  471. action: 'chat',
  472. model: modelId,
  473. chat_id: chatId
  474. });
  475. }, 1000);
  476. };
  477. const createMessagePair = async (userPrompt) => {
  478. prompt = '';
  479. if (selectedModels.length === 0) {
  480. toast.error($i18n.t('Model not selected'));
  481. } else {
  482. const modelId = selectedModels[0];
  483. const model = $models.filter((m) => m.id === modelId).at(0);
  484. const messages = createMessagesList(history.currentId);
  485. const parentMessage = messages.length !== 0 ? messages.at(-1) : null;
  486. const userMessageId = uuidv4();
  487. const responseMessageId = uuidv4();
  488. const userMessage = {
  489. id: userMessageId,
  490. parentId: parentMessage ? parentMessage.id : null,
  491. childrenIds: [responseMessageId],
  492. role: 'user',
  493. content: userPrompt ? userPrompt : `[PROMPT] ${userMessageId}`,
  494. timestamp: Math.floor(Date.now() / 1000)
  495. };
  496. const responseMessage = {
  497. id: responseMessageId,
  498. parentId: userMessageId,
  499. childrenIds: [],
  500. role: 'assistant',
  501. content: `[RESPONSE] ${responseMessageId}`,
  502. done: true,
  503. model: modelId,
  504. modelName: model.name ?? model.id,
  505. modelIdx: 0,
  506. timestamp: Math.floor(Date.now() / 1000)
  507. };
  508. if (parentMessage) {
  509. parentMessage.childrenIds.push(userMessageId);
  510. history.messages[parentMessage.id] = parentMessage;
  511. }
  512. history.messages[userMessageId] = userMessage;
  513. history.messages[responseMessageId] = responseMessage;
  514. history.currentId = responseMessageId;
  515. await tick();
  516. if (autoScroll) {
  517. scrollToBottom();
  518. }
  519. if (messages.length === 0) {
  520. await initChatHandler();
  521. } else {
  522. await saveChatHandler($chatId);
  523. }
  524. }
  525. };
  526. //////////////////////////
  527. // Chat functions
  528. //////////////////////////
  529. const submitPrompt = async (userPrompt, { _raw = false } = {}) => {
  530. let _responses = [];
  531. console.log('submitPrompt', $chatId);
  532. const messages = createMessagesList(history.currentId);
  533. selectedModels = selectedModels.map((modelId) =>
  534. $models.map((m) => m.id).includes(modelId) ? modelId : ''
  535. );
  536. if (selectedModels.includes('')) {
  537. toast.error($i18n.t('Model not selected'));
  538. } else if (messages.length != 0 && messages.at(-1).done != true) {
  539. // Response not done
  540. console.log('wait');
  541. } else if (messages.length != 0 && messages.at(-1).error) {
  542. // Error in response
  543. toast.error(
  544. $i18n.t(
  545. `Oops! There was an error in the previous response. Please try again or contact admin.`
  546. )
  547. );
  548. } else if (
  549. files.length > 0 &&
  550. files.filter((file) => file.type !== 'image' && file.status === 'uploading').length > 0
  551. ) {
  552. // Upload not done
  553. toast.error(
  554. $i18n.t(
  555. `Oops! Hold tight! Your files are still in the processing oven. We're cooking them up to perfection. Please be patient and we'll let you know once they're ready.`
  556. )
  557. );
  558. } else if (
  559. ($config?.file?.max_count ?? null) !== null &&
  560. files.length + chatFiles.length > $config?.file?.max_count
  561. ) {
  562. console.log(chatFiles.length, files.length);
  563. toast.error(
  564. $i18n.t(`You can only chat with a maximum of {{maxCount}} file(s) at a time.`, {
  565. maxCount: $config?.file?.max_count
  566. })
  567. );
  568. } else {
  569. // Reset chat input textarea
  570. const chatTextAreaElement = document.getElementById('chat-textarea');
  571. if (chatTextAreaElement) {
  572. chatTextAreaElement.value = '';
  573. chatTextAreaElement.style.height = '';
  574. }
  575. const _files = JSON.parse(JSON.stringify(files));
  576. chatFiles.push(..._files.filter((item) => ['doc', 'file', 'collection'].includes(item.type)));
  577. chatFiles = chatFiles.filter(
  578. // Remove duplicates
  579. (item, index, array) =>
  580. array.findIndex((i) => JSON.stringify(i) === JSON.stringify(item)) === index
  581. );
  582. files = [];
  583. prompt = '';
  584. // Create user message
  585. let userMessageId = uuidv4();
  586. let userMessage = {
  587. id: userMessageId,
  588. parentId: messages.length !== 0 ? messages.at(-1).id : null,
  589. childrenIds: [],
  590. role: 'user',
  591. content: userPrompt,
  592. files: _files.length > 0 ? _files : undefined,
  593. timestamp: Math.floor(Date.now() / 1000), // Unix epoch
  594. models: selectedModels
  595. };
  596. // Add message to history and Set currentId to messageId
  597. history.messages[userMessageId] = userMessage;
  598. history.currentId = userMessageId;
  599. // Append messageId to childrenIds of parent message
  600. if (messages.length !== 0) {
  601. history.messages[messages.at(-1).id].childrenIds.push(userMessageId);
  602. }
  603. // Wait until history/message have been updated
  604. await tick();
  605. _responses = await sendPrompt(userPrompt, userMessageId, { newChat: true });
  606. }
  607. return _responses;
  608. };
  609. const sendPrompt = async (
  610. prompt: string,
  611. parentId: string,
  612. { modelId = null, modelIdx = null, newChat = false } = {}
  613. ) => {
  614. // Create new chat if newChat is true and first user message
  615. if (
  616. newChat &&
  617. history.messages[history.currentId].parentId === null &&
  618. history.messages[history.currentId].role === 'user'
  619. ) {
  620. await initChatHandler();
  621. }
  622. let _responses: string[] = [];
  623. // If modelId is provided, use it, else use selected model
  624. let selectedModelIds = modelId
  625. ? [modelId]
  626. : atSelectedModel !== undefined
  627. ? [atSelectedModel.id]
  628. : selectedModels;
  629. // Create response messages for each selected model
  630. const responseMessageIds: Record<PropertyKey, string> = {};
  631. for (const [_modelIdx, modelId] of selectedModelIds.entries()) {
  632. const model = $models.filter((m) => m.id === modelId).at(0);
  633. if (model) {
  634. let responseMessageId = uuidv4();
  635. let responseMessage = {
  636. parentId: parentId,
  637. id: responseMessageId,
  638. childrenIds: [],
  639. role: 'assistant',
  640. content: '',
  641. model: model.id,
  642. modelName: model.name ?? model.id,
  643. modelIdx: modelIdx ? modelIdx : _modelIdx,
  644. userContext: null,
  645. timestamp: Math.floor(Date.now() / 1000) // Unix epoch
  646. };
  647. // Add message to history and Set currentId to messageId
  648. history.messages[responseMessageId] = responseMessage;
  649. history.currentId = responseMessageId;
  650. // Append messageId to childrenIds of parent message
  651. if (parentId !== null) {
  652. history.messages[parentId].childrenIds = [
  653. ...history.messages[parentId].childrenIds,
  654. responseMessageId
  655. ];
  656. }
  657. responseMessageIds[`${modelId}-${modelIdx ? modelIdx : _modelIdx}`] = responseMessageId;
  658. }
  659. }
  660. await tick();
  661. const _chatId = JSON.parse(JSON.stringify($chatId));
  662. await Promise.all(
  663. selectedModelIds.map(async (modelId, _modelIdx) => {
  664. console.log('modelId', modelId);
  665. const model = $models.filter((m) => m.id === modelId).at(0);
  666. if (model) {
  667. const messages = createMessagesList(parentId);
  668. // If there are image files, check if model is vision capable
  669. const hasImages = messages.some((message) =>
  670. message.files?.some((file) => file.type === 'image')
  671. );
  672. if (hasImages && !(model.info?.meta?.capabilities?.vision ?? true)) {
  673. toast.error(
  674. $i18n.t('Model {{modelName}} is not vision capable', {
  675. modelName: model.name ?? model.id
  676. })
  677. );
  678. }
  679. let responseMessageId =
  680. responseMessageIds[`${modelId}-${modelIdx ? modelIdx : _modelIdx}`];
  681. let responseMessage = history.messages[responseMessageId];
  682. let userContext = null;
  683. if ($settings?.memory ?? false) {
  684. if (userContext === null) {
  685. const res = await queryMemory(localStorage.token, prompt).catch((error) => {
  686. toast.error(error);
  687. return null;
  688. });
  689. if (res) {
  690. if (res.documents[0].length > 0) {
  691. userContext = res.documents[0].reduce((acc, doc, index) => {
  692. const createdAtTimestamp = res.metadatas[0][index].created_at;
  693. const createdAtDate = new Date(createdAtTimestamp * 1000)
  694. .toISOString()
  695. .split('T')[0];
  696. return `${acc}${index + 1}. [${createdAtDate}]. ${doc}\n`;
  697. }, '');
  698. }
  699. console.log(userContext);
  700. }
  701. }
  702. }
  703. responseMessage.userContext = userContext;
  704. const chatEventEmitter = await getChatEventEmitter(model.id, _chatId);
  705. scrollToBottom();
  706. if (webSearchEnabled) {
  707. await getWebSearchResults(model.id, parentId, responseMessageId);
  708. }
  709. let _response = null;
  710. if (model?.owned_by === 'openai') {
  711. _response = await sendPromptOpenAI(model, prompt, responseMessageId, _chatId);
  712. } else if (model) {
  713. _response = await sendPromptOllama(model, prompt, responseMessageId, _chatId);
  714. }
  715. _responses.push(_response);
  716. if (chatEventEmitter) clearInterval(chatEventEmitter);
  717. } else {
  718. toast.error($i18n.t(`Model {{modelId}} not found`, { modelId }));
  719. }
  720. })
  721. );
  722. currentChatPage.set(1);
  723. chats.set(await getChatList(localStorage.token, $currentChatPage));
  724. return _responses;
  725. };
  726. const sendPromptOllama = async (model, userPrompt, responseMessageId, _chatId) => {
  727. let _response: string | null = null;
  728. const responseMessage = history.messages[responseMessageId];
  729. const userMessage = history.messages[responseMessage.parentId];
  730. // Wait until history/message have been updated
  731. await tick();
  732. // Scroll down
  733. scrollToBottom();
  734. const messagesBody = [
  735. params?.system || $settings.system || (responseMessage?.userContext ?? null)
  736. ? {
  737. role: 'system',
  738. content: `${promptTemplate(
  739. params?.system ?? $settings?.system ?? '',
  740. $user.name,
  741. $settings?.userLocation
  742. ? await getAndUpdateUserLocation(localStorage.token)
  743. : undefined
  744. )}${
  745. (responseMessage?.userContext ?? null)
  746. ? `\n\nUser Context:\n${responseMessage?.userContext ?? ''}`
  747. : ''
  748. }`
  749. }
  750. : undefined,
  751. ...createMessagesList(responseMessageId)
  752. ]
  753. .filter((message) => message?.content?.trim())
  754. .map((message) => {
  755. // Prepare the base message object
  756. const baseMessage = {
  757. role: message.role,
  758. content: message.content
  759. };
  760. // Extract and format image URLs if any exist
  761. const imageUrls = message.files
  762. ?.filter((file) => file.type === 'image')
  763. .map((file) => file.url.slice(file.url.indexOf(',') + 1));
  764. // Add images array only if it contains elements
  765. if (imageUrls && imageUrls.length > 0 && message.role === 'user') {
  766. baseMessage.images = imageUrls;
  767. }
  768. return baseMessage;
  769. });
  770. let lastImageIndex = -1;
  771. // Find the index of the last object with images
  772. messagesBody.forEach((item, index) => {
  773. if (item.images) {
  774. lastImageIndex = index;
  775. }
  776. });
  777. // Remove images from all but the last one
  778. messagesBody.forEach((item, index) => {
  779. if (index !== lastImageIndex) {
  780. delete item.images;
  781. }
  782. });
  783. let files = JSON.parse(JSON.stringify(chatFiles));
  784. if (model?.info?.meta?.knowledge ?? false) {
  785. // Only initialize and add status if knowledge exists
  786. responseMessage.statusHistory = [
  787. {
  788. action: 'knowledge_search',
  789. description: $i18n.t(`Searching Knowledge for "{{searchQuery}}"`, {
  790. searchQuery: userMessage.content
  791. }),
  792. done: false
  793. }
  794. ];
  795. files.push(
  796. ...model.info.meta.knowledge.map((item) => {
  797. if (item?.collection_name) {
  798. return {
  799. id: item.collection_name,
  800. name: item.name,
  801. legacy: true
  802. };
  803. } else if (item?.collection_names) {
  804. return {
  805. name: item.name,
  806. type: 'collection',
  807. collection_names: item.collection_names,
  808. legacy: true
  809. };
  810. } else {
  811. return item;
  812. }
  813. })
  814. );
  815. history.messages[responseMessageId] = responseMessage;
  816. }
  817. files.push(
  818. ...(userMessage?.files ?? []).filter((item) =>
  819. ['doc', 'file', 'collection'].includes(item.type)
  820. ),
  821. ...(responseMessage?.files ?? []).filter((item) => ['web_search_results'].includes(item.type))
  822. );
  823. // Remove duplicates
  824. files = files.filter(
  825. (item, index, array) =>
  826. array.findIndex((i) => JSON.stringify(i) === JSON.stringify(item)) === index
  827. );
  828. scrollToBottom();
  829. eventTarget.dispatchEvent(
  830. new CustomEvent('chat:start', {
  831. detail: {
  832. id: responseMessageId
  833. }
  834. })
  835. );
  836. await tick();
  837. const stream =
  838. model?.info?.params?.stream_response ??
  839. $settings?.params?.stream_response ??
  840. params?.stream_response ??
  841. true;
  842. const [res, controller] = await generateChatCompletion(localStorage.token, {
  843. stream: stream,
  844. model: model.id,
  845. messages: messagesBody,
  846. options: {
  847. ...{ ...($settings?.params ?? {}), ...params },
  848. stop:
  849. (params?.stop ?? $settings?.params?.stop ?? undefined)
  850. ? (params?.stop.split(',').map((token) => token.trim()) ?? $settings.params.stop).map(
  851. (str) => decodeURIComponent(JSON.parse('"' + str.replace(/\"/g, '\\"') + '"'))
  852. )
  853. : undefined,
  854. num_predict: params?.max_tokens ?? $settings?.params?.max_tokens ?? undefined,
  855. repeat_penalty:
  856. params?.frequency_penalty ?? $settings?.params?.frequency_penalty ?? undefined
  857. },
  858. format: $settings.requestFormat ?? undefined,
  859. keep_alive: $settings.keepAlive ?? undefined,
  860. tool_ids: selectedToolIds.length > 0 ? selectedToolIds : undefined,
  861. files: files.length > 0 ? files : undefined,
  862. session_id: $socket?.id,
  863. chat_id: $chatId,
  864. id: responseMessageId
  865. });
  866. if (res && res.ok) {
  867. if (!stream) {
  868. const response = await res.json();
  869. console.log(response);
  870. responseMessage.content = response.message.content;
  871. responseMessage.info = {
  872. eval_count: response.eval_count,
  873. eval_duration: response.eval_duration,
  874. load_duration: response.load_duration,
  875. prompt_eval_count: response.prompt_eval_count,
  876. prompt_eval_duration: response.prompt_eval_duration,
  877. total_duration: response.total_duration
  878. };
  879. responseMessage.done = true;
  880. } else {
  881. console.log('controller', controller);
  882. const reader = res.body
  883. .pipeThrough(new TextDecoderStream())
  884. .pipeThrough(splitStream('\n'))
  885. .getReader();
  886. while (true) {
  887. const { value, done } = await reader.read();
  888. if (done || stopResponseFlag || _chatId !== $chatId) {
  889. responseMessage.done = true;
  890. history.messages[responseMessageId] = responseMessage;
  891. if (stopResponseFlag) {
  892. controller.abort('User: Stop Response');
  893. }
  894. _response = responseMessage.content;
  895. break;
  896. }
  897. try {
  898. let lines = value.split('\n');
  899. for (const line of lines) {
  900. if (line !== '') {
  901. console.log(line);
  902. let data = JSON.parse(line);
  903. if ('citations' in data) {
  904. responseMessage.citations = data.citations;
  905. // Only remove status if it was initially set
  906. if (model?.info?.meta?.knowledge ?? false) {
  907. responseMessage.statusHistory = responseMessage.statusHistory.filter(
  908. (status) => status.action !== 'knowledge_search'
  909. );
  910. }
  911. continue;
  912. }
  913. if ('detail' in data) {
  914. throw data;
  915. }
  916. if (data.done == false) {
  917. if (responseMessage.content == '' && data.message.content == '\n') {
  918. continue;
  919. } else {
  920. responseMessage.content += data.message.content;
  921. if (navigator.vibrate && ($settings?.hapticFeedback ?? false)) {
  922. navigator.vibrate(5);
  923. }
  924. const messageContentParts = getMessageContentParts(
  925. responseMessage.content,
  926. $config?.audio?.tts?.split_on ?? 'punctuation'
  927. );
  928. messageContentParts.pop();
  929. // dispatch only last sentence and make sure it hasn't been dispatched before
  930. if (
  931. messageContentParts.length > 0 &&
  932. messageContentParts[messageContentParts.length - 1] !==
  933. responseMessage.lastSentence
  934. ) {
  935. responseMessage.lastSentence =
  936. messageContentParts[messageContentParts.length - 1];
  937. eventTarget.dispatchEvent(
  938. new CustomEvent('chat', {
  939. detail: {
  940. id: responseMessageId,
  941. content: messageContentParts[messageContentParts.length - 1]
  942. }
  943. })
  944. );
  945. }
  946. history.messages[responseMessageId] = responseMessage;
  947. }
  948. } else {
  949. responseMessage.done = true;
  950. if (responseMessage.content == '') {
  951. responseMessage.error = {
  952. code: 400,
  953. content: `Oops! No text generated from Ollama, Please try again.`
  954. };
  955. }
  956. responseMessage.context = data.context ?? null;
  957. responseMessage.info = {
  958. total_duration: data.total_duration,
  959. load_duration: data.load_duration,
  960. sample_count: data.sample_count,
  961. sample_duration: data.sample_duration,
  962. prompt_eval_count: data.prompt_eval_count,
  963. prompt_eval_duration: data.prompt_eval_duration,
  964. eval_count: data.eval_count,
  965. eval_duration: data.eval_duration
  966. };
  967. history.messages[responseMessageId] = responseMessage;
  968. if ($settings.notificationEnabled && !document.hasFocus()) {
  969. const notification = new Notification(`${model.id}`, {
  970. body: responseMessage.content,
  971. icon: `${WEBUI_BASE_URL}/static/favicon.png`
  972. });
  973. }
  974. if ($settings?.responseAutoCopy ?? false) {
  975. copyToClipboard(responseMessage.content);
  976. }
  977. if ($settings.responseAutoPlayback && !$showCallOverlay) {
  978. await tick();
  979. document.getElementById(`speak-button-${responseMessage.id}`)?.click();
  980. }
  981. }
  982. }
  983. }
  984. } catch (error) {
  985. console.log(error);
  986. if ('detail' in error) {
  987. toast.error(error.detail);
  988. }
  989. break;
  990. }
  991. if (autoScroll) {
  992. scrollToBottom();
  993. }
  994. }
  995. }
  996. } else {
  997. if (res !== null) {
  998. const error = await res.json();
  999. console.log(error);
  1000. if ('detail' in error) {
  1001. toast.error(error.detail);
  1002. responseMessage.error = { content: error.detail };
  1003. } else {
  1004. toast.error(error.error);
  1005. responseMessage.error = { content: error.error };
  1006. }
  1007. } else {
  1008. toast.error(
  1009. $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, { provider: 'Ollama' })
  1010. );
  1011. responseMessage.error = {
  1012. content: $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
  1013. provider: 'Ollama'
  1014. })
  1015. };
  1016. }
  1017. responseMessage.done = true;
  1018. if (responseMessage.statusHistory) {
  1019. responseMessage.statusHistory = responseMessage.statusHistory.filter(
  1020. (status) => status.action !== 'knowledge_search'
  1021. );
  1022. }
  1023. }
  1024. await saveChatHandler(_chatId);
  1025. history.messages[responseMessageId] = responseMessage;
  1026. await chatCompletedHandler(
  1027. _chatId,
  1028. model.id,
  1029. responseMessageId,
  1030. createMessagesList(responseMessageId)
  1031. );
  1032. stopResponseFlag = false;
  1033. await tick();
  1034. let lastMessageContentPart =
  1035. getMessageContentParts(
  1036. responseMessage.content,
  1037. $config?.audio?.tts?.split_on ?? 'punctuation'
  1038. )?.at(-1) ?? '';
  1039. if (lastMessageContentPart) {
  1040. eventTarget.dispatchEvent(
  1041. new CustomEvent('chat', {
  1042. detail: { id: responseMessageId, content: lastMessageContentPart }
  1043. })
  1044. );
  1045. }
  1046. eventTarget.dispatchEvent(
  1047. new CustomEvent('chat:finish', {
  1048. detail: {
  1049. id: responseMessageId,
  1050. content: responseMessage.content
  1051. }
  1052. })
  1053. );
  1054. if (autoScroll) {
  1055. scrollToBottom();
  1056. }
  1057. const messages = createMessagesList(responseMessageId);
  1058. if (messages.length == 2 && messages.at(-1).content !== '' && selectedModels[0] === model.id) {
  1059. window.history.replaceState(history.state, '', `/c/${_chatId}`);
  1060. const title = await generateChatTitle(userPrompt);
  1061. await setChatTitle(_chatId, title);
  1062. }
  1063. return _response;
  1064. };
  1065. const sendPromptOpenAI = async (model, userPrompt, responseMessageId, _chatId) => {
  1066. let _response = null;
  1067. const responseMessage = history.messages[responseMessageId];
  1068. const userMessage = history.messages[responseMessage.parentId];
  1069. let files = JSON.parse(JSON.stringify(chatFiles));
  1070. if (model?.info?.meta?.knowledge ?? false) {
  1071. // Only initialize and add status if knowledge exists
  1072. responseMessage.statusHistory = [
  1073. {
  1074. action: 'knowledge_search',
  1075. description: $i18n.t(`Searching Knowledge for "{{searchQuery}}"`, {
  1076. searchQuery: userMessage.content
  1077. }),
  1078. done: false
  1079. }
  1080. ];
  1081. files.push(
  1082. ...model.info.meta.knowledge.map((item) => {
  1083. if (item?.collection_name) {
  1084. return {
  1085. id: item.collection_name,
  1086. name: item.name,
  1087. legacy: true
  1088. };
  1089. } else if (item?.collection_names) {
  1090. return {
  1091. name: item.name,
  1092. type: 'collection',
  1093. collection_names: item.collection_names,
  1094. legacy: true
  1095. };
  1096. } else {
  1097. return item;
  1098. }
  1099. })
  1100. );
  1101. history.messages[responseMessageId] = responseMessage;
  1102. }
  1103. files.push(
  1104. ...(userMessage?.files ?? []).filter((item) =>
  1105. ['doc', 'file', 'collection'].includes(item.type)
  1106. ),
  1107. ...(responseMessage?.files ?? []).filter((item) => ['web_search_results'].includes(item.type))
  1108. );
  1109. // Remove duplicates
  1110. files = files.filter(
  1111. (item, index, array) =>
  1112. array.findIndex((i) => JSON.stringify(i) === JSON.stringify(item)) === index
  1113. );
  1114. scrollToBottom();
  1115. eventTarget.dispatchEvent(
  1116. new CustomEvent('chat:start', {
  1117. detail: {
  1118. id: responseMessageId
  1119. }
  1120. })
  1121. );
  1122. await tick();
  1123. try {
  1124. const stream =
  1125. model?.info?.params?.stream_response ??
  1126. $settings?.params?.stream_response ??
  1127. params?.stream_response ??
  1128. true;
  1129. const [res, controller] = await generateOpenAIChatCompletion(
  1130. localStorage.token,
  1131. {
  1132. stream: stream,
  1133. model: model.id,
  1134. ...(stream && (model.info?.meta?.capabilities?.usage ?? false)
  1135. ? {
  1136. stream_options: {
  1137. include_usage: true
  1138. }
  1139. }
  1140. : {}),
  1141. messages: [
  1142. params?.system || $settings.system || (responseMessage?.userContext ?? null)
  1143. ? {
  1144. role: 'system',
  1145. content: `${promptTemplate(
  1146. params?.system ?? $settings?.system ?? '',
  1147. $user.name,
  1148. $settings?.userLocation
  1149. ? await getAndUpdateUserLocation(localStorage.token)
  1150. : undefined
  1151. )}${
  1152. (responseMessage?.userContext ?? null)
  1153. ? `\n\nUser Context:\n${responseMessage?.userContext ?? ''}`
  1154. : ''
  1155. }`
  1156. }
  1157. : undefined,
  1158. ...createMessagesList(responseMessageId)
  1159. ]
  1160. .filter((message) => message?.content?.trim())
  1161. .map((message, idx, arr) => ({
  1162. role: message.role,
  1163. ...((message.files?.filter((file) => file.type === 'image').length > 0 ?? false) &&
  1164. message.role === 'user'
  1165. ? {
  1166. content: [
  1167. {
  1168. type: 'text',
  1169. text:
  1170. arr.length - 1 !== idx
  1171. ? message.content
  1172. : (message?.raContent ?? message.content)
  1173. },
  1174. ...message.files
  1175. .filter((file) => file.type === 'image')
  1176. .map((file) => ({
  1177. type: 'image_url',
  1178. image_url: {
  1179. url: file.url
  1180. }
  1181. }))
  1182. ]
  1183. }
  1184. : {
  1185. content:
  1186. arr.length - 1 !== idx
  1187. ? message.content
  1188. : (message?.raContent ?? message.content)
  1189. })
  1190. })),
  1191. seed: params?.seed ?? $settings?.params?.seed ?? undefined,
  1192. stop:
  1193. (params?.stop ?? $settings?.params?.stop ?? undefined)
  1194. ? (params?.stop.split(',').map((token) => token.trim()) ?? $settings.params.stop).map(
  1195. (str) => decodeURIComponent(JSON.parse('"' + str.replace(/\"/g, '\\"') + '"'))
  1196. )
  1197. : undefined,
  1198. temperature: params?.temperature ?? $settings?.params?.temperature ?? undefined,
  1199. top_p: params?.top_p ?? $settings?.params?.top_p ?? undefined,
  1200. frequency_penalty:
  1201. params?.frequency_penalty ?? $settings?.params?.frequency_penalty ?? undefined,
  1202. max_tokens: params?.max_tokens ?? $settings?.params?.max_tokens ?? undefined,
  1203. tool_ids: selectedToolIds.length > 0 ? selectedToolIds : undefined,
  1204. files: files.length > 0 ? files : undefined,
  1205. session_id: $socket?.id,
  1206. chat_id: $chatId,
  1207. id: responseMessageId
  1208. },
  1209. `${WEBUI_BASE_URL}/api`
  1210. );
  1211. // Wait until history/message have been updated
  1212. await tick();
  1213. scrollToBottom();
  1214. if (res && res.ok && res.body) {
  1215. if (!stream) {
  1216. const response = await res.json();
  1217. console.log(response);
  1218. responseMessage.content = response.choices[0].message.content;
  1219. responseMessage.info = { ...response.usage, openai: true };
  1220. responseMessage.done = true;
  1221. } else {
  1222. const textStream = await createOpenAITextStream(res.body, $settings.splitLargeChunks);
  1223. for await (const update of textStream) {
  1224. const { value, done, citations, error, usage } = update;
  1225. if (error) {
  1226. await handleOpenAIError(error, null, model, responseMessage);
  1227. break;
  1228. }
  1229. if (done || stopResponseFlag || _chatId !== $chatId) {
  1230. responseMessage.done = true;
  1231. history.messages[responseMessageId] = responseMessage;
  1232. if (stopResponseFlag) {
  1233. controller.abort('User: Stop Response');
  1234. }
  1235. _response = responseMessage.content;
  1236. break;
  1237. }
  1238. if (usage) {
  1239. responseMessage.info = { ...usage, openai: true, usage };
  1240. }
  1241. if (citations) {
  1242. responseMessage.citations = citations;
  1243. // Only remove status if it was initially set
  1244. if (model?.info?.meta?.knowledge ?? false) {
  1245. responseMessage.statusHistory = responseMessage.statusHistory.filter(
  1246. (status) => status.action !== 'knowledge_search'
  1247. );
  1248. }
  1249. continue;
  1250. }
  1251. if (responseMessage.content == '' && value == '\n') {
  1252. continue;
  1253. } else {
  1254. responseMessage.content += value;
  1255. if (navigator.vibrate && ($settings?.hapticFeedback ?? false)) {
  1256. navigator.vibrate(5);
  1257. }
  1258. const messageContentParts = getMessageContentParts(
  1259. responseMessage.content,
  1260. $config?.audio?.tts?.split_on ?? 'punctuation'
  1261. );
  1262. messageContentParts.pop();
  1263. // dispatch only last sentence and make sure it hasn't been dispatched before
  1264. if (
  1265. messageContentParts.length > 0 &&
  1266. messageContentParts[messageContentParts.length - 1] !== responseMessage.lastSentence
  1267. ) {
  1268. responseMessage.lastSentence = messageContentParts[messageContentParts.length - 1];
  1269. eventTarget.dispatchEvent(
  1270. new CustomEvent('chat', {
  1271. detail: {
  1272. id: responseMessageId,
  1273. content: messageContentParts[messageContentParts.length - 1]
  1274. }
  1275. })
  1276. );
  1277. }
  1278. history.messages[responseMessageId] = responseMessage;
  1279. }
  1280. if (autoScroll) {
  1281. scrollToBottom();
  1282. }
  1283. }
  1284. }
  1285. if ($settings.notificationEnabled && !document.hasFocus()) {
  1286. const notification = new Notification(`${model.id}`, {
  1287. body: responseMessage.content,
  1288. icon: `${WEBUI_BASE_URL}/static/favicon.png`
  1289. });
  1290. }
  1291. if ($settings.responseAutoCopy) {
  1292. copyToClipboard(responseMessage.content);
  1293. }
  1294. if ($settings.responseAutoPlayback && !$showCallOverlay) {
  1295. await tick();
  1296. document.getElementById(`speak-button-${responseMessage.id}`)?.click();
  1297. }
  1298. } else {
  1299. await handleOpenAIError(null, res, model, responseMessage);
  1300. }
  1301. } catch (error) {
  1302. await handleOpenAIError(error, null, model, responseMessage);
  1303. }
  1304. await saveChatHandler(_chatId);
  1305. history.messages[responseMessageId] = responseMessage;
  1306. await chatCompletedHandler(
  1307. _chatId,
  1308. model.id,
  1309. responseMessageId,
  1310. createMessagesList(responseMessageId)
  1311. );
  1312. stopResponseFlag = false;
  1313. await tick();
  1314. let lastMessageContentPart =
  1315. getMessageContentParts(
  1316. responseMessage.content,
  1317. $config?.audio?.tts?.split_on ?? 'punctuation'
  1318. )?.at(-1) ?? '';
  1319. if (lastMessageContentPart) {
  1320. eventTarget.dispatchEvent(
  1321. new CustomEvent('chat', {
  1322. detail: { id: responseMessageId, content: lastMessageContentPart }
  1323. })
  1324. );
  1325. }
  1326. eventTarget.dispatchEvent(
  1327. new CustomEvent('chat:finish', {
  1328. detail: {
  1329. id: responseMessageId,
  1330. content: responseMessage.content
  1331. }
  1332. })
  1333. );
  1334. if (autoScroll) {
  1335. scrollToBottom();
  1336. }
  1337. const messages = createMessagesList(responseMessageId);
  1338. if (messages.length == 2 && selectedModels[0] === model.id) {
  1339. window.history.replaceState(history.state, '', `/c/${_chatId}`);
  1340. const title = await generateChatTitle(userPrompt);
  1341. await setChatTitle(_chatId, title);
  1342. }
  1343. return _response;
  1344. };
  1345. const handleOpenAIError = async (error, res: Response | null, model, responseMessage) => {
  1346. let errorMessage = '';
  1347. let innerError;
  1348. if (error) {
  1349. innerError = error;
  1350. } else if (res !== null) {
  1351. innerError = await res.json();
  1352. }
  1353. console.error(innerError);
  1354. if ('detail' in innerError) {
  1355. toast.error(innerError.detail);
  1356. errorMessage = innerError.detail;
  1357. } else if ('error' in innerError) {
  1358. if ('message' in innerError.error) {
  1359. toast.error(innerError.error.message);
  1360. errorMessage = innerError.error.message;
  1361. } else {
  1362. toast.error(innerError.error);
  1363. errorMessage = innerError.error;
  1364. }
  1365. } else if ('message' in innerError) {
  1366. toast.error(innerError.message);
  1367. errorMessage = innerError.message;
  1368. }
  1369. responseMessage.error = {
  1370. content:
  1371. $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
  1372. provider: model.name ?? model.id
  1373. }) +
  1374. '\n' +
  1375. errorMessage
  1376. };
  1377. responseMessage.done = true;
  1378. if (responseMessage.statusHistory) {
  1379. responseMessage.statusHistory = responseMessage.statusHistory.filter(
  1380. (status) => status.action !== 'knowledge_search'
  1381. );
  1382. }
  1383. history.messages[responseMessage.id] = responseMessage;
  1384. };
  1385. const stopResponse = () => {
  1386. stopResponseFlag = true;
  1387. console.log('stopResponse');
  1388. };
  1389. const regenerateResponse = async (message) => {
  1390. console.log('regenerateResponse');
  1391. if (history.currentId) {
  1392. let userMessage = history.messages[message.parentId];
  1393. let userPrompt = userMessage.content;
  1394. if ((userMessage?.models ?? [...selectedModels]).length == 1) {
  1395. // If user message has only one model selected, sendPrompt automatically selects it for regeneration
  1396. await sendPrompt(userPrompt, userMessage.id);
  1397. } else {
  1398. // If there are multiple models selected, use the model of the response message for regeneration
  1399. // e.g. many model chat
  1400. await sendPrompt(userPrompt, userMessage.id, {
  1401. modelId: message.model,
  1402. modelIdx: message.modelIdx
  1403. });
  1404. }
  1405. }
  1406. };
  1407. const continueResponse = async () => {
  1408. console.log('continueResponse');
  1409. const _chatId = JSON.parse(JSON.stringify($chatId));
  1410. if (history.currentId && history.messages[history.currentId].done == true) {
  1411. const responseMessage = history.messages[history.currentId];
  1412. responseMessage.done = false;
  1413. await tick();
  1414. const model = $models.filter((m) => m.id === responseMessage.model).at(0);
  1415. if (model) {
  1416. if (model?.owned_by === 'openai') {
  1417. await sendPromptOpenAI(
  1418. model,
  1419. history.messages[responseMessage.parentId].content,
  1420. responseMessage.id,
  1421. _chatId
  1422. );
  1423. } else
  1424. await sendPromptOllama(
  1425. model,
  1426. history.messages[responseMessage.parentId].content,
  1427. responseMessage.id,
  1428. _chatId
  1429. );
  1430. }
  1431. } else {
  1432. toast.error($i18n.t(`Model {{modelId}} not found`, { modelId }));
  1433. }
  1434. };
  1435. const mergeResponses = async (messageId, responses, _chatId) => {
  1436. console.log('mergeResponses', messageId, responses);
  1437. const message = history.messages[messageId];
  1438. const mergedResponse = {
  1439. status: true,
  1440. content: ''
  1441. };
  1442. message.merged = mergedResponse;
  1443. history.messages[messageId] = message;
  1444. try {
  1445. const [res, controller] = await generateMoACompletion(
  1446. localStorage.token,
  1447. message.model,
  1448. history.messages[message.parentId].content,
  1449. responses
  1450. );
  1451. if (res && res.ok && res.body) {
  1452. const textStream = await createOpenAITextStream(res.body, $settings.splitLargeChunks);
  1453. for await (const update of textStream) {
  1454. const { value, done, citations, error, usage } = update;
  1455. if (error || done) {
  1456. break;
  1457. }
  1458. if (mergedResponse.content == '' && value == '\n') {
  1459. continue;
  1460. } else {
  1461. mergedResponse.content += value;
  1462. history.messages[messageId] = message;
  1463. }
  1464. if (autoScroll) {
  1465. scrollToBottom();
  1466. }
  1467. }
  1468. await saveChatHandler(_chatId);
  1469. } else {
  1470. console.error(res);
  1471. }
  1472. } catch (e) {
  1473. console.error(e);
  1474. }
  1475. };
  1476. const generateChatTitle = async (userPrompt) => {
  1477. if ($settings?.title?.auto ?? true) {
  1478. const title = await generateTitle(
  1479. localStorage.token,
  1480. selectedModels[0],
  1481. userPrompt,
  1482. $chatId
  1483. ).catch((error) => {
  1484. console.error(error);
  1485. return 'New Chat';
  1486. });
  1487. return title;
  1488. } else {
  1489. return `${userPrompt}`;
  1490. }
  1491. };
  1492. const setChatTitle = async (_chatId, title) => {
  1493. if (_chatId === $chatId) {
  1494. chatTitle.set(title);
  1495. }
  1496. if (!$temporaryChatEnabled) {
  1497. chat = await updateChatById(localStorage.token, _chatId, { title: title });
  1498. currentChatPage.set(1);
  1499. await chats.set(await getChatList(localStorage.token, $currentChatPage));
  1500. }
  1501. };
  1502. const getWebSearchResults = async (
  1503. model: string,
  1504. parentId: string,
  1505. responseMessageId: string
  1506. ) => {
  1507. const responseMessage = history.messages[responseMessageId];
  1508. const userMessage = history.messages[parentId];
  1509. const messages = createMessagesList(history.currentId);
  1510. responseMessage.statusHistory = [
  1511. {
  1512. done: false,
  1513. action: 'web_search',
  1514. description: $i18n.t('Generating search query')
  1515. }
  1516. ];
  1517. history.messages[responseMessageId] = responseMessage;
  1518. const prompt = userMessage.content;
  1519. let searchQuery = await generateSearchQuery(
  1520. localStorage.token,
  1521. model,
  1522. messages.filter((message) => message?.content?.trim()),
  1523. prompt
  1524. ).catch((error) => {
  1525. console.log(error);
  1526. return prompt;
  1527. });
  1528. if (!searchQuery || searchQuery == '') {
  1529. responseMessage.statusHistory.push({
  1530. done: true,
  1531. error: true,
  1532. action: 'web_search',
  1533. description: $i18n.t('No search query generated')
  1534. });
  1535. history.messages[responseMessageId] = responseMessage;
  1536. return;
  1537. }
  1538. responseMessage.statusHistory.push({
  1539. done: false,
  1540. action: 'web_search',
  1541. description: $i18n.t(`Searching "{{searchQuery}}"`, { searchQuery })
  1542. });
  1543. history.messages[responseMessageId] = responseMessage;
  1544. const results = await processWebSearch(localStorage.token, searchQuery).catch((error) => {
  1545. console.log(error);
  1546. toast.error(error);
  1547. return null;
  1548. });
  1549. if (results) {
  1550. responseMessage.statusHistory.push({
  1551. done: true,
  1552. action: 'web_search',
  1553. description: $i18n.t('Searched {{count}} sites', { count: results.filenames.length }),
  1554. query: searchQuery,
  1555. urls: results.filenames
  1556. });
  1557. if (responseMessage?.files ?? undefined === undefined) {
  1558. responseMessage.files = [];
  1559. }
  1560. responseMessage.files.push({
  1561. collection_name: results.collection_name,
  1562. name: searchQuery,
  1563. type: 'web_search_results',
  1564. urls: results.filenames
  1565. });
  1566. history.messages[responseMessageId] = responseMessage;
  1567. } else {
  1568. responseMessage.statusHistory.push({
  1569. done: true,
  1570. error: true,
  1571. action: 'web_search',
  1572. description: 'No search results found'
  1573. });
  1574. history.messages[responseMessageId] = responseMessage;
  1575. }
  1576. };
  1577. const getTags = async () => {
  1578. return await getTagsById(localStorage.token, $chatId).catch(async (error) => {
  1579. return [];
  1580. });
  1581. };
  1582. const initChatHandler = async () => {
  1583. if (!$temporaryChatEnabled) {
  1584. chat = await createNewChat(localStorage.token, {
  1585. id: $chatId,
  1586. title: $i18n.t('New Chat'),
  1587. models: selectedModels,
  1588. system: $settings.system ?? undefined,
  1589. params: params,
  1590. history: history,
  1591. tags: [],
  1592. timestamp: Date.now()
  1593. });
  1594. currentChatPage.set(1);
  1595. await chats.set(await getChatList(localStorage.token, $currentChatPage));
  1596. await chatId.set(chat.id);
  1597. } else {
  1598. await chatId.set('local');
  1599. }
  1600. await tick();
  1601. };
  1602. const saveChatHandler = async (_chatId) => {
  1603. if ($chatId == _chatId) {
  1604. if (!$temporaryChatEnabled) {
  1605. chat = await updateChatById(localStorage.token, _chatId, {
  1606. models: selectedModels,
  1607. history: history,
  1608. messages: createMessagesList(history.currentId),
  1609. params: params,
  1610. files: chatFiles
  1611. });
  1612. currentChatPage.set(1);
  1613. await chats.set(await getChatList(localStorage.token, $currentChatPage));
  1614. }
  1615. }
  1616. };
  1617. </script>
  1618. <svelte:head>
  1619. <title>
  1620. {$chatTitle
  1621. ? `${$chatTitle.length > 30 ? `${$chatTitle.slice(0, 30)}...` : $chatTitle} | ${$WEBUI_NAME}`
  1622. : `${$WEBUI_NAME}`}
  1623. </title>
  1624. </svelte:head>
  1625. <audio id="audioElement" src="" style="display: none;" />
  1626. <EventConfirmDialog
  1627. bind:show={showEventConfirmation}
  1628. title={eventConfirmationTitle}
  1629. message={eventConfirmationMessage}
  1630. input={eventConfirmationInput}
  1631. inputPlaceholder={eventConfirmationInputPlaceholder}
  1632. inputValue={eventConfirmationInputValue}
  1633. on:confirm={(e) => {
  1634. if (e.detail) {
  1635. eventCallback(e.detail);
  1636. } else {
  1637. eventCallback(true);
  1638. }
  1639. }}
  1640. on:cancel={() => {
  1641. eventCallback(false);
  1642. }}
  1643. />
  1644. {#if !chatIdProp || (loaded && chatIdProp)}
  1645. <div
  1646. class="h-screen max-h-[100dvh] {$showSidebar
  1647. ? 'md:max-w-[calc(100%-260px)]'
  1648. : ''} w-full max-w-full flex flex-col"
  1649. >
  1650. {#if $settings?.backgroundImageUrl ?? null}
  1651. <div
  1652. class="absolute {$showSidebar
  1653. ? 'md:max-w-[calc(100%-260px)] md:translate-x-[260px]'
  1654. : ''} top-0 left-0 w-full h-full bg-cover bg-center bg-no-repeat"
  1655. style="background-image: url({$settings.backgroundImageUrl}) "
  1656. />
  1657. <div
  1658. class="absolute top-0 left-0 w-full h-full bg-gradient-to-t from-white to-white/85 dark:from-gray-900 dark:to-[#171717]/90 z-0"
  1659. />
  1660. {/if}
  1661. <Navbar
  1662. {chat}
  1663. title={$chatTitle}
  1664. bind:selectedModels
  1665. shareEnabled={!!history.currentId}
  1666. {initNewChat}
  1667. />
  1668. <PaneGroup direction="horizontal" class="w-full h-full">
  1669. <Pane defaultSize={50} class="h-full flex w-full relative">
  1670. {#if $banners.length > 0 && !history.currentId && !$chatId && selectedModels.length <= 1}
  1671. <div class="absolute top-12 left-0 right-0 w-full z-30">
  1672. <div class=" flex flex-col gap-1 w-full">
  1673. {#each $banners.filter( (b) => (b.dismissible ? !JSON.parse(localStorage.getItem('dismissedBannerIds') ?? '[]').includes(b.id) : true) ) as banner}
  1674. <Banner
  1675. {banner}
  1676. on:dismiss={(e) => {
  1677. const bannerId = e.detail;
  1678. localStorage.setItem(
  1679. 'dismissedBannerIds',
  1680. JSON.stringify(
  1681. [
  1682. bannerId,
  1683. ...JSON.parse(localStorage.getItem('dismissedBannerIds') ?? '[]')
  1684. ].filter((id) => $banners.find((b) => b.id === id))
  1685. )
  1686. );
  1687. }}
  1688. />
  1689. {/each}
  1690. </div>
  1691. </div>
  1692. {/if}
  1693. <div class="flex flex-col flex-auto z-10 w-full">
  1694. {#if $settings?.landingPageMode === 'chat' || createMessagesList(history.currentId).length > 0}
  1695. <div
  1696. class=" pb-2.5 flex flex-col justify-between w-full flex-auto overflow-auto h-0 max-w-full z-10 scrollbar-hidden"
  1697. id="messages-container"
  1698. bind:this={messagesContainerElement}
  1699. on:scroll={(e) => {
  1700. autoScroll =
  1701. messagesContainerElement.scrollHeight - messagesContainerElement.scrollTop <=
  1702. messagesContainerElement.clientHeight + 5;
  1703. }}
  1704. >
  1705. <div class=" h-full w-full flex flex-col">
  1706. <Messages
  1707. chatId={$chatId}
  1708. bind:history
  1709. bind:autoScroll
  1710. bind:prompt
  1711. {selectedModels}
  1712. {sendPrompt}
  1713. {showMessage}
  1714. {continueResponse}
  1715. {regenerateResponse}
  1716. {mergeResponses}
  1717. {chatActionHandler}
  1718. bottomPadding={files.length > 0}
  1719. on:submit={async (e) => {
  1720. if (e.detail) {
  1721. // New user message
  1722. let userPrompt = e.detail.prompt;
  1723. let userMessageId = uuidv4();
  1724. let userMessage = {
  1725. id: userMessageId,
  1726. parentId: e.detail.parentId,
  1727. childrenIds: [],
  1728. role: 'user',
  1729. content: userPrompt,
  1730. models: selectedModels
  1731. };
  1732. let messageParentId = e.detail.parentId;
  1733. if (messageParentId !== null) {
  1734. history.messages[messageParentId].childrenIds = [
  1735. ...history.messages[messageParentId].childrenIds,
  1736. userMessageId
  1737. ];
  1738. }
  1739. history.messages[userMessageId] = userMessage;
  1740. history.currentId = userMessageId;
  1741. await tick();
  1742. await sendPrompt(userPrompt, userMessageId);
  1743. }
  1744. }}
  1745. />
  1746. </div>
  1747. </div>
  1748. <div class=" pb-[1.6rem]">
  1749. <MessageInput
  1750. {history}
  1751. {selectedModels}
  1752. bind:files
  1753. bind:prompt
  1754. bind:autoScroll
  1755. bind:selectedToolIds
  1756. bind:webSearchEnabled
  1757. bind:atSelectedModel
  1758. availableToolIds={selectedModelIds.reduce((a, e, i, arr) => {
  1759. const model = $models.find((m) => m.id === e);
  1760. if (model?.info?.meta?.toolIds ?? false) {
  1761. return [...new Set([...a, ...model.info.meta.toolIds])];
  1762. }
  1763. return a;
  1764. }, [])}
  1765. transparentBackground={$settings?.backgroundImageUrl ?? false}
  1766. {stopResponse}
  1767. {createMessagePair}
  1768. on:submit={async (e) => {
  1769. if (e.detail) {
  1770. prompt = '';
  1771. await tick();
  1772. submitPrompt(e.detail);
  1773. }
  1774. }}
  1775. />
  1776. <div
  1777. class="absolute bottom-1.5 text-xs text-gray-500 text-center line-clamp-1 right-0 left-0"
  1778. >
  1779. {$i18n.t('LLMs can make mistakes. Verify important information.')}
  1780. </div>
  1781. </div>
  1782. {:else}
  1783. <Placeholder
  1784. {history}
  1785. {selectedModels}
  1786. bind:files
  1787. bind:prompt
  1788. bind:autoScroll
  1789. bind:selectedToolIds
  1790. bind:webSearchEnabled
  1791. bind:atSelectedModel
  1792. availableToolIds={selectedModelIds.reduce((a, e, i, arr) => {
  1793. const model = $models.find((m) => m.id === e);
  1794. if (model?.info?.meta?.toolIds ?? false) {
  1795. return [...new Set([...a, ...model.info.meta.toolIds])];
  1796. }
  1797. return a;
  1798. }, [])}
  1799. transparentBackground={$settings?.backgroundImageUrl ?? false}
  1800. {stopResponse}
  1801. {createMessagePair}
  1802. on:submit={async (e) => {
  1803. if (e.detail) {
  1804. prompt = '';
  1805. await tick();
  1806. submitPrompt(e.detail);
  1807. }
  1808. }}
  1809. />
  1810. {/if}
  1811. </div>
  1812. </Pane>
  1813. <ChatControls
  1814. bind:history
  1815. bind:chatFiles
  1816. bind:params
  1817. bind:files
  1818. bind:pane={controlPane}
  1819. chatId={$chatId}
  1820. modelId={selectedModelIds?.at(0) ?? null}
  1821. models={selectedModelIds.reduce((a, e, i, arr) => {
  1822. const model = $models.find((m) => m.id === e);
  1823. if (model) {
  1824. return [...a, model];
  1825. }
  1826. return a;
  1827. }, [])}
  1828. {submitPrompt}
  1829. {stopResponse}
  1830. {showMessage}
  1831. {eventTarget}
  1832. />
  1833. </PaneGroup>
  1834. </div>
  1835. {/if}