Chat.svelte 56 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122
  1. <script lang="ts">
  2. import { v4 as uuidv4 } from 'uuid';
  3. import { toast } from 'svelte-sonner';
  4. import mermaid from 'mermaid';
  5. import { PaneGroup, Pane, PaneResizer } from 'paneforge';
  6. import { getContext, onDestroy, onMount, tick } from 'svelte';
  7. const i18n: Writable<i18nType> = getContext('i18n');
  8. import { goto } from '$app/navigation';
  9. import { page } from '$app/stores';
  10. import type { Unsubscriber, Writable } from 'svelte/store';
  11. import type { i18n as i18nType } from 'i18next';
  12. import { WEBUI_BASE_URL } from '$lib/constants';
  13. import {
  14. chatId,
  15. chats,
  16. config,
  17. type Model,
  18. models,
  19. settings,
  20. showSidebar,
  21. WEBUI_NAME,
  22. banners,
  23. user,
  24. socket,
  25. showControls,
  26. showCallOverlay,
  27. currentChatPage,
  28. temporaryChatEnabled,
  29. mobile,
  30. showOverview,
  31. chatTitle,
  32. showArtifacts
  33. } from '$lib/stores';
  34. import {
  35. convertMessagesToHistory,
  36. copyToClipboard,
  37. getMessageContentParts,
  38. extractSentencesForAudio,
  39. promptTemplate,
  40. splitStream
  41. } from '$lib/utils';
  42. import { generateChatCompletion } from '$lib/apis/ollama';
  43. import {
  44. createNewChat,
  45. getChatById,
  46. getChatList,
  47. getTagsById,
  48. updateChatById
  49. } from '$lib/apis/chats';
  50. import { generateOpenAIChatCompletion } from '$lib/apis/openai';
  51. import { processWebSearch } from '$lib/apis/retrieval';
  52. import { createOpenAITextStream } from '$lib/apis/streaming';
  53. import { queryMemory } from '$lib/apis/memories';
  54. import { getAndUpdateUserLocation, getUserSettings } from '$lib/apis/users';
  55. import {
  56. chatCompleted,
  57. generateTitle,
  58. generateSearchQuery,
  59. chatAction,
  60. generateMoACompletion
  61. } from '$lib/apis';
  62. import Banner from '../common/Banner.svelte';
  63. import MessageInput from '$lib/components/chat/MessageInput.svelte';
  64. import Messages from '$lib/components/chat/Messages.svelte';
  65. import Navbar from '$lib/components/layout/Navbar.svelte';
  66. import ChatControls from './ChatControls.svelte';
  67. import EventConfirmDialog from '../common/ConfirmDialog.svelte';
  68. import Placeholder from './Placeholder.svelte';
  69. export let chatIdProp = '';
  70. let loaded = false;
  71. const eventTarget = new EventTarget();
  72. let controlPane;
  73. let stopResponseFlag = false;
  74. let autoScroll = true;
  75. let processing = '';
  76. let messagesContainerElement: HTMLDivElement;
  77. let showEventConfirmation = false;
  78. let eventConfirmationTitle = '';
  79. let eventConfirmationMessage = '';
  80. let eventConfirmationInput = false;
  81. let eventConfirmationInputPlaceholder = '';
  82. let eventConfirmationInputValue = '';
  83. let eventCallback = null;
  84. let chatIdUnsubscriber: Unsubscriber | undefined;
  85. let selectedModels = [''];
  86. let atSelectedModel: Model | undefined;
  87. let selectedModelIds = [];
  88. $: selectedModelIds = atSelectedModel !== undefined ? [atSelectedModel.id] : selectedModels;
  89. let selectedToolIds = [];
  90. let webSearchEnabled = false;
  91. let chat = null;
  92. let tags = [];
  93. let history = {
  94. messages: {},
  95. currentId: null
  96. };
  97. // Chat Input
  98. let prompt = '';
  99. let chatFiles = [];
  100. let files = [];
  101. let params = {};
  102. $: if (chatIdProp) {
  103. (async () => {
  104. console.log(chatIdProp);
  105. if (chatIdProp && (await loadChat())) {
  106. await tick();
  107. loaded = true;
  108. window.setTimeout(() => scrollToBottom(), 0);
  109. const chatInput = document.getElementById('chat-textarea');
  110. chatInput?.focus();
  111. } else {
  112. await goto('/');
  113. }
  114. })();
  115. }
  116. const showMessage = async (message) => {
  117. const _chatId = JSON.parse(JSON.stringify($chatId));
  118. let _messageId = JSON.parse(JSON.stringify(message.id));
  119. let messageChildrenIds = history.messages[_messageId].childrenIds;
  120. while (messageChildrenIds.length !== 0) {
  121. _messageId = messageChildrenIds.at(-1);
  122. messageChildrenIds = history.messages[_messageId].childrenIds;
  123. }
  124. history.currentId = _messageId;
  125. await tick();
  126. await tick();
  127. await tick();
  128. const messageElement = document.getElementById(`message-${message.id}`);
  129. if (messageElement) {
  130. messageElement.scrollIntoView({ behavior: 'smooth' });
  131. }
  132. await tick();
  133. saveChatHandler(_chatId);
  134. };
  135. const chatEventHandler = async (event, cb) => {
  136. if (event.chat_id === $chatId) {
  137. await tick();
  138. console.log(event);
  139. let message = history.messages[event.message_id];
  140. const type = event?.data?.type ?? null;
  141. const data = event?.data?.data ?? null;
  142. if (type === 'status') {
  143. if (message?.statusHistory) {
  144. message.statusHistory.push(data);
  145. } else {
  146. message.statusHistory = [data];
  147. }
  148. } else if (type === 'citation') {
  149. if (message?.citations) {
  150. message.citations.push(data);
  151. } else {
  152. message.citations = [data];
  153. }
  154. } else if (type === 'message') {
  155. message.content += data.content;
  156. } else if (type === 'replace') {
  157. message.content = data.content;
  158. } else if (type === 'action') {
  159. if (data.action === 'continue') {
  160. const continueButton = document.getElementById('continue-response-button');
  161. if (continueButton) {
  162. continueButton.click();
  163. }
  164. }
  165. } else if (type === 'confirmation') {
  166. eventCallback = cb;
  167. eventConfirmationInput = false;
  168. showEventConfirmation = true;
  169. eventConfirmationTitle = data.title;
  170. eventConfirmationMessage = data.message;
  171. } else if (type === 'input') {
  172. eventCallback = cb;
  173. eventConfirmationInput = true;
  174. showEventConfirmation = true;
  175. eventConfirmationTitle = data.title;
  176. eventConfirmationMessage = data.message;
  177. eventConfirmationInputPlaceholder = data.placeholder;
  178. eventConfirmationInputValue = data?.value ?? '';
  179. } else {
  180. console.log('Unknown message type', data);
  181. }
  182. history.messages[event.message_id] = message;
  183. }
  184. };
  185. const onMessageHandler = async (event: {
  186. origin: string;
  187. data: { type: string; text: string };
  188. }) => {
  189. if (event.origin !== window.origin) {
  190. return;
  191. }
  192. // Replace with your iframe's origin
  193. if (event.data.type === 'input:prompt') {
  194. console.debug(event.data.text);
  195. const inputElement = document.getElementById('chat-textarea');
  196. if (inputElement) {
  197. prompt = event.data.text;
  198. inputElement.focus();
  199. }
  200. }
  201. if (event.data.type === 'action:submit') {
  202. console.debug(event.data.text);
  203. if (prompt !== '') {
  204. await tick();
  205. submitPrompt(prompt);
  206. }
  207. }
  208. if (event.data.type === 'input:prompt:submit') {
  209. console.debug(event.data.text);
  210. if (prompt !== '') {
  211. await tick();
  212. submitPrompt(event.data.text);
  213. }
  214. }
  215. };
  216. onMount(async () => {
  217. window.addEventListener('message', onMessageHandler);
  218. $socket?.on('chat-events', chatEventHandler);
  219. if (!$chatId) {
  220. chatIdUnsubscriber = chatId.subscribe(async (value) => {
  221. if (!value) {
  222. await initNewChat();
  223. }
  224. });
  225. } else {
  226. if ($temporaryChatEnabled) {
  227. await goto('/');
  228. }
  229. }
  230. showControls.subscribe(async (value) => {
  231. if (controlPane && !$mobile) {
  232. try {
  233. if (value) {
  234. const currentSize = controlPane.getSize();
  235. if (currentSize === 0) {
  236. const size = parseInt(localStorage?.chatControlsSize ?? '30');
  237. controlPane.resize(size ? size : 30);
  238. }
  239. } else {
  240. controlPane.resize(0);
  241. }
  242. } catch (e) {
  243. // ignore
  244. }
  245. }
  246. if (!value) {
  247. showCallOverlay.set(false);
  248. showOverview.set(false);
  249. }
  250. });
  251. const chatInput = document.getElementById('chat-textarea');
  252. chatInput?.focus();
  253. chats.subscribe(() => {});
  254. });
  255. onDestroy(() => {
  256. chatIdUnsubscriber?.();
  257. window.removeEventListener('message', onMessageHandler);
  258. $socket?.off('chat-events');
  259. });
  260. //////////////////////////
  261. // Web functions
  262. //////////////////////////
  263. const initNewChat = async () => {
  264. await showControls.set(false);
  265. await showCallOverlay.set(false);
  266. await showOverview.set(false);
  267. await showArtifacts.set(false);
  268. if ($page.url.pathname.includes('/c/')) {
  269. window.history.replaceState(history.state, '', `/`);
  270. }
  271. autoScroll = true;
  272. await chatId.set('');
  273. await chatTitle.set('');
  274. history = {
  275. messages: {},
  276. currentId: null
  277. };
  278. chatFiles = [];
  279. params = {};
  280. if ($page.url.searchParams.get('models')) {
  281. selectedModels = $page.url.searchParams.get('models')?.split(',');
  282. } else if ($page.url.searchParams.get('model')) {
  283. selectedModels = $page.url.searchParams.get('model')?.split(',');
  284. } else if ($settings?.models) {
  285. selectedModels = $settings?.models;
  286. } else if ($config?.default_models) {
  287. console.log($config?.default_models.split(',') ?? '');
  288. selectedModels = $config?.default_models.split(',');
  289. } else {
  290. selectedModels = [''];
  291. }
  292. if ($page.url.searchParams.get('web-search') === 'true') {
  293. webSearchEnabled = true;
  294. }
  295. if ($page.url.searchParams.get('tools')) {
  296. selectedToolIds = $page.url.searchParams
  297. .get('tools')
  298. ?.split(',')
  299. .map((id) => id.trim())
  300. .filter((id) => id);
  301. } else if ($page.url.searchParams.get('tool-ids')) {
  302. selectedToolIds = $page.url.searchParams
  303. .get('tool-ids')
  304. ?.split(',')
  305. .map((id) => id.trim())
  306. .filter((id) => id);
  307. }
  308. if ($page.url.searchParams.get('q')) {
  309. prompt = $page.url.searchParams.get('q') ?? '';
  310. if (prompt) {
  311. await tick();
  312. submitPrompt(prompt);
  313. }
  314. }
  315. if ($page.url.searchParams.get('call') === 'true') {
  316. showCallOverlay.set(true);
  317. showControls.set(true);
  318. }
  319. selectedModels = selectedModels.map((modelId) =>
  320. $models.map((m) => m.id).includes(modelId) ? modelId : ''
  321. );
  322. const userSettings = await getUserSettings(localStorage.token);
  323. if (userSettings) {
  324. settings.set(userSettings.ui);
  325. } else {
  326. settings.set(JSON.parse(localStorage.getItem('settings') ?? '{}'));
  327. }
  328. const chatInput = document.getElementById('chat-textarea');
  329. setTimeout(() => chatInput?.focus(), 0);
  330. };
  331. const loadChat = async () => {
  332. chatId.set(chatIdProp);
  333. chat = await getChatById(localStorage.token, $chatId).catch(async (error) => {
  334. await goto('/');
  335. return null;
  336. });
  337. if (chat) {
  338. tags = await getTags();
  339. const chatContent = chat.chat;
  340. if (chatContent) {
  341. console.log(chatContent);
  342. selectedModels =
  343. (chatContent?.models ?? undefined) !== undefined
  344. ? chatContent.models
  345. : [chatContent.models ?? ''];
  346. history =
  347. (chatContent?.history ?? undefined) !== undefined
  348. ? chatContent.history
  349. : convertMessagesToHistory(chatContent.messages);
  350. chatTitle.set(chatContent.title);
  351. const userSettings = await getUserSettings(localStorage.token);
  352. if (userSettings) {
  353. await settings.set(userSettings.ui);
  354. } else {
  355. await settings.set(JSON.parse(localStorage.getItem('settings') ?? '{}'));
  356. }
  357. params = chatContent?.params ?? {};
  358. chatFiles = chatContent?.files ?? [];
  359. autoScroll = true;
  360. await tick();
  361. if (history.currentId) {
  362. history.messages[history.currentId].done = true;
  363. }
  364. await tick();
  365. return true;
  366. } else {
  367. return null;
  368. }
  369. }
  370. };
  371. const scrollToBottom = async () => {
  372. await tick();
  373. if (messagesContainerElement) {
  374. messagesContainerElement.scrollTop = messagesContainerElement.scrollHeight;
  375. }
  376. };
  377. const createMessagesList = (responseMessageId) => {
  378. if (responseMessageId === null) {
  379. return [];
  380. }
  381. const message = history.messages[responseMessageId];
  382. if (message?.parentId) {
  383. return [...createMessagesList(message.parentId), message];
  384. } else {
  385. return [message];
  386. }
  387. };
  388. const chatCompletedHandler = async (chatId, modelId, responseMessageId, messages) => {
  389. await mermaid.run({
  390. querySelector: '.mermaid'
  391. });
  392. const res = await chatCompleted(localStorage.token, {
  393. model: modelId,
  394. messages: messages.map((m) => ({
  395. id: m.id,
  396. role: m.role,
  397. content: m.content,
  398. info: m.info ? m.info : undefined,
  399. timestamp: m.timestamp
  400. })),
  401. chat_id: chatId,
  402. session_id: $socket?.id,
  403. id: responseMessageId
  404. }).catch((error) => {
  405. toast.error(error);
  406. messages.at(-1).error = { content: error };
  407. return null;
  408. });
  409. if (res !== null) {
  410. // Update chat history with the new messages
  411. for (const message of res.messages) {
  412. history.messages[message.id] = {
  413. ...history.messages[message.id],
  414. ...(history.messages[message.id].content !== message.content
  415. ? { originalContent: history.messages[message.id].content }
  416. : {}),
  417. ...message
  418. };
  419. }
  420. }
  421. await tick();
  422. if ($chatId == chatId) {
  423. if (!$temporaryChatEnabled) {
  424. chat = await updateChatById(localStorage.token, chatId, {
  425. models: selectedModels,
  426. messages: messages,
  427. history: history,
  428. params: params,
  429. files: chatFiles
  430. });
  431. currentChatPage.set(1);
  432. await chats.set(await getChatList(localStorage.token, $currentChatPage));
  433. }
  434. }
  435. };
  436. const chatActionHandler = async (chatId, actionId, modelId, responseMessageId, event = null) => {
  437. const messages = createMessagesList(responseMessageId);
  438. const res = await chatAction(localStorage.token, actionId, {
  439. model: modelId,
  440. messages: messages.map((m) => ({
  441. id: m.id,
  442. role: m.role,
  443. content: m.content,
  444. info: m.info ? m.info : undefined,
  445. timestamp: m.timestamp
  446. })),
  447. ...(event ? { event: event } : {}),
  448. chat_id: chatId,
  449. session_id: $socket?.id,
  450. id: responseMessageId
  451. }).catch((error) => {
  452. toast.error(error);
  453. messages.at(-1).error = { content: error };
  454. return null;
  455. });
  456. if (res !== null) {
  457. // Update chat history with the new messages
  458. for (const message of res.messages) {
  459. history.messages[message.id] = {
  460. ...history.messages[message.id],
  461. ...(history.messages[message.id].content !== message.content
  462. ? { originalContent: history.messages[message.id].content }
  463. : {}),
  464. ...message
  465. };
  466. }
  467. }
  468. if ($chatId == chatId) {
  469. if (!$temporaryChatEnabled) {
  470. chat = await updateChatById(localStorage.token, chatId, {
  471. models: selectedModels,
  472. messages: messages,
  473. history: history,
  474. params: params,
  475. files: chatFiles
  476. });
  477. currentChatPage.set(1);
  478. await chats.set(await getChatList(localStorage.token, $currentChatPage));
  479. }
  480. }
  481. };
  482. const getChatEventEmitter = async (modelId: string, chatId: string = '') => {
  483. return setInterval(() => {
  484. $socket?.emit('usage', {
  485. action: 'chat',
  486. model: modelId,
  487. chat_id: chatId
  488. });
  489. }, 1000);
  490. };
  491. const createMessagePair = async (userPrompt) => {
  492. prompt = '';
  493. if (selectedModels.length === 0) {
  494. toast.error($i18n.t('Model not selected'));
  495. } else {
  496. const modelId = selectedModels[0];
  497. const model = $models.filter((m) => m.id === modelId).at(0);
  498. const messages = createMessagesList(history.currentId);
  499. const parentMessage = messages.length !== 0 ? messages.at(-1) : null;
  500. const userMessageId = uuidv4();
  501. const responseMessageId = uuidv4();
  502. const userMessage = {
  503. id: userMessageId,
  504. parentId: parentMessage ? parentMessage.id : null,
  505. childrenIds: [responseMessageId],
  506. role: 'user',
  507. content: userPrompt ? userPrompt : `[PROMPT] ${userMessageId}`,
  508. timestamp: Math.floor(Date.now() / 1000)
  509. };
  510. const responseMessage = {
  511. id: responseMessageId,
  512. parentId: userMessageId,
  513. childrenIds: [],
  514. role: 'assistant',
  515. content: `[RESPONSE] ${responseMessageId}`,
  516. done: true,
  517. model: modelId,
  518. modelName: model.name ?? model.id,
  519. modelIdx: 0,
  520. timestamp: Math.floor(Date.now() / 1000)
  521. };
  522. if (parentMessage) {
  523. parentMessage.childrenIds.push(userMessageId);
  524. history.messages[parentMessage.id] = parentMessage;
  525. }
  526. history.messages[userMessageId] = userMessage;
  527. history.messages[responseMessageId] = responseMessage;
  528. history.currentId = responseMessageId;
  529. await tick();
  530. if (autoScroll) {
  531. scrollToBottom();
  532. }
  533. if (messages.length === 0) {
  534. await initChatHandler();
  535. } else {
  536. await saveChatHandler($chatId);
  537. }
  538. }
  539. };
  540. //////////////////////////
  541. // Chat functions
  542. //////////////////////////
  543. const submitPrompt = async (userPrompt, { _raw = false } = {}) => {
  544. let _responses = [];
  545. console.log('submitPrompt', $chatId);
  546. const messages = createMessagesList(history.currentId);
  547. selectedModels = selectedModels.map((modelId) =>
  548. $models.map((m) => m.id).includes(modelId) ? modelId : ''
  549. );
  550. if (selectedModels.includes('')) {
  551. toast.error($i18n.t('Model not selected'));
  552. } else if (messages.length != 0 && messages.at(-1).done != true) {
  553. // Response not done
  554. console.log('wait');
  555. } else if (messages.length != 0 && messages.at(-1).error) {
  556. // Error in response
  557. toast.error(
  558. $i18n.t(
  559. `Oops! There was an error in the previous response. Please try again or contact admin.`
  560. )
  561. );
  562. } else if (
  563. files.length > 0 &&
  564. files.filter((file) => file.type !== 'image' && file.status === 'uploading').length > 0
  565. ) {
  566. // Upload not done
  567. toast.error(
  568. $i18n.t(
  569. `Oops! Hold tight! Your files are still in the processing oven. We're cooking them up to perfection. Please be patient and we'll let you know once they're ready.`
  570. )
  571. );
  572. } else if (
  573. ($config?.file?.max_count ?? null) !== null &&
  574. files.length + chatFiles.length > $config?.file?.max_count
  575. ) {
  576. console.log(chatFiles.length, files.length);
  577. toast.error(
  578. $i18n.t(`You can only chat with a maximum of {{maxCount}} file(s) at a time.`, {
  579. maxCount: $config?.file?.max_count
  580. })
  581. );
  582. } else {
  583. // Reset chat input textarea
  584. const chatTextAreaElement = document.getElementById('chat-textarea');
  585. if (chatTextAreaElement) {
  586. chatTextAreaElement.value = '';
  587. chatTextAreaElement.style.height = '';
  588. }
  589. const _files = JSON.parse(JSON.stringify(files));
  590. chatFiles.push(..._files.filter((item) => ['doc', 'file', 'collection'].includes(item.type)));
  591. chatFiles = chatFiles.filter(
  592. // Remove duplicates
  593. (item, index, array) =>
  594. array.findIndex((i) => JSON.stringify(i) === JSON.stringify(item)) === index
  595. );
  596. files = [];
  597. prompt = '';
  598. // Create user message
  599. let userMessageId = uuidv4();
  600. let userMessage = {
  601. id: userMessageId,
  602. parentId: messages.length !== 0 ? messages.at(-1).id : null,
  603. childrenIds: [],
  604. role: 'user',
  605. content: userPrompt,
  606. files: _files.length > 0 ? _files : undefined,
  607. timestamp: Math.floor(Date.now() / 1000), // Unix epoch
  608. models: selectedModels
  609. };
  610. // Add message to history and Set currentId to messageId
  611. history.messages[userMessageId] = userMessage;
  612. history.currentId = userMessageId;
  613. // Append messageId to childrenIds of parent message
  614. if (messages.length !== 0) {
  615. history.messages[messages.at(-1).id].childrenIds.push(userMessageId);
  616. }
  617. // Wait until history/message have been updated
  618. await tick();
  619. _responses = await sendPrompt(userPrompt, userMessageId, { newChat: true });
  620. }
  621. return _responses;
  622. };
  623. const sendPrompt = async (
  624. prompt: string,
  625. parentId: string,
  626. { modelId = null, modelIdx = null, newChat = false } = {}
  627. ) => {
  628. // Create new chat if newChat is true and first user message
  629. if (
  630. newChat &&
  631. history.messages[history.currentId].parentId === null &&
  632. history.messages[history.currentId].role === 'user'
  633. ) {
  634. await initChatHandler();
  635. }
  636. let _responses: string[] = [];
  637. // If modelId is provided, use it, else use selected model
  638. let selectedModelIds = modelId
  639. ? [modelId]
  640. : atSelectedModel !== undefined
  641. ? [atSelectedModel.id]
  642. : selectedModels;
  643. // Create response messages for each selected model
  644. const responseMessageIds: Record<PropertyKey, string> = {};
  645. for (const [_modelIdx, modelId] of selectedModelIds.entries()) {
  646. const model = $models.filter((m) => m.id === modelId).at(0);
  647. if (model) {
  648. let responseMessageId = uuidv4();
  649. let responseMessage = {
  650. parentId: parentId,
  651. id: responseMessageId,
  652. childrenIds: [],
  653. role: 'assistant',
  654. content: '',
  655. model: model.id,
  656. modelName: model.name ?? model.id,
  657. modelIdx: modelIdx ? modelIdx : _modelIdx,
  658. userContext: null,
  659. timestamp: Math.floor(Date.now() / 1000) // Unix epoch
  660. };
  661. // Add message to history and Set currentId to messageId
  662. history.messages[responseMessageId] = responseMessage;
  663. history.currentId = responseMessageId;
  664. // Append messageId to childrenIds of parent message
  665. if (parentId !== null) {
  666. history.messages[parentId].childrenIds = [
  667. ...history.messages[parentId].childrenIds,
  668. responseMessageId
  669. ];
  670. }
  671. responseMessageIds[`${modelId}-${modelIdx ? modelIdx : _modelIdx}`] = responseMessageId;
  672. }
  673. }
  674. await tick();
  675. const _chatId = JSON.parse(JSON.stringify($chatId));
  676. await Promise.all(
  677. selectedModelIds.map(async (modelId, _modelIdx) => {
  678. console.log('modelId', modelId);
  679. const model = $models.filter((m) => m.id === modelId).at(0);
  680. if (model) {
  681. const messages = createMessagesList(parentId);
  682. // If there are image files, check if model is vision capable
  683. const hasImages = messages.some((message) =>
  684. message.files?.some((file) => file.type === 'image')
  685. );
  686. if (hasImages && !(model.info?.meta?.capabilities?.vision ?? true)) {
  687. toast.error(
  688. $i18n.t('Model {{modelName}} is not vision capable', {
  689. modelName: model.name ?? model.id
  690. })
  691. );
  692. }
  693. let responseMessageId =
  694. responseMessageIds[`${modelId}-${modelIdx ? modelIdx : _modelIdx}`];
  695. let responseMessage = history.messages[responseMessageId];
  696. let userContext = null;
  697. if ($settings?.memory ?? false) {
  698. if (userContext === null) {
  699. const res = await queryMemory(localStorage.token, prompt).catch((error) => {
  700. toast.error(error);
  701. return null;
  702. });
  703. if (res) {
  704. if (res.documents[0].length > 0) {
  705. userContext = res.documents[0].reduce((acc, doc, index) => {
  706. const createdAtTimestamp = res.metadatas[0][index].created_at;
  707. const createdAtDate = new Date(createdAtTimestamp * 1000)
  708. .toISOString()
  709. .split('T')[0];
  710. return `${acc}${index + 1}. [${createdAtDate}]. ${doc}\n`;
  711. }, '');
  712. }
  713. console.log(userContext);
  714. }
  715. }
  716. }
  717. responseMessage.userContext = userContext;
  718. const chatEventEmitter = await getChatEventEmitter(model.id, _chatId);
  719. scrollToBottom();
  720. if (webSearchEnabled) {
  721. await getWebSearchResults(model.id, parentId, responseMessageId);
  722. }
  723. let _response = null;
  724. if (model?.owned_by === 'openai') {
  725. _response = await sendPromptOpenAI(model, prompt, responseMessageId, _chatId);
  726. } else if (model) {
  727. _response = await sendPromptOllama(model, prompt, responseMessageId, _chatId);
  728. }
  729. _responses.push(_response);
  730. if (chatEventEmitter) clearInterval(chatEventEmitter);
  731. } else {
  732. toast.error($i18n.t(`Model {{modelId}} not found`, { modelId }));
  733. }
  734. })
  735. );
  736. currentChatPage.set(1);
  737. chats.set(await getChatList(localStorage.token, $currentChatPage));
  738. return _responses;
  739. };
  740. const sendPromptOllama = async (model, userPrompt, responseMessageId, _chatId) => {
  741. let _response: string | null = null;
  742. const responseMessage = history.messages[responseMessageId];
  743. const userMessage = history.messages[responseMessage.parentId];
  744. // Wait until history/message have been updated
  745. await tick();
  746. // Scroll down
  747. scrollToBottom();
  748. const messagesBody = [
  749. params?.system || $settings.system || (responseMessage?.userContext ?? null)
  750. ? {
  751. role: 'system',
  752. content: `${promptTemplate(
  753. params?.system ?? $settings?.system ?? '',
  754. $user.name,
  755. $settings?.userLocation
  756. ? await getAndUpdateUserLocation(localStorage.token)
  757. : undefined
  758. )}${
  759. (responseMessage?.userContext ?? null)
  760. ? `\n\nUser Context:\n${responseMessage?.userContext ?? ''}`
  761. : ''
  762. }`
  763. }
  764. : undefined,
  765. ...createMessagesList(responseMessageId)
  766. ]
  767. .filter((message) => message?.content?.trim())
  768. .map((message) => {
  769. // Prepare the base message object
  770. const baseMessage = {
  771. role: message.role,
  772. content: message.content
  773. };
  774. // Extract and format image URLs if any exist
  775. const imageUrls = message.files
  776. ?.filter((file) => file.type === 'image')
  777. .map((file) => file.url.slice(file.url.indexOf(',') + 1));
  778. // Add images array only if it contains elements
  779. if (imageUrls && imageUrls.length > 0 && message.role === 'user') {
  780. baseMessage.images = imageUrls;
  781. }
  782. return baseMessage;
  783. });
  784. let lastImageIndex = -1;
  785. // Find the index of the last object with images
  786. messagesBody.forEach((item, index) => {
  787. if (item.images) {
  788. lastImageIndex = index;
  789. }
  790. });
  791. // Remove images from all but the last one
  792. messagesBody.forEach((item, index) => {
  793. if (index !== lastImageIndex) {
  794. delete item.images;
  795. }
  796. });
  797. let files = JSON.parse(JSON.stringify(chatFiles));
  798. if (model?.info?.meta?.knowledge ?? false) {
  799. // Only initialize and add status if knowledge exists
  800. responseMessage.statusHistory = [
  801. {
  802. action: 'knowledge_search',
  803. description: $i18n.t(`Searching Knowledge for "{{searchQuery}}"`, {
  804. searchQuery: userMessage.content
  805. }),
  806. done: false
  807. }
  808. ];
  809. files.push(
  810. ...model.info.meta.knowledge.map((item) => {
  811. if (item?.collection_name) {
  812. return {
  813. id: item.collection_name,
  814. name: item.name,
  815. legacy: true
  816. };
  817. } else if (item?.collection_names) {
  818. return {
  819. name: item.name,
  820. type: 'collection',
  821. collection_names: item.collection_names,
  822. legacy: true
  823. };
  824. } else {
  825. return item;
  826. }
  827. })
  828. );
  829. history.messages[responseMessageId] = responseMessage;
  830. }
  831. files.push(
  832. ...(userMessage?.files ?? []).filter((item) =>
  833. ['doc', 'file', 'collection'].includes(item.type)
  834. ),
  835. ...(responseMessage?.files ?? []).filter((item) => ['web_search_results'].includes(item.type))
  836. );
  837. // Remove duplicates
  838. files = files.filter(
  839. (item, index, array) =>
  840. array.findIndex((i) => JSON.stringify(i) === JSON.stringify(item)) === index
  841. );
  842. scrollToBottom();
  843. eventTarget.dispatchEvent(
  844. new CustomEvent('chat:start', {
  845. detail: {
  846. id: responseMessageId
  847. }
  848. })
  849. );
  850. await tick();
  851. const stream =
  852. model?.info?.params?.stream_response ??
  853. $settings?.params?.stream_response ??
  854. params?.stream_response ??
  855. true;
  856. const [res, controller] = await generateChatCompletion(localStorage.token, {
  857. stream: stream,
  858. model: model.id,
  859. messages: messagesBody,
  860. options: {
  861. ...{ ...($settings?.params ?? {}), ...params },
  862. stop:
  863. (params?.stop ?? $settings?.params?.stop ?? undefined)
  864. ? (params?.stop.split(',').map((token) => token.trim()) ?? $settings.params.stop).map(
  865. (str) => decodeURIComponent(JSON.parse('"' + str.replace(/\"/g, '\\"') + '"'))
  866. )
  867. : undefined,
  868. num_predict: params?.max_tokens ?? $settings?.params?.max_tokens ?? undefined,
  869. repeat_penalty:
  870. params?.frequency_penalty ?? $settings?.params?.frequency_penalty ?? undefined
  871. },
  872. format: $settings.requestFormat ?? undefined,
  873. keep_alive: $settings.keepAlive ?? undefined,
  874. tool_ids: selectedToolIds.length > 0 ? selectedToolIds : undefined,
  875. files: files.length > 0 ? files : undefined,
  876. session_id: $socket?.id,
  877. chat_id: $chatId,
  878. id: responseMessageId
  879. });
  880. if (res && res.ok) {
  881. if (!stream) {
  882. const response = await res.json();
  883. console.log(response);
  884. responseMessage.content = response.message.content;
  885. responseMessage.info = {
  886. eval_count: response.eval_count,
  887. eval_duration: response.eval_duration,
  888. load_duration: response.load_duration,
  889. prompt_eval_count: response.prompt_eval_count,
  890. prompt_eval_duration: response.prompt_eval_duration,
  891. total_duration: response.total_duration
  892. };
  893. responseMessage.done = true;
  894. } else {
  895. console.log('controller', controller);
  896. const reader = res.body
  897. .pipeThrough(new TextDecoderStream())
  898. .pipeThrough(splitStream('\n'))
  899. .getReader();
  900. while (true) {
  901. const { value, done } = await reader.read();
  902. if (done || stopResponseFlag || _chatId !== $chatId) {
  903. responseMessage.done = true;
  904. history.messages[responseMessageId] = responseMessage;
  905. if (stopResponseFlag) {
  906. controller.abort('User: Stop Response');
  907. }
  908. _response = responseMessage.content;
  909. break;
  910. }
  911. try {
  912. let lines = value.split('\n');
  913. for (const line of lines) {
  914. if (line !== '') {
  915. console.log(line);
  916. let data = JSON.parse(line);
  917. if ('citations' in data) {
  918. responseMessage.citations = data.citations;
  919. // Only remove status if it was initially set
  920. if (model?.info?.meta?.knowledge ?? false) {
  921. responseMessage.statusHistory = responseMessage.statusHistory.filter(
  922. (status) => status.action !== 'knowledge_search'
  923. );
  924. }
  925. continue;
  926. }
  927. if ('detail' in data) {
  928. throw data;
  929. }
  930. if (data.done == false) {
  931. if (responseMessage.content == '' && data.message.content == '\n') {
  932. continue;
  933. } else {
  934. responseMessage.content += data.message.content;
  935. if (navigator.vibrate && ($settings?.hapticFeedback ?? false)) {
  936. navigator.vibrate(5);
  937. }
  938. const messageContentParts = getMessageContentParts(
  939. responseMessage.content,
  940. $config?.audio?.tts?.split_on ?? 'punctuation'
  941. );
  942. messageContentParts.pop();
  943. // dispatch only last sentence and make sure it hasn't been dispatched before
  944. if (
  945. messageContentParts.length > 0 &&
  946. messageContentParts[messageContentParts.length - 1] !==
  947. responseMessage.lastSentence
  948. ) {
  949. responseMessage.lastSentence =
  950. messageContentParts[messageContentParts.length - 1];
  951. eventTarget.dispatchEvent(
  952. new CustomEvent('chat', {
  953. detail: {
  954. id: responseMessageId,
  955. content: messageContentParts[messageContentParts.length - 1]
  956. }
  957. })
  958. );
  959. }
  960. history.messages[responseMessageId] = responseMessage;
  961. }
  962. } else {
  963. responseMessage.done = true;
  964. if (responseMessage.content == '') {
  965. responseMessage.error = {
  966. code: 400,
  967. content: `Oops! No text generated from Ollama, Please try again.`
  968. };
  969. }
  970. responseMessage.context = data.context ?? null;
  971. responseMessage.info = {
  972. total_duration: data.total_duration,
  973. load_duration: data.load_duration,
  974. sample_count: data.sample_count,
  975. sample_duration: data.sample_duration,
  976. prompt_eval_count: data.prompt_eval_count,
  977. prompt_eval_duration: data.prompt_eval_duration,
  978. eval_count: data.eval_count,
  979. eval_duration: data.eval_duration
  980. };
  981. history.messages[responseMessageId] = responseMessage;
  982. if ($settings.notificationEnabled && !document.hasFocus()) {
  983. const notification = new Notification(`${model.id}`, {
  984. body: responseMessage.content,
  985. icon: `${WEBUI_BASE_URL}/static/favicon.png`
  986. });
  987. }
  988. if ($settings?.responseAutoCopy ?? false) {
  989. copyToClipboard(responseMessage.content);
  990. }
  991. if ($settings.responseAutoPlayback && !$showCallOverlay) {
  992. await tick();
  993. document.getElementById(`speak-button-${responseMessage.id}`)?.click();
  994. }
  995. }
  996. }
  997. }
  998. } catch (error) {
  999. console.log(error);
  1000. if ('detail' in error) {
  1001. toast.error(error.detail);
  1002. }
  1003. break;
  1004. }
  1005. if (autoScroll) {
  1006. scrollToBottom();
  1007. }
  1008. }
  1009. }
  1010. } else {
  1011. if (res !== null) {
  1012. const error = await res.json();
  1013. console.log(error);
  1014. if ('detail' in error) {
  1015. toast.error(error.detail);
  1016. responseMessage.error = { content: error.detail };
  1017. } else {
  1018. toast.error(error.error);
  1019. responseMessage.error = { content: error.error };
  1020. }
  1021. } else {
  1022. toast.error(
  1023. $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, { provider: 'Ollama' })
  1024. );
  1025. responseMessage.error = {
  1026. content: $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
  1027. provider: 'Ollama'
  1028. })
  1029. };
  1030. }
  1031. responseMessage.done = true;
  1032. if (responseMessage.statusHistory) {
  1033. responseMessage.statusHistory = responseMessage.statusHistory.filter(
  1034. (status) => status.action !== 'knowledge_search'
  1035. );
  1036. }
  1037. }
  1038. await saveChatHandler(_chatId);
  1039. history.messages[responseMessageId] = responseMessage;
  1040. await chatCompletedHandler(
  1041. _chatId,
  1042. model.id,
  1043. responseMessageId,
  1044. createMessagesList(responseMessageId)
  1045. );
  1046. stopResponseFlag = false;
  1047. await tick();
  1048. let lastMessageContentPart =
  1049. getMessageContentParts(
  1050. responseMessage.content,
  1051. $config?.audio?.tts?.split_on ?? 'punctuation'
  1052. )?.at(-1) ?? '';
  1053. if (lastMessageContentPart) {
  1054. eventTarget.dispatchEvent(
  1055. new CustomEvent('chat', {
  1056. detail: { id: responseMessageId, content: lastMessageContentPart }
  1057. })
  1058. );
  1059. }
  1060. eventTarget.dispatchEvent(
  1061. new CustomEvent('chat:finish', {
  1062. detail: {
  1063. id: responseMessageId,
  1064. content: responseMessage.content
  1065. }
  1066. })
  1067. );
  1068. if (autoScroll) {
  1069. scrollToBottom();
  1070. }
  1071. const messages = createMessagesList(responseMessageId);
  1072. if (messages.length == 2 && messages.at(-1).content !== '' && selectedModels[0] === model.id) {
  1073. window.history.replaceState(history.state, '', `/c/${_chatId}`);
  1074. const title = await generateChatTitle(userPrompt);
  1075. await setChatTitle(_chatId, title);
  1076. }
  1077. return _response;
  1078. };
  1079. const sendPromptOpenAI = async (model, userPrompt, responseMessageId, _chatId) => {
  1080. let _response = null;
  1081. const responseMessage = history.messages[responseMessageId];
  1082. const userMessage = history.messages[responseMessage.parentId];
  1083. let files = JSON.parse(JSON.stringify(chatFiles));
  1084. if (model?.info?.meta?.knowledge ?? false) {
  1085. // Only initialize and add status if knowledge exists
  1086. responseMessage.statusHistory = [
  1087. {
  1088. action: 'knowledge_search',
  1089. description: $i18n.t(`Searching Knowledge for "{{searchQuery}}"`, {
  1090. searchQuery: userMessage.content
  1091. }),
  1092. done: false
  1093. }
  1094. ];
  1095. files.push(
  1096. ...model.info.meta.knowledge.map((item) => {
  1097. if (item?.collection_name) {
  1098. return {
  1099. id: item.collection_name,
  1100. name: item.name,
  1101. legacy: true
  1102. };
  1103. } else if (item?.collection_names) {
  1104. return {
  1105. name: item.name,
  1106. type: 'collection',
  1107. collection_names: item.collection_names,
  1108. legacy: true
  1109. };
  1110. } else {
  1111. return item;
  1112. }
  1113. })
  1114. );
  1115. history.messages[responseMessageId] = responseMessage;
  1116. }
  1117. files.push(
  1118. ...(userMessage?.files ?? []).filter((item) =>
  1119. ['doc', 'file', 'collection'].includes(item.type)
  1120. ),
  1121. ...(responseMessage?.files ?? []).filter((item) => ['web_search_results'].includes(item.type))
  1122. );
  1123. // Remove duplicates
  1124. files = files.filter(
  1125. (item, index, array) =>
  1126. array.findIndex((i) => JSON.stringify(i) === JSON.stringify(item)) === index
  1127. );
  1128. scrollToBottom();
  1129. eventTarget.dispatchEvent(
  1130. new CustomEvent('chat:start', {
  1131. detail: {
  1132. id: responseMessageId
  1133. }
  1134. })
  1135. );
  1136. await tick();
  1137. try {
  1138. const stream =
  1139. model?.info?.params?.stream_response ??
  1140. $settings?.params?.stream_response ??
  1141. params?.stream_response ??
  1142. true;
  1143. const [res, controller] = await generateOpenAIChatCompletion(
  1144. localStorage.token,
  1145. {
  1146. stream: stream,
  1147. model: model.id,
  1148. ...(stream && (model.info?.meta?.capabilities?.usage ?? false)
  1149. ? {
  1150. stream_options: {
  1151. include_usage: true
  1152. }
  1153. }
  1154. : {}),
  1155. messages: [
  1156. params?.system || $settings.system || (responseMessage?.userContext ?? null)
  1157. ? {
  1158. role: 'system',
  1159. content: `${promptTemplate(
  1160. params?.system ?? $settings?.system ?? '',
  1161. $user.name,
  1162. $settings?.userLocation
  1163. ? await getAndUpdateUserLocation(localStorage.token)
  1164. : undefined
  1165. )}${
  1166. (responseMessage?.userContext ?? null)
  1167. ? `\n\nUser Context:\n${responseMessage?.userContext ?? ''}`
  1168. : ''
  1169. }`
  1170. }
  1171. : undefined,
  1172. ...createMessagesList(responseMessageId)
  1173. ]
  1174. .filter((message) => message?.content?.trim())
  1175. .map((message, idx, arr) => ({
  1176. role: message.role,
  1177. ...((message.files?.filter((file) => file.type === 'image').length > 0 ?? false) &&
  1178. message.role === 'user'
  1179. ? {
  1180. content: [
  1181. {
  1182. type: 'text',
  1183. text:
  1184. arr.length - 1 !== idx
  1185. ? message.content
  1186. : (message?.raContent ?? message.content)
  1187. },
  1188. ...message.files
  1189. .filter((file) => file.type === 'image')
  1190. .map((file) => ({
  1191. type: 'image_url',
  1192. image_url: {
  1193. url: file.url
  1194. }
  1195. }))
  1196. ]
  1197. }
  1198. : {
  1199. content:
  1200. arr.length - 1 !== idx
  1201. ? message.content
  1202. : (message?.raContent ?? message.content)
  1203. })
  1204. })),
  1205. seed: params?.seed ?? $settings?.params?.seed ?? undefined,
  1206. stop:
  1207. (params?.stop ?? $settings?.params?.stop ?? undefined)
  1208. ? (params?.stop.split(',').map((token) => token.trim()) ?? $settings.params.stop).map(
  1209. (str) => decodeURIComponent(JSON.parse('"' + str.replace(/\"/g, '\\"') + '"'))
  1210. )
  1211. : undefined,
  1212. temperature: params?.temperature ?? $settings?.params?.temperature ?? undefined,
  1213. top_p: params?.top_p ?? $settings?.params?.top_p ?? undefined,
  1214. frequency_penalty:
  1215. params?.frequency_penalty ?? $settings?.params?.frequency_penalty ?? undefined,
  1216. max_tokens: params?.max_tokens ?? $settings?.params?.max_tokens ?? undefined,
  1217. tool_ids: selectedToolIds.length > 0 ? selectedToolIds : undefined,
  1218. files: files.length > 0 ? files : undefined,
  1219. session_id: $socket?.id,
  1220. chat_id: $chatId,
  1221. id: responseMessageId
  1222. },
  1223. `${WEBUI_BASE_URL}/api`
  1224. );
  1225. // Wait until history/message have been updated
  1226. await tick();
  1227. scrollToBottom();
  1228. if (res && res.ok && res.body) {
  1229. if (!stream) {
  1230. const response = await res.json();
  1231. console.log(response);
  1232. responseMessage.content = response.choices[0].message.content;
  1233. responseMessage.info = { ...response.usage, openai: true };
  1234. responseMessage.done = true;
  1235. } else {
  1236. const textStream = await createOpenAITextStream(res.body, $settings.splitLargeChunks);
  1237. for await (const update of textStream) {
  1238. const { value, done, citations, error, usage } = update;
  1239. if (error) {
  1240. await handleOpenAIError(error, null, model, responseMessage);
  1241. break;
  1242. }
  1243. if (done || stopResponseFlag || _chatId !== $chatId) {
  1244. responseMessage.done = true;
  1245. history.messages[responseMessageId] = responseMessage;
  1246. if (stopResponseFlag) {
  1247. controller.abort('User: Stop Response');
  1248. }
  1249. _response = responseMessage.content;
  1250. break;
  1251. }
  1252. if (usage) {
  1253. responseMessage.info = { ...usage, openai: true, usage };
  1254. }
  1255. if (citations) {
  1256. responseMessage.citations = citations;
  1257. // Only remove status if it was initially set
  1258. if (model?.info?.meta?.knowledge ?? false) {
  1259. responseMessage.statusHistory = responseMessage.statusHistory.filter(
  1260. (status) => status.action !== 'knowledge_search'
  1261. );
  1262. }
  1263. continue;
  1264. }
  1265. if (responseMessage.content == '' && value == '\n') {
  1266. continue;
  1267. } else {
  1268. responseMessage.content += value;
  1269. if (navigator.vibrate && ($settings?.hapticFeedback ?? false)) {
  1270. navigator.vibrate(5);
  1271. }
  1272. const messageContentParts = getMessageContentParts(
  1273. responseMessage.content,
  1274. $config?.audio?.tts?.split_on ?? 'punctuation'
  1275. );
  1276. messageContentParts.pop();
  1277. // dispatch only last sentence and make sure it hasn't been dispatched before
  1278. if (
  1279. messageContentParts.length > 0 &&
  1280. messageContentParts[messageContentParts.length - 1] !== responseMessage.lastSentence
  1281. ) {
  1282. responseMessage.lastSentence = messageContentParts[messageContentParts.length - 1];
  1283. eventTarget.dispatchEvent(
  1284. new CustomEvent('chat', {
  1285. detail: {
  1286. id: responseMessageId,
  1287. content: messageContentParts[messageContentParts.length - 1]
  1288. }
  1289. })
  1290. );
  1291. }
  1292. history.messages[responseMessageId] = responseMessage;
  1293. }
  1294. if (autoScroll) {
  1295. scrollToBottom();
  1296. }
  1297. }
  1298. }
  1299. if ($settings.notificationEnabled && !document.hasFocus()) {
  1300. const notification = new Notification(`${model.id}`, {
  1301. body: responseMessage.content,
  1302. icon: `${WEBUI_BASE_URL}/static/favicon.png`
  1303. });
  1304. }
  1305. if ($settings.responseAutoCopy) {
  1306. copyToClipboard(responseMessage.content);
  1307. }
  1308. if ($settings.responseAutoPlayback && !$showCallOverlay) {
  1309. await tick();
  1310. document.getElementById(`speak-button-${responseMessage.id}`)?.click();
  1311. }
  1312. } else {
  1313. await handleOpenAIError(null, res, model, responseMessage);
  1314. }
  1315. } catch (error) {
  1316. await handleOpenAIError(error, null, model, responseMessage);
  1317. }
  1318. await saveChatHandler(_chatId);
  1319. history.messages[responseMessageId] = responseMessage;
  1320. await chatCompletedHandler(
  1321. _chatId,
  1322. model.id,
  1323. responseMessageId,
  1324. createMessagesList(responseMessageId)
  1325. );
  1326. stopResponseFlag = false;
  1327. await tick();
  1328. let lastMessageContentPart =
  1329. getMessageContentParts(
  1330. responseMessage.content,
  1331. $config?.audio?.tts?.split_on ?? 'punctuation'
  1332. )?.at(-1) ?? '';
  1333. if (lastMessageContentPart) {
  1334. eventTarget.dispatchEvent(
  1335. new CustomEvent('chat', {
  1336. detail: { id: responseMessageId, content: lastMessageContentPart }
  1337. })
  1338. );
  1339. }
  1340. eventTarget.dispatchEvent(
  1341. new CustomEvent('chat:finish', {
  1342. detail: {
  1343. id: responseMessageId,
  1344. content: responseMessage.content
  1345. }
  1346. })
  1347. );
  1348. if (autoScroll) {
  1349. scrollToBottom();
  1350. }
  1351. const messages = createMessagesList(responseMessageId);
  1352. if (messages.length == 2 && selectedModels[0] === model.id) {
  1353. window.history.replaceState(history.state, '', `/c/${_chatId}`);
  1354. const title = await generateChatTitle(userPrompt);
  1355. await setChatTitle(_chatId, title);
  1356. }
  1357. return _response;
  1358. };
  1359. const handleOpenAIError = async (error, res: Response | null, model, responseMessage) => {
  1360. let errorMessage = '';
  1361. let innerError;
  1362. if (error) {
  1363. innerError = error;
  1364. } else if (res !== null) {
  1365. innerError = await res.json();
  1366. }
  1367. console.error(innerError);
  1368. if ('detail' in innerError) {
  1369. toast.error(innerError.detail);
  1370. errorMessage = innerError.detail;
  1371. } else if ('error' in innerError) {
  1372. if ('message' in innerError.error) {
  1373. toast.error(innerError.error.message);
  1374. errorMessage = innerError.error.message;
  1375. } else {
  1376. toast.error(innerError.error);
  1377. errorMessage = innerError.error;
  1378. }
  1379. } else if ('message' in innerError) {
  1380. toast.error(innerError.message);
  1381. errorMessage = innerError.message;
  1382. }
  1383. responseMessage.error = {
  1384. content:
  1385. $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
  1386. provider: model.name ?? model.id
  1387. }) +
  1388. '\n' +
  1389. errorMessage
  1390. };
  1391. responseMessage.done = true;
  1392. if (responseMessage.statusHistory) {
  1393. responseMessage.statusHistory = responseMessage.statusHistory.filter(
  1394. (status) => status.action !== 'knowledge_search'
  1395. );
  1396. }
  1397. history.messages[responseMessage.id] = responseMessage;
  1398. };
  1399. const stopResponse = () => {
  1400. stopResponseFlag = true;
  1401. console.log('stopResponse');
  1402. };
  1403. const regenerateResponse = async (message) => {
  1404. console.log('regenerateResponse');
  1405. if (history.currentId) {
  1406. let userMessage = history.messages[message.parentId];
  1407. let userPrompt = userMessage.content;
  1408. if ((userMessage?.models ?? [...selectedModels]).length == 1) {
  1409. // If user message has only one model selected, sendPrompt automatically selects it for regeneration
  1410. await sendPrompt(userPrompt, userMessage.id);
  1411. } else {
  1412. // If there are multiple models selected, use the model of the response message for regeneration
  1413. // e.g. many model chat
  1414. await sendPrompt(userPrompt, userMessage.id, {
  1415. modelId: message.model,
  1416. modelIdx: message.modelIdx
  1417. });
  1418. }
  1419. }
  1420. };
  1421. const continueResponse = async () => {
  1422. console.log('continueResponse');
  1423. const _chatId = JSON.parse(JSON.stringify($chatId));
  1424. if (history.currentId && history.messages[history.currentId].done == true) {
  1425. const responseMessage = history.messages[history.currentId];
  1426. responseMessage.done = false;
  1427. await tick();
  1428. const model = $models.filter((m) => m.id === responseMessage.model).at(0);
  1429. if (model) {
  1430. if (model?.owned_by === 'openai') {
  1431. await sendPromptOpenAI(
  1432. model,
  1433. history.messages[responseMessage.parentId].content,
  1434. responseMessage.id,
  1435. _chatId
  1436. );
  1437. } else
  1438. await sendPromptOllama(
  1439. model,
  1440. history.messages[responseMessage.parentId].content,
  1441. responseMessage.id,
  1442. _chatId
  1443. );
  1444. }
  1445. } else {
  1446. toast.error($i18n.t(`Model {{modelId}} not found`, { modelId }));
  1447. }
  1448. };
  1449. const mergeResponses = async (messageId, responses, _chatId) => {
  1450. console.log('mergeResponses', messageId, responses);
  1451. const message = history.messages[messageId];
  1452. const mergedResponse = {
  1453. status: true,
  1454. content: ''
  1455. };
  1456. message.merged = mergedResponse;
  1457. history.messages[messageId] = message;
  1458. try {
  1459. const [res, controller] = await generateMoACompletion(
  1460. localStorage.token,
  1461. message.model,
  1462. history.messages[message.parentId].content,
  1463. responses
  1464. );
  1465. if (res && res.ok && res.body) {
  1466. const textStream = await createOpenAITextStream(res.body, $settings.splitLargeChunks);
  1467. for await (const update of textStream) {
  1468. const { value, done, citations, error, usage } = update;
  1469. if (error || done) {
  1470. break;
  1471. }
  1472. if (mergedResponse.content == '' && value == '\n') {
  1473. continue;
  1474. } else {
  1475. mergedResponse.content += value;
  1476. history.messages[messageId] = message;
  1477. }
  1478. if (autoScroll) {
  1479. scrollToBottom();
  1480. }
  1481. }
  1482. await saveChatHandler(_chatId);
  1483. } else {
  1484. console.error(res);
  1485. }
  1486. } catch (e) {
  1487. console.error(e);
  1488. }
  1489. };
  1490. const generateChatTitle = async (userPrompt) => {
  1491. if ($settings?.title?.auto ?? true) {
  1492. const title = await generateTitle(
  1493. localStorage.token,
  1494. selectedModels[0],
  1495. userPrompt,
  1496. $chatId
  1497. ).catch((error) => {
  1498. console.error(error);
  1499. return 'New Chat';
  1500. });
  1501. return title;
  1502. } else {
  1503. return `${userPrompt}`;
  1504. }
  1505. };
  1506. const setChatTitle = async (_chatId, title) => {
  1507. if (_chatId === $chatId) {
  1508. chatTitle.set(title);
  1509. }
  1510. if (!$temporaryChatEnabled) {
  1511. chat = await updateChatById(localStorage.token, _chatId, { title: title });
  1512. currentChatPage.set(1);
  1513. await chats.set(await getChatList(localStorage.token, $currentChatPage));
  1514. }
  1515. };
  1516. const getWebSearchResults = async (
  1517. model: string,
  1518. parentId: string,
  1519. responseMessageId: string
  1520. ) => {
  1521. const responseMessage = history.messages[responseMessageId];
  1522. const userMessage = history.messages[parentId];
  1523. const messages = createMessagesList(history.currentId);
  1524. responseMessage.statusHistory = [
  1525. {
  1526. done: false,
  1527. action: 'web_search',
  1528. description: $i18n.t('Generating search query')
  1529. }
  1530. ];
  1531. history.messages[responseMessageId] = responseMessage;
  1532. const prompt = userMessage.content;
  1533. let searchQuery = await generateSearchQuery(
  1534. localStorage.token,
  1535. model,
  1536. messages.filter((message) => message?.content?.trim()),
  1537. prompt
  1538. ).catch((error) => {
  1539. console.log(error);
  1540. return prompt;
  1541. });
  1542. if (!searchQuery || searchQuery == '') {
  1543. responseMessage.statusHistory.push({
  1544. done: true,
  1545. error: true,
  1546. action: 'web_search',
  1547. description: $i18n.t('No search query generated')
  1548. });
  1549. history.messages[responseMessageId] = responseMessage;
  1550. return;
  1551. }
  1552. responseMessage.statusHistory.push({
  1553. done: false,
  1554. action: 'web_search',
  1555. description: $i18n.t(`Searching "{{searchQuery}}"`, { searchQuery })
  1556. });
  1557. history.messages[responseMessageId] = responseMessage;
  1558. const results = await processWebSearch(localStorage.token, searchQuery).catch((error) => {
  1559. console.log(error);
  1560. toast.error(error);
  1561. return null;
  1562. });
  1563. if (results) {
  1564. responseMessage.statusHistory.push({
  1565. done: true,
  1566. action: 'web_search',
  1567. description: $i18n.t('Searched {{count}} sites', { count: results.filenames.length }),
  1568. query: searchQuery,
  1569. urls: results.filenames
  1570. });
  1571. if (responseMessage?.files ?? undefined === undefined) {
  1572. responseMessage.files = [];
  1573. }
  1574. responseMessage.files.push({
  1575. collection_name: results.collection_name,
  1576. name: searchQuery,
  1577. type: 'web_search_results',
  1578. urls: results.filenames
  1579. });
  1580. history.messages[responseMessageId] = responseMessage;
  1581. } else {
  1582. responseMessage.statusHistory.push({
  1583. done: true,
  1584. error: true,
  1585. action: 'web_search',
  1586. description: 'No search results found'
  1587. });
  1588. history.messages[responseMessageId] = responseMessage;
  1589. }
  1590. };
  1591. const getTags = async () => {
  1592. return await getTagsById(localStorage.token, $chatId).catch(async (error) => {
  1593. return [];
  1594. });
  1595. };
  1596. const initChatHandler = async () => {
  1597. if (!$temporaryChatEnabled) {
  1598. chat = await createNewChat(localStorage.token, {
  1599. id: $chatId,
  1600. title: $i18n.t('New Chat'),
  1601. models: selectedModels,
  1602. system: $settings.system ?? undefined,
  1603. params: params,
  1604. history: history,
  1605. tags: [],
  1606. timestamp: Date.now()
  1607. });
  1608. currentChatPage.set(1);
  1609. await chats.set(await getChatList(localStorage.token, $currentChatPage));
  1610. await chatId.set(chat.id);
  1611. } else {
  1612. await chatId.set('local');
  1613. }
  1614. await tick();
  1615. };
  1616. const saveChatHandler = async (_chatId) => {
  1617. if ($chatId == _chatId) {
  1618. if (!$temporaryChatEnabled) {
  1619. chat = await updateChatById(localStorage.token, _chatId, {
  1620. models: selectedModels,
  1621. history: history,
  1622. messages: createMessagesList(history.currentId),
  1623. params: params,
  1624. files: chatFiles
  1625. });
  1626. currentChatPage.set(1);
  1627. await chats.set(await getChatList(localStorage.token, $currentChatPage));
  1628. }
  1629. }
  1630. };
  1631. </script>
  1632. <svelte:head>
  1633. <title>
  1634. {$chatTitle
  1635. ? `${$chatTitle.length > 30 ? `${$chatTitle.slice(0, 30)}...` : $chatTitle} | ${$WEBUI_NAME}`
  1636. : `${$WEBUI_NAME}`}
  1637. </title>
  1638. </svelte:head>
  1639. <audio id="audioElement" src="" style="display: none;" />
  1640. <EventConfirmDialog
  1641. bind:show={showEventConfirmation}
  1642. title={eventConfirmationTitle}
  1643. message={eventConfirmationMessage}
  1644. input={eventConfirmationInput}
  1645. inputPlaceholder={eventConfirmationInputPlaceholder}
  1646. inputValue={eventConfirmationInputValue}
  1647. on:confirm={(e) => {
  1648. if (e.detail) {
  1649. eventCallback(e.detail);
  1650. } else {
  1651. eventCallback(true);
  1652. }
  1653. }}
  1654. on:cancel={() => {
  1655. eventCallback(false);
  1656. }}
  1657. />
  1658. {#if !chatIdProp || (loaded && chatIdProp)}
  1659. <div
  1660. class="h-screen max-h-[100dvh] {$showSidebar
  1661. ? 'md:max-w-[calc(100%-260px)]'
  1662. : ''} w-full max-w-full flex flex-col"
  1663. >
  1664. {#if $settings?.backgroundImageUrl ?? null}
  1665. <div
  1666. class="absolute {$showSidebar
  1667. ? 'md:max-w-[calc(100%-260px)] md:translate-x-[260px]'
  1668. : ''} top-0 left-0 w-full h-full bg-cover bg-center bg-no-repeat"
  1669. style="background-image: url({$settings.backgroundImageUrl}) "
  1670. />
  1671. <div
  1672. class="absolute top-0 left-0 w-full h-full bg-gradient-to-t from-white to-white/85 dark:from-gray-900 dark:to-[#171717]/90 z-0"
  1673. />
  1674. {/if}
  1675. <Navbar
  1676. {chat}
  1677. title={$chatTitle}
  1678. bind:selectedModels
  1679. shareEnabled={!!history.currentId}
  1680. {initNewChat}
  1681. />
  1682. <PaneGroup direction="horizontal" class="w-full h-full">
  1683. <Pane defaultSize={50} class="h-full flex w-full relative">
  1684. {#if $banners.length > 0 && !history.currentId && !$chatId && selectedModels.length <= 1}
  1685. <div class="absolute top-12 left-0 right-0 w-full z-30">
  1686. <div class=" flex flex-col gap-1 w-full">
  1687. {#each $banners.filter( (b) => (b.dismissible ? !JSON.parse(localStorage.getItem('dismissedBannerIds') ?? '[]').includes(b.id) : true) ) as banner}
  1688. <Banner
  1689. {banner}
  1690. on:dismiss={(e) => {
  1691. const bannerId = e.detail;
  1692. localStorage.setItem(
  1693. 'dismissedBannerIds',
  1694. JSON.stringify(
  1695. [
  1696. bannerId,
  1697. ...JSON.parse(localStorage.getItem('dismissedBannerIds') ?? '[]')
  1698. ].filter((id) => $banners.find((b) => b.id === id))
  1699. )
  1700. );
  1701. }}
  1702. />
  1703. {/each}
  1704. </div>
  1705. </div>
  1706. {/if}
  1707. <div class="flex flex-col flex-auto z-10 w-full">
  1708. {#if $settings?.landingPageMode === 'chat' || createMessagesList(history.currentId).length > 0}
  1709. <div
  1710. class=" pb-2.5 flex flex-col justify-between w-full flex-auto overflow-auto h-0 max-w-full z-10 scrollbar-hidden"
  1711. id="messages-container"
  1712. bind:this={messagesContainerElement}
  1713. on:scroll={(e) => {
  1714. autoScroll =
  1715. messagesContainerElement.scrollHeight - messagesContainerElement.scrollTop <=
  1716. messagesContainerElement.clientHeight + 5;
  1717. }}
  1718. >
  1719. <div class=" h-full w-full flex flex-col">
  1720. <Messages
  1721. chatId={$chatId}
  1722. bind:history
  1723. bind:autoScroll
  1724. bind:prompt
  1725. {selectedModels}
  1726. {sendPrompt}
  1727. {showMessage}
  1728. {continueResponse}
  1729. {regenerateResponse}
  1730. {mergeResponses}
  1731. {chatActionHandler}
  1732. bottomPadding={files.length > 0}
  1733. on:submit={async (e) => {
  1734. if (e.detail) {
  1735. // New user message
  1736. let userPrompt = e.detail.prompt;
  1737. let userMessageId = uuidv4();
  1738. let userMessage = {
  1739. id: userMessageId,
  1740. parentId: e.detail.parentId,
  1741. childrenIds: [],
  1742. role: 'user',
  1743. content: userPrompt,
  1744. models: selectedModels
  1745. };
  1746. let messageParentId = e.detail.parentId;
  1747. if (messageParentId !== null) {
  1748. history.messages[messageParentId].childrenIds = [
  1749. ...history.messages[messageParentId].childrenIds,
  1750. userMessageId
  1751. ];
  1752. }
  1753. history.messages[userMessageId] = userMessage;
  1754. history.currentId = userMessageId;
  1755. await tick();
  1756. await sendPrompt(userPrompt, userMessageId);
  1757. }
  1758. }}
  1759. />
  1760. </div>
  1761. </div>
  1762. <div class=" pb-[1.6rem]">
  1763. <MessageInput
  1764. {history}
  1765. {selectedModels}
  1766. bind:files
  1767. bind:prompt
  1768. bind:autoScroll
  1769. bind:selectedToolIds
  1770. bind:webSearchEnabled
  1771. bind:atSelectedModel
  1772. availableToolIds={selectedModelIds.reduce((a, e, i, arr) => {
  1773. const model = $models.find((m) => m.id === e);
  1774. if (model?.info?.meta?.toolIds ?? false) {
  1775. return [...new Set([...a, ...model.info.meta.toolIds])];
  1776. }
  1777. return a;
  1778. }, [])}
  1779. transparentBackground={$settings?.backgroundImageUrl ?? false}
  1780. {stopResponse}
  1781. {createMessagePair}
  1782. on:submit={async (e) => {
  1783. if (e.detail) {
  1784. prompt = '';
  1785. await tick();
  1786. submitPrompt(e.detail);
  1787. }
  1788. }}
  1789. />
  1790. <div
  1791. class="absolute bottom-1.5 text-xs text-gray-500 text-center line-clamp-1 right-0 left-0"
  1792. >
  1793. {$i18n.t('LLMs can make mistakes. Verify important information.')}
  1794. </div>
  1795. </div>
  1796. {:else}
  1797. <Placeholder
  1798. {history}
  1799. {selectedModels}
  1800. bind:files
  1801. bind:prompt
  1802. bind:autoScroll
  1803. bind:selectedToolIds
  1804. bind:webSearchEnabled
  1805. bind:atSelectedModel
  1806. availableToolIds={selectedModelIds.reduce((a, e, i, arr) => {
  1807. const model = $models.find((m) => m.id === e);
  1808. if (model?.info?.meta?.toolIds ?? false) {
  1809. return [...new Set([...a, ...model.info.meta.toolIds])];
  1810. }
  1811. return a;
  1812. }, [])}
  1813. transparentBackground={$settings?.backgroundImageUrl ?? false}
  1814. {stopResponse}
  1815. {createMessagePair}
  1816. on:submit={async (e) => {
  1817. if (e.detail) {
  1818. prompt = '';
  1819. await tick();
  1820. submitPrompt(e.detail);
  1821. }
  1822. }}
  1823. />
  1824. {/if}
  1825. </div>
  1826. </Pane>
  1827. <ChatControls
  1828. bind:history
  1829. bind:chatFiles
  1830. bind:params
  1831. bind:files
  1832. bind:pane={controlPane}
  1833. chatId={$chatId}
  1834. modelId={selectedModelIds?.at(0) ?? null}
  1835. models={selectedModelIds.reduce((a, e, i, arr) => {
  1836. const model = $models.find((m) => m.id === e);
  1837. if (model) {
  1838. return [...a, model];
  1839. }
  1840. return a;
  1841. }, [])}
  1842. {submitPrompt}
  1843. {stopResponse}
  1844. {showMessage}
  1845. {eventTarget}
  1846. />
  1847. </PaneGroup>
  1848. </div>
  1849. {/if}