Chat.svelte 38 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474
  1. <script lang="ts">
  2. import { v4 as uuidv4 } from 'uuid';
  3. import { toast } from 'svelte-sonner';
  4. import mermaid from 'mermaid';
  5. import { getContext, onMount, tick } from 'svelte';
  6. import { goto } from '$app/navigation';
  7. import { page } from '$app/stores';
  8. import type { Writable } from 'svelte/store';
  9. import type { i18n as i18nType } from 'i18next';
  10. import { OLLAMA_API_BASE_URL, OPENAI_API_BASE_URL, WEBUI_BASE_URL } from '$lib/constants';
  11. import {
  12. chatId,
  13. chats,
  14. config,
  15. type Model,
  16. models,
  17. settings,
  18. showSidebar,
  19. tags as _tags,
  20. WEBUI_NAME,
  21. banners,
  22. user,
  23. socket,
  24. showCallOverlay,
  25. tools
  26. } from '$lib/stores';
  27. import {
  28. convertMessagesToHistory,
  29. copyToClipboard,
  30. extractSentencesForAudio,
  31. getUserPosition,
  32. promptTemplate,
  33. splitStream
  34. } from '$lib/utils';
  35. import { generateChatCompletion } from '$lib/apis/ollama';
  36. import {
  37. addTagById,
  38. createNewChat,
  39. deleteTagById,
  40. getAllChatTags,
  41. getChatById,
  42. getChatList,
  43. getTagsById,
  44. updateChatById
  45. } from '$lib/apis/chats';
  46. import { generateOpenAIChatCompletion } from '$lib/apis/openai';
  47. import { runWebSearch } from '$lib/apis/rag';
  48. import { createOpenAITextStream } from '$lib/apis/streaming';
  49. import { queryMemory } from '$lib/apis/memories';
  50. import { getAndUpdateUserLocation, getUserSettings } from '$lib/apis/users';
  51. import { chatCompleted, generateTitle, generateSearchQuery } from '$lib/apis';
  52. import Banner from '../common/Banner.svelte';
  53. import MessageInput from '$lib/components/chat/MessageInput.svelte';
  54. import Messages from '$lib/components/chat/Messages.svelte';
  55. import Navbar from '$lib/components/layout/Navbar.svelte';
  56. import CallOverlay from './MessageInput/CallOverlay.svelte';
  57. import { error } from '@sveltejs/kit';
  58. const i18n: Writable<i18nType> = getContext('i18n');
  59. export let chatIdProp = '';
  60. let loaded = false;
  61. const eventTarget = new EventTarget();
  62. let stopResponseFlag = false;
  63. let autoScroll = true;
  64. let processing = '';
  65. let messagesContainerElement: HTMLDivElement;
  66. let showModelSelector = true;
  67. let selectedModels = [''];
  68. let atSelectedModel: Model | undefined;
  69. let selectedModelIds = [];
  70. $: selectedModelIds = atSelectedModel !== undefined ? [atSelectedModel.id] : selectedModels;
  71. let selectedToolIds = [];
  72. let webSearchEnabled = false;
  73. let chat = null;
  74. let tags = [];
  75. let title = '';
  76. let prompt = '';
  77. let files = [];
  78. let messages = [];
  79. let history = {
  80. messages: {},
  81. currentId: null
  82. };
  83. $: if (history.currentId !== null) {
  84. let _messages = [];
  85. let currentMessage = history.messages[history.currentId];
  86. while (currentMessage !== null) {
  87. _messages.unshift({ ...currentMessage });
  88. currentMessage =
  89. currentMessage.parentId !== null ? history.messages[currentMessage.parentId] : null;
  90. }
  91. messages = _messages;
  92. } else {
  93. messages = [];
  94. }
  95. $: if (chatIdProp) {
  96. (async () => {
  97. console.log(chatIdProp);
  98. if (chatIdProp && (await loadChat())) {
  99. await tick();
  100. loaded = true;
  101. window.setTimeout(() => scrollToBottom(), 0);
  102. const chatInput = document.getElementById('chat-textarea');
  103. chatInput?.focus();
  104. } else {
  105. await goto('/');
  106. }
  107. })();
  108. }
  109. onMount(async () => {
  110. const onMessageHandler = async (event) => {
  111. if (event.origin === window.origin) {
  112. // Replace with your iframe's origin
  113. console.log('Message received from iframe:', event.data);
  114. if (event.data.type === 'input:prompt') {
  115. console.log(event.data.text);
  116. const inputElement = document.getElementById('chat-textarea');
  117. if (inputElement) {
  118. prompt = event.data.text;
  119. inputElement.focus();
  120. }
  121. }
  122. if (event.data.type === 'action:submit') {
  123. console.log(event.data.text);
  124. if (prompt !== '') {
  125. await tick();
  126. submitPrompt(prompt);
  127. }
  128. }
  129. if (event.data.type === 'input:prompt:submit') {
  130. console.log(event.data.text);
  131. if (prompt !== '') {
  132. await tick();
  133. submitPrompt(event.data.text);
  134. }
  135. }
  136. }
  137. };
  138. window.addEventListener('message', onMessageHandler);
  139. if (!$chatId) {
  140. chatId.subscribe(async (value) => {
  141. if (!value) {
  142. await initNewChat();
  143. }
  144. });
  145. } else {
  146. if (!($settings.saveChatHistory ?? true)) {
  147. await goto('/');
  148. }
  149. }
  150. return () => {
  151. window.removeEventListener('message', onMessageHandler);
  152. };
  153. });
  154. //////////////////////////
  155. // Web functions
  156. //////////////////////////
  157. const initNewChat = async () => {
  158. window.history.replaceState(history.state, '', `/`);
  159. await chatId.set('');
  160. autoScroll = true;
  161. title = '';
  162. messages = [];
  163. history = {
  164. messages: {},
  165. currentId: null
  166. };
  167. if ($page.url.searchParams.get('models')) {
  168. selectedModels = $page.url.searchParams.get('models')?.split(',');
  169. } else if ($settings?.models) {
  170. selectedModels = $settings?.models;
  171. } else if ($config?.default_models) {
  172. console.log($config?.default_models.split(',') ?? '');
  173. selectedModels = $config?.default_models.split(',');
  174. } else {
  175. selectedModels = [''];
  176. }
  177. if ($page.url.searchParams.get('q')) {
  178. prompt = $page.url.searchParams.get('q') ?? '';
  179. if (prompt) {
  180. await tick();
  181. submitPrompt(prompt);
  182. }
  183. }
  184. selectedModels = selectedModels.map((modelId) =>
  185. $models.map((m) => m.id).includes(modelId) ? modelId : ''
  186. );
  187. const userSettings = await getUserSettings(localStorage.token);
  188. if (userSettings) {
  189. settings.set(userSettings.ui);
  190. } else {
  191. settings.set(JSON.parse(localStorage.getItem('settings') ?? '{}'));
  192. }
  193. const chatInput = document.getElementById('chat-textarea');
  194. setTimeout(() => chatInput?.focus(), 0);
  195. };
  196. const loadChat = async () => {
  197. chatId.set(chatIdProp);
  198. chat = await getChatById(localStorage.token, $chatId).catch(async (error) => {
  199. await goto('/');
  200. return null;
  201. });
  202. if (chat) {
  203. tags = await getTags();
  204. const chatContent = chat.chat;
  205. if (chatContent) {
  206. console.log(chatContent);
  207. selectedModels =
  208. (chatContent?.models ?? undefined) !== undefined
  209. ? chatContent.models
  210. : [chatContent.models ?? ''];
  211. history =
  212. (chatContent?.history ?? undefined) !== undefined
  213. ? chatContent.history
  214. : convertMessagesToHistory(chatContent.messages);
  215. title = chatContent.title;
  216. const userSettings = await getUserSettings(localStorage.token);
  217. if (userSettings) {
  218. await settings.set(userSettings.ui);
  219. } else {
  220. await settings.set(JSON.parse(localStorage.getItem('settings') ?? '{}'));
  221. }
  222. await settings.set({
  223. ...$settings,
  224. system: chatContent.system ?? $settings.system,
  225. params: chatContent.options ?? $settings.params
  226. });
  227. autoScroll = true;
  228. await tick();
  229. if (messages.length > 0) {
  230. history.messages[messages.at(-1).id].done = true;
  231. }
  232. await tick();
  233. return true;
  234. } else {
  235. return null;
  236. }
  237. }
  238. };
  239. const scrollToBottom = async () => {
  240. await tick();
  241. if (messagesContainerElement) {
  242. messagesContainerElement.scrollTop = messagesContainerElement.scrollHeight;
  243. }
  244. };
  245. const createMessagesList = (responseMessageId) => {
  246. const message = history.messages[responseMessageId];
  247. if (message.parentId) {
  248. return [...createMessagesList(message.parentId), message];
  249. } else {
  250. return [message];
  251. }
  252. };
  253. const chatCompletedHandler = async (modelId, messages) => {
  254. await mermaid.run({
  255. querySelector: '.mermaid'
  256. });
  257. const res = await chatCompleted(localStorage.token, {
  258. model: modelId,
  259. messages: messages.map((m) => ({
  260. id: m.id,
  261. role: m.role,
  262. content: m.content,
  263. info: m.info ? m.info : undefined,
  264. timestamp: m.timestamp
  265. })),
  266. chat_id: $chatId
  267. }).catch((error) => {
  268. toast.error(error);
  269. messages.at(-1).error = { content: error };
  270. return null;
  271. });
  272. if (res !== null) {
  273. // Update chat history with the new messages
  274. for (const message of res.messages) {
  275. history.messages[message.id] = {
  276. ...history.messages[message.id],
  277. ...(history.messages[message.id].content !== message.content
  278. ? { originalContent: history.messages[message.id].content }
  279. : {}),
  280. ...message
  281. };
  282. }
  283. }
  284. };
  285. const getChatEventEmitter = async (modelId: string, chatId: string = '') => {
  286. return setInterval(() => {
  287. $socket?.emit('usage', {
  288. action: 'chat',
  289. model: modelId,
  290. chat_id: chatId
  291. });
  292. }, 1000);
  293. };
  294. //////////////////////////
  295. // Chat functions
  296. //////////////////////////
  297. const submitPrompt = async (userPrompt, { _raw = false } = {}) => {
  298. let _responses = [];
  299. console.log('submitPrompt', $chatId);
  300. selectedModels = selectedModels.map((modelId) =>
  301. $models.map((m) => m.id).includes(modelId) ? modelId : ''
  302. );
  303. if (selectedModels.includes('')) {
  304. toast.error($i18n.t('Model not selected'));
  305. } else if (messages.length != 0 && messages.at(-1).done != true) {
  306. // Response not done
  307. console.log('wait');
  308. } else if (messages.length != 0 && messages.at(-1).error) {
  309. // Error in response
  310. toast.error(
  311. $i18n.t(
  312. `Oops! There was an error in the previous response. Please try again or contact admin.`
  313. )
  314. );
  315. } else if (
  316. files.length > 0 &&
  317. files.filter((file) => file.type !== 'image' && file.status !== 'processed').length > 0
  318. ) {
  319. // Upload not done
  320. toast.error(
  321. $i18n.t(
  322. `Oops! Hold tight! Your files are still in the processing oven. We're cooking them up to perfection. Please be patient and we'll let you know once they're ready.`
  323. )
  324. );
  325. } else {
  326. // Reset chat input textarea
  327. const chatTextAreaElement = document.getElementById('chat-textarea');
  328. if (chatTextAreaElement) {
  329. chatTextAreaElement.value = '';
  330. chatTextAreaElement.style.height = '';
  331. }
  332. const _files = JSON.parse(JSON.stringify(files));
  333. files = [];
  334. prompt = '';
  335. // Create user message
  336. let userMessageId = uuidv4();
  337. let userMessage = {
  338. id: userMessageId,
  339. parentId: messages.length !== 0 ? messages.at(-1).id : null,
  340. childrenIds: [],
  341. role: 'user',
  342. content: userPrompt,
  343. files: _files.length > 0 ? _files : undefined,
  344. timestamp: Math.floor(Date.now() / 1000), // Unix epoch
  345. models: selectedModels.filter((m, mIdx) => selectedModels.indexOf(m) === mIdx)
  346. };
  347. // Add message to history and Set currentId to messageId
  348. history.messages[userMessageId] = userMessage;
  349. history.currentId = userMessageId;
  350. // Append messageId to childrenIds of parent message
  351. if (messages.length !== 0) {
  352. history.messages[messages.at(-1).id].childrenIds.push(userMessageId);
  353. }
  354. // Wait until history/message have been updated
  355. await tick();
  356. _responses = await sendPrompt(userPrompt, userMessageId, { newChat: true });
  357. }
  358. return _responses;
  359. };
  360. const sendPrompt = async (prompt, parentId, { modelId = null, newChat = false } = {}) => {
  361. let _responses = [];
  362. // If modelId is provided, use it, else use selected model
  363. let selectedModelIds = modelId
  364. ? [modelId]
  365. : atSelectedModel !== undefined
  366. ? [atSelectedModel.id]
  367. : selectedModels;
  368. // Create response messages for each selected model
  369. const responseMessageIds = {};
  370. for (const modelId of selectedModelIds) {
  371. const model = $models.filter((m) => m.id === modelId).at(0);
  372. if (model) {
  373. let responseMessageId = uuidv4();
  374. let responseMessage = {
  375. parentId: parentId,
  376. id: responseMessageId,
  377. childrenIds: [],
  378. role: 'assistant',
  379. content: '',
  380. model: model.id,
  381. modelName: model.name ?? model.id,
  382. userContext: null,
  383. timestamp: Math.floor(Date.now() / 1000) // Unix epoch
  384. };
  385. // Add message to history and Set currentId to messageId
  386. history.messages[responseMessageId] = responseMessage;
  387. history.currentId = responseMessageId;
  388. // Append messageId to childrenIds of parent message
  389. if (parentId !== null) {
  390. history.messages[parentId].childrenIds = [
  391. ...history.messages[parentId].childrenIds,
  392. responseMessageId
  393. ];
  394. }
  395. responseMessageIds[modelId] = responseMessageId;
  396. }
  397. }
  398. await tick();
  399. // Create new chat if only one message in messages
  400. if (newChat && messages.length == 2) {
  401. if ($settings.saveChatHistory ?? true) {
  402. chat = await createNewChat(localStorage.token, {
  403. id: $chatId,
  404. title: $i18n.t('New Chat'),
  405. models: selectedModels,
  406. system: $settings.system ?? undefined,
  407. options: {
  408. ...($settings.params ?? {})
  409. },
  410. messages: messages,
  411. history: history,
  412. tags: [],
  413. timestamp: Date.now()
  414. });
  415. await chats.set(await getChatList(localStorage.token));
  416. await chatId.set(chat.id);
  417. } else {
  418. await chatId.set('local');
  419. }
  420. await tick();
  421. }
  422. const _chatId = JSON.parse(JSON.stringify($chatId));
  423. await Promise.all(
  424. selectedModelIds.map(async (modelId) => {
  425. console.log('modelId', modelId);
  426. const model = $models.filter((m) => m.id === modelId).at(0);
  427. if (model) {
  428. // If there are image files, check if model is vision capable
  429. const hasImages = messages.some((message) =>
  430. message.files?.some((file) => file.type === 'image')
  431. );
  432. if (hasImages && !(model.info?.meta?.capabilities?.vision ?? true)) {
  433. toast.error(
  434. $i18n.t('Model {{modelName}} is not vision capable', {
  435. modelName: model.name ?? model.id
  436. })
  437. );
  438. }
  439. let responseMessageId = responseMessageIds[modelId];
  440. let responseMessage = history.messages[responseMessageId];
  441. let userContext = null;
  442. if ($settings?.memory ?? false) {
  443. if (userContext === null) {
  444. const res = await queryMemory(localStorage.token, prompt).catch((error) => {
  445. toast.error(error);
  446. return null;
  447. });
  448. if (res) {
  449. if (res.documents[0].length > 0) {
  450. userContext = res.documents.reduce((acc, doc, index) => {
  451. const createdAtTimestamp = res.metadatas[index][0].created_at;
  452. const createdAtDate = new Date(createdAtTimestamp * 1000)
  453. .toISOString()
  454. .split('T')[0];
  455. acc.push(`${index + 1}. [${createdAtDate}]. ${doc[0]}`);
  456. return acc;
  457. }, []);
  458. }
  459. console.log(userContext);
  460. }
  461. }
  462. }
  463. responseMessage.userContext = userContext;
  464. const chatEventEmitter = await getChatEventEmitter(model.id, _chatId);
  465. if (webSearchEnabled) {
  466. await getWebSearchResults(model.id, parentId, responseMessageId);
  467. }
  468. let _response = null;
  469. if (model?.owned_by === 'openai') {
  470. _response = await sendPromptOpenAI(model, prompt, responseMessageId, _chatId);
  471. } else if (model) {
  472. _response = await sendPromptOllama(model, prompt, responseMessageId, _chatId);
  473. }
  474. _responses.push(_response);
  475. if (chatEventEmitter) clearInterval(chatEventEmitter);
  476. } else {
  477. toast.error($i18n.t(`Model {{modelId}} not found`, { modelId }));
  478. }
  479. })
  480. );
  481. await chats.set(await getChatList(localStorage.token));
  482. return _responses;
  483. };
  484. const sendPromptOllama = async (model, userPrompt, responseMessageId, _chatId) => {
  485. let _response = null;
  486. const responseMessage = history.messages[responseMessageId];
  487. // Wait until history/message have been updated
  488. await tick();
  489. // Scroll down
  490. scrollToBottom();
  491. const messagesBody = [
  492. $settings.system || (responseMessage?.userContext ?? null)
  493. ? {
  494. role: 'system',
  495. content: `${promptTemplate(
  496. $settings?.system ?? '',
  497. $user.name,
  498. $settings?.userLocation
  499. ? await getAndUpdateUserLocation(localStorage.token)
  500. : undefined
  501. )}${
  502. responseMessage?.userContext ?? null
  503. ? `\n\nUser Context:\n${(responseMessage?.userContext ?? []).join('\n')}`
  504. : ''
  505. }`
  506. }
  507. : undefined,
  508. ...messages
  509. ]
  510. .filter((message) => message?.content?.trim())
  511. .map((message, idx, arr) => {
  512. // Prepare the base message object
  513. const baseMessage = {
  514. role: message.role,
  515. content: message.content
  516. };
  517. // Extract and format image URLs if any exist
  518. const imageUrls = message.files
  519. ?.filter((file) => file.type === 'image')
  520. .map((file) => file.url.slice(file.url.indexOf(',') + 1));
  521. // Add images array only if it contains elements
  522. if (imageUrls && imageUrls.length > 0 && message.role === 'user') {
  523. baseMessage.images = imageUrls;
  524. }
  525. return baseMessage;
  526. });
  527. let lastImageIndex = -1;
  528. // Find the index of the last object with images
  529. messagesBody.forEach((item, index) => {
  530. if (item.images) {
  531. lastImageIndex = index;
  532. }
  533. });
  534. // Remove images from all but the last one
  535. messagesBody.forEach((item, index) => {
  536. if (index !== lastImageIndex) {
  537. delete item.images;
  538. }
  539. });
  540. let files = [];
  541. if (model?.info?.meta?.knowledge ?? false) {
  542. files = model.info.meta.knowledge;
  543. }
  544. const lastUserMessage = messages.filter((message) => message.role === 'user').at(-1);
  545. files = [
  546. ...files,
  547. ...(lastUserMessage?.files?.filter((item) =>
  548. ['doc', 'file', 'collection', 'web_search_results'].includes(item.type)
  549. ) ?? []),
  550. ...(responseMessage?.files?.filter((item) =>
  551. ['doc', 'file', 'collection', 'web_search_results'].includes(item.type)
  552. ) ?? [])
  553. ].filter(
  554. // Remove duplicates
  555. (item, index, array) =>
  556. array.findIndex((i) => JSON.stringify(i) === JSON.stringify(item)) === index
  557. );
  558. eventTarget.dispatchEvent(
  559. new CustomEvent('chat:start', {
  560. detail: {
  561. id: responseMessageId
  562. }
  563. })
  564. );
  565. await tick();
  566. const [res, controller] = await generateChatCompletion(localStorage.token, {
  567. model: model.id,
  568. messages: messagesBody,
  569. options: {
  570. ...($settings.params ?? {}),
  571. stop:
  572. $settings?.params?.stop ?? undefined
  573. ? $settings.params.stop.map((str) =>
  574. decodeURIComponent(JSON.parse('"' + str.replace(/\"/g, '\\"') + '"'))
  575. )
  576. : undefined,
  577. num_predict: $settings?.params?.max_tokens ?? undefined,
  578. repeat_penalty: $settings?.params?.frequency_penalty ?? undefined
  579. },
  580. format: $settings.requestFormat ?? undefined,
  581. keep_alive: $settings.keepAlive ?? undefined,
  582. tool_ids: selectedToolIds.length > 0 ? selectedToolIds : undefined,
  583. files: files.length > 0 ? files : undefined,
  584. citations: files.length > 0 ? true : undefined,
  585. chat_id: $chatId
  586. });
  587. if (res && res.ok) {
  588. console.log('controller', controller);
  589. const reader = res.body
  590. .pipeThrough(new TextDecoderStream())
  591. .pipeThrough(splitStream('\n'))
  592. .getReader();
  593. while (true) {
  594. const { value, done } = await reader.read();
  595. if (done || stopResponseFlag || _chatId !== $chatId) {
  596. responseMessage.done = true;
  597. messages = messages;
  598. if (stopResponseFlag) {
  599. controller.abort('User: Stop Response');
  600. } else {
  601. const messages = createMessagesList(responseMessageId);
  602. await chatCompletedHandler(model.id, messages);
  603. }
  604. _response = responseMessage.content;
  605. break;
  606. }
  607. try {
  608. let lines = value.split('\n');
  609. for (const line of lines) {
  610. if (line !== '') {
  611. console.log(line);
  612. let data = JSON.parse(line);
  613. if ('citations' in data) {
  614. responseMessage.citations = data.citations;
  615. continue;
  616. }
  617. if ('detail' in data) {
  618. throw data;
  619. }
  620. if (data.done == false) {
  621. if (responseMessage.content == '' && data.message.content == '\n') {
  622. continue;
  623. } else {
  624. responseMessage.content += data.message.content;
  625. const sentences = extractSentencesForAudio(responseMessage.content);
  626. sentences.pop();
  627. // dispatch only last sentence and make sure it hasn't been dispatched before
  628. if (
  629. sentences.length > 0 &&
  630. sentences[sentences.length - 1] !== responseMessage.lastSentence
  631. ) {
  632. responseMessage.lastSentence = sentences[sentences.length - 1];
  633. eventTarget.dispatchEvent(
  634. new CustomEvent('chat', {
  635. detail: { id: responseMessageId, content: sentences[sentences.length - 1] }
  636. })
  637. );
  638. }
  639. messages = messages;
  640. }
  641. } else {
  642. responseMessage.done = true;
  643. if (responseMessage.content == '') {
  644. responseMessage.error = {
  645. code: 400,
  646. content: `Oops! No text generated from Ollama, Please try again.`
  647. };
  648. }
  649. responseMessage.context = data.context ?? null;
  650. responseMessage.info = {
  651. total_duration: data.total_duration,
  652. load_duration: data.load_duration,
  653. sample_count: data.sample_count,
  654. sample_duration: data.sample_duration,
  655. prompt_eval_count: data.prompt_eval_count,
  656. prompt_eval_duration: data.prompt_eval_duration,
  657. eval_count: data.eval_count,
  658. eval_duration: data.eval_duration
  659. };
  660. messages = messages;
  661. if ($settings.notificationEnabled && !document.hasFocus()) {
  662. const notification = new Notification(`${model.id}`, {
  663. body: responseMessage.content,
  664. icon: `${WEBUI_BASE_URL}/static/favicon.png`
  665. });
  666. }
  667. if ($settings?.responseAutoCopy ?? false) {
  668. copyToClipboard(responseMessage.content);
  669. }
  670. if ($settings.responseAutoPlayback && !$showCallOverlay) {
  671. await tick();
  672. document.getElementById(`speak-button-${responseMessage.id}`)?.click();
  673. }
  674. }
  675. }
  676. }
  677. } catch (error) {
  678. console.log(error);
  679. if ('detail' in error) {
  680. toast.error(error.detail);
  681. }
  682. break;
  683. }
  684. if (autoScroll) {
  685. scrollToBottom();
  686. }
  687. }
  688. if ($chatId == _chatId) {
  689. if ($settings.saveChatHistory ?? true) {
  690. chat = await updateChatById(localStorage.token, _chatId, {
  691. messages: messages,
  692. history: history,
  693. models: selectedModels
  694. });
  695. await chats.set(await getChatList(localStorage.token));
  696. }
  697. }
  698. } else {
  699. if (res !== null) {
  700. const error = await res.json();
  701. console.log(error);
  702. if ('detail' in error) {
  703. toast.error(error.detail);
  704. responseMessage.error = { content: error.detail };
  705. } else {
  706. toast.error(error.error);
  707. responseMessage.error = { content: error.error };
  708. }
  709. } else {
  710. toast.error(
  711. $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, { provider: 'Ollama' })
  712. );
  713. responseMessage.error = {
  714. content: $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
  715. provider: 'Ollama'
  716. })
  717. };
  718. }
  719. responseMessage.done = true;
  720. messages = messages;
  721. }
  722. stopResponseFlag = false;
  723. await tick();
  724. let lastSentence = extractSentencesForAudio(responseMessage.content)?.at(-1) ?? '';
  725. if (lastSentence) {
  726. eventTarget.dispatchEvent(
  727. new CustomEvent('chat', {
  728. detail: { id: responseMessageId, content: lastSentence }
  729. })
  730. );
  731. }
  732. eventTarget.dispatchEvent(
  733. new CustomEvent('chat:finish', {
  734. detail: {
  735. id: responseMessageId,
  736. content: responseMessage.content
  737. }
  738. })
  739. );
  740. if (autoScroll) {
  741. scrollToBottom();
  742. }
  743. if (messages.length == 2 && messages.at(1).content !== '') {
  744. window.history.replaceState(history.state, '', `/c/${_chatId}`);
  745. const _title = await generateChatTitle(userPrompt);
  746. await setChatTitle(_chatId, _title);
  747. }
  748. return _response;
  749. };
  750. const sendPromptOpenAI = async (model, userPrompt, responseMessageId, _chatId) => {
  751. let _response = null;
  752. const responseMessage = history.messages[responseMessageId];
  753. let files = [];
  754. if (model?.info?.meta?.knowledge ?? false) {
  755. files = model.info.meta.knowledge;
  756. }
  757. const lastUserMessage = messages.filter((message) => message.role === 'user').at(-1);
  758. files = [
  759. ...files,
  760. ...(lastUserMessage?.files?.filter((item) =>
  761. ['doc', 'file', 'collection', 'web_search_results'].includes(item.type)
  762. ) ?? []),
  763. ...(responseMessage?.files?.filter((item) =>
  764. ['doc', 'file', 'collection', 'web_search_results'].includes(item.type)
  765. ) ?? [])
  766. ].filter(
  767. // Remove duplicates
  768. (item, index, array) =>
  769. array.findIndex((i) => JSON.stringify(i) === JSON.stringify(item)) === index
  770. );
  771. scrollToBottom();
  772. eventTarget.dispatchEvent(
  773. new CustomEvent('chat:start', {
  774. detail: {
  775. id: responseMessageId
  776. }
  777. })
  778. );
  779. await tick();
  780. try {
  781. const [res, controller] = await generateOpenAIChatCompletion(
  782. localStorage.token,
  783. {
  784. model: model.id,
  785. stream: true,
  786. stream_options:
  787. model.info?.meta?.capabilities?.usage ?? false
  788. ? {
  789. include_usage: true
  790. }
  791. : undefined,
  792. messages: [
  793. $settings.system || (responseMessage?.userContext ?? null)
  794. ? {
  795. role: 'system',
  796. content: `${promptTemplate(
  797. $settings?.system ?? '',
  798. $user.name,
  799. $settings?.userLocation
  800. ? await getAndUpdateUserLocation(localStorage.token)
  801. : undefined
  802. )}${
  803. responseMessage?.userContext ?? null
  804. ? `\n\nUser Context:\n${(responseMessage?.userContext ?? []).join('\n')}`
  805. : ''
  806. }`
  807. }
  808. : undefined,
  809. ...messages
  810. ]
  811. .filter((message) => message?.content?.trim())
  812. .map((message, idx, arr) => ({
  813. role: message.role,
  814. ...((message.files?.filter((file) => file.type === 'image').length > 0 ?? false) &&
  815. message.role === 'user'
  816. ? {
  817. content: [
  818. {
  819. type: 'text',
  820. text:
  821. arr.length - 1 !== idx
  822. ? message.content
  823. : message?.raContent ?? message.content
  824. },
  825. ...message.files
  826. .filter((file) => file.type === 'image')
  827. .map((file) => ({
  828. type: 'image_url',
  829. image_url: {
  830. url: file.url
  831. }
  832. }))
  833. ]
  834. }
  835. : {
  836. content:
  837. arr.length - 1 !== idx
  838. ? message.content
  839. : message?.raContent ?? message.content
  840. })
  841. })),
  842. seed: $settings?.params?.seed ?? undefined,
  843. stop:
  844. $settings?.params?.stop ?? undefined
  845. ? $settings.params.stop.map((str) =>
  846. decodeURIComponent(JSON.parse('"' + str.replace(/\"/g, '\\"') + '"'))
  847. )
  848. : undefined,
  849. temperature: $settings?.params?.temperature ?? undefined,
  850. top_p: $settings?.params?.top_p ?? undefined,
  851. frequency_penalty: $settings?.params?.frequency_penalty ?? undefined,
  852. max_tokens: $settings?.params?.max_tokens ?? undefined,
  853. tool_ids: selectedToolIds.length > 0 ? selectedToolIds : undefined,
  854. files: files.length > 0 ? files : undefined,
  855. citations: files.length > 0 ? true : undefined,
  856. chat_id: $chatId
  857. },
  858. `${WEBUI_BASE_URL}/api`
  859. );
  860. // Wait until history/message have been updated
  861. await tick();
  862. scrollToBottom();
  863. if (res && res.ok && res.body) {
  864. const textStream = await createOpenAITextStream(res.body, $settings.splitLargeChunks);
  865. let lastUsage = null;
  866. for await (const update of textStream) {
  867. const { value, done, citations, error, usage } = update;
  868. if (error) {
  869. await handleOpenAIError(error, null, model, responseMessage);
  870. break;
  871. }
  872. if (done || stopResponseFlag || _chatId !== $chatId) {
  873. responseMessage.done = true;
  874. messages = messages;
  875. if (stopResponseFlag) {
  876. controller.abort('User: Stop Response');
  877. } else {
  878. const messages = createMessagesList(responseMessageId);
  879. await chatCompletedHandler(model.id, messages);
  880. }
  881. _response = responseMessage.content;
  882. break;
  883. }
  884. if (usage) {
  885. lastUsage = usage;
  886. }
  887. if (citations) {
  888. responseMessage.citations = citations;
  889. continue;
  890. }
  891. if (responseMessage.content == '' && value == '\n') {
  892. continue;
  893. } else {
  894. responseMessage.content += value;
  895. const sentences = extractSentencesForAudio(responseMessage.content);
  896. sentences.pop();
  897. // dispatch only last sentence and make sure it hasn't been dispatched before
  898. if (
  899. sentences.length > 0 &&
  900. sentences[sentences.length - 1] !== responseMessage.lastSentence
  901. ) {
  902. responseMessage.lastSentence = sentences[sentences.length - 1];
  903. eventTarget.dispatchEvent(
  904. new CustomEvent('chat', {
  905. detail: { id: responseMessageId, content: sentences[sentences.length - 1] }
  906. })
  907. );
  908. }
  909. messages = messages;
  910. }
  911. if (autoScroll) {
  912. scrollToBottom();
  913. }
  914. }
  915. if ($settings.notificationEnabled && !document.hasFocus()) {
  916. const notification = new Notification(`${model.id}`, {
  917. body: responseMessage.content,
  918. icon: `${WEBUI_BASE_URL}/static/favicon.png`
  919. });
  920. }
  921. if ($settings.responseAutoCopy) {
  922. copyToClipboard(responseMessage.content);
  923. }
  924. if ($settings.responseAutoPlayback && !$showCallOverlay) {
  925. await tick();
  926. document.getElementById(`speak-button-${responseMessage.id}`)?.click();
  927. }
  928. if (lastUsage) {
  929. responseMessage.info = { ...lastUsage, openai: true };
  930. }
  931. if ($chatId == _chatId) {
  932. if ($settings.saveChatHistory ?? true) {
  933. chat = await updateChatById(localStorage.token, _chatId, {
  934. models: selectedModels,
  935. messages: messages,
  936. history: history
  937. });
  938. await chats.set(await getChatList(localStorage.token));
  939. }
  940. }
  941. } else {
  942. await handleOpenAIError(null, res, model, responseMessage);
  943. }
  944. } catch (error) {
  945. await handleOpenAIError(error, null, model, responseMessage);
  946. }
  947. messages = messages;
  948. stopResponseFlag = false;
  949. await tick();
  950. let lastSentence = extractSentencesForAudio(responseMessage.content)?.at(-1) ?? '';
  951. if (lastSentence) {
  952. eventTarget.dispatchEvent(
  953. new CustomEvent('chat', {
  954. detail: { id: responseMessageId, content: lastSentence }
  955. })
  956. );
  957. }
  958. eventTarget.dispatchEvent(
  959. new CustomEvent('chat:finish', {
  960. detail: {
  961. id: responseMessageId,
  962. content: responseMessage.content
  963. }
  964. })
  965. );
  966. if (autoScroll) {
  967. scrollToBottom();
  968. }
  969. if (messages.length == 2) {
  970. window.history.replaceState(history.state, '', `/c/${_chatId}`);
  971. const _title = await generateChatTitle(userPrompt);
  972. await setChatTitle(_chatId, _title);
  973. }
  974. return _response;
  975. };
  976. const handleOpenAIError = async (error, res: Response | null, model, responseMessage) => {
  977. let errorMessage = '';
  978. let innerError;
  979. if (error) {
  980. innerError = error;
  981. } else if (res !== null) {
  982. innerError = await res.json();
  983. }
  984. console.error(innerError);
  985. if ('detail' in innerError) {
  986. toast.error(innerError.detail);
  987. errorMessage = innerError.detail;
  988. } else if ('error' in innerError) {
  989. if ('message' in innerError.error) {
  990. toast.error(innerError.error.message);
  991. errorMessage = innerError.error.message;
  992. } else {
  993. toast.error(innerError.error);
  994. errorMessage = innerError.error;
  995. }
  996. } else if ('message' in innerError) {
  997. toast.error(innerError.message);
  998. errorMessage = innerError.message;
  999. }
  1000. responseMessage.error = {
  1001. content:
  1002. $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
  1003. provider: model.name ?? model.id
  1004. }) +
  1005. '\n' +
  1006. errorMessage
  1007. };
  1008. responseMessage.done = true;
  1009. messages = messages;
  1010. };
  1011. const stopResponse = () => {
  1012. stopResponseFlag = true;
  1013. console.log('stopResponse');
  1014. };
  1015. const regenerateResponse = async (message) => {
  1016. console.log('regenerateResponse');
  1017. if (messages.length != 0) {
  1018. let userMessage = history.messages[message.parentId];
  1019. let userPrompt = userMessage.content;
  1020. if ((userMessage?.models ?? [...selectedModels]).length == 1) {
  1021. // If user message has only one model selected, sendPrompt automatically selects it for regeneration
  1022. await sendPrompt(userPrompt, userMessage.id);
  1023. } else {
  1024. // If there are multiple models selected, use the model of the response message for regeneration
  1025. // e.g. many model chat
  1026. await sendPrompt(userPrompt, userMessage.id, { modelId: message.model });
  1027. }
  1028. }
  1029. };
  1030. const continueGeneration = async () => {
  1031. console.log('continueGeneration');
  1032. const _chatId = JSON.parse(JSON.stringify($chatId));
  1033. if (messages.length != 0 && messages.at(-1).done == true) {
  1034. const responseMessage = history.messages[history.currentId];
  1035. responseMessage.done = false;
  1036. await tick();
  1037. const model = $models.filter((m) => m.id === responseMessage.model).at(0);
  1038. if (model) {
  1039. if (model?.owned_by === 'openai') {
  1040. await sendPromptOpenAI(
  1041. model,
  1042. history.messages[responseMessage.parentId].content,
  1043. responseMessage.id,
  1044. _chatId
  1045. );
  1046. } else
  1047. await sendPromptOllama(
  1048. model,
  1049. history.messages[responseMessage.parentId].content,
  1050. responseMessage.id,
  1051. _chatId
  1052. );
  1053. }
  1054. } else {
  1055. toast.error($i18n.t(`Model {{modelId}} not found`, { modelId }));
  1056. }
  1057. };
  1058. const generateChatTitle = async (userPrompt) => {
  1059. if ($settings?.title?.auto ?? true) {
  1060. const title = await generateTitle(
  1061. localStorage.token,
  1062. selectedModels[0],
  1063. userPrompt,
  1064. $chatId
  1065. ).catch((error) => {
  1066. console.error(error);
  1067. return 'New Chat';
  1068. });
  1069. return title;
  1070. } else {
  1071. return `${userPrompt}`;
  1072. }
  1073. };
  1074. const setChatTitle = async (_chatId, _title) => {
  1075. if (_chatId === $chatId) {
  1076. title = _title;
  1077. }
  1078. if ($settings.saveChatHistory ?? true) {
  1079. chat = await updateChatById(localStorage.token, _chatId, { title: _title });
  1080. await chats.set(await getChatList(localStorage.token));
  1081. }
  1082. };
  1083. const getWebSearchResults = async (model: string, parentId: string, responseId: string) => {
  1084. const responseMessage = history.messages[responseId];
  1085. const userMessage = history.messages[parentId];
  1086. responseMessage.statusHistory = [
  1087. {
  1088. done: false,
  1089. action: 'web_search',
  1090. description: $i18n.t('Generating search query')
  1091. }
  1092. ];
  1093. messages = messages;
  1094. const prompt = userMessage.content;
  1095. let searchQuery = await generateSearchQuery(localStorage.token, model, messages, prompt).catch(
  1096. (error) => {
  1097. console.log(error);
  1098. return prompt;
  1099. }
  1100. );
  1101. if (!searchQuery) {
  1102. toast.warning($i18n.t('No search query generated'));
  1103. responseMessage.statusHistory.push({
  1104. done: true,
  1105. error: true,
  1106. action: 'web_search',
  1107. description: 'No search query generated'
  1108. });
  1109. messages = messages;
  1110. }
  1111. responseMessage.statusHistory.push({
  1112. done: false,
  1113. action: 'web_search',
  1114. description: $i18n.t(`Searching "{{searchQuery}}"`, { searchQuery })
  1115. });
  1116. messages = messages;
  1117. const results = await runWebSearch(localStorage.token, searchQuery).catch((error) => {
  1118. console.log(error);
  1119. toast.error(error);
  1120. return null;
  1121. });
  1122. if (results) {
  1123. responseMessage.statusHistory.push({
  1124. done: true,
  1125. action: 'web_search',
  1126. description: $i18n.t('Searched {{count}} sites', { count: results.filenames.length }),
  1127. query: searchQuery,
  1128. urls: results.filenames
  1129. });
  1130. if (responseMessage?.files ?? undefined === undefined) {
  1131. responseMessage.files = [];
  1132. }
  1133. responseMessage.files.push({
  1134. collection_name: results.collection_name,
  1135. name: searchQuery,
  1136. type: 'web_search_results',
  1137. urls: results.filenames
  1138. });
  1139. messages = messages;
  1140. } else {
  1141. responseMessage.statusHistory.push({
  1142. done: true,
  1143. error: true,
  1144. action: 'web_search',
  1145. description: 'No search results found'
  1146. });
  1147. messages = messages;
  1148. }
  1149. };
  1150. const getTags = async () => {
  1151. return await getTagsById(localStorage.token, $chatId).catch(async (error) => {
  1152. return [];
  1153. });
  1154. };
  1155. </script>
  1156. <svelte:head>
  1157. <title>
  1158. {title
  1159. ? `${title.length > 30 ? `${title.slice(0, 30)}...` : title} | ${$WEBUI_NAME}`
  1160. : `${$WEBUI_NAME}`}
  1161. </title>
  1162. </svelte:head>
  1163. <audio id="audioElement" src="" style="display: none;" />
  1164. {#if $showCallOverlay}
  1165. <CallOverlay
  1166. {submitPrompt}
  1167. {stopResponse}
  1168. bind:files
  1169. modelId={selectedModelIds?.at(0) ?? null}
  1170. chatId={$chatId}
  1171. {eventTarget}
  1172. />
  1173. {/if}
  1174. {#if !chatIdProp || (loaded && chatIdProp)}
  1175. <div
  1176. class="h-screen max-h-[100dvh] {$showSidebar
  1177. ? 'md:max-w-[calc(100%-260px)]'
  1178. : ''} w-full max-w-full flex flex-col"
  1179. >
  1180. {#if $settings?.backgroundImageUrl ?? null}
  1181. <div
  1182. class="absolute {$showSidebar
  1183. ? 'md:max-w-[calc(100%-260px)] md:translate-x-[260px]'
  1184. : ''} top-0 left-0 w-full h-full bg-cover bg-center bg-no-repeat"
  1185. style="background-image: url({$settings.backgroundImageUrl}) "
  1186. />
  1187. <div
  1188. class="absolute top-0 left-0 w-full h-full bg-gradient-to-t from-white to-white/85 dark:from-gray-900 dark:to-[#171717]/90 z-0"
  1189. />
  1190. {/if}
  1191. <Navbar
  1192. {title}
  1193. bind:selectedModels
  1194. bind:showModelSelector
  1195. shareEnabled={messages.length > 0}
  1196. {chat}
  1197. {initNewChat}
  1198. />
  1199. {#if $banners.length > 0 && messages.length === 0 && !$chatId && selectedModels.length <= 1}
  1200. <div
  1201. class="absolute top-[4.25rem] w-full {$showSidebar
  1202. ? 'md:max-w-[calc(100%-260px)]'
  1203. : ''} z-20"
  1204. >
  1205. <div class=" flex flex-col gap-1 w-full">
  1206. {#each $banners.filter( (b) => (b.dismissible ? !JSON.parse(localStorage.getItem('dismissedBannerIds') ?? '[]').includes(b.id) : true) ) as banner}
  1207. <Banner
  1208. {banner}
  1209. on:dismiss={(e) => {
  1210. const bannerId = e.detail;
  1211. localStorage.setItem(
  1212. 'dismissedBannerIds',
  1213. JSON.stringify(
  1214. [
  1215. bannerId,
  1216. ...JSON.parse(localStorage.getItem('dismissedBannerIds') ?? '[]')
  1217. ].filter((id) => $banners.find((b) => b.id === id))
  1218. )
  1219. );
  1220. }}
  1221. />
  1222. {/each}
  1223. </div>
  1224. </div>
  1225. {/if}
  1226. <div class="flex flex-col flex-auto z-10">
  1227. <div
  1228. class=" pb-2.5 flex flex-col justify-between w-full flex-auto overflow-auto h-0 max-w-full z-10"
  1229. id="messages-container"
  1230. bind:this={messagesContainerElement}
  1231. on:scroll={(e) => {
  1232. autoScroll =
  1233. messagesContainerElement.scrollHeight - messagesContainerElement.scrollTop <=
  1234. messagesContainerElement.clientHeight + 5;
  1235. }}
  1236. >
  1237. <div class=" h-full w-full flex flex-col {chatIdProp ? 'py-4' : 'pt-2 pb-4'}">
  1238. <Messages
  1239. chatId={$chatId}
  1240. {selectedModels}
  1241. {processing}
  1242. bind:history
  1243. bind:messages
  1244. bind:autoScroll
  1245. bind:prompt
  1246. bottomPadding={files.length > 0}
  1247. {sendPrompt}
  1248. {continueGeneration}
  1249. {regenerateResponse}
  1250. />
  1251. </div>
  1252. </div>
  1253. <MessageInput
  1254. bind:files
  1255. bind:prompt
  1256. bind:autoScroll
  1257. bind:selectedToolIds
  1258. bind:webSearchEnabled
  1259. bind:atSelectedModel
  1260. availableToolIds={selectedModelIds.reduce((a, e, i, arr) => {
  1261. const model = $models.find((m) => m.id === e);
  1262. if (model?.info?.meta?.toolIds ?? false) {
  1263. return [...new Set([...a, ...model.info.meta.toolIds])];
  1264. }
  1265. return a;
  1266. }, [])}
  1267. transparentBackground={$settings?.backgroundImageUrl ?? false}
  1268. {selectedModels}
  1269. {messages}
  1270. {submitPrompt}
  1271. {stopResponse}
  1272. />
  1273. </div>
  1274. </div>
  1275. {/if}