Chat.svelte 37 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426
  1. <script lang="ts">
  2. import { v4 as uuidv4 } from 'uuid';
  3. import { toast } from 'svelte-sonner';
  4. import mermaid from 'mermaid';
  5. import { getContext, onMount, tick } from 'svelte';
  6. import { goto } from '$app/navigation';
  7. import { page } from '$app/stores';
  8. import type { Writable } from 'svelte/store';
  9. import type { i18n as i18nType } from 'i18next';
  10. import { OLLAMA_API_BASE_URL, OPENAI_API_BASE_URL, WEBUI_BASE_URL } from '$lib/constants';
  11. import {
  12. chatId,
  13. chats,
  14. config,
  15. type Model,
  16. models,
  17. settings,
  18. showSidebar,
  19. tags as _tags,
  20. WEBUI_NAME,
  21. banners,
  22. user,
  23. socket,
  24. showCallOverlay,
  25. tools
  26. } from '$lib/stores';
  27. import {
  28. convertMessagesToHistory,
  29. copyToClipboard,
  30. extractSentencesForAudio,
  31. getUserPosition,
  32. promptTemplate,
  33. splitStream
  34. } from '$lib/utils';
  35. import { generateChatCompletion } from '$lib/apis/ollama';
  36. import {
  37. addTagById,
  38. createNewChat,
  39. deleteTagById,
  40. getAllChatTags,
  41. getChatById,
  42. getChatList,
  43. getTagsById,
  44. updateChatById
  45. } from '$lib/apis/chats';
  46. import { generateOpenAIChatCompletion } from '$lib/apis/openai';
  47. import { runWebSearch } from '$lib/apis/rag';
  48. import { createOpenAITextStream } from '$lib/apis/streaming';
  49. import { queryMemory } from '$lib/apis/memories';
  50. import { getAndUpdateUserLocation, getUserSettings } from '$lib/apis/users';
  51. import { chatCompleted, generateTitle, generateSearchQuery } from '$lib/apis';
  52. import Banner from '../common/Banner.svelte';
  53. import MessageInput from '$lib/components/chat/MessageInput.svelte';
  54. import Messages from '$lib/components/chat/Messages.svelte';
  55. import Navbar from '$lib/components/layout/Navbar.svelte';
  56. import CallOverlay from './MessageInput/CallOverlay.svelte';
  57. import { error } from '@sveltejs/kit';
  58. const i18n: Writable<i18nType> = getContext('i18n');
  59. export let chatIdProp = '';
  60. let loaded = false;
  61. const eventTarget = new EventTarget();
  62. let stopResponseFlag = false;
  63. let autoScroll = true;
  64. let processing = '';
  65. let messagesContainerElement: HTMLDivElement;
  66. let showModelSelector = true;
  67. let selectedModels = [''];
  68. let atSelectedModel: Model | undefined;
  69. let selectedModelIds = [];
  70. $: selectedModelIds = atSelectedModel !== undefined ? [atSelectedModel.id] : selectedModels;
  71. let selectedToolIds = [];
  72. let webSearchEnabled = false;
  73. let chat = null;
  74. let tags = [];
  75. let title = '';
  76. let prompt = '';
  77. let files = [];
  78. let messages = [];
  79. let history = {
  80. messages: {},
  81. currentId: null
  82. };
  83. $: if (history.currentId !== null) {
  84. let _messages = [];
  85. let currentMessage = history.messages[history.currentId];
  86. while (currentMessage !== null) {
  87. _messages.unshift({ ...currentMessage });
  88. currentMessage =
  89. currentMessage.parentId !== null ? history.messages[currentMessage.parentId] : null;
  90. }
  91. messages = _messages;
  92. } else {
  93. messages = [];
  94. }
  95. $: if (chatIdProp) {
  96. (async () => {
  97. console.log(chatIdProp);
  98. if (chatIdProp && (await loadChat())) {
  99. await tick();
  100. loaded = true;
  101. window.setTimeout(() => scrollToBottom(), 0);
  102. const chatInput = document.getElementById('chat-textarea');
  103. chatInput?.focus();
  104. } else {
  105. await goto('/');
  106. }
  107. })();
  108. }
  109. onMount(async () => {
  110. if (!$chatId) {
  111. chatId.subscribe(async (value) => {
  112. if (!value) {
  113. await initNewChat();
  114. }
  115. });
  116. } else {
  117. if (!($settings.saveChatHistory ?? true)) {
  118. await goto('/');
  119. }
  120. }
  121. });
  122. //////////////////////////
  123. // Web functions
  124. //////////////////////////
  125. const initNewChat = async () => {
  126. window.history.replaceState(history.state, '', `/`);
  127. await chatId.set('');
  128. autoScroll = true;
  129. title = '';
  130. messages = [];
  131. history = {
  132. messages: {},
  133. currentId: null
  134. };
  135. if ($page.url.searchParams.get('models')) {
  136. selectedModels = $page.url.searchParams.get('models')?.split(',');
  137. } else if ($settings?.models) {
  138. selectedModels = $settings?.models;
  139. } else if ($config?.default_models) {
  140. console.log($config?.default_models.split(',') ?? '');
  141. selectedModels = $config?.default_models.split(',');
  142. } else {
  143. selectedModels = [''];
  144. }
  145. if ($page.url.searchParams.get('q')) {
  146. prompt = $page.url.searchParams.get('q') ?? '';
  147. if (prompt) {
  148. await tick();
  149. submitPrompt(prompt);
  150. }
  151. }
  152. selectedModels = selectedModels.map((modelId) =>
  153. $models.map((m) => m.id).includes(modelId) ? modelId : ''
  154. );
  155. const userSettings = await getUserSettings(localStorage.token);
  156. if (userSettings) {
  157. settings.set(userSettings.ui);
  158. } else {
  159. settings.set(JSON.parse(localStorage.getItem('settings') ?? '{}'));
  160. }
  161. const chatInput = document.getElementById('chat-textarea');
  162. setTimeout(() => chatInput?.focus(), 0);
  163. };
  164. const loadChat = async () => {
  165. chatId.set(chatIdProp);
  166. chat = await getChatById(localStorage.token, $chatId).catch(async (error) => {
  167. await goto('/');
  168. return null;
  169. });
  170. if (chat) {
  171. tags = await getTags();
  172. const chatContent = chat.chat;
  173. if (chatContent) {
  174. console.log(chatContent);
  175. selectedModels =
  176. (chatContent?.models ?? undefined) !== undefined
  177. ? chatContent.models
  178. : [chatContent.models ?? ''];
  179. history =
  180. (chatContent?.history ?? undefined) !== undefined
  181. ? chatContent.history
  182. : convertMessagesToHistory(chatContent.messages);
  183. title = chatContent.title;
  184. const userSettings = await getUserSettings(localStorage.token);
  185. if (userSettings) {
  186. await settings.set(userSettings.ui);
  187. } else {
  188. await settings.set(JSON.parse(localStorage.getItem('settings') ?? '{}'));
  189. }
  190. await settings.set({
  191. ...$settings,
  192. system: chatContent.system ?? $settings.system,
  193. params: chatContent.options ?? $settings.params
  194. });
  195. autoScroll = true;
  196. await tick();
  197. if (messages.length > 0) {
  198. history.messages[messages.at(-1).id].done = true;
  199. }
  200. await tick();
  201. return true;
  202. } else {
  203. return null;
  204. }
  205. }
  206. };
  207. const scrollToBottom = async () => {
  208. await tick();
  209. if (messagesContainerElement) {
  210. messagesContainerElement.scrollTop = messagesContainerElement.scrollHeight;
  211. }
  212. };
  213. const createMessagesList = (responseMessageId) => {
  214. const message = history.messages[responseMessageId];
  215. if (message.parentId) {
  216. return [...createMessagesList(message.parentId), message];
  217. } else {
  218. return [message];
  219. }
  220. };
  221. const chatCompletedHandler = async (modelId, messages) => {
  222. await mermaid.run({
  223. querySelector: '.mermaid'
  224. });
  225. const res = await chatCompleted(localStorage.token, {
  226. model: modelId,
  227. messages: messages.map((m) => ({
  228. id: m.id,
  229. role: m.role,
  230. content: m.content,
  231. info: m.info ? m.info : undefined,
  232. timestamp: m.timestamp
  233. })),
  234. chat_id: $chatId
  235. }).catch((error) => {
  236. console.error(error);
  237. return null;
  238. });
  239. if (res !== null) {
  240. // Update chat history with the new messages
  241. for (const message of res.messages) {
  242. history.messages[message.id] = {
  243. ...history.messages[message.id],
  244. ...(history.messages[message.id].content !== message.content
  245. ? { originalContent: history.messages[message.id].content }
  246. : {}),
  247. ...message
  248. };
  249. }
  250. }
  251. };
  252. const getChatEventEmitter = async (modelId: string, chatId: string = '') => {
  253. return setInterval(() => {
  254. $socket?.emit('usage', {
  255. action: 'chat',
  256. model: modelId,
  257. chat_id: chatId
  258. });
  259. }, 1000);
  260. };
  261. //////////////////////////
  262. // Chat functions
  263. //////////////////////////
  264. const submitPrompt = async (userPrompt, { _raw = false } = {}) => {
  265. let _responses = [];
  266. console.log('submitPrompt', $chatId);
  267. selectedModels = selectedModels.map((modelId) =>
  268. $models.map((m) => m.id).includes(modelId) ? modelId : ''
  269. );
  270. if (selectedModels.includes('')) {
  271. toast.error($i18n.t('Model not selected'));
  272. } else if (messages.length != 0 && messages.at(-1).done != true) {
  273. // Response not done
  274. console.log('wait');
  275. } else if (
  276. files.length > 0 &&
  277. files.filter((file) => file.upload_status === false).length > 0
  278. ) {
  279. // Upload not done
  280. toast.error(
  281. $i18n.t(
  282. `Oops! Hold tight! Your files are still in the processing oven. We're cooking them up to perfection. Please be patient and we'll let you know once they're ready.`
  283. )
  284. );
  285. } else {
  286. // Reset chat input textarea
  287. const chatTextAreaElement = document.getElementById('chat-textarea');
  288. if (chatTextAreaElement) {
  289. chatTextAreaElement.value = '';
  290. chatTextAreaElement.style.height = '';
  291. }
  292. const _files = JSON.parse(JSON.stringify(files));
  293. files = [];
  294. prompt = '';
  295. // Create user message
  296. let userMessageId = uuidv4();
  297. let userMessage = {
  298. id: userMessageId,
  299. parentId: messages.length !== 0 ? messages.at(-1).id : null,
  300. childrenIds: [],
  301. role: 'user',
  302. content: userPrompt,
  303. files: _files.length > 0 ? _files : undefined,
  304. timestamp: Math.floor(Date.now() / 1000), // Unix epoch
  305. models: selectedModels.filter((m, mIdx) => selectedModels.indexOf(m) === mIdx)
  306. };
  307. // Add message to history and Set currentId to messageId
  308. history.messages[userMessageId] = userMessage;
  309. history.currentId = userMessageId;
  310. // Append messageId to childrenIds of parent message
  311. if (messages.length !== 0) {
  312. history.messages[messages.at(-1).id].childrenIds.push(userMessageId);
  313. }
  314. // Wait until history/message have been updated
  315. await tick();
  316. _responses = await sendPrompt(userPrompt, userMessageId, { newChat: true });
  317. }
  318. return _responses;
  319. };
  320. const sendPrompt = async (prompt, parentId, { modelId = null, newChat = false } = {}) => {
  321. let _responses = [];
  322. // If modelId is provided, use it, else use selected model
  323. let selectedModelIds = modelId
  324. ? [modelId]
  325. : atSelectedModel !== undefined
  326. ? [atSelectedModel.id]
  327. : selectedModels;
  328. // Create response messages for each selected model
  329. const responseMessageIds = {};
  330. for (const modelId of selectedModelIds) {
  331. const model = $models.filter((m) => m.id === modelId).at(0);
  332. if (model) {
  333. let responseMessageId = uuidv4();
  334. let responseMessage = {
  335. parentId: parentId,
  336. id: responseMessageId,
  337. childrenIds: [],
  338. role: 'assistant',
  339. content: '',
  340. model: model.id,
  341. modelName: model.name ?? model.id,
  342. userContext: null,
  343. timestamp: Math.floor(Date.now() / 1000) // Unix epoch
  344. };
  345. // Add message to history and Set currentId to messageId
  346. history.messages[responseMessageId] = responseMessage;
  347. history.currentId = responseMessageId;
  348. // Append messageId to childrenIds of parent message
  349. if (parentId !== null) {
  350. history.messages[parentId].childrenIds = [
  351. ...history.messages[parentId].childrenIds,
  352. responseMessageId
  353. ];
  354. }
  355. responseMessageIds[modelId] = responseMessageId;
  356. }
  357. }
  358. await tick();
  359. // Create new chat if only one message in messages
  360. if (newChat && messages.length == 2) {
  361. if ($settings.saveChatHistory ?? true) {
  362. chat = await createNewChat(localStorage.token, {
  363. id: $chatId,
  364. title: $i18n.t('New Chat'),
  365. models: selectedModels,
  366. system: $settings.system ?? undefined,
  367. options: {
  368. ...($settings.params ?? {})
  369. },
  370. messages: messages,
  371. history: history,
  372. tags: [],
  373. timestamp: Date.now()
  374. });
  375. await chats.set(await getChatList(localStorage.token));
  376. await chatId.set(chat.id);
  377. } else {
  378. await chatId.set('local');
  379. }
  380. await tick();
  381. }
  382. const _chatId = JSON.parse(JSON.stringify($chatId));
  383. await Promise.all(
  384. selectedModelIds.map(async (modelId) => {
  385. console.log('modelId', modelId);
  386. const model = $models.filter((m) => m.id === modelId).at(0);
  387. if (model) {
  388. // If there are image files, check if model is vision capable
  389. const hasImages = messages.some((message) =>
  390. message.files?.some((file) => file.type === 'image')
  391. );
  392. if (hasImages && !(model.info?.meta?.capabilities?.vision ?? true)) {
  393. toast.error(
  394. $i18n.t('Model {{modelName}} is not vision capable', {
  395. modelName: model.name ?? model.id
  396. })
  397. );
  398. }
  399. let responseMessageId = responseMessageIds[modelId];
  400. let responseMessage = history.messages[responseMessageId];
  401. let userContext = null;
  402. if ($settings?.memory ?? false) {
  403. if (userContext === null) {
  404. const res = await queryMemory(localStorage.token, prompt).catch((error) => {
  405. toast.error(error);
  406. return null;
  407. });
  408. if (res) {
  409. if (res.documents[0].length > 0) {
  410. userContext = res.documents.reduce((acc, doc, index) => {
  411. const createdAtTimestamp = res.metadatas[index][0].created_at;
  412. const createdAtDate = new Date(createdAtTimestamp * 1000)
  413. .toISOString()
  414. .split('T')[0];
  415. acc.push(`${index + 1}. [${createdAtDate}]. ${doc[0]}`);
  416. return acc;
  417. }, []);
  418. }
  419. console.log(userContext);
  420. }
  421. }
  422. }
  423. responseMessage.userContext = userContext;
  424. const chatEventEmitter = await getChatEventEmitter(model.id, _chatId);
  425. if (webSearchEnabled) {
  426. await getWebSearchResults(model.id, parentId, responseMessageId);
  427. }
  428. let _response = null;
  429. if (model?.owned_by === 'openai') {
  430. _response = await sendPromptOpenAI(model, prompt, responseMessageId, _chatId);
  431. } else if (model) {
  432. _response = await sendPromptOllama(model, prompt, responseMessageId, _chatId);
  433. }
  434. _responses.push(_response);
  435. if (chatEventEmitter) clearInterval(chatEventEmitter);
  436. } else {
  437. toast.error($i18n.t(`Model {{modelId}} not found`, { modelId }));
  438. }
  439. })
  440. );
  441. await chats.set(await getChatList(localStorage.token));
  442. return _responses;
  443. };
  444. const sendPromptOllama = async (model, userPrompt, responseMessageId, _chatId) => {
  445. let _response = null;
  446. const responseMessage = history.messages[responseMessageId];
  447. // Wait until history/message have been updated
  448. await tick();
  449. // Scroll down
  450. scrollToBottom();
  451. const messagesBody = [
  452. $settings.system || (responseMessage?.userContext ?? null)
  453. ? {
  454. role: 'system',
  455. content: `${promptTemplate(
  456. $settings?.system ?? '',
  457. $user.name,
  458. $settings?.userLocation
  459. ? await getAndUpdateUserLocation(localStorage.token)
  460. : undefined
  461. )}${
  462. responseMessage?.userContext ?? null
  463. ? `\n\nUser Context:\n${(responseMessage?.userContext ?? []).join('\n')}`
  464. : ''
  465. }`
  466. }
  467. : undefined,
  468. ...messages
  469. ]
  470. .filter((message) => message?.content?.trim())
  471. .map((message, idx, arr) => {
  472. // Prepare the base message object
  473. const baseMessage = {
  474. role: message.role,
  475. content: message.content
  476. };
  477. // Extract and format image URLs if any exist
  478. const imageUrls = message.files
  479. ?.filter((file) => file.type === 'image')
  480. .map((file) => file.url.slice(file.url.indexOf(',') + 1));
  481. // Add images array only if it contains elements
  482. if (imageUrls && imageUrls.length > 0 && message.role === 'user') {
  483. baseMessage.images = imageUrls;
  484. }
  485. return baseMessage;
  486. });
  487. let lastImageIndex = -1;
  488. // Find the index of the last object with images
  489. messagesBody.forEach((item, index) => {
  490. if (item.images) {
  491. lastImageIndex = index;
  492. }
  493. });
  494. // Remove images from all but the last one
  495. messagesBody.forEach((item, index) => {
  496. if (index !== lastImageIndex) {
  497. delete item.images;
  498. }
  499. });
  500. let files = [];
  501. if (model?.info?.meta?.knowledge ?? false) {
  502. files = model.info.meta.knowledge;
  503. }
  504. files = [
  505. ...files,
  506. ...messages
  507. .filter((message) => message?.files ?? null)
  508. .map((message) =>
  509. message.files.filter((item) =>
  510. ['doc', 'file', 'collection', 'web_search_results'].includes(item.type)
  511. )
  512. )
  513. .flat(1)
  514. ].filter(
  515. (item, index, array) =>
  516. array.findIndex((i) => JSON.stringify(i) === JSON.stringify(item)) === index
  517. );
  518. eventTarget.dispatchEvent(
  519. new CustomEvent('chat:start', {
  520. detail: {
  521. id: responseMessageId
  522. }
  523. })
  524. );
  525. await tick();
  526. const [res, controller] = await generateChatCompletion(localStorage.token, {
  527. model: model.id,
  528. messages: messagesBody,
  529. options: {
  530. ...($settings.params ?? {}),
  531. stop:
  532. $settings?.params?.stop ?? undefined
  533. ? $settings.params.stop.map((str) =>
  534. decodeURIComponent(JSON.parse('"' + str.replace(/\"/g, '\\"') + '"'))
  535. )
  536. : undefined,
  537. num_predict: $settings?.params?.max_tokens ?? undefined,
  538. repeat_penalty: $settings?.params?.frequency_penalty ?? undefined
  539. },
  540. format: $settings.requestFormat ?? undefined,
  541. keep_alive: $settings.keepAlive ?? undefined,
  542. tool_ids: selectedToolIds.length > 0 ? selectedToolIds : undefined,
  543. files: files.length > 0 ? files : undefined,
  544. citations: files.length > 0,
  545. chat_id: $chatId
  546. });
  547. if (res && res.ok) {
  548. console.log('controller', controller);
  549. const reader = res.body
  550. .pipeThrough(new TextDecoderStream())
  551. .pipeThrough(splitStream('\n'))
  552. .getReader();
  553. while (true) {
  554. const { value, done } = await reader.read();
  555. if (done || stopResponseFlag || _chatId !== $chatId) {
  556. responseMessage.done = true;
  557. messages = messages;
  558. if (stopResponseFlag) {
  559. controller.abort('User: Stop Response');
  560. } else {
  561. const messages = createMessagesList(responseMessageId);
  562. await chatCompletedHandler(model.id, messages);
  563. }
  564. _response = responseMessage.content;
  565. break;
  566. }
  567. try {
  568. let lines = value.split('\n');
  569. for (const line of lines) {
  570. if (line !== '') {
  571. console.log(line);
  572. let data = JSON.parse(line);
  573. if ('citations' in data) {
  574. responseMessage.citations = data.citations;
  575. continue;
  576. }
  577. if ('detail' in data) {
  578. throw data;
  579. }
  580. if (data.done == false) {
  581. if (responseMessage.content == '' && data.message.content == '\n') {
  582. continue;
  583. } else {
  584. responseMessage.content += data.message.content;
  585. const sentences = extractSentencesForAudio(responseMessage.content);
  586. sentences.pop();
  587. // dispatch only last sentence and make sure it hasn't been dispatched before
  588. if (
  589. sentences.length > 0 &&
  590. sentences[sentences.length - 1] !== responseMessage.lastSentence
  591. ) {
  592. responseMessage.lastSentence = sentences[sentences.length - 1];
  593. eventTarget.dispatchEvent(
  594. new CustomEvent('chat', {
  595. detail: { id: responseMessageId, content: sentences[sentences.length - 1] }
  596. })
  597. );
  598. }
  599. messages = messages;
  600. }
  601. } else {
  602. responseMessage.done = true;
  603. if (responseMessage.content == '') {
  604. responseMessage.error = {
  605. code: 400,
  606. content: `Oops! No text generated from Ollama, Please try again.`
  607. };
  608. }
  609. responseMessage.context = data.context ?? null;
  610. responseMessage.info = {
  611. total_duration: data.total_duration,
  612. load_duration: data.load_duration,
  613. sample_count: data.sample_count,
  614. sample_duration: data.sample_duration,
  615. prompt_eval_count: data.prompt_eval_count,
  616. prompt_eval_duration: data.prompt_eval_duration,
  617. eval_count: data.eval_count,
  618. eval_duration: data.eval_duration
  619. };
  620. messages = messages;
  621. if ($settings.notificationEnabled && !document.hasFocus()) {
  622. const notification = new Notification(`${model.id}`, {
  623. body: responseMessage.content,
  624. icon: `${WEBUI_BASE_URL}/static/favicon.png`
  625. });
  626. }
  627. if ($settings?.responseAutoCopy ?? false) {
  628. copyToClipboard(responseMessage.content);
  629. }
  630. if ($settings.responseAutoPlayback && !$showCallOverlay) {
  631. await tick();
  632. document.getElementById(`speak-button-${responseMessage.id}`)?.click();
  633. }
  634. }
  635. }
  636. }
  637. } catch (error) {
  638. console.log(error);
  639. if ('detail' in error) {
  640. toast.error(error.detail);
  641. }
  642. break;
  643. }
  644. if (autoScroll) {
  645. scrollToBottom();
  646. }
  647. }
  648. if ($chatId == _chatId) {
  649. if ($settings.saveChatHistory ?? true) {
  650. chat = await updateChatById(localStorage.token, _chatId, {
  651. messages: messages,
  652. history: history,
  653. models: selectedModels
  654. });
  655. await chats.set(await getChatList(localStorage.token));
  656. }
  657. }
  658. } else {
  659. if (res !== null) {
  660. const error = await res.json();
  661. console.log(error);
  662. if ('detail' in error) {
  663. toast.error(error.detail);
  664. responseMessage.error = { content: error.detail };
  665. } else {
  666. toast.error(error.error);
  667. responseMessage.error = { content: error.error };
  668. }
  669. } else {
  670. toast.error(
  671. $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, { provider: 'Ollama' })
  672. );
  673. responseMessage.error = {
  674. content: $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
  675. provider: 'Ollama'
  676. })
  677. };
  678. }
  679. responseMessage.done = true;
  680. messages = messages;
  681. }
  682. stopResponseFlag = false;
  683. await tick();
  684. let lastSentence = extractSentencesForAudio(responseMessage.content)?.at(-1) ?? '';
  685. if (lastSentence) {
  686. eventTarget.dispatchEvent(
  687. new CustomEvent('chat', {
  688. detail: { id: responseMessageId, content: lastSentence }
  689. })
  690. );
  691. }
  692. eventTarget.dispatchEvent(
  693. new CustomEvent('chat:finish', {
  694. detail: {
  695. id: responseMessageId,
  696. content: responseMessage.content
  697. }
  698. })
  699. );
  700. if (autoScroll) {
  701. scrollToBottom();
  702. }
  703. if (messages.length == 2 && messages.at(1).content !== '') {
  704. window.history.replaceState(history.state, '', `/c/${_chatId}`);
  705. const _title = await generateChatTitle(userPrompt);
  706. await setChatTitle(_chatId, _title);
  707. }
  708. return _response;
  709. };
  710. const sendPromptOpenAI = async (model, userPrompt, responseMessageId, _chatId) => {
  711. let _response = null;
  712. const responseMessage = history.messages[responseMessageId];
  713. let files = [];
  714. if (model?.info?.meta?.knowledge ?? false) {
  715. files = model.info.meta.knowledge;
  716. }
  717. files = [
  718. ...files,
  719. ...messages
  720. .filter((message) => message?.files ?? null)
  721. .map((message) =>
  722. message.files.filter((item) =>
  723. ['doc', 'file', 'collection', 'web_search_results'].includes(item.type)
  724. )
  725. )
  726. .flat(1)
  727. ].filter(
  728. (item, index, array) =>
  729. array.findIndex((i) => JSON.stringify(i) === JSON.stringify(item)) === index
  730. );
  731. scrollToBottom();
  732. eventTarget.dispatchEvent(
  733. new CustomEvent('chat:start', {
  734. detail: {
  735. id: responseMessageId
  736. }
  737. })
  738. );
  739. await tick();
  740. try {
  741. const [res, controller] = await generateOpenAIChatCompletion(
  742. localStorage.token,
  743. {
  744. model: model.id,
  745. stream: true,
  746. stream_options:
  747. model.info?.meta?.capabilities?.usage ?? false
  748. ? {
  749. include_usage: true
  750. }
  751. : undefined,
  752. messages: [
  753. $settings.system || (responseMessage?.userContext ?? null)
  754. ? {
  755. role: 'system',
  756. content: `${promptTemplate(
  757. $settings?.system ?? '',
  758. $user.name,
  759. $settings?.userLocation
  760. ? await getAndUpdateUserLocation(localStorage.token)
  761. : undefined
  762. )}${
  763. responseMessage?.userContext ?? null
  764. ? `\n\nUser Context:\n${(responseMessage?.userContext ?? []).join('\n')}`
  765. : ''
  766. }`
  767. }
  768. : undefined,
  769. ...messages
  770. ]
  771. .filter((message) => message?.content?.trim())
  772. .map((message, idx, arr) => ({
  773. role: message.role,
  774. ...((message.files?.filter((file) => file.type === 'image').length > 0 ?? false) &&
  775. message.role === 'user'
  776. ? {
  777. content: [
  778. {
  779. type: 'text',
  780. text:
  781. arr.length - 1 !== idx
  782. ? message.content
  783. : message?.raContent ?? message.content
  784. },
  785. ...message.files
  786. .filter((file) => file.type === 'image')
  787. .map((file) => ({
  788. type: 'image_url',
  789. image_url: {
  790. url: file.url
  791. }
  792. }))
  793. ]
  794. }
  795. : {
  796. content:
  797. arr.length - 1 !== idx
  798. ? message.content
  799. : message?.raContent ?? message.content
  800. })
  801. })),
  802. seed: $settings?.params?.seed ?? undefined,
  803. stop:
  804. $settings?.params?.stop ?? undefined
  805. ? $settings.params.stop.map((str) =>
  806. decodeURIComponent(JSON.parse('"' + str.replace(/\"/g, '\\"') + '"'))
  807. )
  808. : undefined,
  809. temperature: $settings?.params?.temperature ?? undefined,
  810. top_p: $settings?.params?.top_p ?? undefined,
  811. frequency_penalty: $settings?.params?.frequency_penalty ?? undefined,
  812. max_tokens: $settings?.params?.max_tokens ?? undefined,
  813. tool_ids: selectedToolIds.length > 0 ? selectedToolIds : undefined,
  814. files: files.length > 0 ? files : undefined,
  815. citations: files.length > 0,
  816. chat_id: $chatId
  817. },
  818. `${OPENAI_API_BASE_URL}`
  819. );
  820. // Wait until history/message have been updated
  821. await tick();
  822. scrollToBottom();
  823. if (res && res.ok && res.body) {
  824. const textStream = await createOpenAITextStream(res.body, $settings.splitLargeChunks);
  825. let lastUsage = null;
  826. for await (const update of textStream) {
  827. const { value, done, citations, error, usage } = update;
  828. if (error) {
  829. await handleOpenAIError(error, null, model, responseMessage);
  830. break;
  831. }
  832. if (done || stopResponseFlag || _chatId !== $chatId) {
  833. responseMessage.done = true;
  834. messages = messages;
  835. if (stopResponseFlag) {
  836. controller.abort('User: Stop Response');
  837. } else {
  838. const messages = createMessagesList(responseMessageId);
  839. await chatCompletedHandler(model.id, messages);
  840. }
  841. _response = responseMessage.content;
  842. break;
  843. }
  844. if (usage) {
  845. lastUsage = usage;
  846. }
  847. if (citations) {
  848. responseMessage.citations = citations;
  849. continue;
  850. }
  851. if (responseMessage.content == '' && value == '\n') {
  852. continue;
  853. } else {
  854. responseMessage.content += value;
  855. const sentences = extractSentencesForAudio(responseMessage.content);
  856. sentences.pop();
  857. // dispatch only last sentence and make sure it hasn't been dispatched before
  858. if (
  859. sentences.length > 0 &&
  860. sentences[sentences.length - 1] !== responseMessage.lastSentence
  861. ) {
  862. responseMessage.lastSentence = sentences[sentences.length - 1];
  863. eventTarget.dispatchEvent(
  864. new CustomEvent('chat', {
  865. detail: { id: responseMessageId, content: sentences[sentences.length - 1] }
  866. })
  867. );
  868. }
  869. messages = messages;
  870. }
  871. if (autoScroll) {
  872. scrollToBottom();
  873. }
  874. }
  875. if ($settings.notificationEnabled && !document.hasFocus()) {
  876. const notification = new Notification(`${model.id}`, {
  877. body: responseMessage.content,
  878. icon: `${WEBUI_BASE_URL}/static/favicon.png`
  879. });
  880. }
  881. if ($settings.responseAutoCopy) {
  882. copyToClipboard(responseMessage.content);
  883. }
  884. if ($settings.responseAutoPlayback && !$showCallOverlay) {
  885. await tick();
  886. document.getElementById(`speak-button-${responseMessage.id}`)?.click();
  887. }
  888. if (lastUsage) {
  889. responseMessage.info = { ...lastUsage, openai: true };
  890. }
  891. if ($chatId == _chatId) {
  892. if ($settings.saveChatHistory ?? true) {
  893. chat = await updateChatById(localStorage.token, _chatId, {
  894. models: selectedModels,
  895. messages: messages,
  896. history: history
  897. });
  898. await chats.set(await getChatList(localStorage.token));
  899. }
  900. }
  901. } else {
  902. await handleOpenAIError(null, res, model, responseMessage);
  903. }
  904. } catch (error) {
  905. await handleOpenAIError(error, null, model, responseMessage);
  906. }
  907. messages = messages;
  908. stopResponseFlag = false;
  909. await tick();
  910. let lastSentence = extractSentencesForAudio(responseMessage.content)?.at(-1) ?? '';
  911. if (lastSentence) {
  912. eventTarget.dispatchEvent(
  913. new CustomEvent('chat', {
  914. detail: { id: responseMessageId, content: lastSentence }
  915. })
  916. );
  917. }
  918. eventTarget.dispatchEvent(
  919. new CustomEvent('chat:finish', {
  920. detail: {
  921. id: responseMessageId,
  922. content: responseMessage.content
  923. }
  924. })
  925. );
  926. if (autoScroll) {
  927. scrollToBottom();
  928. }
  929. if (messages.length == 2) {
  930. window.history.replaceState(history.state, '', `/c/${_chatId}`);
  931. const _title = await generateChatTitle(userPrompt);
  932. await setChatTitle(_chatId, _title);
  933. }
  934. return _response;
  935. };
  936. const handleOpenAIError = async (error, res: Response | null, model, responseMessage) => {
  937. let errorMessage = '';
  938. let innerError;
  939. if (error) {
  940. innerError = error;
  941. } else if (res !== null) {
  942. innerError = await res.json();
  943. }
  944. console.error(innerError);
  945. if ('detail' in innerError) {
  946. toast.error(innerError.detail);
  947. errorMessage = innerError.detail;
  948. } else if ('error' in innerError) {
  949. if ('message' in innerError.error) {
  950. toast.error(innerError.error.message);
  951. errorMessage = innerError.error.message;
  952. } else {
  953. toast.error(innerError.error);
  954. errorMessage = innerError.error;
  955. }
  956. } else if ('message' in innerError) {
  957. toast.error(innerError.message);
  958. errorMessage = innerError.message;
  959. }
  960. responseMessage.error = {
  961. content:
  962. $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
  963. provider: model.name ?? model.id
  964. }) +
  965. '\n' +
  966. errorMessage
  967. };
  968. responseMessage.done = true;
  969. messages = messages;
  970. };
  971. const stopResponse = () => {
  972. stopResponseFlag = true;
  973. console.log('stopResponse');
  974. };
  975. const regenerateResponse = async (message) => {
  976. console.log('regenerateResponse');
  977. if (messages.length != 0) {
  978. let userMessage = history.messages[message.parentId];
  979. let userPrompt = userMessage.content;
  980. if ((userMessage?.models ?? [...selectedModels]).length == 1) {
  981. // If user message has only one model selected, sendPrompt automatically selects it for regeneration
  982. await sendPrompt(userPrompt, userMessage.id);
  983. } else {
  984. // If there are multiple models selected, use the model of the response message for regeneration
  985. // e.g. many model chat
  986. await sendPrompt(userPrompt, userMessage.id, { modelId: message.model });
  987. }
  988. }
  989. };
  990. const continueGeneration = async () => {
  991. console.log('continueGeneration');
  992. const _chatId = JSON.parse(JSON.stringify($chatId));
  993. if (messages.length != 0 && messages.at(-1).done == true) {
  994. const responseMessage = history.messages[history.currentId];
  995. responseMessage.done = false;
  996. await tick();
  997. const model = $models.filter((m) => m.id === responseMessage.model).at(0);
  998. if (model) {
  999. if (model?.owned_by === 'openai') {
  1000. await sendPromptOpenAI(
  1001. model,
  1002. history.messages[responseMessage.parentId].content,
  1003. responseMessage.id,
  1004. _chatId
  1005. );
  1006. } else
  1007. await sendPromptOllama(
  1008. model,
  1009. history.messages[responseMessage.parentId].content,
  1010. responseMessage.id,
  1011. _chatId
  1012. );
  1013. }
  1014. } else {
  1015. toast.error($i18n.t(`Model {{modelId}} not found`, { modelId }));
  1016. }
  1017. };
  1018. const generateChatTitle = async (userPrompt) => {
  1019. if ($settings?.title?.auto ?? true) {
  1020. const title = await generateTitle(
  1021. localStorage.token,
  1022. selectedModels[0],
  1023. userPrompt,
  1024. $chatId
  1025. ).catch((error) => {
  1026. console.error(error);
  1027. return 'New Chat';
  1028. });
  1029. return title;
  1030. } else {
  1031. return `${userPrompt}`;
  1032. }
  1033. };
  1034. const setChatTitle = async (_chatId, _title) => {
  1035. if (_chatId === $chatId) {
  1036. title = _title;
  1037. }
  1038. if ($settings.saveChatHistory ?? true) {
  1039. chat = await updateChatById(localStorage.token, _chatId, { title: _title });
  1040. await chats.set(await getChatList(localStorage.token));
  1041. }
  1042. };
  1043. const getWebSearchResults = async (model: string, parentId: string, responseId: string) => {
  1044. const responseMessage = history.messages[responseId];
  1045. responseMessage.statusHistory = [
  1046. {
  1047. done: false,
  1048. action: 'web_search',
  1049. description: $i18n.t('Generating search query')
  1050. }
  1051. ];
  1052. messages = messages;
  1053. const prompt = history.messages[parentId].content;
  1054. let searchQuery = await generateSearchQuery(localStorage.token, model, messages, prompt).catch(
  1055. (error) => {
  1056. console.log(error);
  1057. return prompt;
  1058. }
  1059. );
  1060. if (!searchQuery) {
  1061. toast.warning($i18n.t('No search query generated'));
  1062. responseMessage.statusHistory.push({
  1063. done: true,
  1064. error: true,
  1065. action: 'web_search',
  1066. description: 'No search query generated'
  1067. });
  1068. messages = messages;
  1069. }
  1070. responseMessage.statusHistory.push({
  1071. done: false,
  1072. action: 'web_search',
  1073. description: $i18n.t(`Searching "{{searchQuery}}"`, { searchQuery })
  1074. });
  1075. messages = messages;
  1076. const results = await runWebSearch(localStorage.token, searchQuery).catch((error) => {
  1077. console.log(error);
  1078. toast.error(error);
  1079. return null;
  1080. });
  1081. if (results) {
  1082. responseMessage.statusHistory.push({
  1083. done: true,
  1084. action: 'web_search',
  1085. description: $i18n.t('Searched {{count}} sites', { count: results.filenames.length }),
  1086. query: searchQuery,
  1087. urls: results.filenames
  1088. });
  1089. if (responseMessage?.files ?? undefined === undefined) {
  1090. responseMessage.files = [];
  1091. }
  1092. responseMessage.files.push({
  1093. collection_name: results.collection_name,
  1094. name: searchQuery,
  1095. type: 'web_search_results',
  1096. urls: results.filenames
  1097. });
  1098. messages = messages;
  1099. } else {
  1100. responseMessage.statusHistory.push({
  1101. done: true,
  1102. error: true,
  1103. action: 'web_search',
  1104. description: 'No search results found'
  1105. });
  1106. messages = messages;
  1107. }
  1108. };
  1109. const getTags = async () => {
  1110. return await getTagsById(localStorage.token, $chatId).catch(async (error) => {
  1111. return [];
  1112. });
  1113. };
  1114. </script>
  1115. <svelte:head>
  1116. <title>
  1117. {title
  1118. ? `${title.length > 30 ? `${title.slice(0, 30)}...` : title} | ${$WEBUI_NAME}`
  1119. : `${$WEBUI_NAME}`}
  1120. </title>
  1121. </svelte:head>
  1122. <audio id="audioElement" src="" style="display: none;" />
  1123. {#if $showCallOverlay}
  1124. <CallOverlay
  1125. {submitPrompt}
  1126. {stopResponse}
  1127. bind:files
  1128. modelId={selectedModelIds?.at(0) ?? null}
  1129. chatId={$chatId}
  1130. {eventTarget}
  1131. />
  1132. {/if}
  1133. {#if !chatIdProp || (loaded && chatIdProp)}
  1134. <div
  1135. class="h-screen max-h-[100dvh] {$showSidebar
  1136. ? 'md:max-w-[calc(100%-260px)]'
  1137. : ''} w-full max-w-full flex flex-col"
  1138. >
  1139. {#if $settings?.backgroundImageUrl ?? null}
  1140. <div
  1141. class="absolute {$showSidebar
  1142. ? 'md:max-w-[calc(100%-260px)] md:translate-x-[260px]'
  1143. : ''} top-0 left-0 w-full h-full bg-cover bg-center bg-no-repeat"
  1144. style="background-image: url({$settings.backgroundImageUrl}) "
  1145. />
  1146. <div
  1147. class="absolute top-0 left-0 w-full h-full bg-gradient-to-t from-white to-white/85 dark:from-gray-900 dark:to-[#171717]/90 z-0"
  1148. />
  1149. {/if}
  1150. <Navbar
  1151. {title}
  1152. bind:selectedModels
  1153. bind:showModelSelector
  1154. shareEnabled={messages.length > 0}
  1155. {chat}
  1156. {initNewChat}
  1157. />
  1158. {#if $banners.length > 0 && messages.length === 0 && !$chatId && selectedModels.length <= 1}
  1159. <div
  1160. class="absolute top-[4.25rem] w-full {$showSidebar
  1161. ? 'md:max-w-[calc(100%-260px)]'
  1162. : ''} z-0"
  1163. >
  1164. <div class=" flex flex-col gap-1 w-full">
  1165. {#each $banners.filter( (b) => (b.dismissible ? !JSON.parse(localStorage.getItem('dismissedBannerIds') ?? '[]').includes(b.id) : true) ) as banner}
  1166. <Banner
  1167. {banner}
  1168. on:dismiss={(e) => {
  1169. const bannerId = e.detail;
  1170. localStorage.setItem(
  1171. 'dismissedBannerIds',
  1172. JSON.stringify(
  1173. [
  1174. bannerId,
  1175. ...JSON.parse(localStorage.getItem('dismissedBannerIds') ?? '[]')
  1176. ].filter((id) => $banners.find((b) => b.id === id))
  1177. )
  1178. );
  1179. }}
  1180. />
  1181. {/each}
  1182. </div>
  1183. </div>
  1184. {/if}
  1185. <div class="flex flex-col flex-auto z-10">
  1186. <div
  1187. class=" pb-2.5 flex flex-col justify-between w-full flex-auto overflow-auto h-0 max-w-full z-10"
  1188. id="messages-container"
  1189. bind:this={messagesContainerElement}
  1190. on:scroll={(e) => {
  1191. autoScroll =
  1192. messagesContainerElement.scrollHeight - messagesContainerElement.scrollTop <=
  1193. messagesContainerElement.clientHeight + 5;
  1194. }}
  1195. >
  1196. <div class=" h-full w-full flex flex-col {chatIdProp ? 'py-4' : 'pt-2 pb-4'}">
  1197. <Messages
  1198. chatId={$chatId}
  1199. {selectedModels}
  1200. {processing}
  1201. bind:history
  1202. bind:messages
  1203. bind:autoScroll
  1204. bind:prompt
  1205. bottomPadding={files.length > 0}
  1206. {sendPrompt}
  1207. {continueGeneration}
  1208. {regenerateResponse}
  1209. />
  1210. </div>
  1211. </div>
  1212. <MessageInput
  1213. bind:files
  1214. bind:prompt
  1215. bind:autoScroll
  1216. bind:selectedToolIds
  1217. bind:webSearchEnabled
  1218. bind:atSelectedModel
  1219. availableToolIds={selectedModelIds.reduce((a, e, i, arr) => {
  1220. const model = $models.find((m) => m.id === e);
  1221. if (model?.info?.meta?.toolIds ?? false) {
  1222. return [...new Set([...a, ...model.info.meta.toolIds])];
  1223. }
  1224. return a;
  1225. }, [])}
  1226. transparentBackground={$settings?.backgroundImageUrl ?? false}
  1227. {selectedModels}
  1228. {messages}
  1229. {submitPrompt}
  1230. {stopResponse}
  1231. />
  1232. </div>
  1233. </div>
  1234. {/if}