Chat.svelte 33 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301
  1. <script lang="ts">
  2. import { v4 as uuidv4 } from 'uuid';
  3. import { toast } from 'svelte-sonner';
  4. import mermaid from 'mermaid';
  5. import { getContext, onMount, tick } from 'svelte';
  6. import { goto } from '$app/navigation';
  7. import { page } from '$app/stores';
  8. import type { Writable } from 'svelte/store';
  9. import type { i18n as i18nType } from 'i18next';
  10. import { OLLAMA_API_BASE_URL, OPENAI_API_BASE_URL, WEBUI_BASE_URL } from '$lib/constants';
  11. import {
  12. chatId,
  13. chats,
  14. config,
  15. type Model,
  16. models,
  17. settings,
  18. showSidebar,
  19. tags as _tags,
  20. WEBUI_NAME,
  21. banners,
  22. user,
  23. socket,
  24. showCallOverlay,
  25. tools
  26. } from '$lib/stores';
  27. import {
  28. convertMessagesToHistory,
  29. copyToClipboard,
  30. promptTemplate,
  31. splitStream
  32. } from '$lib/utils';
  33. import { generateChatCompletion } from '$lib/apis/ollama';
  34. import {
  35. addTagById,
  36. createNewChat,
  37. deleteTagById,
  38. getAllChatTags,
  39. getChatById,
  40. getChatList,
  41. getTagsById,
  42. updateChatById
  43. } from '$lib/apis/chats';
  44. import { generateOpenAIChatCompletion } from '$lib/apis/openai';
  45. import { runWebSearch } from '$lib/apis/rag';
  46. import { createOpenAITextStream } from '$lib/apis/streaming';
  47. import { queryMemory } from '$lib/apis/memories';
  48. import { getUserSettings } from '$lib/apis/users';
  49. import { chatCompleted, generateTitle, generateSearchQuery } from '$lib/apis';
  50. import Banner from '../common/Banner.svelte';
  51. import MessageInput from '$lib/components/chat/MessageInput.svelte';
  52. import Messages from '$lib/components/chat/Messages.svelte';
  53. import Navbar from '$lib/components/layout/Navbar.svelte';
  54. import CallOverlay from './MessageInput/CallOverlay.svelte';
  55. import { error } from '@sveltejs/kit';
  56. const i18n: Writable<i18nType> = getContext('i18n');
  57. export let chatIdProp = '';
  58. let loaded = false;
  59. let stopResponseFlag = false;
  60. let autoScroll = true;
  61. let processing = '';
  62. let messagesContainerElement: HTMLDivElement;
  63. let showModelSelector = true;
  64. let selectedModels = [''];
  65. let atSelectedModel: Model | undefined;
  66. let selectedModelIds = [];
  67. $: selectedModelIds = atSelectedModel !== undefined ? [atSelectedModel.id] : selectedModels;
  68. let selectedToolIds = [];
  69. let webSearchEnabled = false;
  70. let chat = null;
  71. let tags = [];
  72. let title = '';
  73. let prompt = '';
  74. let files = [];
  75. let messages = [];
  76. let history = {
  77. messages: {},
  78. currentId: null
  79. };
  80. $: if (history.currentId !== null) {
  81. let _messages = [];
  82. let currentMessage = history.messages[history.currentId];
  83. while (currentMessage !== null) {
  84. _messages.unshift({ ...currentMessage });
  85. currentMessage =
  86. currentMessage.parentId !== null ? history.messages[currentMessage.parentId] : null;
  87. }
  88. messages = _messages;
  89. } else {
  90. messages = [];
  91. }
  92. $: if (chatIdProp) {
  93. (async () => {
  94. if (await loadChat()) {
  95. await tick();
  96. loaded = true;
  97. window.setTimeout(() => scrollToBottom(), 0);
  98. const chatInput = document.getElementById('chat-textarea');
  99. chatInput?.focus();
  100. } else {
  101. await goto('/');
  102. }
  103. })();
  104. }
  105. onMount(async () => {
  106. if (!$chatId) {
  107. await initNewChat();
  108. } else {
  109. if (!($settings.saveChatHistory ?? true)) {
  110. await goto('/');
  111. }
  112. }
  113. });
  114. //////////////////////////
  115. // Web functions
  116. //////////////////////////
  117. const initNewChat = async () => {
  118. window.history.replaceState(history.state, '', `/`);
  119. await chatId.set('');
  120. autoScroll = true;
  121. title = '';
  122. messages = [];
  123. history = {
  124. messages: {},
  125. currentId: null
  126. };
  127. if ($page.url.searchParams.get('models')) {
  128. selectedModels = $page.url.searchParams.get('models')?.split(',');
  129. } else if ($settings?.models) {
  130. selectedModels = $settings?.models;
  131. } else if ($config?.default_models) {
  132. console.log($config?.default_models.split(',') ?? '');
  133. selectedModels = $config?.default_models.split(',');
  134. } else {
  135. selectedModels = [''];
  136. }
  137. if ($page.url.searchParams.get('q')) {
  138. prompt = $page.url.searchParams.get('q') ?? '';
  139. if (prompt) {
  140. await tick();
  141. submitPrompt(prompt);
  142. }
  143. }
  144. selectedModels = selectedModels.map((modelId) =>
  145. $models.map((m) => m.id).includes(modelId) ? modelId : ''
  146. );
  147. const userSettings = await getUserSettings(localStorage.token);
  148. if (userSettings) {
  149. settings.set(userSettings.ui);
  150. } else {
  151. settings.set(JSON.parse(localStorage.getItem('settings') ?? '{}'));
  152. }
  153. const chatInput = document.getElementById('chat-textarea');
  154. setTimeout(() => chatInput?.focus(), 0);
  155. };
  156. const loadChat = async () => {
  157. chatId.set(chatIdProp);
  158. chat = await getChatById(localStorage.token, $chatId).catch(async (error) => {
  159. await goto('/');
  160. return null;
  161. });
  162. if (chat) {
  163. tags = await getTags();
  164. const chatContent = chat.chat;
  165. if (chatContent) {
  166. console.log(chatContent);
  167. selectedModels =
  168. (chatContent?.models ?? undefined) !== undefined
  169. ? chatContent.models
  170. : [chatContent.models ?? ''];
  171. history =
  172. (chatContent?.history ?? undefined) !== undefined
  173. ? chatContent.history
  174. : convertMessagesToHistory(chatContent.messages);
  175. title = chatContent.title;
  176. const userSettings = await getUserSettings(localStorage.token);
  177. if (userSettings) {
  178. await settings.set(userSettings.ui);
  179. } else {
  180. await settings.set(JSON.parse(localStorage.getItem('settings') ?? '{}'));
  181. }
  182. await settings.set({
  183. ...$settings,
  184. system: chatContent.system ?? $settings.system,
  185. params: chatContent.options ?? $settings.params
  186. });
  187. autoScroll = true;
  188. await tick();
  189. if (messages.length > 0) {
  190. history.messages[messages.at(-1).id].done = true;
  191. }
  192. await tick();
  193. return true;
  194. } else {
  195. return null;
  196. }
  197. }
  198. };
  199. const scrollToBottom = async () => {
  200. await tick();
  201. if (messagesContainerElement) {
  202. messagesContainerElement.scrollTop = messagesContainerElement.scrollHeight;
  203. }
  204. };
  205. const createMessagesList = (responseMessageId) => {
  206. const message = history.messages[responseMessageId];
  207. if (message.parentId) {
  208. return [...createMessagesList(message.parentId), message];
  209. } else {
  210. return [message];
  211. }
  212. };
  213. const chatCompletedHandler = async (modelId, messages) => {
  214. await mermaid.run({
  215. querySelector: '.mermaid'
  216. });
  217. const res = await chatCompleted(localStorage.token, {
  218. model: modelId,
  219. messages: messages.map((m) => ({
  220. id: m.id,
  221. role: m.role,
  222. content: m.content,
  223. timestamp: m.timestamp
  224. })),
  225. chat_id: $chatId
  226. }).catch((error) => {
  227. console.error(error);
  228. return null;
  229. });
  230. if (res !== null) {
  231. // Update chat history with the new messages
  232. for (const message of res.messages) {
  233. history.messages[message.id] = {
  234. ...history.messages[message.id],
  235. ...(history.messages[message.id].content !== message.content
  236. ? { originalContent: history.messages[message.id].content }
  237. : {}),
  238. ...message
  239. };
  240. }
  241. }
  242. };
  243. const getChatEventEmitter = async (modelId: string, chatId: string = '') => {
  244. return setInterval(() => {
  245. $socket?.emit('usage', {
  246. action: 'chat',
  247. model: modelId,
  248. chat_id: chatId
  249. });
  250. }, 1000);
  251. };
  252. //////////////////////////
  253. // Chat functions
  254. //////////////////////////
  255. const submitPrompt = async (userPrompt, _user = null) => {
  256. let _responses = [];
  257. console.log('submitPrompt', $chatId);
  258. selectedModels = selectedModels.map((modelId) =>
  259. $models.map((m) => m.id).includes(modelId) ? modelId : ''
  260. );
  261. if (selectedModels.includes('')) {
  262. toast.error($i18n.t('Model not selected'));
  263. } else if (messages.length != 0 && messages.at(-1).done != true) {
  264. // Response not done
  265. console.log('wait');
  266. } else if (
  267. files.length > 0 &&
  268. files.filter((file) => file.upload_status === false).length > 0
  269. ) {
  270. // Upload not done
  271. toast.error(
  272. $i18n.t(
  273. `Oops! Hold tight! Your files are still in the processing oven. We're cooking them up to perfection. Please be patient and we'll let you know once they're ready.`
  274. )
  275. );
  276. } else {
  277. // Reset chat input textarea
  278. const chatTextAreaElement = document.getElementById('chat-textarea');
  279. if (chatTextAreaElement) {
  280. chatTextAreaElement.value = '';
  281. chatTextAreaElement.style.height = '';
  282. }
  283. const _files = JSON.parse(JSON.stringify(files));
  284. files = [];
  285. prompt = '';
  286. // Create user message
  287. let userMessageId = uuidv4();
  288. let userMessage = {
  289. id: userMessageId,
  290. parentId: messages.length !== 0 ? messages.at(-1).id : null,
  291. childrenIds: [],
  292. role: 'user',
  293. user: _user ?? undefined,
  294. content: userPrompt,
  295. files: _files.length > 0 ? _files : undefined,
  296. timestamp: Math.floor(Date.now() / 1000), // Unix epoch
  297. models: selectedModels.filter((m, mIdx) => selectedModels.indexOf(m) === mIdx)
  298. };
  299. // Add message to history and Set currentId to messageId
  300. history.messages[userMessageId] = userMessage;
  301. history.currentId = userMessageId;
  302. // Append messageId to childrenIds of parent message
  303. if (messages.length !== 0) {
  304. history.messages[messages.at(-1).id].childrenIds.push(userMessageId);
  305. }
  306. // Wait until history/message have been updated
  307. await tick();
  308. // Send prompt
  309. _responses = await sendPrompt(userPrompt, userMessageId);
  310. }
  311. return _responses;
  312. };
  313. const sendPrompt = async (prompt, parentId, modelId = null, newChat = true) => {
  314. let _responses = [];
  315. // If modelId is provided, use it, else use selected model
  316. let selectedModelIds = modelId
  317. ? [modelId]
  318. : atSelectedModel !== undefined
  319. ? [atSelectedModel.id]
  320. : selectedModels;
  321. // Create response messages for each selected model
  322. const responseMessageIds = {};
  323. for (const modelId of selectedModelIds) {
  324. const model = $models.filter((m) => m.id === modelId).at(0);
  325. if (model) {
  326. let responseMessageId = uuidv4();
  327. let responseMessage = {
  328. parentId: parentId,
  329. id: responseMessageId,
  330. childrenIds: [],
  331. role: 'assistant',
  332. content: '',
  333. model: model.id,
  334. modelName: model.name ?? model.id,
  335. userContext: null,
  336. timestamp: Math.floor(Date.now() / 1000) // Unix epoch
  337. };
  338. // Add message to history and Set currentId to messageId
  339. history.messages[responseMessageId] = responseMessage;
  340. history.currentId = responseMessageId;
  341. // Append messageId to childrenIds of parent message
  342. if (parentId !== null) {
  343. history.messages[parentId].childrenIds = [
  344. ...history.messages[parentId].childrenIds,
  345. responseMessageId
  346. ];
  347. }
  348. responseMessageIds[modelId] = responseMessageId;
  349. }
  350. }
  351. await tick();
  352. // Create new chat if only one message in messages
  353. if (newChat && messages.length == 2) {
  354. if ($settings.saveChatHistory ?? true) {
  355. chat = await createNewChat(localStorage.token, {
  356. id: $chatId,
  357. title: $i18n.t('New Chat'),
  358. models: selectedModels,
  359. system: $settings.system ?? undefined,
  360. options: {
  361. ...($settings.params ?? {})
  362. },
  363. messages: messages,
  364. history: history,
  365. tags: [],
  366. timestamp: Date.now()
  367. });
  368. await chats.set(await getChatList(localStorage.token));
  369. await chatId.set(chat.id);
  370. } else {
  371. await chatId.set('local');
  372. }
  373. await tick();
  374. }
  375. const _chatId = JSON.parse(JSON.stringify($chatId));
  376. await Promise.all(
  377. selectedModelIds.map(async (modelId) => {
  378. console.log('modelId', modelId);
  379. const model = $models.filter((m) => m.id === modelId).at(0);
  380. if (model) {
  381. // If there are image files, check if model is vision capable
  382. const hasImages = messages.some((message) =>
  383. message.files?.some((file) => file.type === 'image')
  384. );
  385. if (hasImages && !(model.info?.meta?.capabilities?.vision ?? true)) {
  386. toast.error(
  387. $i18n.t('Model {{modelName}} is not vision capable', {
  388. modelName: model.name ?? model.id
  389. })
  390. );
  391. }
  392. let responseMessageId = responseMessageIds[modelId];
  393. let responseMessage = history.messages[responseMessageId];
  394. let userContext = null;
  395. if ($settings?.memory ?? false) {
  396. if (userContext === null) {
  397. const res = await queryMemory(localStorage.token, prompt).catch((error) => {
  398. toast.error(error);
  399. return null;
  400. });
  401. if (res) {
  402. if (res.documents[0].length > 0) {
  403. userContext = res.documents.reduce((acc, doc, index) => {
  404. const createdAtTimestamp = res.metadatas[index][0].created_at;
  405. const createdAtDate = new Date(createdAtTimestamp * 1000)
  406. .toISOString()
  407. .split('T')[0];
  408. acc.push(`${index + 1}. [${createdAtDate}]. ${doc[0]}`);
  409. return acc;
  410. }, []);
  411. }
  412. console.log(userContext);
  413. }
  414. }
  415. }
  416. responseMessage.userContext = userContext;
  417. const chatEventEmitter = await getChatEventEmitter(model.id, _chatId);
  418. if (webSearchEnabled) {
  419. await getWebSearchResults(model.id, parentId, responseMessageId);
  420. }
  421. let _response = null;
  422. if (model?.owned_by === 'openai') {
  423. _response = await sendPromptOpenAI(model, prompt, responseMessageId, _chatId);
  424. } else if (model) {
  425. _response = await sendPromptOllama(model, prompt, responseMessageId, _chatId);
  426. }
  427. _responses.push(_response);
  428. console.log('chatEventEmitter', chatEventEmitter);
  429. if (chatEventEmitter) clearInterval(chatEventEmitter);
  430. } else {
  431. toast.error($i18n.t(`Model {{modelId}} not found`, { modelId }));
  432. }
  433. })
  434. );
  435. await chats.set(await getChatList(localStorage.token));
  436. return _responses;
  437. };
  438. const getWebSearchResults = async (model: string, parentId: string, responseId: string) => {
  439. const responseMessage = history.messages[responseId];
  440. responseMessage.statusHistory = [
  441. {
  442. done: false,
  443. action: 'web_search',
  444. description: $i18n.t('Generating search query')
  445. }
  446. ];
  447. messages = messages;
  448. const prompt = history.messages[parentId].content;
  449. let searchQuery = await generateSearchQuery(localStorage.token, model, messages, prompt).catch(
  450. (error) => {
  451. console.log(error);
  452. return prompt;
  453. }
  454. );
  455. if (!searchQuery) {
  456. toast.warning($i18n.t('No search query generated'));
  457. responseMessage.statusHistory.push({
  458. done: true,
  459. error: true,
  460. action: 'web_search',
  461. description: 'No search query generated'
  462. });
  463. messages = messages;
  464. }
  465. responseMessage.statusHistory.push({
  466. done: false,
  467. action: 'web_search',
  468. description: $i18n.t(`Searching "{{searchQuery}}"`, { searchQuery })
  469. });
  470. messages = messages;
  471. const results = await runWebSearch(localStorage.token, searchQuery).catch((error) => {
  472. console.log(error);
  473. toast.error(error);
  474. return null;
  475. });
  476. if (results) {
  477. responseMessage.statusHistory.push({
  478. done: true,
  479. action: 'web_search',
  480. description: $i18n.t('Searched {{count}} sites', { count: results.filenames.length }),
  481. query: searchQuery,
  482. urls: results.filenames
  483. });
  484. if (responseMessage?.files ?? undefined === undefined) {
  485. responseMessage.files = [];
  486. }
  487. responseMessage.files.push({
  488. collection_name: results.collection_name,
  489. name: searchQuery,
  490. type: 'web_search_results',
  491. urls: results.filenames
  492. });
  493. messages = messages;
  494. } else {
  495. responseMessage.statusHistory.push({
  496. done: true,
  497. error: true,
  498. action: 'web_search',
  499. description: 'No search results found'
  500. });
  501. messages = messages;
  502. }
  503. };
  504. const sendPromptOllama = async (model, userPrompt, responseMessageId, _chatId) => {
  505. let _response = null;
  506. const responseMessage = history.messages[responseMessageId];
  507. // Wait until history/message have been updated
  508. await tick();
  509. // Scroll down
  510. scrollToBottom();
  511. const messagesBody = [
  512. $settings.system || (responseMessage?.userContext ?? null)
  513. ? {
  514. role: 'system',
  515. content: `${promptTemplate($settings?.system ?? '', $user.name)}${
  516. responseMessage?.userContext ?? null
  517. ? `\n\nUser Context:\n${(responseMessage?.userContext ?? []).join('\n')}`
  518. : ''
  519. }`
  520. }
  521. : undefined,
  522. ...messages
  523. ]
  524. .filter((message) => message?.content?.trim())
  525. .map((message, idx, arr) => {
  526. // Prepare the base message object
  527. const baseMessage = {
  528. role: message.role,
  529. content: message.content
  530. };
  531. // Extract and format image URLs if any exist
  532. const imageUrls = message.files
  533. ?.filter((file) => file.type === 'image')
  534. .map((file) => file.url.slice(file.url.indexOf(',') + 1));
  535. // Add images array only if it contains elements
  536. if (imageUrls && imageUrls.length > 0 && message.role === 'user') {
  537. baseMessage.images = imageUrls;
  538. }
  539. return baseMessage;
  540. });
  541. let lastImageIndex = -1;
  542. // Find the index of the last object with images
  543. messagesBody.forEach((item, index) => {
  544. if (item.images) {
  545. lastImageIndex = index;
  546. }
  547. });
  548. // Remove images from all but the last one
  549. messagesBody.forEach((item, index) => {
  550. if (index !== lastImageIndex) {
  551. delete item.images;
  552. }
  553. });
  554. let docs = [];
  555. if (model?.info?.meta?.knowledge ?? false) {
  556. docs = model.info.meta.knowledge;
  557. }
  558. docs = [
  559. ...docs,
  560. ...messages
  561. .filter((message) => message?.files ?? null)
  562. .map((message) =>
  563. message.files.filter((item) =>
  564. ['doc', 'collection', 'web_search_results'].includes(item.type)
  565. )
  566. )
  567. .flat(1)
  568. ].filter(
  569. (item, index, array) =>
  570. array.findIndex((i) => JSON.stringify(i) === JSON.stringify(item)) === index
  571. );
  572. const [res, controller] = await generateChatCompletion(localStorage.token, {
  573. model: model.id,
  574. messages: messagesBody,
  575. options: {
  576. ...($settings.params ?? {}),
  577. stop:
  578. $settings?.params?.stop ?? undefined
  579. ? $settings.params.stop.map((str) =>
  580. decodeURIComponent(JSON.parse('"' + str.replace(/\"/g, '\\"') + '"'))
  581. )
  582. : undefined,
  583. num_predict: $settings?.params?.max_tokens ?? undefined,
  584. repeat_penalty: $settings?.params?.frequency_penalty ?? undefined
  585. },
  586. format: $settings.requestFormat ?? undefined,
  587. keep_alive: $settings.keepAlive ?? undefined,
  588. tool_ids: selectedToolIds.length > 0 ? selectedToolIds : undefined,
  589. docs: docs.length > 0 ? docs : undefined,
  590. citations: docs.length > 0,
  591. chat_id: $chatId
  592. });
  593. if (res && res.ok) {
  594. console.log('controller', controller);
  595. const reader = res.body
  596. .pipeThrough(new TextDecoderStream())
  597. .pipeThrough(splitStream('\n'))
  598. .getReader();
  599. while (true) {
  600. const { value, done } = await reader.read();
  601. if (done || stopResponseFlag || _chatId !== $chatId) {
  602. responseMessage.done = true;
  603. messages = messages;
  604. if (stopResponseFlag) {
  605. controller.abort('User: Stop Response');
  606. } else {
  607. const messages = createMessagesList(responseMessageId);
  608. await chatCompletedHandler(model.id, messages);
  609. }
  610. _response = responseMessage.content;
  611. break;
  612. }
  613. try {
  614. let lines = value.split('\n');
  615. for (const line of lines) {
  616. if (line !== '') {
  617. console.log(line);
  618. let data = JSON.parse(line);
  619. if ('citations' in data) {
  620. responseMessage.citations = data.citations;
  621. continue;
  622. }
  623. if ('detail' in data) {
  624. throw data;
  625. }
  626. if (data.done == false) {
  627. if (responseMessage.content == '' && data.message.content == '\n') {
  628. continue;
  629. } else {
  630. responseMessage.content += data.message.content;
  631. messages = messages;
  632. }
  633. } else {
  634. responseMessage.done = true;
  635. if (responseMessage.content == '') {
  636. responseMessage.error = {
  637. code: 400,
  638. content: `Oops! No text generated from Ollama, Please try again.`
  639. };
  640. }
  641. responseMessage.context = data.context ?? null;
  642. responseMessage.info = {
  643. total_duration: data.total_duration,
  644. load_duration: data.load_duration,
  645. sample_count: data.sample_count,
  646. sample_duration: data.sample_duration,
  647. prompt_eval_count: data.prompt_eval_count,
  648. prompt_eval_duration: data.prompt_eval_duration,
  649. eval_count: data.eval_count,
  650. eval_duration: data.eval_duration
  651. };
  652. messages = messages;
  653. if ($settings.notificationEnabled && !document.hasFocus()) {
  654. const notification = new Notification(
  655. selectedModelfile
  656. ? `${
  657. selectedModelfile.title.charAt(0).toUpperCase() +
  658. selectedModelfile.title.slice(1)
  659. }`
  660. : `${model.id}`,
  661. {
  662. body: responseMessage.content,
  663. icon: selectedModelfile?.imageUrl ?? `${WEBUI_BASE_URL}/static/favicon.png`
  664. }
  665. );
  666. }
  667. if ($settings.responseAutoCopy) {
  668. copyToClipboard(responseMessage.content);
  669. }
  670. if ($settings.responseAutoPlayback && !$showCallOverlay) {
  671. await tick();
  672. document.getElementById(`speak-button-${responseMessage.id}`)?.click();
  673. }
  674. }
  675. }
  676. }
  677. } catch (error) {
  678. console.log(error);
  679. if ('detail' in error) {
  680. toast.error(error.detail);
  681. }
  682. break;
  683. }
  684. if (autoScroll) {
  685. scrollToBottom();
  686. }
  687. }
  688. if ($chatId == _chatId) {
  689. if ($settings.saveChatHistory ?? true) {
  690. chat = await updateChatById(localStorage.token, _chatId, {
  691. messages: messages,
  692. history: history,
  693. models: selectedModels
  694. });
  695. await chats.set(await getChatList(localStorage.token));
  696. }
  697. }
  698. } else {
  699. if (res !== null) {
  700. const error = await res.json();
  701. console.log(error);
  702. if ('detail' in error) {
  703. toast.error(error.detail);
  704. responseMessage.error = { content: error.detail };
  705. } else {
  706. toast.error(error.error);
  707. responseMessage.error = { content: error.error };
  708. }
  709. } else {
  710. toast.error(
  711. $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, { provider: 'Ollama' })
  712. );
  713. responseMessage.error = {
  714. content: $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
  715. provider: 'Ollama'
  716. })
  717. };
  718. }
  719. responseMessage.done = true;
  720. messages = messages;
  721. }
  722. stopResponseFlag = false;
  723. await tick();
  724. if (autoScroll) {
  725. scrollToBottom();
  726. }
  727. if (messages.length == 2 && messages.at(1).content !== '') {
  728. window.history.replaceState(history.state, '', `/c/${_chatId}`);
  729. const _title = await generateChatTitle(userPrompt);
  730. await setChatTitle(_chatId, _title);
  731. }
  732. return _response;
  733. };
  734. const sendPromptOpenAI = async (model, userPrompt, responseMessageId, _chatId) => {
  735. let _response = null;
  736. const responseMessage = history.messages[responseMessageId];
  737. let docs = [];
  738. if (model?.info?.meta?.knowledge ?? false) {
  739. docs = model.info.meta.knowledge;
  740. }
  741. docs = [
  742. ...docs,
  743. ...messages
  744. .filter((message) => message?.files ?? null)
  745. .map((message) =>
  746. message.files.filter((item) =>
  747. ['doc', 'collection', 'web_search_results'].includes(item.type)
  748. )
  749. )
  750. .flat(1)
  751. ].filter(
  752. (item, index, array) =>
  753. array.findIndex((i) => JSON.stringify(i) === JSON.stringify(item)) === index
  754. );
  755. scrollToBottom();
  756. try {
  757. const [res, controller] = await generateOpenAIChatCompletion(
  758. localStorage.token,
  759. {
  760. model: model.id,
  761. stream: true,
  762. stream_options:
  763. model.info?.meta?.capabilities?.usage ?? false
  764. ? {
  765. include_usage: true
  766. }
  767. : undefined,
  768. messages: [
  769. $settings.system || (responseMessage?.userContext ?? null)
  770. ? {
  771. role: 'system',
  772. content: `${promptTemplate($settings?.system ?? '', $user.name)}${
  773. responseMessage?.userContext ?? null
  774. ? `\n\nUser Context:\n${(responseMessage?.userContext ?? []).join('\n')}`
  775. : ''
  776. }`
  777. }
  778. : undefined,
  779. ...messages
  780. ]
  781. .filter((message) => message?.content?.trim())
  782. .map((message, idx, arr) => ({
  783. role: message.role,
  784. ...((message.files?.filter((file) => file.type === 'image').length > 0 ?? false) &&
  785. message.role === 'user'
  786. ? {
  787. content: [
  788. {
  789. type: 'text',
  790. text:
  791. arr.length - 1 !== idx
  792. ? message.content
  793. : message?.raContent ?? message.content
  794. },
  795. ...message.files
  796. .filter((file) => file.type === 'image')
  797. .map((file) => ({
  798. type: 'image_url',
  799. image_url: {
  800. url: file.url
  801. }
  802. }))
  803. ]
  804. }
  805. : {
  806. content:
  807. arr.length - 1 !== idx
  808. ? message.content
  809. : message?.raContent ?? message.content
  810. })
  811. })),
  812. seed: $settings?.params?.seed ?? undefined,
  813. stop:
  814. $settings?.params?.stop ?? undefined
  815. ? $settings.params.stop.map((str) =>
  816. decodeURIComponent(JSON.parse('"' + str.replace(/\"/g, '\\"') + '"'))
  817. )
  818. : undefined,
  819. temperature: $settings?.params?.temperature ?? undefined,
  820. top_p: $settings?.params?.top_p ?? undefined,
  821. frequency_penalty: $settings?.params?.frequency_penalty ?? undefined,
  822. max_tokens: $settings?.params?.max_tokens ?? undefined,
  823. tool_ids: selectedToolIds.length > 0 ? selectedToolIds : undefined,
  824. docs: docs.length > 0 ? docs : undefined,
  825. citations: docs.length > 0,
  826. chat_id: $chatId
  827. },
  828. `${OPENAI_API_BASE_URL}`
  829. );
  830. // Wait until history/message have been updated
  831. await tick();
  832. scrollToBottom();
  833. if (res && res.ok && res.body) {
  834. const textStream = await createOpenAITextStream(res.body, $settings.splitLargeChunks);
  835. let lastUsage = null;
  836. for await (const update of textStream) {
  837. const { value, done, citations, error, usage } = update;
  838. if (error) {
  839. await handleOpenAIError(error, null, model, responseMessage);
  840. break;
  841. }
  842. if (done || stopResponseFlag || _chatId !== $chatId) {
  843. responseMessage.done = true;
  844. messages = messages;
  845. if (stopResponseFlag) {
  846. controller.abort('User: Stop Response');
  847. } else {
  848. const messages = createMessagesList(responseMessageId);
  849. await chatCompletedHandler(model.id, messages);
  850. }
  851. _response = responseMessage.content;
  852. break;
  853. }
  854. if (usage) {
  855. lastUsage = usage;
  856. }
  857. if (citations) {
  858. responseMessage.citations = citations;
  859. continue;
  860. }
  861. if (responseMessage.content == '' && value == '\n') {
  862. continue;
  863. } else {
  864. responseMessage.content += value;
  865. messages = messages;
  866. }
  867. if (autoScroll) {
  868. scrollToBottom();
  869. }
  870. }
  871. if ($settings.notificationEnabled && !document.hasFocus()) {
  872. const notification = new Notification(`${model.id}`, {
  873. body: responseMessage.content,
  874. icon: `${WEBUI_BASE_URL}/static/favicon.png`
  875. });
  876. }
  877. if ($settings.responseAutoCopy) {
  878. copyToClipboard(responseMessage.content);
  879. }
  880. if ($settings.responseAutoPlayback && !$showCallOverlay) {
  881. await tick();
  882. document.getElementById(`speak-button-${responseMessage.id}`)?.click();
  883. }
  884. if (lastUsage) {
  885. responseMessage.info = { ...lastUsage, openai: true };
  886. }
  887. if ($chatId == _chatId) {
  888. if ($settings.saveChatHistory ?? true) {
  889. chat = await updateChatById(localStorage.token, _chatId, {
  890. models: selectedModels,
  891. messages: messages,
  892. history: history
  893. });
  894. await chats.set(await getChatList(localStorage.token));
  895. }
  896. }
  897. } else {
  898. await handleOpenAIError(null, res, model, responseMessage);
  899. }
  900. } catch (error) {
  901. await handleOpenAIError(error, null, model, responseMessage);
  902. }
  903. messages = messages;
  904. stopResponseFlag = false;
  905. await tick();
  906. if (autoScroll) {
  907. scrollToBottom();
  908. }
  909. if (messages.length == 2) {
  910. window.history.replaceState(history.state, '', `/c/${_chatId}`);
  911. const _title = await generateChatTitle(userPrompt);
  912. await setChatTitle(_chatId, _title);
  913. }
  914. return _response;
  915. };
  916. const handleOpenAIError = async (error, res: Response | null, model, responseMessage) => {
  917. let errorMessage = '';
  918. let innerError;
  919. if (error) {
  920. innerError = error;
  921. } else if (res !== null) {
  922. innerError = await res.json();
  923. }
  924. console.error(innerError);
  925. if ('detail' in innerError) {
  926. toast.error(innerError.detail);
  927. errorMessage = innerError.detail;
  928. } else if ('error' in innerError) {
  929. if ('message' in innerError.error) {
  930. toast.error(innerError.error.message);
  931. errorMessage = innerError.error.message;
  932. } else {
  933. toast.error(innerError.error);
  934. errorMessage = innerError.error;
  935. }
  936. } else if ('message' in innerError) {
  937. toast.error(innerError.message);
  938. errorMessage = innerError.message;
  939. }
  940. responseMessage.error = {
  941. content:
  942. $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
  943. provider: model.name ?? model.id
  944. }) +
  945. '\n' +
  946. errorMessage
  947. };
  948. responseMessage.done = true;
  949. messages = messages;
  950. };
  951. const stopResponse = () => {
  952. stopResponseFlag = true;
  953. console.log('stopResponse');
  954. };
  955. const regenerateResponse = async (message) => {
  956. console.log('regenerateResponse');
  957. if (messages.length != 0) {
  958. let userMessage = history.messages[message.parentId];
  959. let userPrompt = userMessage.content;
  960. if ((userMessage?.models ?? [...selectedModels]).length == 1) {
  961. await sendPrompt(userPrompt, userMessage.id, undefined, false);
  962. } else {
  963. await sendPrompt(userPrompt, userMessage.id, message.model, false);
  964. }
  965. }
  966. };
  967. const continueGeneration = async () => {
  968. console.log('continueGeneration');
  969. const _chatId = JSON.parse(JSON.stringify($chatId));
  970. if (messages.length != 0 && messages.at(-1).done == true) {
  971. const responseMessage = history.messages[history.currentId];
  972. responseMessage.done = false;
  973. await tick();
  974. const model = $models.filter((m) => m.id === responseMessage.model).at(0);
  975. if (model) {
  976. if (model?.owned_by === 'openai') {
  977. await sendPromptOpenAI(
  978. model,
  979. history.messages[responseMessage.parentId].content,
  980. responseMessage.id,
  981. _chatId
  982. );
  983. } else
  984. await sendPromptOllama(
  985. model,
  986. history.messages[responseMessage.parentId].content,
  987. responseMessage.id,
  988. _chatId
  989. );
  990. }
  991. } else {
  992. toast.error($i18n.t(`Model {{modelId}} not found`, { modelId }));
  993. }
  994. };
  995. const generateChatTitle = async (userPrompt) => {
  996. if ($settings?.title?.auto ?? true) {
  997. const title = await generateTitle(
  998. localStorage.token,
  999. selectedModels[0],
  1000. userPrompt,
  1001. $chatId
  1002. ).catch((error) => {
  1003. console.error(error);
  1004. return 'New Chat';
  1005. });
  1006. return title;
  1007. } else {
  1008. return `${userPrompt}`;
  1009. }
  1010. };
  1011. const setChatTitle = async (_chatId, _title) => {
  1012. if (_chatId === $chatId) {
  1013. title = _title;
  1014. }
  1015. if ($settings.saveChatHistory ?? true) {
  1016. chat = await updateChatById(localStorage.token, _chatId, { title: _title });
  1017. await chats.set(await getChatList(localStorage.token));
  1018. }
  1019. };
  1020. const getTags = async () => {
  1021. return await getTagsById(localStorage.token, $chatId).catch(async (error) => {
  1022. return [];
  1023. });
  1024. };
  1025. </script>
  1026. <svelte:head>
  1027. <title>
  1028. {title
  1029. ? `${title.length > 30 ? `${title.slice(0, 30)}...` : title} | ${$WEBUI_NAME}`
  1030. : `${$WEBUI_NAME}`}
  1031. </title>
  1032. </svelte:head>
  1033. <CallOverlay {submitPrompt} bind:files />
  1034. {#if !chatIdProp || (loaded && chatIdProp)}
  1035. <div
  1036. class="h-screen max-h-[100dvh] {$showSidebar
  1037. ? 'md:max-w-[calc(100%-260px)]'
  1038. : ''} w-full max-w-full flex flex-col"
  1039. >
  1040. <Navbar
  1041. {title}
  1042. bind:selectedModels
  1043. bind:showModelSelector
  1044. shareEnabled={messages.length > 0}
  1045. {chat}
  1046. {initNewChat}
  1047. />
  1048. {#if $banners.length > 0 && messages.length === 0 && !$chatId && selectedModels.length <= 1}
  1049. <div
  1050. class="absolute top-[4.25rem] w-full {$showSidebar ? 'md:max-w-[calc(100%-260px)]' : ''}"
  1051. >
  1052. <div class=" flex flex-col gap-1 w-full">
  1053. {#each $banners.filter( (b) => (b.dismissible ? !JSON.parse(localStorage.getItem('dismissedBannerIds') ?? '[]').includes(b.id) : true) ) as banner}
  1054. <Banner
  1055. {banner}
  1056. on:dismiss={(e) => {
  1057. const bannerId = e.detail;
  1058. localStorage.setItem(
  1059. 'dismissedBannerIds',
  1060. JSON.stringify(
  1061. [
  1062. bannerId,
  1063. ...JSON.parse(localStorage.getItem('dismissedBannerIds') ?? '[]')
  1064. ].filter((id) => $banners.find((b) => b.id === id))
  1065. )
  1066. );
  1067. }}
  1068. />
  1069. {/each}
  1070. </div>
  1071. </div>
  1072. {/if}
  1073. <div class="flex flex-col flex-auto">
  1074. <div
  1075. class=" pb-2.5 flex flex-col justify-between w-full flex-auto overflow-auto h-0 max-w-full"
  1076. id="messages-container"
  1077. bind:this={messagesContainerElement}
  1078. on:scroll={(e) => {
  1079. autoScroll =
  1080. messagesContainerElement.scrollHeight - messagesContainerElement.scrollTop <=
  1081. messagesContainerElement.clientHeight + 5;
  1082. }}
  1083. >
  1084. <div class=" h-full w-full flex flex-col {chatIdProp ? 'py-4' : 'pt-2 pb-4'}">
  1085. <Messages
  1086. chatId={$chatId}
  1087. {selectedModels}
  1088. {processing}
  1089. bind:history
  1090. bind:messages
  1091. bind:autoScroll
  1092. bind:prompt
  1093. bottomPadding={files.length > 0}
  1094. {sendPrompt}
  1095. {continueGeneration}
  1096. {regenerateResponse}
  1097. />
  1098. </div>
  1099. </div>
  1100. <MessageInput
  1101. bind:files
  1102. bind:prompt
  1103. bind:autoScroll
  1104. bind:selectedToolIds
  1105. bind:webSearchEnabled
  1106. bind:atSelectedModel
  1107. availableToolIds={selectedModelIds.reduce((a, e, i, arr) => {
  1108. const model = $models.find((m) => m.id === e);
  1109. if (model?.info?.meta?.toolIds ?? false) {
  1110. return [...new Set([...a, ...model.info.meta.toolIds])];
  1111. }
  1112. return a;
  1113. }, [])}
  1114. {selectedModels}
  1115. {messages}
  1116. {submitPrompt}
  1117. {stopResponse}
  1118. />
  1119. </div>
  1120. </div>
  1121. {/if}