Chat.svelte 33 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301
  1. <script lang="ts">
  2. import { v4 as uuidv4 } from 'uuid';
  3. import { toast } from 'svelte-sonner';
  4. import mermaid from 'mermaid';
  5. import { getContext, onMount, tick } from 'svelte';
  6. import { goto } from '$app/navigation';
  7. import { page } from '$app/stores';
  8. import {
  9. chatId,
  10. chats,
  11. config,
  12. type Model,
  13. models,
  14. settings,
  15. showSidebar,
  16. tags as _tags,
  17. WEBUI_NAME,
  18. banners,
  19. user,
  20. socket,
  21. showCallOverlay
  22. } from '$lib/stores';
  23. import {
  24. convertMessagesToHistory,
  25. copyToClipboard,
  26. promptTemplate,
  27. splitStream
  28. } from '$lib/utils';
  29. import { generateChatCompletion } from '$lib/apis/ollama';
  30. import {
  31. addTagById,
  32. createNewChat,
  33. deleteTagById,
  34. getAllChatTags,
  35. getChatById,
  36. getChatList,
  37. getTagsById,
  38. updateChatById
  39. } from '$lib/apis/chats';
  40. import {
  41. generateOpenAIChatCompletion,
  42. generateSearchQuery,
  43. generateTitle
  44. } from '$lib/apis/openai';
  45. import MessageInput from '$lib/components/chat/MessageInput.svelte';
  46. import Messages from '$lib/components/chat/Messages.svelte';
  47. import Navbar from '$lib/components/layout/Navbar.svelte';
  48. import { OLLAMA_API_BASE_URL, OPENAI_API_BASE_URL, WEBUI_BASE_URL } from '$lib/constants';
  49. import { createOpenAITextStream } from '$lib/apis/streaming';
  50. import { queryMemory } from '$lib/apis/memories';
  51. import type { Writable } from 'svelte/store';
  52. import type { i18n as i18nType } from 'i18next';
  53. import { runWebSearch } from '$lib/apis/rag';
  54. import Banner from '../common/Banner.svelte';
  55. import { getUserSettings } from '$lib/apis/users';
  56. import { chatCompleted } from '$lib/apis';
  57. import CallOverlay from './MessageInput/CallOverlay.svelte';
  58. const i18n: Writable<i18nType> = getContext('i18n');
  59. export let chatIdProp = '';
  60. let loaded = false;
  61. let stopResponseFlag = false;
  62. let autoScroll = true;
  63. let processing = '';
  64. let messagesContainerElement: HTMLDivElement;
  65. let showModelSelector = true;
  66. let selectedModels = [''];
  67. let atSelectedModel: Model | undefined;
  68. let webSearchEnabled = false;
  69. let chat = null;
  70. let tags = [];
  71. let title = '';
  72. let prompt = '';
  73. let files = [];
  74. let messages = [];
  75. let history = {
  76. messages: {},
  77. currentId: null
  78. };
  79. $: if (history.currentId !== null) {
  80. let _messages = [];
  81. let currentMessage = history.messages[history.currentId];
  82. while (currentMessage !== null) {
  83. _messages.unshift({ ...currentMessage });
  84. currentMessage =
  85. currentMessage.parentId !== null ? history.messages[currentMessage.parentId] : null;
  86. }
  87. messages = _messages;
  88. } else {
  89. messages = [];
  90. }
  91. $: if (chatIdProp) {
  92. (async () => {
  93. if (await loadChat()) {
  94. await tick();
  95. loaded = true;
  96. window.setTimeout(() => scrollToBottom(), 0);
  97. const chatInput = document.getElementById('chat-textarea');
  98. chatInput?.focus();
  99. } else {
  100. await goto('/');
  101. }
  102. })();
  103. }
  104. onMount(async () => {
  105. if (!$chatId) {
  106. await initNewChat();
  107. } else {
  108. if (!($settings.saveChatHistory ?? true)) {
  109. await goto('/');
  110. }
  111. }
  112. });
  113. //////////////////////////
  114. // Web functions
  115. //////////////////////////
  116. const initNewChat = async () => {
  117. window.history.replaceState(history.state, '', `/`);
  118. await chatId.set('');
  119. autoScroll = true;
  120. title = '';
  121. messages = [];
  122. history = {
  123. messages: {},
  124. currentId: null
  125. };
  126. if ($page.url.searchParams.get('models')) {
  127. selectedModels = $page.url.searchParams.get('models')?.split(',');
  128. } else if ($settings?.models) {
  129. selectedModels = $settings?.models;
  130. } else if ($config?.default_models) {
  131. console.log($config?.default_models.split(',') ?? '');
  132. selectedModels = $config?.default_models.split(',');
  133. } else {
  134. selectedModels = [''];
  135. }
  136. if ($page.url.searchParams.get('q')) {
  137. prompt = $page.url.searchParams.get('q') ?? '';
  138. if (prompt) {
  139. await tick();
  140. submitPrompt(prompt);
  141. }
  142. }
  143. selectedModels = selectedModels.map((modelId) =>
  144. $models.map((m) => m.id).includes(modelId) ? modelId : ''
  145. );
  146. const userSettings = await getUserSettings(localStorage.token);
  147. if (userSettings) {
  148. settings.set(userSettings.ui);
  149. } else {
  150. settings.set(JSON.parse(localStorage.getItem('settings') ?? '{}'));
  151. }
  152. const chatInput = document.getElementById('chat-textarea');
  153. setTimeout(() => chatInput?.focus(), 0);
  154. };
  155. const loadChat = async () => {
  156. chatId.set(chatIdProp);
  157. chat = await getChatById(localStorage.token, $chatId).catch(async (error) => {
  158. await goto('/');
  159. return null;
  160. });
  161. if (chat) {
  162. tags = await getTags();
  163. const chatContent = chat.chat;
  164. if (chatContent) {
  165. console.log(chatContent);
  166. selectedModels =
  167. (chatContent?.models ?? undefined) !== undefined
  168. ? chatContent.models
  169. : [chatContent.models ?? ''];
  170. history =
  171. (chatContent?.history ?? undefined) !== undefined
  172. ? chatContent.history
  173. : convertMessagesToHistory(chatContent.messages);
  174. title = chatContent.title;
  175. const userSettings = await getUserSettings(localStorage.token);
  176. if (userSettings) {
  177. await settings.set(userSettings.ui);
  178. } else {
  179. await settings.set(JSON.parse(localStorage.getItem('settings') ?? '{}'));
  180. }
  181. await settings.set({
  182. ...$settings,
  183. system: chatContent.system ?? $settings.system,
  184. params: chatContent.options ?? $settings.params
  185. });
  186. autoScroll = true;
  187. await tick();
  188. if (messages.length > 0) {
  189. history.messages[messages.at(-1).id].done = true;
  190. }
  191. await tick();
  192. return true;
  193. } else {
  194. return null;
  195. }
  196. }
  197. };
  198. const scrollToBottom = async () => {
  199. await tick();
  200. if (messagesContainerElement) {
  201. messagesContainerElement.scrollTop = messagesContainerElement.scrollHeight;
  202. }
  203. };
  204. const createMessagesList = (responseMessageId) => {
  205. const message = history.messages[responseMessageId];
  206. if (message.parentId) {
  207. return [...createMessagesList(message.parentId), message];
  208. } else {
  209. return [message];
  210. }
  211. };
  212. const chatCompletedHandler = async (modelId, messages) => {
  213. await mermaid.run({
  214. querySelector: '.mermaid'
  215. });
  216. const res = await chatCompleted(localStorage.token, {
  217. model: modelId,
  218. messages: messages.map((m) => ({
  219. id: m.id,
  220. role: m.role,
  221. content: m.content,
  222. timestamp: m.timestamp
  223. })),
  224. chat_id: $chatId
  225. }).catch((error) => {
  226. console.error(error);
  227. return null;
  228. });
  229. if (res !== null) {
  230. // Update chat history with the new messages
  231. for (const message of res.messages) {
  232. history.messages[message.id] = {
  233. ...history.messages[message.id],
  234. ...(history.messages[message.id].content !== message.content
  235. ? { originalContent: history.messages[message.id].content }
  236. : {}),
  237. ...message
  238. };
  239. }
  240. }
  241. };
  242. const getChatEventEmitter = async (modelId: string, chatId: string = '') => {
  243. return setInterval(() => {
  244. $socket?.emit('usage', {
  245. action: 'chat',
  246. model: modelId,
  247. chat_id: chatId
  248. });
  249. }, 1000);
  250. };
  251. //////////////////////////
  252. // Ollama functions
  253. //////////////////////////
  254. const submitPrompt = async (userPrompt, _user = null) => {
  255. let _responses = [];
  256. console.log('submitPrompt', $chatId);
  257. selectedModels = selectedModels.map((modelId) =>
  258. $models.map((m) => m.id).includes(modelId) ? modelId : ''
  259. );
  260. if (selectedModels.includes('')) {
  261. toast.error($i18n.t('Model not selected'));
  262. } else if (messages.length != 0 && messages.at(-1).done != true) {
  263. // Response not done
  264. console.log('wait');
  265. } else if (
  266. files.length > 0 &&
  267. files.filter((file) => file.upload_status === false).length > 0
  268. ) {
  269. // Upload not done
  270. toast.error(
  271. $i18n.t(
  272. `Oops! Hold tight! Your files are still in the processing oven. We're cooking them up to perfection. Please be patient and we'll let you know once they're ready.`
  273. )
  274. );
  275. } else {
  276. // Reset chat message textarea height
  277. document.getElementById('chat-textarea').style.height = '';
  278. // Create user message
  279. let userMessageId = uuidv4();
  280. let userMessage = {
  281. id: userMessageId,
  282. parentId: messages.length !== 0 ? messages.at(-1).id : null,
  283. childrenIds: [],
  284. role: 'user',
  285. user: _user ?? undefined,
  286. content: userPrompt,
  287. files: files.length > 0 ? files : undefined,
  288. timestamp: Math.floor(Date.now() / 1000), // Unix epoch
  289. models: selectedModels.filter((m, mIdx) => selectedModels.indexOf(m) === mIdx)
  290. };
  291. // Add message to history and Set currentId to messageId
  292. history.messages[userMessageId] = userMessage;
  293. history.currentId = userMessageId;
  294. // Append messageId to childrenIds of parent message
  295. if (messages.length !== 0) {
  296. history.messages[messages.at(-1).id].childrenIds.push(userMessageId);
  297. }
  298. // Wait until history/message have been updated
  299. await tick();
  300. // Create new chat if only one message in messages
  301. if (messages.length == 1) {
  302. if ($settings.saveChatHistory ?? true) {
  303. chat = await createNewChat(localStorage.token, {
  304. id: $chatId,
  305. title: $i18n.t('New Chat'),
  306. models: selectedModels,
  307. system: $settings.system ?? undefined,
  308. options: {
  309. ...($settings.params ?? {})
  310. },
  311. messages: messages,
  312. history: history,
  313. tags: [],
  314. timestamp: Date.now()
  315. });
  316. await chats.set(await getChatList(localStorage.token));
  317. await chatId.set(chat.id);
  318. } else {
  319. await chatId.set('local');
  320. }
  321. await tick();
  322. }
  323. // Reset chat input textarea
  324. prompt = '';
  325. document.getElementById('chat-textarea').style.height = '';
  326. files = [];
  327. // Send prompt
  328. _responses = await sendPrompt(userPrompt, userMessageId);
  329. }
  330. return _responses;
  331. };
  332. const sendPrompt = async (prompt, parentId, modelId = null) => {
  333. let _responses = [];
  334. const _chatId = JSON.parse(JSON.stringify($chatId));
  335. await Promise.all(
  336. (modelId
  337. ? [modelId]
  338. : atSelectedModel !== undefined
  339. ? [atSelectedModel.id]
  340. : selectedModels
  341. ).map(async (modelId) => {
  342. console.log('modelId', modelId);
  343. const model = $models.filter((m) => m.id === modelId).at(0);
  344. if (model) {
  345. // If there are image files, check if model is vision capable
  346. const hasImages = messages.some((message) =>
  347. message.files?.some((file) => file.type === 'image')
  348. );
  349. if (hasImages && !(model.info?.meta?.capabilities?.vision ?? true)) {
  350. toast.error(
  351. $i18n.t('Model {{modelName}} is not vision capable', {
  352. modelName: model.name ?? model.id
  353. })
  354. );
  355. }
  356. // Create response message
  357. let responseMessageId = uuidv4();
  358. let responseMessage = {
  359. parentId: parentId,
  360. id: responseMessageId,
  361. childrenIds: [],
  362. role: 'assistant',
  363. content: '',
  364. model: model.id,
  365. modelName: model.name ?? model.id,
  366. userContext: null,
  367. timestamp: Math.floor(Date.now() / 1000) // Unix epoch
  368. };
  369. // Add message to history and Set currentId to messageId
  370. history.messages[responseMessageId] = responseMessage;
  371. history.currentId = responseMessageId;
  372. // Append messageId to childrenIds of parent message
  373. if (parentId !== null) {
  374. history.messages[parentId].childrenIds = [
  375. ...history.messages[parentId].childrenIds,
  376. responseMessageId
  377. ];
  378. }
  379. await tick();
  380. let userContext = null;
  381. if ($settings?.memory ?? false) {
  382. if (userContext === null) {
  383. const res = await queryMemory(localStorage.token, prompt).catch((error) => {
  384. toast.error(error);
  385. return null;
  386. });
  387. if (res) {
  388. if (res.documents[0].length > 0) {
  389. userContext = res.documents.reduce((acc, doc, index) => {
  390. const createdAtTimestamp = res.metadatas[index][0].created_at;
  391. const createdAtDate = new Date(createdAtTimestamp * 1000)
  392. .toISOString()
  393. .split('T')[0];
  394. acc.push(`${index + 1}. [${createdAtDate}]. ${doc[0]}`);
  395. return acc;
  396. }, []);
  397. }
  398. console.log(userContext);
  399. }
  400. }
  401. }
  402. responseMessage.userContext = userContext;
  403. const chatEventEmitter = await getChatEventEmitter(model.id, _chatId);
  404. if (webSearchEnabled) {
  405. await getWebSearchResults(model.id, parentId, responseMessageId);
  406. }
  407. let _response = null;
  408. if (model?.owned_by === 'openai') {
  409. _response = await sendPromptOpenAI(model, prompt, responseMessageId, _chatId);
  410. } else if (model) {
  411. _response = await sendPromptOllama(model, prompt, responseMessageId, _chatId);
  412. }
  413. _responses.push(_response);
  414. console.log('chatEventEmitter', chatEventEmitter);
  415. if (chatEventEmitter) clearInterval(chatEventEmitter);
  416. } else {
  417. toast.error($i18n.t(`Model {{modelId}} not found`, { modelId }));
  418. }
  419. })
  420. );
  421. await chats.set(await getChatList(localStorage.token));
  422. return _responses;
  423. };
  424. const getWebSearchResults = async (model: string, parentId: string, responseId: string) => {
  425. const responseMessage = history.messages[responseId];
  426. responseMessage.status = {
  427. done: false,
  428. action: 'web_search',
  429. description: $i18n.t('Generating search query')
  430. };
  431. messages = messages;
  432. const prompt = history.messages[parentId].content;
  433. let searchQuery = prompt;
  434. if (prompt.length > 100) {
  435. searchQuery = await generateChatSearchQuery(model, prompt);
  436. if (!searchQuery) {
  437. toast.warning($i18n.t('No search query generated'));
  438. responseMessage.status = {
  439. ...responseMessage.status,
  440. done: true,
  441. error: true,
  442. description: 'No search query generated'
  443. };
  444. messages = messages;
  445. return;
  446. }
  447. }
  448. responseMessage.status = {
  449. ...responseMessage.status,
  450. description: $i18n.t("Searching the web for '{{searchQuery}}'", { searchQuery })
  451. };
  452. messages = messages;
  453. const results = await runWebSearch(localStorage.token, searchQuery).catch((error) => {
  454. console.log(error);
  455. toast.error(error);
  456. return null;
  457. });
  458. if (results) {
  459. responseMessage.status = {
  460. ...responseMessage.status,
  461. done: true,
  462. description: $i18n.t('Searched {{count}} sites', { count: results.filenames.length }),
  463. urls: results.filenames
  464. };
  465. if (responseMessage?.files ?? undefined === undefined) {
  466. responseMessage.files = [];
  467. }
  468. responseMessage.files.push({
  469. collection_name: results.collection_name,
  470. name: searchQuery,
  471. type: 'web_search_results',
  472. urls: results.filenames
  473. });
  474. messages = messages;
  475. } else {
  476. responseMessage.status = {
  477. ...responseMessage.status,
  478. done: true,
  479. error: true,
  480. description: 'No search results found'
  481. };
  482. messages = messages;
  483. }
  484. };
  485. const sendPromptOllama = async (model, userPrompt, responseMessageId, _chatId) => {
  486. let _response = null;
  487. model = model.id;
  488. const responseMessage = history.messages[responseMessageId];
  489. // Wait until history/message have been updated
  490. await tick();
  491. // Scroll down
  492. scrollToBottom();
  493. const messagesBody = [
  494. $settings.system || (responseMessage?.userContext ?? null)
  495. ? {
  496. role: 'system',
  497. content: `${promptTemplate($settings?.system ?? '', $user.name)}${
  498. responseMessage?.userContext ?? null
  499. ? `\n\nUser Context:\n${(responseMessage?.userContext ?? []).join('\n')}`
  500. : ''
  501. }`
  502. }
  503. : undefined,
  504. ...messages
  505. ]
  506. .filter((message) => message?.content?.trim())
  507. .map((message, idx, arr) => {
  508. // Prepare the base message object
  509. const baseMessage = {
  510. role: message.role,
  511. content: message.content
  512. };
  513. // Extract and format image URLs if any exist
  514. const imageUrls = message.files
  515. ?.filter((file) => file.type === 'image')
  516. .map((file) => file.url.slice(file.url.indexOf(',') + 1));
  517. // Add images array only if it contains elements
  518. if (imageUrls && imageUrls.length > 0 && message.role === 'user') {
  519. baseMessage.images = imageUrls;
  520. }
  521. return baseMessage;
  522. });
  523. let lastImageIndex = -1;
  524. // Find the index of the last object with images
  525. messagesBody.forEach((item, index) => {
  526. if (item.images) {
  527. lastImageIndex = index;
  528. }
  529. });
  530. // Remove images from all but the last one
  531. messagesBody.forEach((item, index) => {
  532. if (index !== lastImageIndex) {
  533. delete item.images;
  534. }
  535. });
  536. const docs = messages
  537. .filter((message) => message?.files ?? null)
  538. .map((message) =>
  539. message.files.filter((item) =>
  540. ['doc', 'collection', 'web_search_results'].includes(item.type)
  541. )
  542. )
  543. .flat(1);
  544. const [res, controller] = await generateChatCompletion(localStorage.token, {
  545. model: model,
  546. messages: messagesBody,
  547. options: {
  548. ...($settings.params ?? {}),
  549. stop:
  550. $settings?.params?.stop ?? undefined
  551. ? $settings.params.stop.map((str) =>
  552. decodeURIComponent(JSON.parse('"' + str.replace(/\"/g, '\\"') + '"'))
  553. )
  554. : undefined,
  555. num_predict: $settings?.params?.max_tokens ?? undefined,
  556. repeat_penalty: $settings?.params?.frequency_penalty ?? undefined
  557. },
  558. format: $settings.requestFormat ?? undefined,
  559. keep_alive: $settings.keepAlive ?? undefined,
  560. docs: docs.length > 0 ? docs : undefined,
  561. citations: docs.length > 0,
  562. chat_id: $chatId
  563. });
  564. if (res && res.ok) {
  565. console.log('controller', controller);
  566. const reader = res.body
  567. .pipeThrough(new TextDecoderStream())
  568. .pipeThrough(splitStream('\n'))
  569. .getReader();
  570. while (true) {
  571. const { value, done } = await reader.read();
  572. if (done || stopResponseFlag || _chatId !== $chatId) {
  573. responseMessage.done = true;
  574. messages = messages;
  575. if (stopResponseFlag) {
  576. controller.abort('User: Stop Response');
  577. } else {
  578. const messages = createMessagesList(responseMessageId);
  579. await chatCompletedHandler(model, messages);
  580. }
  581. _response = responseMessage.content;
  582. break;
  583. }
  584. try {
  585. let lines = value.split('\n');
  586. for (const line of lines) {
  587. if (line !== '') {
  588. console.log(line);
  589. let data = JSON.parse(line);
  590. if ('citations' in data) {
  591. responseMessage.citations = data.citations;
  592. continue;
  593. }
  594. if ('detail' in data) {
  595. throw data;
  596. }
  597. if (data.done == false) {
  598. if (responseMessage.content == '' && data.message.content == '\n') {
  599. continue;
  600. } else {
  601. responseMessage.content += data.message.content;
  602. messages = messages;
  603. }
  604. } else {
  605. responseMessage.done = true;
  606. if (responseMessage.content == '') {
  607. responseMessage.error = {
  608. code: 400,
  609. content: `Oops! No text generated from Ollama, Please try again.`
  610. };
  611. }
  612. responseMessage.context = data.context ?? null;
  613. responseMessage.info = {
  614. total_duration: data.total_duration,
  615. load_duration: data.load_duration,
  616. sample_count: data.sample_count,
  617. sample_duration: data.sample_duration,
  618. prompt_eval_count: data.prompt_eval_count,
  619. prompt_eval_duration: data.prompt_eval_duration,
  620. eval_count: data.eval_count,
  621. eval_duration: data.eval_duration
  622. };
  623. messages = messages;
  624. if ($settings.notificationEnabled && !document.hasFocus()) {
  625. const notification = new Notification(
  626. selectedModelfile
  627. ? `${
  628. selectedModelfile.title.charAt(0).toUpperCase() +
  629. selectedModelfile.title.slice(1)
  630. }`
  631. : `${model}`,
  632. {
  633. body: responseMessage.content,
  634. icon: selectedModelfile?.imageUrl ?? `${WEBUI_BASE_URL}/static/favicon.png`
  635. }
  636. );
  637. }
  638. if ($settings.responseAutoCopy) {
  639. copyToClipboard(responseMessage.content);
  640. }
  641. if ($settings.responseAutoPlayback) {
  642. await tick();
  643. document.getElementById(`speak-button-${responseMessage.id}`)?.click();
  644. }
  645. }
  646. }
  647. }
  648. } catch (error) {
  649. console.log(error);
  650. if ('detail' in error) {
  651. toast.error(error.detail);
  652. }
  653. break;
  654. }
  655. if (autoScroll) {
  656. scrollToBottom();
  657. }
  658. }
  659. if ($chatId == _chatId) {
  660. if ($settings.saveChatHistory ?? true) {
  661. chat = await updateChatById(localStorage.token, _chatId, {
  662. messages: messages,
  663. history: history,
  664. models: selectedModels
  665. });
  666. await chats.set(await getChatList(localStorage.token));
  667. }
  668. }
  669. } else {
  670. if (res !== null) {
  671. const error = await res.json();
  672. console.log(error);
  673. if ('detail' in error) {
  674. toast.error(error.detail);
  675. responseMessage.error = { content: error.detail };
  676. } else {
  677. toast.error(error.error);
  678. responseMessage.error = { content: error.error };
  679. }
  680. } else {
  681. toast.error(
  682. $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, { provider: 'Ollama' })
  683. );
  684. responseMessage.error = {
  685. content: $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
  686. provider: 'Ollama'
  687. })
  688. };
  689. }
  690. responseMessage.done = true;
  691. messages = messages;
  692. }
  693. stopResponseFlag = false;
  694. await tick();
  695. if (autoScroll) {
  696. scrollToBottom();
  697. }
  698. if (messages.length == 2 && messages.at(1).content !== '') {
  699. window.history.replaceState(history.state, '', `/c/${_chatId}`);
  700. const _title = await generateChatTitle(userPrompt);
  701. await setChatTitle(_chatId, _title);
  702. }
  703. return _response;
  704. };
  705. const sendPromptOpenAI = async (model, userPrompt, responseMessageId, _chatId) => {
  706. let _response = null;
  707. const responseMessage = history.messages[responseMessageId];
  708. const docs = messages
  709. .filter((message) => message?.files ?? null)
  710. .map((message) =>
  711. message.files.filter((item) =>
  712. ['doc', 'collection', 'web_search_results'].includes(item.type)
  713. )
  714. )
  715. .flat(1);
  716. console.log(docs);
  717. scrollToBottom();
  718. try {
  719. const [res, controller] = await generateOpenAIChatCompletion(
  720. localStorage.token,
  721. {
  722. model: model.id,
  723. stream: true,
  724. stream_options:
  725. model.info?.meta?.capabilities?.usage ?? false
  726. ? {
  727. include_usage: true
  728. }
  729. : undefined,
  730. messages: [
  731. $settings.system || (responseMessage?.userContext ?? null)
  732. ? {
  733. role: 'system',
  734. content: `${promptTemplate($settings?.system ?? '', $user.name)}${
  735. responseMessage?.userContext ?? null
  736. ? `\n\nUser Context:\n${(responseMessage?.userContext ?? []).join('\n')}`
  737. : ''
  738. }`
  739. }
  740. : undefined,
  741. ...messages
  742. ]
  743. .filter((message) => message?.content?.trim())
  744. .map((message, idx, arr) => ({
  745. role: message.role,
  746. ...((message.files?.filter((file) => file.type === 'image').length > 0 ?? false) &&
  747. message.role === 'user'
  748. ? {
  749. content: [
  750. {
  751. type: 'text',
  752. text:
  753. arr.length - 1 !== idx
  754. ? message.content
  755. : message?.raContent ?? message.content
  756. },
  757. ...message.files
  758. .filter((file) => file.type === 'image')
  759. .map((file) => ({
  760. type: 'image_url',
  761. image_url: {
  762. url: file.url
  763. }
  764. }))
  765. ]
  766. }
  767. : {
  768. content:
  769. arr.length - 1 !== idx
  770. ? message.content
  771. : message?.raContent ?? message.content
  772. })
  773. })),
  774. seed: $settings?.params?.seed ?? undefined,
  775. stop:
  776. $settings?.params?.stop ?? undefined
  777. ? $settings.params.stop.map((str) =>
  778. decodeURIComponent(JSON.parse('"' + str.replace(/\"/g, '\\"') + '"'))
  779. )
  780. : undefined,
  781. temperature: $settings?.params?.temperature ?? undefined,
  782. top_p: $settings?.params?.top_p ?? undefined,
  783. frequency_penalty: $settings?.params?.frequency_penalty ?? undefined,
  784. max_tokens: $settings?.params?.max_tokens ?? undefined,
  785. docs: docs.length > 0 ? docs : undefined,
  786. citations: docs.length > 0,
  787. chat_id: $chatId
  788. },
  789. `${OPENAI_API_BASE_URL}`
  790. );
  791. // Wait until history/message have been updated
  792. await tick();
  793. scrollToBottom();
  794. if (res && res.ok && res.body) {
  795. const textStream = await createOpenAITextStream(res.body, $settings.splitLargeChunks);
  796. let lastUsage = null;
  797. for await (const update of textStream) {
  798. const { value, done, citations, error, usage } = update;
  799. if (error) {
  800. await handleOpenAIError(error, null, model, responseMessage);
  801. break;
  802. }
  803. if (done || stopResponseFlag || _chatId !== $chatId) {
  804. responseMessage.done = true;
  805. messages = messages;
  806. if (stopResponseFlag) {
  807. controller.abort('User: Stop Response');
  808. } else {
  809. const messages = createMessagesList(responseMessageId);
  810. await chatCompletedHandler(model.id, messages);
  811. }
  812. _response = responseMessage.content;
  813. break;
  814. }
  815. if (usage) {
  816. lastUsage = usage;
  817. }
  818. if (citations) {
  819. responseMessage.citations = citations;
  820. continue;
  821. }
  822. if (responseMessage.content == '' && value == '\n') {
  823. continue;
  824. } else {
  825. responseMessage.content += value;
  826. messages = messages;
  827. }
  828. if (autoScroll) {
  829. scrollToBottom();
  830. }
  831. }
  832. if ($settings.notificationEnabled && !document.hasFocus()) {
  833. const notification = new Notification(`OpenAI ${model}`, {
  834. body: responseMessage.content,
  835. icon: `${WEBUI_BASE_URL}/static/favicon.png`
  836. });
  837. }
  838. if ($settings.responseAutoCopy) {
  839. copyToClipboard(responseMessage.content);
  840. }
  841. if ($settings.responseAutoPlayback) {
  842. await tick();
  843. document.getElementById(`speak-button-${responseMessage.id}`)?.click();
  844. }
  845. if (lastUsage) {
  846. responseMessage.info = { ...lastUsage, openai: true };
  847. }
  848. if ($chatId == _chatId) {
  849. if ($settings.saveChatHistory ?? true) {
  850. chat = await updateChatById(localStorage.token, _chatId, {
  851. models: selectedModels,
  852. messages: messages,
  853. history: history
  854. });
  855. await chats.set(await getChatList(localStorage.token));
  856. }
  857. }
  858. } else {
  859. await handleOpenAIError(null, res, model, responseMessage);
  860. }
  861. } catch (error) {
  862. await handleOpenAIError(error, null, model, responseMessage);
  863. }
  864. messages = messages;
  865. stopResponseFlag = false;
  866. await tick();
  867. if (autoScroll) {
  868. scrollToBottom();
  869. }
  870. if (messages.length == 2) {
  871. window.history.replaceState(history.state, '', `/c/${_chatId}`);
  872. const _title = await generateChatTitle(userPrompt);
  873. await setChatTitle(_chatId, _title);
  874. }
  875. return _response;
  876. };
  877. const handleOpenAIError = async (error, res: Response | null, model, responseMessage) => {
  878. let errorMessage = '';
  879. let innerError;
  880. if (error) {
  881. innerError = error;
  882. } else if (res !== null) {
  883. innerError = await res.json();
  884. }
  885. console.error(innerError);
  886. if ('detail' in innerError) {
  887. toast.error(innerError.detail);
  888. errorMessage = innerError.detail;
  889. } else if ('error' in innerError) {
  890. if ('message' in innerError.error) {
  891. toast.error(innerError.error.message);
  892. errorMessage = innerError.error.message;
  893. } else {
  894. toast.error(innerError.error);
  895. errorMessage = innerError.error;
  896. }
  897. } else if ('message' in innerError) {
  898. toast.error(innerError.message);
  899. errorMessage = innerError.message;
  900. }
  901. responseMessage.error = {
  902. content:
  903. $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
  904. provider: model.name ?? model.id
  905. }) +
  906. '\n' +
  907. errorMessage
  908. };
  909. responseMessage.done = true;
  910. messages = messages;
  911. };
  912. const stopResponse = () => {
  913. stopResponseFlag = true;
  914. console.log('stopResponse');
  915. };
  916. const regenerateResponse = async (message) => {
  917. console.log('regenerateResponse');
  918. if (messages.length != 0) {
  919. let userMessage = history.messages[message.parentId];
  920. let userPrompt = userMessage.content;
  921. if ((userMessage?.models ?? [...selectedModels]).length == 1) {
  922. await sendPrompt(userPrompt, userMessage.id);
  923. } else {
  924. await sendPrompt(userPrompt, userMessage.id, message.model);
  925. }
  926. }
  927. };
  928. const continueGeneration = async () => {
  929. console.log('continueGeneration');
  930. const _chatId = JSON.parse(JSON.stringify($chatId));
  931. if (messages.length != 0 && messages.at(-1).done == true) {
  932. const responseMessage = history.messages[history.currentId];
  933. responseMessage.done = false;
  934. await tick();
  935. const model = $models.filter((m) => m.id === responseMessage.model).at(0);
  936. if (model) {
  937. if (model?.owned_by === 'openai') {
  938. await sendPromptOpenAI(
  939. model,
  940. history.messages[responseMessage.parentId].content,
  941. responseMessage.id,
  942. _chatId
  943. );
  944. } else
  945. await sendPromptOllama(
  946. model,
  947. history.messages[responseMessage.parentId].content,
  948. responseMessage.id,
  949. _chatId
  950. );
  951. }
  952. } else {
  953. toast.error($i18n.t(`Model {{modelId}} not found`, { modelId }));
  954. }
  955. };
  956. const generateChatTitle = async (userPrompt) => {
  957. if ($settings?.title?.auto ?? true) {
  958. const model = $models.find((model) => model.id === selectedModels[0]);
  959. const titleModelId =
  960. model?.owned_by === 'openai' ?? false
  961. ? $settings?.title?.modelExternal ?? selectedModels[0]
  962. : $settings?.title?.model ?? selectedModels[0];
  963. const titleModel = $models.find((model) => model.id === titleModelId);
  964. console.log(titleModel);
  965. const title = await generateTitle(
  966. localStorage.token,
  967. $settings?.title?.prompt ??
  968. $i18n.t(
  969. "Create a concise, 3-5 word phrase as a header for the following query, strictly adhering to the 3-5 word limit and avoiding the use of the word 'title':"
  970. ) + ' {{prompt}}',
  971. titleModelId,
  972. userPrompt,
  973. $chatId,
  974. titleModel?.owned_by === 'openai' ?? false
  975. ? `${OPENAI_API_BASE_URL}`
  976. : `${OLLAMA_API_BASE_URL}/v1`
  977. );
  978. return title;
  979. } else {
  980. return `${userPrompt}`;
  981. }
  982. };
  983. const generateChatSearchQuery = async (modelId: string, prompt: string) => {
  984. const model = $models.find((model) => model.id === modelId);
  985. const taskModelId =
  986. model?.owned_by === 'openai' ?? false
  987. ? $settings?.title?.modelExternal ?? modelId
  988. : $settings?.title?.model ?? modelId;
  989. const taskModel = $models.find((model) => model.id === taskModelId);
  990. const previousMessages = messages
  991. .filter((message) => message.role === 'user')
  992. .map((message) => message.content);
  993. return await generateSearchQuery(
  994. localStorage.token,
  995. taskModelId,
  996. previousMessages,
  997. prompt,
  998. taskModel?.owned_by === 'openai' ?? false
  999. ? `${OPENAI_API_BASE_URL}`
  1000. : `${OLLAMA_API_BASE_URL}/v1`
  1001. );
  1002. };
  1003. const setChatTitle = async (_chatId, _title) => {
  1004. if (_chatId === $chatId) {
  1005. title = _title;
  1006. }
  1007. if ($settings.saveChatHistory ?? true) {
  1008. chat = await updateChatById(localStorage.token, _chatId, { title: _title });
  1009. await chats.set(await getChatList(localStorage.token));
  1010. }
  1011. };
  1012. const getTags = async () => {
  1013. return await getTagsById(localStorage.token, $chatId).catch(async (error) => {
  1014. return [];
  1015. });
  1016. };
  1017. const addTag = async (tagName) => {
  1018. const res = await addTagById(localStorage.token, $chatId, tagName);
  1019. tags = await getTags();
  1020. chat = await updateChatById(localStorage.token, $chatId, {
  1021. tags: tags
  1022. });
  1023. _tags.set(await getAllChatTags(localStorage.token));
  1024. };
  1025. const deleteTag = async (tagName) => {
  1026. const res = await deleteTagById(localStorage.token, $chatId, tagName);
  1027. tags = await getTags();
  1028. chat = await updateChatById(localStorage.token, $chatId, {
  1029. tags: tags
  1030. });
  1031. _tags.set(await getAllChatTags(localStorage.token));
  1032. };
  1033. </script>
  1034. <svelte:head>
  1035. <title>
  1036. {title
  1037. ? `${title.length > 30 ? `${title.slice(0, 30)}...` : title} | ${$WEBUI_NAME}`
  1038. : `${$WEBUI_NAME}`}
  1039. </title>
  1040. </svelte:head>
  1041. <CallOverlay {submitPrompt} bind:files />
  1042. {#if !chatIdProp || (loaded && chatIdProp)}
  1043. <div
  1044. class="h-screen max-h-[100dvh] {$showSidebar
  1045. ? 'md:max-w-[calc(100%-260px)]'
  1046. : ''} w-full max-w-full flex flex-col"
  1047. >
  1048. <Navbar
  1049. {title}
  1050. bind:selectedModels
  1051. bind:showModelSelector
  1052. shareEnabled={messages.length > 0}
  1053. {chat}
  1054. {initNewChat}
  1055. />
  1056. {#if $banners.length > 0 && messages.length === 0 && !$chatId && selectedModels.length <= 1}
  1057. <div
  1058. class="absolute top-[4.25rem] w-full {$showSidebar ? 'md:max-w-[calc(100%-260px)]' : ''}"
  1059. >
  1060. <div class=" flex flex-col gap-1 w-full">
  1061. {#each $banners.filter( (b) => (b.dismissible ? !JSON.parse(localStorage.getItem('dismissedBannerIds') ?? '[]').includes(b.id) : true) ) as banner}
  1062. <Banner
  1063. {banner}
  1064. on:dismiss={(e) => {
  1065. const bannerId = e.detail;
  1066. localStorage.setItem(
  1067. 'dismissedBannerIds',
  1068. JSON.stringify(
  1069. [
  1070. bannerId,
  1071. ...JSON.parse(localStorage.getItem('dismissedBannerIds') ?? '[]')
  1072. ].filter((id) => $banners.find((b) => b.id === id))
  1073. )
  1074. );
  1075. }}
  1076. />
  1077. {/each}
  1078. </div>
  1079. </div>
  1080. {/if}
  1081. <div class="flex flex-col flex-auto">
  1082. <div
  1083. class=" pb-2.5 flex flex-col justify-between w-full flex-auto overflow-auto h-0 max-w-full"
  1084. id="messages-container"
  1085. bind:this={messagesContainerElement}
  1086. on:scroll={(e) => {
  1087. autoScroll =
  1088. messagesContainerElement.scrollHeight - messagesContainerElement.scrollTop <=
  1089. messagesContainerElement.clientHeight + 5;
  1090. }}
  1091. >
  1092. <div class=" h-full w-full flex flex-col {chatIdProp ? 'py-4' : 'pt-2 pb-4'}">
  1093. <Messages
  1094. chatId={$chatId}
  1095. {selectedModels}
  1096. {processing}
  1097. bind:history
  1098. bind:messages
  1099. bind:autoScroll
  1100. bind:prompt
  1101. bottomPadding={files.length > 0}
  1102. {sendPrompt}
  1103. {continueGeneration}
  1104. {regenerateResponse}
  1105. />
  1106. </div>
  1107. </div>
  1108. <MessageInput
  1109. bind:files
  1110. bind:prompt
  1111. bind:autoScroll
  1112. bind:webSearchEnabled
  1113. bind:atSelectedModel
  1114. {selectedModels}
  1115. {messages}
  1116. {submitPrompt}
  1117. {stopResponse}
  1118. />
  1119. </div>
  1120. </div>
  1121. {/if}