Chat.svelte 28 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088
  1. <script lang="ts">
  2. import { v4 as uuidv4 } from 'uuid';
  3. import { toast } from 'svelte-sonner';
  4. import { getContext, onMount, tick } from 'svelte';
  5. import { goto } from '$app/navigation';
  6. import { page } from '$app/stores';
  7. import {
  8. chatId,
  9. chats,
  10. config,
  11. type Model,
  12. models,
  13. settings,
  14. showSidebar,
  15. tags as _tags,
  16. WEBUI_NAME,
  17. banners
  18. } from '$lib/stores';
  19. import { convertMessagesToHistory, copyToClipboard, splitStream } from '$lib/utils';
  20. import { cancelOllamaRequest, generateChatCompletion } from '$lib/apis/ollama';
  21. import {
  22. addTagById,
  23. createNewChat,
  24. deleteTagById,
  25. getAllChatTags,
  26. getChatById,
  27. getChatList,
  28. getTagsById,
  29. updateChatById
  30. } from '$lib/apis/chats';
  31. import { generateOpenAIChatCompletion, generateTitle } from '$lib/apis/openai';
  32. import MessageInput from '$lib/components/chat/MessageInput.svelte';
  33. import Messages from '$lib/components/chat/Messages.svelte';
  34. import Navbar from '$lib/components/layout/Navbar.svelte';
  35. import { OLLAMA_API_BASE_URL, OPENAI_API_BASE_URL, WEBUI_BASE_URL } from '$lib/constants';
  36. import { createOpenAITextStream } from '$lib/apis/streaming';
  37. import { queryMemory } from '$lib/apis/memories';
  38. import type { Writable } from 'svelte/store';
  39. import type { i18n as i18nType } from 'i18next';
  40. import Banner from '../common/Banner.svelte';
  41. import { getUserSettings } from '$lib/apis/users';
  42. const i18n: Writable<i18nType> = getContext('i18n');
  43. export let chatIdProp = '';
  44. let loaded = false;
  45. let stopResponseFlag = false;
  46. let autoScroll = true;
  47. let processing = '';
  48. let messagesContainerElement: HTMLDivElement;
  49. let currentRequestId = null;
  50. let showModelSelector = true;
  51. let selectedModels = [''];
  52. let atSelectedModel: Model | undefined;
  53. let chat = null;
  54. let tags = [];
  55. let title = '';
  56. let prompt = '';
  57. let files = [];
  58. let messages = [];
  59. let history = {
  60. messages: {},
  61. currentId: null
  62. };
  63. $: if (history.currentId !== null) {
  64. let _messages = [];
  65. let currentMessage = history.messages[history.currentId];
  66. while (currentMessage !== null) {
  67. _messages.unshift({ ...currentMessage });
  68. currentMessage =
  69. currentMessage.parentId !== null ? history.messages[currentMessage.parentId] : null;
  70. }
  71. messages = _messages;
  72. } else {
  73. messages = [];
  74. }
  75. $: if (chatIdProp) {
  76. (async () => {
  77. if (await loadChat()) {
  78. await tick();
  79. loaded = true;
  80. window.setTimeout(() => scrollToBottom(), 0);
  81. const chatInput = document.getElementById('chat-textarea');
  82. chatInput?.focus();
  83. } else {
  84. await goto('/');
  85. }
  86. })();
  87. }
  88. onMount(async () => {
  89. if (!$chatId) {
  90. await initNewChat();
  91. } else {
  92. if (!($settings.saveChatHistory ?? true)) {
  93. await goto('/');
  94. }
  95. }
  96. });
  97. //////////////////////////
  98. // Web functions
  99. //////////////////////////
  100. const initNewChat = async () => {
  101. if (currentRequestId !== null) {
  102. await cancelOllamaRequest(localStorage.token, currentRequestId);
  103. currentRequestId = null;
  104. }
  105. window.history.replaceState(history.state, '', `/`);
  106. await chatId.set('');
  107. autoScroll = true;
  108. title = '';
  109. messages = [];
  110. history = {
  111. messages: {},
  112. currentId: null
  113. };
  114. if ($page.url.searchParams.get('models')) {
  115. selectedModels = $page.url.searchParams.get('models')?.split(',');
  116. } else if ($settings?.models) {
  117. selectedModels = $settings?.models;
  118. } else if ($config?.default_models) {
  119. selectedModels = $config?.default_models.split(',');
  120. } else {
  121. selectedModels = [''];
  122. }
  123. if ($page.url.searchParams.get('q')) {
  124. prompt = $page.url.searchParams.get('q') ?? '';
  125. if (prompt) {
  126. await tick();
  127. submitPrompt(prompt);
  128. }
  129. }
  130. selectedModels = selectedModels.map((modelId) =>
  131. $models.map((m) => m.id).includes(modelId) ? modelId : ''
  132. );
  133. const userSettings = await getUserSettings(localStorage.token);
  134. if (userSettings) {
  135. settings.set(userSettings.ui);
  136. } else {
  137. settings.set(JSON.parse(localStorage.getItem('settings') ?? '{}'));
  138. }
  139. const chatInput = document.getElementById('chat-textarea');
  140. setTimeout(() => chatInput?.focus(), 0);
  141. };
  142. const loadChat = async () => {
  143. chatId.set(chatIdProp);
  144. chat = await getChatById(localStorage.token, $chatId).catch(async (error) => {
  145. await goto('/');
  146. return null;
  147. });
  148. if (chat) {
  149. tags = await getTags();
  150. const chatContent = chat.chat;
  151. if (chatContent) {
  152. console.log(chatContent);
  153. selectedModels =
  154. (chatContent?.models ?? undefined) !== undefined
  155. ? chatContent.models
  156. : [chatContent.models ?? ''];
  157. history =
  158. (chatContent?.history ?? undefined) !== undefined
  159. ? chatContent.history
  160. : convertMessagesToHistory(chatContent.messages);
  161. title = chatContent.title;
  162. const userSettings = await getUserSettings(localStorage.token);
  163. if (userSettings) {
  164. await settings.set(userSettings.ui);
  165. } else {
  166. await settings.set(JSON.parse(localStorage.getItem('settings') ?? '{}'));
  167. }
  168. await settings.set({
  169. ...$settings,
  170. system: chatContent.system ?? $settings.system,
  171. params: chatContent.options ?? $settings.params
  172. });
  173. autoScroll = true;
  174. await tick();
  175. if (messages.length > 0) {
  176. history.messages[messages.at(-1).id].done = true;
  177. }
  178. await tick();
  179. return true;
  180. } else {
  181. return null;
  182. }
  183. }
  184. };
  185. const scrollToBottom = async () => {
  186. await tick();
  187. if (messagesContainerElement) {
  188. messagesContainerElement.scrollTop = messagesContainerElement.scrollHeight;
  189. }
  190. };
  191. //////////////////////////
  192. // Ollama functions
  193. //////////////////////////
  194. const submitPrompt = async (userPrompt, _user = null) => {
  195. console.log('submitPrompt', $chatId);
  196. selectedModels = selectedModels.map((modelId) =>
  197. $models.map((m) => m.id).includes(modelId) ? modelId : ''
  198. );
  199. if (selectedModels.includes('')) {
  200. toast.error($i18n.t('Model not selected'));
  201. } else if (messages.length != 0 && messages.at(-1).done != true) {
  202. // Response not done
  203. console.log('wait');
  204. } else if (
  205. files.length > 0 &&
  206. files.filter((file) => file.upload_status === false).length > 0
  207. ) {
  208. // Upload not done
  209. toast.error(
  210. $i18n.t(
  211. `Oops! Hold tight! Your files are still in the processing oven. We're cooking them up to perfection. Please be patient and we'll let you know once they're ready.`
  212. )
  213. );
  214. } else {
  215. // Reset chat message textarea height
  216. document.getElementById('chat-textarea').style.height = '';
  217. // Create user message
  218. let userMessageId = uuidv4();
  219. let userMessage = {
  220. id: userMessageId,
  221. parentId: messages.length !== 0 ? messages.at(-1).id : null,
  222. childrenIds: [],
  223. role: 'user',
  224. user: _user ?? undefined,
  225. content: userPrompt,
  226. files: files.length > 0 ? files : undefined,
  227. timestamp: Math.floor(Date.now() / 1000), // Unix epoch
  228. models: selectedModels.filter((m, mIdx) => selectedModels.indexOf(m) === mIdx)
  229. };
  230. // Add message to history and Set currentId to messageId
  231. history.messages[userMessageId] = userMessage;
  232. history.currentId = userMessageId;
  233. // Append messageId to childrenIds of parent message
  234. if (messages.length !== 0) {
  235. history.messages[messages.at(-1).id].childrenIds.push(userMessageId);
  236. }
  237. // Wait until history/message have been updated
  238. await tick();
  239. // Create new chat if only one message in messages
  240. if (messages.length == 1) {
  241. if ($settings.saveChatHistory ?? true) {
  242. chat = await createNewChat(localStorage.token, {
  243. id: $chatId,
  244. title: $i18n.t('New Chat'),
  245. models: selectedModels,
  246. system: $settings.system ?? undefined,
  247. options: {
  248. ...($settings.params ?? {})
  249. },
  250. messages: messages,
  251. history: history,
  252. tags: [],
  253. timestamp: Date.now()
  254. });
  255. await chats.set(await getChatList(localStorage.token));
  256. await chatId.set(chat.id);
  257. } else {
  258. await chatId.set('local');
  259. }
  260. await tick();
  261. }
  262. // Reset chat input textarea
  263. prompt = '';
  264. document.getElementById('chat-textarea').style.height = '';
  265. files = [];
  266. // Send prompt
  267. await sendPrompt(userPrompt, userMessageId);
  268. }
  269. };
  270. const sendPrompt = async (prompt, parentId, modelId = null) => {
  271. const _chatId = JSON.parse(JSON.stringify($chatId));
  272. await Promise.all(
  273. (modelId
  274. ? [modelId]
  275. : atSelectedModel !== undefined
  276. ? [atSelectedModel.id]
  277. : selectedModels
  278. ).map(async (modelId) => {
  279. console.log('modelId', modelId);
  280. const model = $models.filter((m) => m.id === modelId).at(0);
  281. if (model) {
  282. // If there are image files, check if model is vision capable
  283. const hasImages = messages.some((message) =>
  284. message.files?.some((file) => file.type === 'image')
  285. );
  286. if (hasImages && !(model.info?.meta?.capabilities?.vision ?? true)) {
  287. toast.error(
  288. $i18n.t('Model {{modelName}} is not vision capable', {
  289. modelName: model.name ?? model.id
  290. })
  291. );
  292. }
  293. // Create response message
  294. let responseMessageId = uuidv4();
  295. let responseMessage = {
  296. parentId: parentId,
  297. id: responseMessageId,
  298. childrenIds: [],
  299. role: 'assistant',
  300. content: '',
  301. model: model.id,
  302. modelName: model.name ?? model.id,
  303. userContext: null,
  304. timestamp: Math.floor(Date.now() / 1000) // Unix epoch
  305. };
  306. // Add message to history and Set currentId to messageId
  307. history.messages[responseMessageId] = responseMessage;
  308. history.currentId = responseMessageId;
  309. // Append messageId to childrenIds of parent message
  310. if (parentId !== null) {
  311. history.messages[parentId].childrenIds = [
  312. ...history.messages[parentId].childrenIds,
  313. responseMessageId
  314. ];
  315. }
  316. await tick();
  317. let userContext = null;
  318. if ($settings?.memory ?? false) {
  319. if (userContext === null) {
  320. const res = await queryMemory(localStorage.token, prompt).catch((error) => {
  321. toast.error(error);
  322. return null;
  323. });
  324. if (res) {
  325. if (res.documents[0].length > 0) {
  326. userContext = res.documents.reduce((acc, doc, index) => {
  327. const createdAtTimestamp = res.metadatas[index][0].created_at;
  328. const createdAtDate = new Date(createdAtTimestamp * 1000)
  329. .toISOString()
  330. .split('T')[0];
  331. acc.push(`${index + 1}. [${createdAtDate}]. ${doc[0]}`);
  332. return acc;
  333. }, []);
  334. }
  335. console.log(userContext);
  336. }
  337. }
  338. }
  339. responseMessage.userContext = userContext;
  340. if (model?.owned_by === 'openai') {
  341. await sendPromptOpenAI(model, prompt, responseMessageId, _chatId);
  342. } else if (model) {
  343. await sendPromptOllama(model, prompt, responseMessageId, _chatId);
  344. }
  345. } else {
  346. toast.error($i18n.t(`Model {{modelId}} not found`, { modelId }));
  347. }
  348. })
  349. );
  350. await chats.set(await getChatList(localStorage.token));
  351. };
  352. const sendPromptOllama = async (model, userPrompt, responseMessageId, _chatId) => {
  353. model = model.id;
  354. const responseMessage = history.messages[responseMessageId];
  355. // Wait until history/message have been updated
  356. await tick();
  357. // Scroll down
  358. scrollToBottom();
  359. const messagesBody = [
  360. $settings.system || (responseMessage?.userContext ?? null)
  361. ? {
  362. role: 'system',
  363. content: `${$settings?.system ?? ''}${
  364. responseMessage?.userContext ?? null
  365. ? `\n\nUser Context:\n${(responseMessage?.userContext ?? []).join('\n')}`
  366. : ''
  367. }`
  368. }
  369. : undefined,
  370. ...messages
  371. ]
  372. .filter((message) => message?.content?.trim())
  373. .map((message, idx, arr) => {
  374. // Prepare the base message object
  375. const baseMessage = {
  376. role: message.role,
  377. content: message.content
  378. };
  379. // Extract and format image URLs if any exist
  380. const imageUrls = message.files
  381. ?.filter((file) => file.type === 'image')
  382. .map((file) => file.url.slice(file.url.indexOf(',') + 1));
  383. // Add images array only if it contains elements
  384. if (imageUrls && imageUrls.length > 0 && message.role === 'user') {
  385. baseMessage.images = imageUrls;
  386. }
  387. return baseMessage;
  388. });
  389. let lastImageIndex = -1;
  390. // Find the index of the last object with images
  391. messagesBody.forEach((item, index) => {
  392. if (item.images) {
  393. lastImageIndex = index;
  394. }
  395. });
  396. // Remove images from all but the last one
  397. messagesBody.forEach((item, index) => {
  398. if (index !== lastImageIndex) {
  399. delete item.images;
  400. }
  401. });
  402. const docs = messages
  403. .filter((message) => message?.files ?? null)
  404. .map((message) =>
  405. message.files.filter((item) => item.type === 'doc' || item.type === 'collection')
  406. )
  407. .flat(1);
  408. const [res, controller] = await generateChatCompletion(localStorage.token, {
  409. model: model,
  410. messages: messagesBody,
  411. options: {
  412. ...($settings.params ?? {}),
  413. stop:
  414. $settings?.params?.stop ?? undefined
  415. ? $settings.params.stop.map((str) =>
  416. decodeURIComponent(JSON.parse('"' + str.replace(/\"/g, '\\"') + '"'))
  417. )
  418. : undefined,
  419. num_predict: $settings?.params?.max_tokens ?? undefined,
  420. repeat_penalty: $settings?.params?.frequency_penalty ?? undefined
  421. },
  422. format: $settings.requestFormat ?? undefined,
  423. keep_alive: $settings.keepAlive ?? undefined,
  424. docs: docs.length > 0 ? docs : undefined,
  425. citations: docs.length > 0
  426. });
  427. if (res && res.ok) {
  428. console.log('controller', controller);
  429. const reader = res.body
  430. .pipeThrough(new TextDecoderStream())
  431. .pipeThrough(splitStream('\n'))
  432. .getReader();
  433. while (true) {
  434. const { value, done } = await reader.read();
  435. if (done || stopResponseFlag || _chatId !== $chatId) {
  436. responseMessage.done = true;
  437. messages = messages;
  438. if (stopResponseFlag) {
  439. controller.abort('User: Stop Response');
  440. await cancelOllamaRequest(localStorage.token, currentRequestId);
  441. }
  442. currentRequestId = null;
  443. break;
  444. }
  445. try {
  446. let lines = value.split('\n');
  447. for (const line of lines) {
  448. if (line !== '') {
  449. console.log(line);
  450. let data = JSON.parse(line);
  451. if ('citations' in data) {
  452. responseMessage.citations = data.citations;
  453. continue;
  454. }
  455. if ('detail' in data) {
  456. throw data;
  457. }
  458. if ('id' in data) {
  459. console.log(data);
  460. currentRequestId = data.id;
  461. } else {
  462. if (data.done == false) {
  463. if (responseMessage.content == '' && data.message.content == '\n') {
  464. continue;
  465. } else {
  466. responseMessage.content += data.message.content;
  467. messages = messages;
  468. }
  469. } else {
  470. responseMessage.done = true;
  471. if (responseMessage.content == '') {
  472. responseMessage.error = true;
  473. responseMessage.content =
  474. 'Oops! No text generated from Ollama, Please try again.';
  475. }
  476. responseMessage.context = data.context ?? null;
  477. responseMessage.info = {
  478. total_duration: data.total_duration,
  479. load_duration: data.load_duration,
  480. sample_count: data.sample_count,
  481. sample_duration: data.sample_duration,
  482. prompt_eval_count: data.prompt_eval_count,
  483. prompt_eval_duration: data.prompt_eval_duration,
  484. eval_count: data.eval_count,
  485. eval_duration: data.eval_duration
  486. };
  487. messages = messages;
  488. if ($settings.notificationEnabled && !document.hasFocus()) {
  489. const notification = new Notification(
  490. selectedModelfile
  491. ? `${
  492. selectedModelfile.title.charAt(0).toUpperCase() +
  493. selectedModelfile.title.slice(1)
  494. }`
  495. : `${model}`,
  496. {
  497. body: responseMessage.content,
  498. icon: selectedModelfile?.imageUrl ?? `${WEBUI_BASE_URL}/static/favicon.png`
  499. }
  500. );
  501. }
  502. if ($settings.responseAutoCopy) {
  503. copyToClipboard(responseMessage.content);
  504. }
  505. if ($settings.responseAutoPlayback) {
  506. await tick();
  507. document.getElementById(`speak-button-${responseMessage.id}`)?.click();
  508. }
  509. }
  510. }
  511. }
  512. }
  513. } catch (error) {
  514. console.log(error);
  515. if ('detail' in error) {
  516. toast.error(error.detail);
  517. }
  518. break;
  519. }
  520. if (autoScroll) {
  521. scrollToBottom();
  522. }
  523. }
  524. if ($chatId == _chatId) {
  525. if ($settings.saveChatHistory ?? true) {
  526. chat = await updateChatById(localStorage.token, _chatId, {
  527. messages: messages,
  528. history: history,
  529. models: selectedModels
  530. });
  531. await chats.set(await getChatList(localStorage.token));
  532. }
  533. }
  534. } else {
  535. if (res !== null) {
  536. const error = await res.json();
  537. console.log(error);
  538. if ('detail' in error) {
  539. toast.error(error.detail);
  540. responseMessage.content = error.detail;
  541. } else {
  542. toast.error(error.error);
  543. responseMessage.content = error.error;
  544. }
  545. } else {
  546. toast.error(
  547. $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, { provider: 'Ollama' })
  548. );
  549. responseMessage.content = $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
  550. provider: 'Ollama'
  551. });
  552. }
  553. responseMessage.error = true;
  554. responseMessage.content = $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
  555. provider: 'Ollama'
  556. });
  557. responseMessage.done = true;
  558. messages = messages;
  559. }
  560. stopResponseFlag = false;
  561. await tick();
  562. if (autoScroll) {
  563. scrollToBottom();
  564. }
  565. if (messages.length == 2 && messages.at(1).content !== '') {
  566. window.history.replaceState(history.state, '', `/c/${_chatId}`);
  567. const _title = await generateChatTitle(userPrompt);
  568. await setChatTitle(_chatId, _title);
  569. }
  570. };
  571. const sendPromptOpenAI = async (model, userPrompt, responseMessageId, _chatId) => {
  572. const responseMessage = history.messages[responseMessageId];
  573. const docs = messages
  574. .filter((message) => message?.files ?? null)
  575. .map((message) =>
  576. message.files.filter((item) => item.type === 'doc' || item.type === 'collection')
  577. )
  578. .flat(1);
  579. console.log(docs);
  580. scrollToBottom();
  581. try {
  582. const [res, controller] = await generateOpenAIChatCompletion(
  583. localStorage.token,
  584. {
  585. model: model.id,
  586. stream: true,
  587. messages: [
  588. $settings.system || (responseMessage?.userContext ?? null)
  589. ? {
  590. role: 'system',
  591. content: `${$settings?.system ?? ''}${
  592. responseMessage?.userContext ?? null
  593. ? `\n\nUser Context:\n${(responseMessage?.userContext ?? []).join('\n')}`
  594. : ''
  595. }`
  596. }
  597. : undefined,
  598. ...messages
  599. ]
  600. .filter((message) => message?.content?.trim())
  601. .map((message, idx, arr) => ({
  602. role: message.role,
  603. ...((message.files?.filter((file) => file.type === 'image').length > 0 ?? false) &&
  604. message.role === 'user'
  605. ? {
  606. content: [
  607. {
  608. type: 'text',
  609. text:
  610. arr.length - 1 !== idx
  611. ? message.content
  612. : message?.raContent ?? message.content
  613. },
  614. ...message.files
  615. .filter((file) => file.type === 'image')
  616. .map((file) => ({
  617. type: 'image_url',
  618. image_url: {
  619. url: file.url
  620. }
  621. }))
  622. ]
  623. }
  624. : {
  625. content:
  626. arr.length - 1 !== idx
  627. ? message.content
  628. : message?.raContent ?? message.content
  629. })
  630. })),
  631. seed: $settings?.params?.seed ?? undefined,
  632. stop:
  633. $settings?.params?.stop ?? undefined
  634. ? $settings.params.stop.map((str) =>
  635. decodeURIComponent(JSON.parse('"' + str.replace(/\"/g, '\\"') + '"'))
  636. )
  637. : undefined,
  638. temperature: $settings?.params?.temperature ?? undefined,
  639. top_p: $settings?.params?.top_p ?? undefined,
  640. frequency_penalty: $settings?.params?.frequency_penalty ?? undefined,
  641. max_tokens: $settings?.params?.max_tokens ?? undefined,
  642. docs: docs.length > 0 ? docs : undefined,
  643. citations: docs.length > 0
  644. },
  645. `${OPENAI_API_BASE_URL}`
  646. );
  647. // Wait until history/message have been updated
  648. await tick();
  649. scrollToBottom();
  650. if (res && res.ok && res.body) {
  651. const textStream = await createOpenAITextStream(res.body, $settings.splitLargeChunks);
  652. for await (const update of textStream) {
  653. const { value, done, citations, error } = update;
  654. if (error) {
  655. await handleOpenAIError(error, null, model, responseMessage);
  656. break;
  657. }
  658. if (done || stopResponseFlag || _chatId !== $chatId) {
  659. responseMessage.done = true;
  660. messages = messages;
  661. if (stopResponseFlag) {
  662. controller.abort('User: Stop Response');
  663. }
  664. break;
  665. }
  666. if (citations) {
  667. responseMessage.citations = citations;
  668. continue;
  669. }
  670. if (responseMessage.content == '' && value == '\n') {
  671. continue;
  672. } else {
  673. responseMessage.content += value;
  674. messages = messages;
  675. }
  676. if ($settings.notificationEnabled && !document.hasFocus()) {
  677. const notification = new Notification(`OpenAI ${model}`, {
  678. body: responseMessage.content,
  679. icon: `${WEBUI_BASE_URL}/static/favicon.png`
  680. });
  681. }
  682. if ($settings.responseAutoCopy) {
  683. copyToClipboard(responseMessage.content);
  684. }
  685. if ($settings.responseAutoPlayback) {
  686. await tick();
  687. document.getElementById(`speak-button-${responseMessage.id}`)?.click();
  688. }
  689. if (autoScroll) {
  690. scrollToBottom();
  691. }
  692. }
  693. if ($chatId == _chatId) {
  694. if ($settings.saveChatHistory ?? true) {
  695. chat = await updateChatById(localStorage.token, _chatId, {
  696. models: selectedModels,
  697. messages: messages,
  698. history: history
  699. });
  700. await chats.set(await getChatList(localStorage.token));
  701. }
  702. }
  703. } else {
  704. await handleOpenAIError(null, res, model, responseMessage);
  705. }
  706. } catch (error) {
  707. await handleOpenAIError(error, null, model, responseMessage);
  708. }
  709. messages = messages;
  710. stopResponseFlag = false;
  711. await tick();
  712. if (autoScroll) {
  713. scrollToBottom();
  714. }
  715. if (messages.length == 2) {
  716. window.history.replaceState(history.state, '', `/c/${_chatId}`);
  717. const _title = await generateChatTitle(userPrompt);
  718. await setChatTitle(_chatId, _title);
  719. }
  720. };
  721. const handleOpenAIError = async (error, res: Response | null, model, responseMessage) => {
  722. let errorMessage = '';
  723. let innerError;
  724. if (error) {
  725. innerError = error;
  726. } else if (res !== null) {
  727. innerError = await res.json();
  728. }
  729. console.error(innerError);
  730. if ('detail' in innerError) {
  731. toast.error(innerError.detail);
  732. errorMessage = innerError.detail;
  733. } else if ('error' in innerError) {
  734. if ('message' in innerError.error) {
  735. toast.error(innerError.error.message);
  736. errorMessage = innerError.error.message;
  737. } else {
  738. toast.error(innerError.error);
  739. errorMessage = innerError.error;
  740. }
  741. } else if ('message' in innerError) {
  742. toast.error(innerError.message);
  743. errorMessage = innerError.message;
  744. }
  745. responseMessage.error = true;
  746. responseMessage.content =
  747. $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
  748. provider: model.name ?? model.id
  749. }) +
  750. '\n' +
  751. errorMessage;
  752. responseMessage.done = true;
  753. messages = messages;
  754. };
  755. const stopResponse = () => {
  756. stopResponseFlag = true;
  757. console.log('stopResponse');
  758. };
  759. const regenerateResponse = async (message) => {
  760. console.log('regenerateResponse');
  761. if (messages.length != 0) {
  762. let userMessage = history.messages[message.parentId];
  763. let userPrompt = userMessage.content;
  764. if ((userMessage?.models ?? [...selectedModels]).length == 1) {
  765. await sendPrompt(userPrompt, userMessage.id);
  766. } else {
  767. await sendPrompt(userPrompt, userMessage.id, message.model);
  768. }
  769. }
  770. };
  771. const continueGeneration = async () => {
  772. console.log('continueGeneration');
  773. const _chatId = JSON.parse(JSON.stringify($chatId));
  774. if (messages.length != 0 && messages.at(-1).done == true) {
  775. const responseMessage = history.messages[history.currentId];
  776. responseMessage.done = false;
  777. await tick();
  778. const model = $models.filter((m) => m.id === responseMessage.model).at(0);
  779. if (model) {
  780. if (model?.external) {
  781. await sendPromptOpenAI(
  782. model,
  783. history.messages[responseMessage.parentId].content,
  784. responseMessage.id,
  785. _chatId
  786. );
  787. } else
  788. await sendPromptOllama(
  789. model,
  790. history.messages[responseMessage.parentId].content,
  791. responseMessage.id,
  792. _chatId
  793. );
  794. }
  795. } else {
  796. toast.error($i18n.t(`Model {{modelId}} not found`, { modelId }));
  797. }
  798. };
  799. const generateChatTitle = async (userPrompt) => {
  800. if ($settings?.title?.auto ?? true) {
  801. const model = $models.find((model) => model.id === selectedModels[0]);
  802. const titleModelId =
  803. model?.external ?? false
  804. ? $settings?.title?.modelExternal ?? selectedModels[0]
  805. : $settings?.title?.model ?? selectedModels[0];
  806. const titleModel = $models.find((model) => model.id === titleModelId);
  807. console.log(titleModel);
  808. const title = await generateTitle(
  809. localStorage.token,
  810. $settings?.title?.prompt ??
  811. $i18n.t(
  812. "Create a concise, 3-5 word phrase as a header for the following query, strictly adhering to the 3-5 word limit and avoiding the use of the word 'title':"
  813. ) + ' {{prompt}}',
  814. titleModelId,
  815. userPrompt,
  816. titleModel?.owned_by === 'openai' ?? false
  817. ? `${OPENAI_API_BASE_URL}`
  818. : `${OLLAMA_API_BASE_URL}/v1`
  819. );
  820. return title;
  821. } else {
  822. return `${userPrompt}`;
  823. }
  824. };
  825. const setChatTitle = async (_chatId, _title) => {
  826. if (_chatId === $chatId) {
  827. title = _title;
  828. }
  829. if ($settings.saveChatHistory ?? true) {
  830. chat = await updateChatById(localStorage.token, _chatId, { title: _title });
  831. await chats.set(await getChatList(localStorage.token));
  832. }
  833. };
  834. const getTags = async () => {
  835. return await getTagsById(localStorage.token, $chatId).catch(async (error) => {
  836. return [];
  837. });
  838. };
  839. const addTag = async (tagName) => {
  840. const res = await addTagById(localStorage.token, $chatId, tagName);
  841. tags = await getTags();
  842. chat = await updateChatById(localStorage.token, $chatId, {
  843. tags: tags
  844. });
  845. _tags.set(await getAllChatTags(localStorage.token));
  846. };
  847. const deleteTag = async (tagName) => {
  848. const res = await deleteTagById(localStorage.token, $chatId, tagName);
  849. tags = await getTags();
  850. chat = await updateChatById(localStorage.token, $chatId, {
  851. tags: tags
  852. });
  853. _tags.set(await getAllChatTags(localStorage.token));
  854. };
  855. </script>
  856. <svelte:head>
  857. <title>
  858. {title
  859. ? `${title.length > 30 ? `${title.slice(0, 30)}...` : title} | ${$WEBUI_NAME}`
  860. : `${$WEBUI_NAME}`}
  861. </title>
  862. </svelte:head>
  863. {#if !chatIdProp || (loaded && chatIdProp)}
  864. <div
  865. class="min-h-screen max-h-screen {$showSidebar
  866. ? 'md:max-w-[calc(100%-260px)]'
  867. : ''} w-full max-w-full flex flex-col"
  868. >
  869. <Navbar
  870. {title}
  871. bind:selectedModels
  872. bind:showModelSelector
  873. shareEnabled={messages.length > 0}
  874. {chat}
  875. {initNewChat}
  876. />
  877. {#if $banners.length > 0 && !$chatId && selectedModels.length <= 1}
  878. <div
  879. class="absolute top-[4.25rem] w-full {$showSidebar ? 'md:max-w-[calc(100%-260px)]' : ''}"
  880. >
  881. <div class=" flex flex-col gap-1 w-full">
  882. {#each $banners.filter( (b) => (b.dismissible ? !JSON.parse(localStorage.getItem('dismissedBannerIds') ?? '[]').includes(b.id) : true) ) as banner}
  883. <Banner
  884. {banner}
  885. on:dismiss={(e) => {
  886. const bannerId = e.detail;
  887. localStorage.setItem(
  888. 'dismissedBannerIds',
  889. JSON.stringify(
  890. [
  891. bannerId,
  892. ...JSON.parse(localStorage.getItem('dismissedBannerIds') ?? '[]')
  893. ].filter((id) => $banners.find((b) => b.id === id))
  894. )
  895. );
  896. }}
  897. />
  898. {/each}
  899. </div>
  900. </div>
  901. {/if}
  902. <div class="flex flex-col flex-auto">
  903. <div
  904. class=" pb-2.5 flex flex-col justify-between w-full flex-auto overflow-auto h-0 max-w-full"
  905. id="messages-container"
  906. bind:this={messagesContainerElement}
  907. on:scroll={(e) => {
  908. autoScroll =
  909. messagesContainerElement.scrollHeight - messagesContainerElement.scrollTop <=
  910. messagesContainerElement.clientHeight + 5;
  911. }}
  912. >
  913. <div class=" h-full w-full flex flex-col {chatIdProp ? 'py-4' : 'pt-2 pb-4'}">
  914. <Messages
  915. chatId={$chatId}
  916. {selectedModels}
  917. {processing}
  918. bind:history
  919. bind:messages
  920. bind:autoScroll
  921. bind:prompt
  922. bottomPadding={files.length > 0}
  923. {sendPrompt}
  924. {continueGeneration}
  925. {regenerateResponse}
  926. />
  927. </div>
  928. </div>
  929. </div>
  930. </div>
  931. <MessageInput
  932. bind:files
  933. bind:prompt
  934. bind:autoScroll
  935. bind:atSelectedModel
  936. {selectedModels}
  937. {messages}
  938. {submitPrompt}
  939. {stopResponse}
  940. />
  941. {/if}