Chat.svelte 29 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090
  1. <script lang="ts">
  2. import { v4 as uuidv4 } from 'uuid';
  3. import { toast } from 'svelte-sonner';
  4. import { getContext, onMount, tick } from 'svelte';
  5. import { goto } from '$app/navigation';
  6. import { page } from '$app/stores';
  7. import {
  8. chatId,
  9. chats,
  10. config,
  11. type Model,
  12. models,
  13. settings,
  14. showSidebar,
  15. tags as _tags,
  16. WEBUI_NAME,
  17. banners
  18. } from '$lib/stores';
  19. import { convertMessagesToHistory, copyToClipboard, splitStream } from '$lib/utils';
  20. import { cancelOllamaRequest, generateChatCompletion } from '$lib/apis/ollama';
  21. import {
  22. addTagById,
  23. createNewChat,
  24. deleteTagById,
  25. getAllChatTags,
  26. getChatById,
  27. getChatList,
  28. getTagsById,
  29. updateChatById
  30. } from '$lib/apis/chats';
  31. import { generateOpenAIChatCompletion, generateTitle } from '$lib/apis/openai';
  32. import MessageInput from '$lib/components/chat/MessageInput.svelte';
  33. import Messages from '$lib/components/chat/Messages.svelte';
  34. import Navbar from '$lib/components/layout/Navbar.svelte';
  35. import { OLLAMA_API_BASE_URL, OPENAI_API_BASE_URL, WEBUI_BASE_URL } from '$lib/constants';
  36. import { createOpenAITextStream } from '$lib/apis/streaming';
  37. import { queryMemory } from '$lib/apis/memories';
  38. import type { Writable } from 'svelte/store';
  39. import type { i18n as i18nType } from 'i18next';
  40. import Banner from '../common/Banner.svelte';
  41. import { getUserSettings } from '$lib/apis/users';
  42. const i18n: Writable<i18nType> = getContext('i18n');
  43. export let chatIdProp = '';
  44. let loaded = false;
  45. let stopResponseFlag = false;
  46. let autoScroll = true;
  47. let processing = '';
  48. let messagesContainerElement: HTMLDivElement;
  49. let currentRequestId = null;
  50. let showModelSelector = true;
  51. let selectedModels = [''];
  52. let atSelectedModel: Model | undefined;
  53. let chat = null;
  54. let tags = [];
  55. let title = '';
  56. let prompt = '';
  57. let files = [];
  58. let messages = [];
  59. let history = {
  60. messages: {},
  61. currentId: null
  62. };
  63. $: if (history.currentId !== null) {
  64. let _messages = [];
  65. let currentMessage = history.messages[history.currentId];
  66. while (currentMessage !== null) {
  67. _messages.unshift({ ...currentMessage });
  68. currentMessage =
  69. currentMessage.parentId !== null ? history.messages[currentMessage.parentId] : null;
  70. }
  71. messages = _messages;
  72. } else {
  73. messages = [];
  74. }
  75. $: if (chatIdProp) {
  76. (async () => {
  77. if (await loadChat()) {
  78. await tick();
  79. loaded = true;
  80. window.setTimeout(() => scrollToBottom(), 0);
  81. const chatInput = document.getElementById('chat-textarea');
  82. chatInput?.focus();
  83. } else {
  84. await goto('/');
  85. }
  86. })();
  87. }
  88. onMount(async () => {
  89. if (!$chatId) {
  90. await initNewChat();
  91. } else {
  92. if (!($settings.saveChatHistory ?? true)) {
  93. await goto('/');
  94. }
  95. }
  96. });
  97. //////////////////////////
  98. // Web functions
  99. //////////////////////////
  100. const initNewChat = async () => {
  101. if (currentRequestId !== null) {
  102. await cancelOllamaRequest(localStorage.token, currentRequestId);
  103. currentRequestId = null;
  104. }
  105. window.history.replaceState(history.state, '', `/`);
  106. await chatId.set('');
  107. autoScroll = true;
  108. title = '';
  109. messages = [];
  110. history = {
  111. messages: {},
  112. currentId: null
  113. };
  114. if ($page.url.searchParams.get('models')) {
  115. selectedModels = $page.url.searchParams.get('models')?.split(',');
  116. } else if ($settings?.models) {
  117. selectedModels = $settings?.models;
  118. } else if ($config?.default_models) {
  119. console.log($config?.default_models.split(',') ?? '');
  120. selectedModels = $config?.default_models.split(',');
  121. } else {
  122. selectedModels = [''];
  123. }
  124. if ($page.url.searchParams.get('q')) {
  125. prompt = $page.url.searchParams.get('q') ?? '';
  126. if (prompt) {
  127. await tick();
  128. submitPrompt(prompt);
  129. }
  130. }
  131. selectedModels = selectedModels.map((modelId) =>
  132. $models.map((m) => m.id).includes(modelId) ? modelId : ''
  133. );
  134. const userSettings = await getUserSettings(localStorage.token);
  135. if (userSettings) {
  136. settings.set(userSettings.ui);
  137. } else {
  138. settings.set(JSON.parse(localStorage.getItem('settings') ?? '{}'));
  139. }
  140. const chatInput = document.getElementById('chat-textarea');
  141. setTimeout(() => chatInput?.focus(), 0);
  142. };
  143. const loadChat = async () => {
  144. chatId.set(chatIdProp);
  145. chat = await getChatById(localStorage.token, $chatId).catch(async (error) => {
  146. await goto('/');
  147. return null;
  148. });
  149. if (chat) {
  150. tags = await getTags();
  151. const chatContent = chat.chat;
  152. if (chatContent) {
  153. console.log(chatContent);
  154. selectedModels =
  155. (chatContent?.models ?? undefined) !== undefined
  156. ? chatContent.models
  157. : [chatContent.models ?? ''];
  158. history =
  159. (chatContent?.history ?? undefined) !== undefined
  160. ? chatContent.history
  161. : convertMessagesToHistory(chatContent.messages);
  162. title = chatContent.title;
  163. const userSettings = await getUserSettings(localStorage.token);
  164. if (userSettings) {
  165. await settings.set(userSettings.ui);
  166. } else {
  167. await settings.set(JSON.parse(localStorage.getItem('settings') ?? '{}'));
  168. }
  169. await settings.set({
  170. ...$settings,
  171. system: chatContent.system ?? $settings.system,
  172. params: chatContent.options ?? $settings.params
  173. });
  174. autoScroll = true;
  175. await tick();
  176. if (messages.length > 0) {
  177. history.messages[messages.at(-1).id].done = true;
  178. }
  179. await tick();
  180. return true;
  181. } else {
  182. return null;
  183. }
  184. }
  185. };
  186. const scrollToBottom = async () => {
  187. await tick();
  188. if (messagesContainerElement) {
  189. messagesContainerElement.scrollTop = messagesContainerElement.scrollHeight;
  190. }
  191. };
  192. //////////////////////////
  193. // Ollama functions
  194. //////////////////////////
  195. const submitPrompt = async (userPrompt, _user = null) => {
  196. console.log('submitPrompt', $chatId);
  197. selectedModels = selectedModels.map((modelId) =>
  198. $models.map((m) => m.id).includes(modelId) ? modelId : ''
  199. );
  200. if (selectedModels.includes('')) {
  201. toast.error($i18n.t('Model not selected'));
  202. } else if (messages.length != 0 && messages.at(-1).done != true) {
  203. // Response not done
  204. console.log('wait');
  205. } else if (
  206. files.length > 0 &&
  207. files.filter((file) => file.upload_status === false).length > 0
  208. ) {
  209. // Upload not done
  210. toast.error(
  211. $i18n.t(
  212. `Oops! Hold tight! Your files are still in the processing oven. We're cooking them up to perfection. Please be patient and we'll let you know once they're ready.`
  213. )
  214. );
  215. } else {
  216. // Reset chat message textarea height
  217. document.getElementById('chat-textarea').style.height = '';
  218. // Create user message
  219. let userMessageId = uuidv4();
  220. let userMessage = {
  221. id: userMessageId,
  222. parentId: messages.length !== 0 ? messages.at(-1).id : null,
  223. childrenIds: [],
  224. role: 'user',
  225. user: _user ?? undefined,
  226. content: userPrompt,
  227. files: files.length > 0 ? files : undefined,
  228. timestamp: Math.floor(Date.now() / 1000), // Unix epoch
  229. models: selectedModels.filter((m, mIdx) => selectedModels.indexOf(m) === mIdx)
  230. };
  231. // Add message to history and Set currentId to messageId
  232. history.messages[userMessageId] = userMessage;
  233. history.currentId = userMessageId;
  234. // Append messageId to childrenIds of parent message
  235. if (messages.length !== 0) {
  236. history.messages[messages.at(-1).id].childrenIds.push(userMessageId);
  237. }
  238. // Wait until history/message have been updated
  239. await tick();
  240. // Create new chat if only one message in messages
  241. if (messages.length == 1) {
  242. if ($settings.saveChatHistory ?? true) {
  243. chat = await createNewChat(localStorage.token, {
  244. id: $chatId,
  245. title: $i18n.t('New Chat'),
  246. models: selectedModels,
  247. system: $settings.system ?? undefined,
  248. options: {
  249. ...($settings.params ?? {})
  250. },
  251. messages: messages,
  252. history: history,
  253. tags: [],
  254. timestamp: Date.now()
  255. });
  256. await chats.set(await getChatList(localStorage.token));
  257. await chatId.set(chat.id);
  258. } else {
  259. await chatId.set('local');
  260. }
  261. await tick();
  262. }
  263. // Reset chat input textarea
  264. prompt = '';
  265. document.getElementById('chat-textarea').style.height = '';
  266. files = [];
  267. // Send prompt
  268. await sendPrompt(userPrompt, userMessageId);
  269. }
  270. };
  271. const sendPrompt = async (prompt, parentId, modelId = null) => {
  272. const _chatId = JSON.parse(JSON.stringify($chatId));
  273. await Promise.all(
  274. (modelId
  275. ? [modelId]
  276. : atSelectedModel !== undefined
  277. ? [atSelectedModel.id]
  278. : selectedModels
  279. ).map(async (modelId) => {
  280. console.log('modelId', modelId);
  281. const model = $models.filter((m) => m.id === modelId).at(0);
  282. if (model) {
  283. // If there are image files, check if model is vision capable
  284. const hasImages = messages.some((message) =>
  285. message.files?.some((file) => file.type === 'image')
  286. );
  287. if (hasImages && !(model.info?.meta?.capabilities?.vision ?? true)) {
  288. toast.error(
  289. $i18n.t('Model {{modelName}} is not vision capable', {
  290. modelName: model.name ?? model.id
  291. })
  292. );
  293. }
  294. // Create response message
  295. let responseMessageId = uuidv4();
  296. let responseMessage = {
  297. parentId: parentId,
  298. id: responseMessageId,
  299. childrenIds: [],
  300. role: 'assistant',
  301. content: '',
  302. model: model.id,
  303. modelName: model.name ?? model.id,
  304. userContext: null,
  305. timestamp: Math.floor(Date.now() / 1000) // Unix epoch
  306. };
  307. // Add message to history and Set currentId to messageId
  308. history.messages[responseMessageId] = responseMessage;
  309. history.currentId = responseMessageId;
  310. // Append messageId to childrenIds of parent message
  311. if (parentId !== null) {
  312. history.messages[parentId].childrenIds = [
  313. ...history.messages[parentId].childrenIds,
  314. responseMessageId
  315. ];
  316. }
  317. await tick();
  318. let userContext = null;
  319. if ($settings?.memory ?? false) {
  320. if (userContext === null) {
  321. const res = await queryMemory(localStorage.token, prompt).catch((error) => {
  322. toast.error(error);
  323. return null;
  324. });
  325. if (res) {
  326. if (res.documents[0].length > 0) {
  327. userContext = res.documents.reduce((acc, doc, index) => {
  328. const createdAtTimestamp = res.metadatas[index][0].created_at;
  329. const createdAtDate = new Date(createdAtTimestamp * 1000)
  330. .toISOString()
  331. .split('T')[0];
  332. acc.push(`${index + 1}. [${createdAtDate}]. ${doc[0]}`);
  333. return acc;
  334. }, []);
  335. }
  336. console.log(userContext);
  337. }
  338. }
  339. }
  340. responseMessage.userContext = userContext;
  341. if (model?.owned_by === 'openai') {
  342. await sendPromptOpenAI(model, prompt, responseMessageId, _chatId);
  343. } else if (model) {
  344. await sendPromptOllama(model, prompt, responseMessageId, _chatId);
  345. }
  346. } else {
  347. toast.error($i18n.t(`Model {{modelId}} not found`, { modelId }));
  348. }
  349. })
  350. );
  351. await chats.set(await getChatList(localStorage.token));
  352. };
  353. const sendPromptOllama = async (model, userPrompt, responseMessageId, _chatId) => {
  354. model = model.id;
  355. const responseMessage = history.messages[responseMessageId];
  356. // Wait until history/message have been updated
  357. await tick();
  358. // Scroll down
  359. scrollToBottom();
  360. const messagesBody = [
  361. $settings.system || (responseMessage?.userContext ?? null)
  362. ? {
  363. role: 'system',
  364. content: `${$settings?.system ?? ''}${
  365. responseMessage?.userContext ?? null
  366. ? `\n\nUser Context:\n${(responseMessage?.userContext ?? []).join('\n')}`
  367. : ''
  368. }`
  369. }
  370. : undefined,
  371. ...messages
  372. ]
  373. .filter((message) => message?.content?.trim())
  374. .map((message, idx, arr) => {
  375. // Prepare the base message object
  376. const baseMessage = {
  377. role: message.role,
  378. content: message.content
  379. };
  380. // Extract and format image URLs if any exist
  381. const imageUrls = message.files
  382. ?.filter((file) => file.type === 'image')
  383. .map((file) => file.url.slice(file.url.indexOf(',') + 1));
  384. // Add images array only if it contains elements
  385. if (imageUrls && imageUrls.length > 0 && message.role === 'user') {
  386. baseMessage.images = imageUrls;
  387. }
  388. return baseMessage;
  389. });
  390. let lastImageIndex = -1;
  391. // Find the index of the last object with images
  392. messagesBody.forEach((item, index) => {
  393. if (item.images) {
  394. lastImageIndex = index;
  395. }
  396. });
  397. // Remove images from all but the last one
  398. messagesBody.forEach((item, index) => {
  399. if (index !== lastImageIndex) {
  400. delete item.images;
  401. }
  402. });
  403. const docs = messages
  404. .filter((message) => message?.files ?? null)
  405. .map((message) =>
  406. message.files.filter((item) => item.type === 'doc' || item.type === 'collection')
  407. )
  408. .flat(1);
  409. const [res, controller] = await generateChatCompletion(localStorage.token, {
  410. model: model,
  411. messages: messagesBody,
  412. options: {
  413. ...($settings.params ?? {}),
  414. stop:
  415. $settings?.params?.stop ?? undefined
  416. ? $settings.params.stop.map((str) =>
  417. decodeURIComponent(JSON.parse('"' + str.replace(/\"/g, '\\"') + '"'))
  418. )
  419. : undefined,
  420. num_predict: $settings?.params?.max_tokens ?? undefined,
  421. repeat_penalty: $settings?.params?.frequency_penalty ?? undefined
  422. },
  423. format: $settings.requestFormat ?? undefined,
  424. keep_alive: $settings.keepAlive ?? undefined,
  425. docs: docs.length > 0 ? docs : undefined,
  426. citations: docs.length > 0
  427. });
  428. if (res && res.ok) {
  429. console.log('controller', controller);
  430. const reader = res.body
  431. .pipeThrough(new TextDecoderStream())
  432. .pipeThrough(splitStream('\n'))
  433. .getReader();
  434. while (true) {
  435. const { value, done } = await reader.read();
  436. if (done || stopResponseFlag || _chatId !== $chatId) {
  437. responseMessage.done = true;
  438. messages = messages;
  439. if (stopResponseFlag) {
  440. controller.abort('User: Stop Response');
  441. await cancelOllamaRequest(localStorage.token, currentRequestId);
  442. }
  443. currentRequestId = null;
  444. break;
  445. }
  446. try {
  447. let lines = value.split('\n');
  448. for (const line of lines) {
  449. if (line !== '') {
  450. console.log(line);
  451. let data = JSON.parse(line);
  452. if ('citations' in data) {
  453. responseMessage.citations = data.citations;
  454. continue;
  455. }
  456. if ('detail' in data) {
  457. throw data;
  458. }
  459. if ('id' in data) {
  460. console.log(data);
  461. currentRequestId = data.id;
  462. } else {
  463. if (data.done == false) {
  464. if (responseMessage.content == '' && data.message.content == '\n') {
  465. continue;
  466. } else {
  467. responseMessage.content += data.message.content;
  468. messages = messages;
  469. }
  470. } else {
  471. responseMessage.done = true;
  472. if (responseMessage.content == '') {
  473. responseMessage.error = true;
  474. responseMessage.content =
  475. 'Oops! No text generated from Ollama, Please try again.';
  476. }
  477. responseMessage.context = data.context ?? null;
  478. responseMessage.info = {
  479. total_duration: data.total_duration,
  480. load_duration: data.load_duration,
  481. sample_count: data.sample_count,
  482. sample_duration: data.sample_duration,
  483. prompt_eval_count: data.prompt_eval_count,
  484. prompt_eval_duration: data.prompt_eval_duration,
  485. eval_count: data.eval_count,
  486. eval_duration: data.eval_duration
  487. };
  488. messages = messages;
  489. if ($settings.notificationEnabled && !document.hasFocus()) {
  490. const notification = new Notification(
  491. selectedModelfile
  492. ? `${
  493. selectedModelfile.title.charAt(0).toUpperCase() +
  494. selectedModelfile.title.slice(1)
  495. }`
  496. : `${model}`,
  497. {
  498. body: responseMessage.content,
  499. icon: selectedModelfile?.imageUrl ?? `${WEBUI_BASE_URL}/static/favicon.png`
  500. }
  501. );
  502. }
  503. if ($settings.responseAutoCopy) {
  504. copyToClipboard(responseMessage.content);
  505. }
  506. if ($settings.responseAutoPlayback) {
  507. await tick();
  508. document.getElementById(`speak-button-${responseMessage.id}`)?.click();
  509. }
  510. }
  511. }
  512. }
  513. }
  514. } catch (error) {
  515. console.log(error);
  516. if ('detail' in error) {
  517. toast.error(error.detail);
  518. }
  519. break;
  520. }
  521. if (autoScroll) {
  522. scrollToBottom();
  523. }
  524. }
  525. if ($chatId == _chatId) {
  526. if ($settings.saveChatHistory ?? true) {
  527. chat = await updateChatById(localStorage.token, _chatId, {
  528. messages: messages,
  529. history: history,
  530. models: selectedModels
  531. });
  532. await chats.set(await getChatList(localStorage.token));
  533. }
  534. }
  535. } else {
  536. if (res !== null) {
  537. const error = await res.json();
  538. console.log(error);
  539. if ('detail' in error) {
  540. toast.error(error.detail);
  541. responseMessage.content = error.detail;
  542. } else {
  543. toast.error(error.error);
  544. responseMessage.content = error.error;
  545. }
  546. } else {
  547. toast.error(
  548. $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, { provider: 'Ollama' })
  549. );
  550. responseMessage.content = $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
  551. provider: 'Ollama'
  552. });
  553. }
  554. responseMessage.error = true;
  555. responseMessage.content = $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
  556. provider: 'Ollama'
  557. });
  558. responseMessage.done = true;
  559. messages = messages;
  560. }
  561. stopResponseFlag = false;
  562. await tick();
  563. if (autoScroll) {
  564. scrollToBottom();
  565. }
  566. if (messages.length == 2 && messages.at(1).content !== '') {
  567. window.history.replaceState(history.state, '', `/c/${_chatId}`);
  568. const _title = await generateChatTitle(userPrompt);
  569. await setChatTitle(_chatId, _title);
  570. }
  571. };
  572. const sendPromptOpenAI = async (model, userPrompt, responseMessageId, _chatId) => {
  573. const responseMessage = history.messages[responseMessageId];
  574. const docs = messages
  575. .filter((message) => message?.files ?? null)
  576. .map((message) =>
  577. message.files.filter((item) => item.type === 'doc' || item.type === 'collection')
  578. )
  579. .flat(1);
  580. console.log(docs);
  581. scrollToBottom();
  582. try {
  583. const [res, controller] = await generateOpenAIChatCompletion(
  584. localStorage.token,
  585. {
  586. model: model.id,
  587. stream: true,
  588. messages: [
  589. $settings.system || (responseMessage?.userContext ?? null)
  590. ? {
  591. role: 'system',
  592. content: `${$settings?.system ?? ''}${
  593. responseMessage?.userContext ?? null
  594. ? `\n\nUser Context:\n${(responseMessage?.userContext ?? []).join('\n')}`
  595. : ''
  596. }`
  597. }
  598. : undefined,
  599. ...messages
  600. ]
  601. .filter((message) => message?.content?.trim())
  602. .map((message, idx, arr) => ({
  603. role: message.role,
  604. ...((message.files?.filter((file) => file.type === 'image').length > 0 ?? false) &&
  605. message.role === 'user'
  606. ? {
  607. content: [
  608. {
  609. type: 'text',
  610. text:
  611. arr.length - 1 !== idx
  612. ? message.content
  613. : message?.raContent ?? message.content
  614. },
  615. ...message.files
  616. .filter((file) => file.type === 'image')
  617. .map((file) => ({
  618. type: 'image_url',
  619. image_url: {
  620. url: file.url
  621. }
  622. }))
  623. ]
  624. }
  625. : {
  626. content:
  627. arr.length - 1 !== idx
  628. ? message.content
  629. : message?.raContent ?? message.content
  630. })
  631. })),
  632. seed: $settings?.params?.seed ?? undefined,
  633. stop:
  634. $settings?.params?.stop ?? undefined
  635. ? $settings.params.stop.map((str) =>
  636. decodeURIComponent(JSON.parse('"' + str.replace(/\"/g, '\\"') + '"'))
  637. )
  638. : undefined,
  639. temperature: $settings?.params?.temperature ?? undefined,
  640. top_p: $settings?.params?.top_p ?? undefined,
  641. frequency_penalty: $settings?.params?.frequency_penalty ?? undefined,
  642. max_tokens: $settings?.params?.max_tokens ?? undefined,
  643. docs: docs.length > 0 ? docs : undefined,
  644. citations: docs.length > 0
  645. },
  646. `${OPENAI_API_BASE_URL}`
  647. );
  648. // Wait until history/message have been updated
  649. await tick();
  650. scrollToBottom();
  651. if (res && res.ok && res.body) {
  652. const textStream = await createOpenAITextStream(res.body, $settings.splitLargeChunks);
  653. for await (const update of textStream) {
  654. const { value, done, citations, error } = update;
  655. if (error) {
  656. await handleOpenAIError(error, null, model, responseMessage);
  657. break;
  658. }
  659. if (done || stopResponseFlag || _chatId !== $chatId) {
  660. responseMessage.done = true;
  661. messages = messages;
  662. if (stopResponseFlag) {
  663. controller.abort('User: Stop Response');
  664. }
  665. break;
  666. }
  667. if (citations) {
  668. responseMessage.citations = citations;
  669. continue;
  670. }
  671. if (responseMessage.content == '' && value == '\n') {
  672. continue;
  673. } else {
  674. responseMessage.content += value;
  675. messages = messages;
  676. }
  677. if ($settings.notificationEnabled && !document.hasFocus()) {
  678. const notification = new Notification(`OpenAI ${model}`, {
  679. body: responseMessage.content,
  680. icon: `${WEBUI_BASE_URL}/static/favicon.png`
  681. });
  682. }
  683. if ($settings.responseAutoCopy) {
  684. copyToClipboard(responseMessage.content);
  685. }
  686. if ($settings.responseAutoPlayback) {
  687. await tick();
  688. document.getElementById(`speak-button-${responseMessage.id}`)?.click();
  689. }
  690. if (autoScroll) {
  691. scrollToBottom();
  692. }
  693. }
  694. if ($chatId == _chatId) {
  695. if ($settings.saveChatHistory ?? true) {
  696. chat = await updateChatById(localStorage.token, _chatId, {
  697. models: selectedModels,
  698. messages: messages,
  699. history: history
  700. });
  701. await chats.set(await getChatList(localStorage.token));
  702. }
  703. }
  704. } else {
  705. await handleOpenAIError(null, res, model, responseMessage);
  706. }
  707. } catch (error) {
  708. await handleOpenAIError(error, null, model, responseMessage);
  709. }
  710. messages = messages;
  711. stopResponseFlag = false;
  712. await tick();
  713. if (autoScroll) {
  714. scrollToBottom();
  715. }
  716. if (messages.length == 2) {
  717. window.history.replaceState(history.state, '', `/c/${_chatId}`);
  718. const _title = await generateChatTitle(userPrompt);
  719. await setChatTitle(_chatId, _title);
  720. }
  721. };
  722. const handleOpenAIError = async (error, res: Response | null, model, responseMessage) => {
  723. let errorMessage = '';
  724. let innerError;
  725. if (error) {
  726. innerError = error;
  727. } else if (res !== null) {
  728. innerError = await res.json();
  729. }
  730. console.error(innerError);
  731. if ('detail' in innerError) {
  732. toast.error(innerError.detail);
  733. errorMessage = innerError.detail;
  734. } else if ('error' in innerError) {
  735. if ('message' in innerError.error) {
  736. toast.error(innerError.error.message);
  737. errorMessage = innerError.error.message;
  738. } else {
  739. toast.error(innerError.error);
  740. errorMessage = innerError.error;
  741. }
  742. } else if ('message' in innerError) {
  743. toast.error(innerError.message);
  744. errorMessage = innerError.message;
  745. }
  746. responseMessage.error = true;
  747. responseMessage.content =
  748. $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
  749. provider: model.name ?? model.id
  750. }) +
  751. '\n' +
  752. errorMessage;
  753. responseMessage.done = true;
  754. messages = messages;
  755. };
  756. const stopResponse = () => {
  757. stopResponseFlag = true;
  758. console.log('stopResponse');
  759. };
  760. const regenerateResponse = async (message) => {
  761. console.log('regenerateResponse');
  762. if (messages.length != 0) {
  763. let userMessage = history.messages[message.parentId];
  764. let userPrompt = userMessage.content;
  765. if ((userMessage?.models ?? [...selectedModels]).length == 1) {
  766. await sendPrompt(userPrompt, userMessage.id);
  767. } else {
  768. await sendPrompt(userPrompt, userMessage.id, message.model);
  769. }
  770. }
  771. };
  772. const continueGeneration = async () => {
  773. console.log('continueGeneration');
  774. const _chatId = JSON.parse(JSON.stringify($chatId));
  775. if (messages.length != 0 && messages.at(-1).done == true) {
  776. const responseMessage = history.messages[history.currentId];
  777. responseMessage.done = false;
  778. await tick();
  779. const model = $models.filter((m) => m.id === responseMessage.model).at(0);
  780. if (model) {
  781. if (model?.external) {
  782. await sendPromptOpenAI(
  783. model,
  784. history.messages[responseMessage.parentId].content,
  785. responseMessage.id,
  786. _chatId
  787. );
  788. } else
  789. await sendPromptOllama(
  790. model,
  791. history.messages[responseMessage.parentId].content,
  792. responseMessage.id,
  793. _chatId
  794. );
  795. }
  796. } else {
  797. toast.error($i18n.t(`Model {{modelId}} not found`, { modelId }));
  798. }
  799. };
  800. const generateChatTitle = async (userPrompt) => {
  801. if ($settings?.title?.auto ?? true) {
  802. const model = $models.find((model) => model.id === selectedModels[0]);
  803. const titleModelId =
  804. model?.external ?? false
  805. ? $settings?.title?.modelExternal ?? selectedModels[0]
  806. : $settings?.title?.model ?? selectedModels[0];
  807. const titleModel = $models.find((model) => model.id === titleModelId);
  808. console.log(titleModel);
  809. const title = await generateTitle(
  810. localStorage.token,
  811. $settings?.title?.prompt ??
  812. $i18n.t(
  813. "Create a concise, 3-5 word phrase as a header for the following query, strictly adhering to the 3-5 word limit and avoiding the use of the word 'title':"
  814. ) + ' {{prompt}}',
  815. titleModelId,
  816. userPrompt,
  817. titleModel?.owned_by === 'openai' ?? false
  818. ? `${OPENAI_API_BASE_URL}`
  819. : `${OLLAMA_API_BASE_URL}/v1`
  820. );
  821. return title;
  822. } else {
  823. return `${userPrompt}`;
  824. }
  825. };
  826. const setChatTitle = async (_chatId, _title) => {
  827. if (_chatId === $chatId) {
  828. title = _title;
  829. }
  830. if ($settings.saveChatHistory ?? true) {
  831. chat = await updateChatById(localStorage.token, _chatId, { title: _title });
  832. await chats.set(await getChatList(localStorage.token));
  833. }
  834. };
  835. const getTags = async () => {
  836. return await getTagsById(localStorage.token, $chatId).catch(async (error) => {
  837. return [];
  838. });
  839. };
  840. const addTag = async (tagName) => {
  841. const res = await addTagById(localStorage.token, $chatId, tagName);
  842. tags = await getTags();
  843. chat = await updateChatById(localStorage.token, $chatId, {
  844. tags: tags
  845. });
  846. _tags.set(await getAllChatTags(localStorage.token));
  847. };
  848. const deleteTag = async (tagName) => {
  849. const res = await deleteTagById(localStorage.token, $chatId, tagName);
  850. tags = await getTags();
  851. chat = await updateChatById(localStorage.token, $chatId, {
  852. tags: tags
  853. });
  854. _tags.set(await getAllChatTags(localStorage.token));
  855. };
  856. </script>
  857. <svelte:head>
  858. <title>
  859. {title
  860. ? `${title.length > 30 ? `${title.slice(0, 30)}...` : title} | ${$WEBUI_NAME}`
  861. : `${$WEBUI_NAME}`}
  862. </title>
  863. </svelte:head>
  864. {#if !chatIdProp || (loaded && chatIdProp)}
  865. <div
  866. class="min-h-screen max-h-screen {$showSidebar
  867. ? 'md:max-w-[calc(100%-260px)]'
  868. : ''} w-full max-w-full flex flex-col"
  869. >
  870. <Navbar
  871. {title}
  872. bind:selectedModels
  873. bind:showModelSelector
  874. shareEnabled={messages.length > 0}
  875. {chat}
  876. {initNewChat}
  877. />
  878. {#if $banners.length > 0 && !$chatId && selectedModels.length <= 1}
  879. <div
  880. class="absolute top-[4.25rem] w-full {$showSidebar ? 'md:max-w-[calc(100%-260px)]' : ''}"
  881. >
  882. <div class=" flex flex-col gap-1 w-full">
  883. {#each $banners.filter( (b) => (b.dismissible ? !JSON.parse(localStorage.getItem('dismissedBannerIds') ?? '[]').includes(b.id) : true) ) as banner}
  884. <Banner
  885. {banner}
  886. on:dismiss={(e) => {
  887. const bannerId = e.detail;
  888. localStorage.setItem(
  889. 'dismissedBannerIds',
  890. JSON.stringify(
  891. [
  892. bannerId,
  893. ...JSON.parse(localStorage.getItem('dismissedBannerIds') ?? '[]')
  894. ].filter((id) => $banners.find((b) => b.id === id))
  895. )
  896. );
  897. }}
  898. />
  899. {/each}
  900. </div>
  901. </div>
  902. {/if}
  903. <div class="flex flex-col flex-auto">
  904. <div
  905. class=" pb-2.5 flex flex-col justify-between w-full flex-auto overflow-auto h-0 max-w-full"
  906. id="messages-container"
  907. bind:this={messagesContainerElement}
  908. on:scroll={(e) => {
  909. autoScroll =
  910. messagesContainerElement.scrollHeight - messagesContainerElement.scrollTop <=
  911. messagesContainerElement.clientHeight + 5;
  912. }}
  913. >
  914. <div class=" h-full w-full flex flex-col {chatIdProp ? 'py-4' : 'pt-2 pb-4'}">
  915. <Messages
  916. chatId={$chatId}
  917. {selectedModels}
  918. {processing}
  919. bind:history
  920. bind:messages
  921. bind:autoScroll
  922. bind:prompt
  923. bottomPadding={files.length > 0}
  924. {sendPrompt}
  925. {continueGeneration}
  926. {regenerateResponse}
  927. />
  928. </div>
  929. </div>
  930. </div>
  931. </div>
  932. <MessageInput
  933. bind:files
  934. bind:prompt
  935. bind:autoScroll
  936. bind:atSelectedModel
  937. {selectedModels}
  938. {messages}
  939. {submitPrompt}
  940. {stopResponse}
  941. />
  942. {/if}