Chat.svelte 33 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285
  1. <script lang="ts">
  2. import { v4 as uuidv4 } from 'uuid';
  3. import { toast } from 'svelte-sonner';
  4. import { getContext, onMount, tick } from 'svelte';
  5. import { goto } from '$app/navigation';
  6. import { page } from '$app/stores';
  7. import {
  8. chatId,
  9. chats,
  10. config,
  11. type Model,
  12. models,
  13. settings,
  14. showSidebar,
  15. tags as _tags,
  16. WEBUI_NAME,
  17. banners,
  18. user
  19. } from '$lib/stores';
  20. import {
  21. convertMessagesToHistory,
  22. copyToClipboard,
  23. promptTemplate,
  24. splitStream
  25. } from '$lib/utils';
  26. import { cancelOllamaRequest, generateChatCompletion } from '$lib/apis/ollama';
  27. import {
  28. addTagById,
  29. createNewChat,
  30. deleteTagById,
  31. getAllChatTags,
  32. getChatById,
  33. getChatList,
  34. getTagsById,
  35. updateChatById
  36. } from '$lib/apis/chats';
  37. import {
  38. generateOpenAIChatCompletion,
  39. generateSearchQuery,
  40. generateTitle
  41. } from '$lib/apis/openai';
  42. import MessageInput from '$lib/components/chat/MessageInput.svelte';
  43. import Messages from '$lib/components/chat/Messages.svelte';
  44. import Navbar from '$lib/components/layout/Navbar.svelte';
  45. import { OLLAMA_API_BASE_URL, OPENAI_API_BASE_URL, WEBUI_BASE_URL } from '$lib/constants';
  46. import { createOpenAITextStream } from '$lib/apis/streaming';
  47. import { queryMemory } from '$lib/apis/memories';
  48. import type { Writable } from 'svelte/store';
  49. import type { i18n as i18nType } from 'i18next';
  50. import { runWebSearch } from '$lib/apis/rag';
  51. import Banner from '../common/Banner.svelte';
  52. import { getUserSettings } from '$lib/apis/users';
  53. import { chatCompleted } from '$lib/apis';
  54. const i18n: Writable<i18nType> = getContext('i18n');
  55. export let chatIdProp = '';
  56. let loaded = false;
  57. let stopResponseFlag = false;
  58. let autoScroll = true;
  59. let processing = '';
  60. let messagesContainerElement: HTMLDivElement;
  61. let currentRequestId = null;
  62. let showModelSelector = true;
  63. let selectedModels = [''];
  64. let atSelectedModel: Model | undefined;
  65. let webSearchEnabled = false;
  66. let chat = null;
  67. let tags = [];
  68. let title = '';
  69. let prompt = '';
  70. let files = [];
  71. let messages = [];
  72. let history = {
  73. messages: {},
  74. currentId: null
  75. };
  76. $: if (history.currentId !== null) {
  77. let _messages = [];
  78. let currentMessage = history.messages[history.currentId];
  79. while (currentMessage !== null) {
  80. _messages.unshift({ ...currentMessage });
  81. currentMessage =
  82. currentMessage.parentId !== null ? history.messages[currentMessage.parentId] : null;
  83. }
  84. messages = _messages;
  85. } else {
  86. messages = [];
  87. }
  88. $: if (chatIdProp) {
  89. (async () => {
  90. if (await loadChat()) {
  91. await tick();
  92. loaded = true;
  93. window.setTimeout(() => scrollToBottom(), 0);
  94. const chatInput = document.getElementById('chat-textarea');
  95. chatInput?.focus();
  96. } else {
  97. await goto('/');
  98. }
  99. })();
  100. }
  101. onMount(async () => {
  102. if (!$chatId) {
  103. await initNewChat();
  104. } else {
  105. if (!($settings.saveChatHistory ?? true)) {
  106. await goto('/');
  107. }
  108. }
  109. });
  110. //////////////////////////
  111. // Web functions
  112. //////////////////////////
  113. const initNewChat = async () => {
  114. if (currentRequestId !== null) {
  115. await cancelOllamaRequest(localStorage.token, currentRequestId);
  116. currentRequestId = null;
  117. }
  118. window.history.replaceState(history.state, '', `/`);
  119. await chatId.set('');
  120. autoScroll = true;
  121. title = '';
  122. messages = [];
  123. history = {
  124. messages: {},
  125. currentId: null
  126. };
  127. if ($page.url.searchParams.get('models')) {
  128. selectedModels = $page.url.searchParams.get('models')?.split(',');
  129. } else if ($settings?.models) {
  130. selectedModels = $settings?.models;
  131. } else if ($config?.default_models) {
  132. console.log($config?.default_models.split(',') ?? '');
  133. selectedModels = $config?.default_models.split(',');
  134. } else {
  135. selectedModels = [''];
  136. }
  137. if ($page.url.searchParams.get('q')) {
  138. prompt = $page.url.searchParams.get('q') ?? '';
  139. if (prompt) {
  140. await tick();
  141. submitPrompt(prompt);
  142. }
  143. }
  144. selectedModels = selectedModels.map((modelId) =>
  145. $models.map((m) => m.id).includes(modelId) ? modelId : ''
  146. );
  147. const userSettings = await getUserSettings(localStorage.token);
  148. if (userSettings) {
  149. settings.set(userSettings.ui);
  150. } else {
  151. settings.set(JSON.parse(localStorage.getItem('settings') ?? '{}'));
  152. }
  153. const chatInput = document.getElementById('chat-textarea');
  154. setTimeout(() => chatInput?.focus(), 0);
  155. };
  156. const loadChat = async () => {
  157. chatId.set(chatIdProp);
  158. chat = await getChatById(localStorage.token, $chatId).catch(async (error) => {
  159. await goto('/');
  160. return null;
  161. });
  162. if (chat) {
  163. tags = await getTags();
  164. const chatContent = chat.chat;
  165. if (chatContent) {
  166. console.log(chatContent);
  167. selectedModels =
  168. (chatContent?.models ?? undefined) !== undefined
  169. ? chatContent.models
  170. : [chatContent.models ?? ''];
  171. history =
  172. (chatContent?.history ?? undefined) !== undefined
  173. ? chatContent.history
  174. : convertMessagesToHistory(chatContent.messages);
  175. title = chatContent.title;
  176. const userSettings = await getUserSettings(localStorage.token);
  177. if (userSettings) {
  178. await settings.set(userSettings.ui);
  179. } else {
  180. await settings.set(JSON.parse(localStorage.getItem('settings') ?? '{}'));
  181. }
  182. await settings.set({
  183. ...$settings,
  184. system: chatContent.system ?? $settings.system,
  185. params: chatContent.options ?? $settings.params
  186. });
  187. autoScroll = true;
  188. await tick();
  189. if (messages.length > 0) {
  190. history.messages[messages.at(-1).id].done = true;
  191. }
  192. await tick();
  193. return true;
  194. } else {
  195. return null;
  196. }
  197. }
  198. };
  199. const scrollToBottom = async () => {
  200. await tick();
  201. if (messagesContainerElement) {
  202. messagesContainerElement.scrollTop = messagesContainerElement.scrollHeight;
  203. }
  204. };
  205. const createMessagesList = (responseMessageId) => {
  206. const message = history.messages[responseMessageId];
  207. if (message.parentId) {
  208. return [...createMessagesList(message.parentId), message];
  209. } else {
  210. return [message];
  211. }
  212. };
  213. //////////////////////////
  214. // Ollama functions
  215. //////////////////////////
  216. const submitPrompt = async (userPrompt, _user = null) => {
  217. console.log('submitPrompt', $chatId);
  218. selectedModels = selectedModels.map((modelId) =>
  219. $models.map((m) => m.id).includes(modelId) ? modelId : ''
  220. );
  221. if (selectedModels.includes('')) {
  222. toast.error($i18n.t('Model not selected'));
  223. } else if (messages.length != 0 && messages.at(-1).done != true) {
  224. // Response not done
  225. console.log('wait');
  226. } else if (
  227. files.length > 0 &&
  228. files.filter((file) => file.upload_status === false).length > 0
  229. ) {
  230. // Upload not done
  231. toast.error(
  232. $i18n.t(
  233. `Oops! Hold tight! Your files are still in the processing oven. We're cooking them up to perfection. Please be patient and we'll let you know once they're ready.`
  234. )
  235. );
  236. } else {
  237. // Reset chat message textarea height
  238. document.getElementById('chat-textarea').style.height = '';
  239. // Create user message
  240. let userMessageId = uuidv4();
  241. let userMessage = {
  242. id: userMessageId,
  243. parentId: messages.length !== 0 ? messages.at(-1).id : null,
  244. childrenIds: [],
  245. role: 'user',
  246. user: _user ?? undefined,
  247. content: userPrompt,
  248. files: files.length > 0 ? files : undefined,
  249. timestamp: Math.floor(Date.now() / 1000), // Unix epoch
  250. models: selectedModels.filter((m, mIdx) => selectedModels.indexOf(m) === mIdx)
  251. };
  252. // Add message to history and Set currentId to messageId
  253. history.messages[userMessageId] = userMessage;
  254. history.currentId = userMessageId;
  255. // Append messageId to childrenIds of parent message
  256. if (messages.length !== 0) {
  257. history.messages[messages.at(-1).id].childrenIds.push(userMessageId);
  258. }
  259. // Wait until history/message have been updated
  260. await tick();
  261. // Create new chat if only one message in messages
  262. if (messages.length == 1) {
  263. if ($settings.saveChatHistory ?? true) {
  264. chat = await createNewChat(localStorage.token, {
  265. id: $chatId,
  266. title: $i18n.t('New Chat'),
  267. models: selectedModels,
  268. system: $settings.system ?? undefined,
  269. options: {
  270. ...($settings.params ?? {})
  271. },
  272. messages: messages,
  273. history: history,
  274. tags: [],
  275. timestamp: Date.now()
  276. });
  277. await chats.set(await getChatList(localStorage.token));
  278. await chatId.set(chat.id);
  279. } else {
  280. await chatId.set('local');
  281. }
  282. await tick();
  283. }
  284. // Reset chat input textarea
  285. prompt = '';
  286. document.getElementById('chat-textarea').style.height = '';
  287. files = [];
  288. // Send prompt
  289. await sendPrompt(userPrompt, userMessageId);
  290. }
  291. };
  292. const sendPrompt = async (prompt, parentId, modelId = null) => {
  293. const _chatId = JSON.parse(JSON.stringify($chatId));
  294. await Promise.all(
  295. (modelId
  296. ? [modelId]
  297. : atSelectedModel !== undefined
  298. ? [atSelectedModel.id]
  299. : selectedModels
  300. ).map(async (modelId) => {
  301. console.log('modelId', modelId);
  302. const model = $models.filter((m) => m.id === modelId).at(0);
  303. if (model) {
  304. // If there are image files, check if model is vision capable
  305. const hasImages = messages.some((message) =>
  306. message.files?.some((file) => file.type === 'image')
  307. );
  308. if (hasImages && !(model.info?.meta?.capabilities?.vision ?? true)) {
  309. toast.error(
  310. $i18n.t('Model {{modelName}} is not vision capable', {
  311. modelName: model.name ?? model.id
  312. })
  313. );
  314. }
  315. // Create response message
  316. let responseMessageId = uuidv4();
  317. let responseMessage = {
  318. parentId: parentId,
  319. id: responseMessageId,
  320. childrenIds: [],
  321. role: 'assistant',
  322. content: '',
  323. model: model.id,
  324. modelName: model.name ?? model.id,
  325. userContext: null,
  326. timestamp: Math.floor(Date.now() / 1000) // Unix epoch
  327. };
  328. // Add message to history and Set currentId to messageId
  329. history.messages[responseMessageId] = responseMessage;
  330. history.currentId = responseMessageId;
  331. // Append messageId to childrenIds of parent message
  332. if (parentId !== null) {
  333. history.messages[parentId].childrenIds = [
  334. ...history.messages[parentId].childrenIds,
  335. responseMessageId
  336. ];
  337. }
  338. await tick();
  339. let userContext = null;
  340. if ($settings?.memory ?? false) {
  341. if (userContext === null) {
  342. const res = await queryMemory(localStorage.token, prompt).catch((error) => {
  343. toast.error(error);
  344. return null;
  345. });
  346. if (res) {
  347. if (res.documents[0].length > 0) {
  348. userContext = res.documents.reduce((acc, doc, index) => {
  349. const createdAtTimestamp = res.metadatas[index][0].created_at;
  350. const createdAtDate = new Date(createdAtTimestamp * 1000)
  351. .toISOString()
  352. .split('T')[0];
  353. acc.push(`${index + 1}. [${createdAtDate}]. ${doc[0]}`);
  354. return acc;
  355. }, []);
  356. }
  357. console.log(userContext);
  358. }
  359. }
  360. }
  361. responseMessage.userContext = userContext;
  362. if (webSearchEnabled) {
  363. await getWebSearchResults(model.id, parentId, responseMessageId);
  364. }
  365. if (model?.owned_by === 'openai') {
  366. await sendPromptOpenAI(model, prompt, responseMessageId, _chatId);
  367. } else if (model) {
  368. await sendPromptOllama(model, prompt, responseMessageId, _chatId);
  369. }
  370. } else {
  371. toast.error($i18n.t(`Model {{modelId}} not found`, { modelId }));
  372. }
  373. })
  374. );
  375. await chats.set(await getChatList(localStorage.token));
  376. };
  377. const getWebSearchResults = async (model: string, parentId: string, responseId: string) => {
  378. const responseMessage = history.messages[responseId];
  379. responseMessage.status = {
  380. done: false,
  381. action: 'web_search',
  382. description: $i18n.t('Generating search query')
  383. };
  384. messages = messages;
  385. const prompt = history.messages[parentId].content;
  386. let searchQuery = prompt;
  387. if (prompt.length > 100) {
  388. searchQuery = await generateChatSearchQuery(model, prompt);
  389. if (!searchQuery) {
  390. toast.warning($i18n.t('No search query generated'));
  391. responseMessage.status = {
  392. ...responseMessage.status,
  393. done: true,
  394. error: true,
  395. description: 'No search query generated'
  396. };
  397. messages = messages;
  398. return;
  399. }
  400. }
  401. responseMessage.status = {
  402. ...responseMessage.status,
  403. description: $i18n.t("Searching the web for '{{searchQuery}}'", { searchQuery })
  404. };
  405. messages = messages;
  406. const results = await runWebSearch(localStorage.token, searchQuery);
  407. if (results === undefined) {
  408. toast.warning($i18n.t('No search results found'));
  409. responseMessage.status = {
  410. ...responseMessage.status,
  411. done: true,
  412. error: true,
  413. description: 'No search results found'
  414. };
  415. messages = messages;
  416. return;
  417. }
  418. responseMessage.status = {
  419. ...responseMessage.status,
  420. done: true,
  421. description: $i18n.t('Searched {{count}} sites', { count: results.filenames.length }),
  422. urls: results.filenames
  423. };
  424. if (responseMessage?.files ?? undefined === undefined) {
  425. responseMessage.files = [];
  426. }
  427. responseMessage.files.push({
  428. collection_name: results.collection_name,
  429. name: searchQuery,
  430. type: 'web_search_results',
  431. urls: results.filenames
  432. });
  433. messages = messages;
  434. };
  435. const sendPromptOllama = async (model, userPrompt, responseMessageId, _chatId) => {
  436. model = model.id;
  437. const responseMessage = history.messages[responseMessageId];
  438. // Wait until history/message have been updated
  439. await tick();
  440. // Scroll down
  441. scrollToBottom();
  442. const messagesBody = [
  443. $settings.system || (responseMessage?.userContext ?? null)
  444. ? {
  445. role: 'system',
  446. content: `${promptTemplate($settings?.system ?? '', $user.name)}${
  447. responseMessage?.userContext ?? null
  448. ? `\n\nUser Context:\n${(responseMessage?.userContext ?? []).join('\n')}`
  449. : ''
  450. }`
  451. }
  452. : undefined,
  453. ...messages
  454. ]
  455. .filter((message) => message?.content?.trim())
  456. .map((message, idx, arr) => {
  457. // Prepare the base message object
  458. const baseMessage = {
  459. role: message.role,
  460. content: message.content
  461. };
  462. // Extract and format image URLs if any exist
  463. const imageUrls = message.files
  464. ?.filter((file) => file.type === 'image')
  465. .map((file) => file.url.slice(file.url.indexOf(',') + 1));
  466. // Add images array only if it contains elements
  467. if (imageUrls && imageUrls.length > 0 && message.role === 'user') {
  468. baseMessage.images = imageUrls;
  469. }
  470. return baseMessage;
  471. });
  472. let lastImageIndex = -1;
  473. // Find the index of the last object with images
  474. messagesBody.forEach((item, index) => {
  475. if (item.images) {
  476. lastImageIndex = index;
  477. }
  478. });
  479. // Remove images from all but the last one
  480. messagesBody.forEach((item, index) => {
  481. if (index !== lastImageIndex) {
  482. delete item.images;
  483. }
  484. });
  485. const docs = messages
  486. .filter((message) => message?.files ?? null)
  487. .map((message) =>
  488. message.files.filter((item) =>
  489. ['doc', 'collection', 'web_search_results'].includes(item.type)
  490. )
  491. )
  492. .flat(1);
  493. const [res, controller] = await generateChatCompletion(localStorage.token, {
  494. model: model,
  495. messages: messagesBody,
  496. options: {
  497. ...($settings.params ?? {}),
  498. stop:
  499. $settings?.params?.stop ?? undefined
  500. ? $settings.params.stop.map((str) =>
  501. decodeURIComponent(JSON.parse('"' + str.replace(/\"/g, '\\"') + '"'))
  502. )
  503. : undefined,
  504. num_predict: $settings?.params?.max_tokens ?? undefined,
  505. repeat_penalty: $settings?.params?.frequency_penalty ?? undefined
  506. },
  507. format: $settings.requestFormat ?? undefined,
  508. keep_alive: $settings.keepAlive ?? undefined,
  509. docs: docs.length > 0 ? docs : undefined,
  510. citations: docs.length > 0,
  511. chat_id: $chatId
  512. });
  513. if (res && res.ok) {
  514. console.log('controller', controller);
  515. const reader = res.body
  516. .pipeThrough(new TextDecoderStream())
  517. .pipeThrough(splitStream('\n'))
  518. .getReader();
  519. while (true) {
  520. const { value, done } = await reader.read();
  521. if (done || stopResponseFlag || _chatId !== $chatId) {
  522. responseMessage.done = true;
  523. messages = messages;
  524. if (stopResponseFlag) {
  525. controller.abort('User: Stop Response');
  526. await cancelOllamaRequest(localStorage.token, currentRequestId);
  527. } else {
  528. const messages = createMessagesList(responseMessageId);
  529. const res = await chatCompleted(localStorage.token, {
  530. model: model,
  531. messages: messages.map((m) => ({
  532. id: m.id,
  533. role: m.role,
  534. content: m.content,
  535. timestamp: m.timestamp
  536. })),
  537. chat_id: $chatId
  538. }).catch((error) => {
  539. console.error(error);
  540. return null;
  541. });
  542. if (res !== null) {
  543. // Update chat history with the new messages
  544. for (const message of res.messages) {
  545. history.messages[message.id] = {
  546. ...history.messages[message.id],
  547. ...(history.messages[message.id].content !== message.content
  548. ? { originalContent: history.messages[message.id].content }
  549. : {}),
  550. ...message
  551. };
  552. }
  553. }
  554. }
  555. currentRequestId = null;
  556. break;
  557. }
  558. try {
  559. let lines = value.split('\n');
  560. for (const line of lines) {
  561. if (line !== '') {
  562. console.log(line);
  563. let data = JSON.parse(line);
  564. if ('citations' in data) {
  565. responseMessage.citations = data.citations;
  566. continue;
  567. }
  568. if ('detail' in data) {
  569. throw data;
  570. }
  571. if ('id' in data) {
  572. console.log(data);
  573. currentRequestId = data.id;
  574. } else {
  575. if (data.done == false) {
  576. if (responseMessage.content == '' && data.message.content == '\n') {
  577. continue;
  578. } else {
  579. responseMessage.content += data.message.content;
  580. messages = messages;
  581. }
  582. } else {
  583. responseMessage.done = true;
  584. if (responseMessage.content == '') {
  585. responseMessage.error = {
  586. code: 400,
  587. content: `Oops! No text generated from Ollama, Please try again.`
  588. };
  589. }
  590. responseMessage.context = data.context ?? null;
  591. responseMessage.info = {
  592. total_duration: data.total_duration,
  593. load_duration: data.load_duration,
  594. sample_count: data.sample_count,
  595. sample_duration: data.sample_duration,
  596. prompt_eval_count: data.prompt_eval_count,
  597. prompt_eval_duration: data.prompt_eval_duration,
  598. eval_count: data.eval_count,
  599. eval_duration: data.eval_duration
  600. };
  601. messages = messages;
  602. if ($settings.notificationEnabled && !document.hasFocus()) {
  603. const notification = new Notification(
  604. selectedModelfile
  605. ? `${
  606. selectedModelfile.title.charAt(0).toUpperCase() +
  607. selectedModelfile.title.slice(1)
  608. }`
  609. : `${model}`,
  610. {
  611. body: responseMessage.content,
  612. icon: selectedModelfile?.imageUrl ?? `${WEBUI_BASE_URL}/static/favicon.png`
  613. }
  614. );
  615. }
  616. if ($settings.responseAutoCopy) {
  617. copyToClipboard(responseMessage.content);
  618. }
  619. if ($settings.responseAutoPlayback) {
  620. await tick();
  621. document.getElementById(`speak-button-${responseMessage.id}`)?.click();
  622. }
  623. }
  624. }
  625. }
  626. }
  627. } catch (error) {
  628. console.log(error);
  629. if ('detail' in error) {
  630. toast.error(error.detail);
  631. }
  632. break;
  633. }
  634. if (autoScroll) {
  635. scrollToBottom();
  636. }
  637. }
  638. if ($chatId == _chatId) {
  639. if ($settings.saveChatHistory ?? true) {
  640. chat = await updateChatById(localStorage.token, _chatId, {
  641. messages: messages,
  642. history: history,
  643. models: selectedModels
  644. });
  645. await chats.set(await getChatList(localStorage.token));
  646. }
  647. }
  648. } else {
  649. if (res !== null) {
  650. const error = await res.json();
  651. console.log(error);
  652. if ('detail' in error) {
  653. toast.error(error.detail);
  654. responseMessage.error = { content: error.detail };
  655. } else {
  656. toast.error(error.error);
  657. responseMessage.error = { content: error.error };
  658. }
  659. } else {
  660. toast.error(
  661. $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, { provider: 'Ollama' })
  662. );
  663. responseMessage.error = {
  664. content: $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
  665. provider: 'Ollama'
  666. })
  667. };
  668. }
  669. responseMessage.done = true;
  670. messages = messages;
  671. }
  672. stopResponseFlag = false;
  673. await tick();
  674. if (autoScroll) {
  675. scrollToBottom();
  676. }
  677. if (messages.length == 2 && messages.at(1).content !== '') {
  678. window.history.replaceState(history.state, '', `/c/${_chatId}`);
  679. const _title = await generateChatTitle(userPrompt);
  680. await setChatTitle(_chatId, _title);
  681. }
  682. };
  683. const sendPromptOpenAI = async (model, userPrompt, responseMessageId, _chatId) => {
  684. const responseMessage = history.messages[responseMessageId];
  685. const docs = messages
  686. .filter((message) => message?.files ?? null)
  687. .map((message) =>
  688. message.files.filter((item) =>
  689. ['doc', 'collection', 'web_search_results'].includes(item.type)
  690. )
  691. )
  692. .flat(1);
  693. console.log(docs);
  694. scrollToBottom();
  695. try {
  696. const [res, controller] = await generateOpenAIChatCompletion(
  697. localStorage.token,
  698. {
  699. model: model.id,
  700. stream: true,
  701. stream_options:
  702. model.info?.meta?.capabilities?.usage ?? false
  703. ? {
  704. include_usage: true
  705. }
  706. : undefined,
  707. messages: [
  708. $settings.system || (responseMessage?.userContext ?? null)
  709. ? {
  710. role: 'system',
  711. content: `${promptTemplate($settings?.system ?? '', $user.name)}${
  712. responseMessage?.userContext ?? null
  713. ? `\n\nUser Context:\n${(responseMessage?.userContext ?? []).join('\n')}`
  714. : ''
  715. }`
  716. }
  717. : undefined,
  718. ...messages
  719. ]
  720. .filter((message) => message?.content?.trim())
  721. .map((message, idx, arr) => ({
  722. role: message.role,
  723. ...((message.files?.filter((file) => file.type === 'image').length > 0 ?? false) &&
  724. message.role === 'user'
  725. ? {
  726. content: [
  727. {
  728. type: 'text',
  729. text:
  730. arr.length - 1 !== idx
  731. ? message.content
  732. : message?.raContent ?? message.content
  733. },
  734. ...message.files
  735. .filter((file) => file.type === 'image')
  736. .map((file) => ({
  737. type: 'image_url',
  738. image_url: {
  739. url: file.url
  740. }
  741. }))
  742. ]
  743. }
  744. : {
  745. content:
  746. arr.length - 1 !== idx
  747. ? message.content
  748. : message?.raContent ?? message.content
  749. })
  750. })),
  751. seed: $settings?.params?.seed ?? undefined,
  752. stop:
  753. $settings?.params?.stop ?? undefined
  754. ? $settings.params.stop.map((str) =>
  755. decodeURIComponent(JSON.parse('"' + str.replace(/\"/g, '\\"') + '"'))
  756. )
  757. : undefined,
  758. temperature: $settings?.params?.temperature ?? undefined,
  759. top_p: $settings?.params?.top_p ?? undefined,
  760. frequency_penalty: $settings?.params?.frequency_penalty ?? undefined,
  761. max_tokens: $settings?.params?.max_tokens ?? undefined,
  762. docs: docs.length > 0 ? docs : undefined,
  763. citations: docs.length > 0,
  764. chat_id: $chatId
  765. },
  766. `${OPENAI_API_BASE_URL}`
  767. );
  768. // Wait until history/message have been updated
  769. await tick();
  770. scrollToBottom();
  771. if (res && res.ok && res.body) {
  772. const textStream = await createOpenAITextStream(res.body, $settings.splitLargeChunks);
  773. let lastUsage = null;
  774. for await (const update of textStream) {
  775. const { value, done, citations, error, usage } = update;
  776. if (error) {
  777. await handleOpenAIError(error, null, model, responseMessage);
  778. break;
  779. }
  780. if (done || stopResponseFlag || _chatId !== $chatId) {
  781. responseMessage.done = true;
  782. messages = messages;
  783. if (stopResponseFlag) {
  784. controller.abort('User: Stop Response');
  785. } else {
  786. const messages = createMessagesList(responseMessageId);
  787. const res = await chatCompleted(localStorage.token, {
  788. model: model.id,
  789. messages: messages.map((m) => ({
  790. id: m.id,
  791. role: m.role,
  792. content: m.content,
  793. timestamp: m.timestamp
  794. })),
  795. chat_id: $chatId
  796. }).catch((error) => {
  797. console.error(error);
  798. return null;
  799. });
  800. if (res !== null) {
  801. // Update chat history with the new messages
  802. for (const message of res.messages) {
  803. history.messages[message.id] = {
  804. ...history.messages[message.id],
  805. ...(history.messages[message.id].content !== message.content
  806. ? { originalContent: history.messages[message.id].content }
  807. : {}),
  808. ...message
  809. };
  810. }
  811. }
  812. }
  813. break;
  814. }
  815. if (usage) {
  816. lastUsage = usage;
  817. }
  818. if (citations) {
  819. responseMessage.citations = citations;
  820. continue;
  821. }
  822. if (responseMessage.content == '' && value == '\n') {
  823. continue;
  824. } else {
  825. responseMessage.content += value;
  826. messages = messages;
  827. }
  828. if (autoScroll) {
  829. scrollToBottom();
  830. }
  831. }
  832. if ($settings.notificationEnabled && !document.hasFocus()) {
  833. const notification = new Notification(`OpenAI ${model}`, {
  834. body: responseMessage.content,
  835. icon: `${WEBUI_BASE_URL}/static/favicon.png`
  836. });
  837. }
  838. if ($settings.responseAutoCopy) {
  839. copyToClipboard(responseMessage.content);
  840. }
  841. if ($settings.responseAutoPlayback) {
  842. await tick();
  843. document.getElementById(`speak-button-${responseMessage.id}`)?.click();
  844. }
  845. if (lastUsage) {
  846. responseMessage.info = { ...lastUsage, openai: true };
  847. }
  848. if ($chatId == _chatId) {
  849. if ($settings.saveChatHistory ?? true) {
  850. chat = await updateChatById(localStorage.token, _chatId, {
  851. models: selectedModels,
  852. messages: messages,
  853. history: history
  854. });
  855. await chats.set(await getChatList(localStorage.token));
  856. }
  857. }
  858. } else {
  859. await handleOpenAIError(null, res, model, responseMessage);
  860. }
  861. } catch (error) {
  862. await handleOpenAIError(error, null, model, responseMessage);
  863. }
  864. messages = messages;
  865. stopResponseFlag = false;
  866. await tick();
  867. if (autoScroll) {
  868. scrollToBottom();
  869. }
  870. if (messages.length == 2) {
  871. window.history.replaceState(history.state, '', `/c/${_chatId}`);
  872. const _title = await generateChatTitle(userPrompt);
  873. await setChatTitle(_chatId, _title);
  874. }
  875. };
  876. const handleOpenAIError = async (error, res: Response | null, model, responseMessage) => {
  877. let errorMessage = '';
  878. let innerError;
  879. if (error) {
  880. innerError = error;
  881. } else if (res !== null) {
  882. innerError = await res.json();
  883. }
  884. console.error(innerError);
  885. if ('detail' in innerError) {
  886. toast.error(innerError.detail);
  887. errorMessage = innerError.detail;
  888. } else if ('error' in innerError) {
  889. if ('message' in innerError.error) {
  890. toast.error(innerError.error.message);
  891. errorMessage = innerError.error.message;
  892. } else {
  893. toast.error(innerError.error);
  894. errorMessage = innerError.error;
  895. }
  896. } else if ('message' in innerError) {
  897. toast.error(innerError.message);
  898. errorMessage = innerError.message;
  899. }
  900. responseMessage.error = {
  901. content:
  902. $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
  903. provider: model.name ?? model.id
  904. }) +
  905. '\n' +
  906. errorMessage
  907. };
  908. responseMessage.done = true;
  909. messages = messages;
  910. };
  911. const stopResponse = () => {
  912. stopResponseFlag = true;
  913. console.log('stopResponse');
  914. };
  915. const regenerateResponse = async (message) => {
  916. console.log('regenerateResponse');
  917. if (messages.length != 0) {
  918. let userMessage = history.messages[message.parentId];
  919. let userPrompt = userMessage.content;
  920. if ((userMessage?.models ?? [...selectedModels]).length == 1) {
  921. await sendPrompt(userPrompt, userMessage.id);
  922. } else {
  923. await sendPrompt(userPrompt, userMessage.id, message.model);
  924. }
  925. }
  926. };
  927. const continueGeneration = async () => {
  928. console.log('continueGeneration');
  929. const _chatId = JSON.parse(JSON.stringify($chatId));
  930. if (messages.length != 0 && messages.at(-1).done == true) {
  931. const responseMessage = history.messages[history.currentId];
  932. responseMessage.done = false;
  933. await tick();
  934. const model = $models.filter((m) => m.id === responseMessage.model).at(0);
  935. if (model) {
  936. if (model?.owned_by === 'openai') {
  937. await sendPromptOpenAI(
  938. model,
  939. history.messages[responseMessage.parentId].content,
  940. responseMessage.id,
  941. _chatId
  942. );
  943. } else
  944. await sendPromptOllama(
  945. model,
  946. history.messages[responseMessage.parentId].content,
  947. responseMessage.id,
  948. _chatId
  949. );
  950. }
  951. } else {
  952. toast.error($i18n.t(`Model {{modelId}} not found`, { modelId }));
  953. }
  954. };
  955. const generateChatTitle = async (userPrompt) => {
  956. if ($settings?.title?.auto ?? true) {
  957. const model = $models.find((model) => model.id === selectedModels[0]);
  958. const titleModelId =
  959. model?.owned_by === 'openai' ?? false
  960. ? $settings?.title?.modelExternal ?? selectedModels[0]
  961. : $settings?.title?.model ?? selectedModels[0];
  962. const titleModel = $models.find((model) => model.id === titleModelId);
  963. console.log(titleModel);
  964. const title = await generateTitle(
  965. localStorage.token,
  966. $settings?.title?.prompt ??
  967. $i18n.t(
  968. "Create a concise, 3-5 word phrase as a header for the following query, strictly adhering to the 3-5 word limit and avoiding the use of the word 'title':"
  969. ) + ' {{prompt}}',
  970. titleModelId,
  971. userPrompt,
  972. $chatId,
  973. titleModel?.owned_by === 'openai' ?? false
  974. ? `${OPENAI_API_BASE_URL}`
  975. : `${OLLAMA_API_BASE_URL}/v1`
  976. );
  977. return title;
  978. } else {
  979. return `${userPrompt}`;
  980. }
  981. };
  982. const generateChatSearchQuery = async (modelId: string, prompt: string) => {
  983. const model = $models.find((model) => model.id === modelId);
  984. const taskModelId =
  985. model?.owned_by === 'openai' ?? false
  986. ? $settings?.title?.modelExternal ?? modelId
  987. : $settings?.title?.model ?? modelId;
  988. const taskModel = $models.find((model) => model.id === taskModelId);
  989. const previousMessages = messages
  990. .filter((message) => message.role === 'user')
  991. .map((message) => message.content);
  992. return await generateSearchQuery(
  993. localStorage.token,
  994. taskModelId,
  995. previousMessages,
  996. prompt,
  997. taskModel?.owned_by === 'openai' ?? false
  998. ? `${OPENAI_API_BASE_URL}`
  999. : `${OLLAMA_API_BASE_URL}/v1`
  1000. );
  1001. };
  1002. const setChatTitle = async (_chatId, _title) => {
  1003. if (_chatId === $chatId) {
  1004. title = _title;
  1005. }
  1006. if ($settings.saveChatHistory ?? true) {
  1007. chat = await updateChatById(localStorage.token, _chatId, { title: _title });
  1008. await chats.set(await getChatList(localStorage.token));
  1009. }
  1010. };
  1011. const getTags = async () => {
  1012. return await getTagsById(localStorage.token, $chatId).catch(async (error) => {
  1013. return [];
  1014. });
  1015. };
  1016. const addTag = async (tagName) => {
  1017. const res = await addTagById(localStorage.token, $chatId, tagName);
  1018. tags = await getTags();
  1019. chat = await updateChatById(localStorage.token, $chatId, {
  1020. tags: tags
  1021. });
  1022. _tags.set(await getAllChatTags(localStorage.token));
  1023. };
  1024. const deleteTag = async (tagName) => {
  1025. const res = await deleteTagById(localStorage.token, $chatId, tagName);
  1026. tags = await getTags();
  1027. chat = await updateChatById(localStorage.token, $chatId, {
  1028. tags: tags
  1029. });
  1030. _tags.set(await getAllChatTags(localStorage.token));
  1031. };
  1032. </script>
  1033. <svelte:head>
  1034. <title>
  1035. {title
  1036. ? `${title.length > 30 ? `${title.slice(0, 30)}...` : title} | ${$WEBUI_NAME}`
  1037. : `${$WEBUI_NAME}`}
  1038. </title>
  1039. </svelte:head>
  1040. {#if !chatIdProp || (loaded && chatIdProp)}
  1041. <div
  1042. class="min-h-screen max-h-screen {$showSidebar
  1043. ? 'md:max-w-[calc(100%-260px)]'
  1044. : ''} w-full max-w-full flex flex-col"
  1045. >
  1046. <Navbar
  1047. {title}
  1048. bind:selectedModels
  1049. bind:showModelSelector
  1050. shareEnabled={messages.length > 0}
  1051. {chat}
  1052. {initNewChat}
  1053. />
  1054. {#if $banners.length > 0 && !$chatId && selectedModels.length <= 1}
  1055. <div
  1056. class="absolute top-[4.25rem] w-full {$showSidebar ? 'md:max-w-[calc(100%-260px)]' : ''}"
  1057. >
  1058. <div class=" flex flex-col gap-1 w-full">
  1059. {#each $banners.filter( (b) => (b.dismissible ? !JSON.parse(localStorage.getItem('dismissedBannerIds') ?? '[]').includes(b.id) : true) ) as banner}
  1060. <Banner
  1061. {banner}
  1062. on:dismiss={(e) => {
  1063. const bannerId = e.detail;
  1064. localStorage.setItem(
  1065. 'dismissedBannerIds',
  1066. JSON.stringify(
  1067. [
  1068. bannerId,
  1069. ...JSON.parse(localStorage.getItem('dismissedBannerIds') ?? '[]')
  1070. ].filter((id) => $banners.find((b) => b.id === id))
  1071. )
  1072. );
  1073. }}
  1074. />
  1075. {/each}
  1076. </div>
  1077. </div>
  1078. {/if}
  1079. <div class="flex flex-col flex-auto">
  1080. <div
  1081. class=" pb-2.5 flex flex-col justify-between w-full flex-auto overflow-auto h-0 max-w-full"
  1082. id="messages-container"
  1083. bind:this={messagesContainerElement}
  1084. on:scroll={(e) => {
  1085. autoScroll =
  1086. messagesContainerElement.scrollHeight - messagesContainerElement.scrollTop <=
  1087. messagesContainerElement.clientHeight + 5;
  1088. }}
  1089. >
  1090. <div class=" h-full w-full flex flex-col {chatIdProp ? 'py-4' : 'pt-2 pb-4'}">
  1091. <Messages
  1092. chatId={$chatId}
  1093. {selectedModels}
  1094. {processing}
  1095. bind:history
  1096. bind:messages
  1097. bind:autoScroll
  1098. bind:prompt
  1099. bottomPadding={files.length > 0}
  1100. {sendPrompt}
  1101. {continueGeneration}
  1102. {regenerateResponse}
  1103. />
  1104. </div>
  1105. </div>
  1106. <MessageInput
  1107. bind:files
  1108. bind:prompt
  1109. bind:autoScroll
  1110. bind:webSearchEnabled
  1111. bind:atSelectedModel
  1112. {selectedModels}
  1113. {messages}
  1114. {submitPrompt}
  1115. {stopResponse}
  1116. />
  1117. </div>
  1118. </div>
  1119. {/if}