Chat.svelte 31 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162
  1. <script lang="ts">
  2. import { v4 as uuidv4 } from 'uuid';
  3. import { toast } from 'svelte-sonner';
  4. import { getContext, onMount, tick } from 'svelte';
  5. import { goto } from '$app/navigation';
  6. import { page } from '$app/stores';
  7. import {
  8. chatId,
  9. chats,
  10. config,
  11. type Model,
  12. models,
  13. settings,
  14. showSidebar,
  15. tags as _tags,
  16. WEBUI_NAME,
  17. banners
  18. } from '$lib/stores';
  19. import { convertMessagesToHistory, copyToClipboard, splitStream } from '$lib/utils';
  20. import { cancelOllamaRequest, generateChatCompletion } from '$lib/apis/ollama';
  21. import {
  22. addTagById,
  23. createNewChat,
  24. deleteTagById,
  25. getAllChatTags,
  26. getChatById,
  27. getChatList,
  28. getTagsById,
  29. updateChatById
  30. } from '$lib/apis/chats';
  31. import {
  32. generateOpenAIChatCompletion,
  33. generateSearchQuery,
  34. generateTitle
  35. } from '$lib/apis/openai';
  36. import MessageInput from '$lib/components/chat/MessageInput.svelte';
  37. import Messages from '$lib/components/chat/Messages.svelte';
  38. import Navbar from '$lib/components/layout/Navbar.svelte';
  39. import { OLLAMA_API_BASE_URL, OPENAI_API_BASE_URL, WEBUI_BASE_URL } from '$lib/constants';
  40. import { createOpenAITextStream } from '$lib/apis/streaming';
  41. import { queryMemory } from '$lib/apis/memories';
  42. import type { Writable } from 'svelte/store';
  43. import type { i18n as i18nType } from 'i18next';
  44. import { runWebSearch } from '$lib/apis/rag';
  45. import Banner from '../common/Banner.svelte';
  46. import { getUserSettings } from '$lib/apis/users';
  47. const i18n: Writable<i18nType> = getContext('i18n');
  48. export let chatIdProp = '';
  49. let loaded = false;
  50. let stopResponseFlag = false;
  51. let autoScroll = true;
  52. let processing = '';
  53. let messagesContainerElement: HTMLDivElement;
  54. let currentRequestId = null;
  55. let showModelSelector = true;
  56. let selectedModels = [''];
  57. let atSelectedModel: Model | undefined;
  58. let useWebSearch = false;
  59. let chat = null;
  60. let tags = [];
  61. let title = '';
  62. let prompt = '';
  63. let files = [];
  64. let messages = [];
  65. let history = {
  66. messages: {},
  67. currentId: null
  68. };
  69. $: if (history.currentId !== null) {
  70. let _messages = [];
  71. let currentMessage = history.messages[history.currentId];
  72. while (currentMessage !== null) {
  73. _messages.unshift({ ...currentMessage });
  74. currentMessage =
  75. currentMessage.parentId !== null ? history.messages[currentMessage.parentId] : null;
  76. }
  77. messages = _messages;
  78. } else {
  79. messages = [];
  80. }
  81. $: if (chatIdProp) {
  82. (async () => {
  83. if (await loadChat()) {
  84. await tick();
  85. loaded = true;
  86. window.setTimeout(() => scrollToBottom(), 0);
  87. const chatInput = document.getElementById('chat-textarea');
  88. chatInput?.focus();
  89. } else {
  90. await goto('/');
  91. }
  92. })();
  93. }
  94. onMount(async () => {
  95. if (!$chatId) {
  96. await initNewChat();
  97. } else {
  98. if (!($settings.saveChatHistory ?? true)) {
  99. await goto('/');
  100. }
  101. }
  102. });
  103. //////////////////////////
  104. // Web functions
  105. //////////////////////////
  106. const initNewChat = async () => {
  107. if (currentRequestId !== null) {
  108. await cancelOllamaRequest(localStorage.token, currentRequestId);
  109. currentRequestId = null;
  110. }
  111. window.history.replaceState(history.state, '', `/`);
  112. await chatId.set('');
  113. autoScroll = true;
  114. title = '';
  115. messages = [];
  116. history = {
  117. messages: {},
  118. currentId: null
  119. };
  120. if ($page.url.searchParams.get('models')) {
  121. selectedModels = $page.url.searchParams.get('models')?.split(',');
  122. } else if ($settings?.models) {
  123. selectedModels = $settings?.models;
  124. } else if ($config?.default_models) {
  125. console.log($config?.default_models.split(',') ?? '');
  126. selectedModels = $config?.default_models.split(',');
  127. } else {
  128. selectedModels = [''];
  129. }
  130. if ($page.url.searchParams.get('q')) {
  131. prompt = $page.url.searchParams.get('q') ?? '';
  132. if (prompt) {
  133. await tick();
  134. submitPrompt(prompt);
  135. }
  136. }
  137. selectedModels = selectedModels.map((modelId) =>
  138. $models.map((m) => m.id).includes(modelId) ? modelId : ''
  139. );
  140. const userSettings = await getUserSettings(localStorage.token);
  141. if (userSettings) {
  142. settings.set(userSettings.ui);
  143. } else {
  144. settings.set(JSON.parse(localStorage.getItem('settings') ?? '{}'));
  145. }
  146. const chatInput = document.getElementById('chat-textarea');
  147. setTimeout(() => chatInput?.focus(), 0);
  148. };
  149. const loadChat = async () => {
  150. chatId.set(chatIdProp);
  151. chat = await getChatById(localStorage.token, $chatId).catch(async (error) => {
  152. await goto('/');
  153. return null;
  154. });
  155. if (chat) {
  156. tags = await getTags();
  157. const chatContent = chat.chat;
  158. if (chatContent) {
  159. console.log(chatContent);
  160. selectedModels =
  161. (chatContent?.models ?? undefined) !== undefined
  162. ? chatContent.models
  163. : [chatContent.models ?? ''];
  164. history =
  165. (chatContent?.history ?? undefined) !== undefined
  166. ? chatContent.history
  167. : convertMessagesToHistory(chatContent.messages);
  168. title = chatContent.title;
  169. const userSettings = await getUserSettings(localStorage.token);
  170. if (userSettings) {
  171. await settings.set(userSettings.ui);
  172. } else {
  173. await settings.set(JSON.parse(localStorage.getItem('settings') ?? '{}'));
  174. }
  175. await settings.set({
  176. ...$settings,
  177. system: chatContent.system ?? $settings.system,
  178. params: chatContent.options ?? $settings.params
  179. });
  180. autoScroll = true;
  181. await tick();
  182. if (messages.length > 0) {
  183. history.messages[messages.at(-1).id].done = true;
  184. }
  185. await tick();
  186. return true;
  187. } else {
  188. return null;
  189. }
  190. }
  191. };
  192. const scrollToBottom = async () => {
  193. await tick();
  194. if (messagesContainerElement) {
  195. messagesContainerElement.scrollTop = messagesContainerElement.scrollHeight;
  196. }
  197. };
  198. //////////////////////////
  199. // Ollama functions
  200. //////////////////////////
  201. const submitPrompt = async (userPrompt, _user = null) => {
  202. console.log('submitPrompt', $chatId);
  203. selectedModels = selectedModels.map((modelId) =>
  204. $models.map((m) => m.id).includes(modelId) ? modelId : ''
  205. );
  206. if (selectedModels.includes('')) {
  207. toast.error($i18n.t('Model not selected'));
  208. } else if (messages.length != 0 && messages.at(-1).done != true) {
  209. // Response not done
  210. console.log('wait');
  211. } else if (
  212. files.length > 0 &&
  213. files.filter((file) => file.upload_status === false).length > 0
  214. ) {
  215. // Upload not done
  216. toast.error(
  217. $i18n.t(
  218. `Oops! Hold tight! Your files are still in the processing oven. We're cooking them up to perfection. Please be patient and we'll let you know once they're ready.`
  219. )
  220. );
  221. } else {
  222. // Reset chat message textarea height
  223. document.getElementById('chat-textarea').style.height = '';
  224. // Create user message
  225. let userMessageId = uuidv4();
  226. let userMessage = {
  227. id: userMessageId,
  228. parentId: messages.length !== 0 ? messages.at(-1).id : null,
  229. childrenIds: [],
  230. role: 'user',
  231. user: _user ?? undefined,
  232. content: userPrompt,
  233. files: files.length > 0 ? files : undefined,
  234. timestamp: Math.floor(Date.now() / 1000), // Unix epoch
  235. models: selectedModels.filter((m, mIdx) => selectedModels.indexOf(m) === mIdx)
  236. };
  237. // Add message to history and Set currentId to messageId
  238. history.messages[userMessageId] = userMessage;
  239. history.currentId = userMessageId;
  240. // Append messageId to childrenIds of parent message
  241. if (messages.length !== 0) {
  242. history.messages[messages.at(-1).id].childrenIds.push(userMessageId);
  243. }
  244. // Wait until history/message have been updated
  245. await tick();
  246. // Create new chat if only one message in messages
  247. if (messages.length == 1) {
  248. if ($settings.saveChatHistory ?? true) {
  249. chat = await createNewChat(localStorage.token, {
  250. id: $chatId,
  251. title: $i18n.t('New Chat'),
  252. models: selectedModels,
  253. system: $settings.system ?? undefined,
  254. options: {
  255. ...($settings.params ?? {})
  256. },
  257. messages: messages,
  258. history: history,
  259. tags: [],
  260. timestamp: Date.now()
  261. });
  262. await chats.set(await getChatList(localStorage.token));
  263. await chatId.set(chat.id);
  264. } else {
  265. await chatId.set('local');
  266. }
  267. await tick();
  268. }
  269. // Reset chat input textarea
  270. prompt = '';
  271. document.getElementById('chat-textarea').style.height = '';
  272. files = [];
  273. // Send prompt
  274. await sendPrompt(userPrompt, userMessageId);
  275. }
  276. };
  277. const sendPrompt = async (prompt, parentId, modelId = null) => {
  278. const _chatId = JSON.parse(JSON.stringify($chatId));
  279. await Promise.all(
  280. (modelId
  281. ? [modelId]
  282. : atSelectedModel !== undefined
  283. ? [atSelectedModel.id]
  284. : selectedModels
  285. ).map(async (modelId) => {
  286. console.log('modelId', modelId);
  287. const model = $models.filter((m) => m.id === modelId).at(0);
  288. if (model) {
  289. // If there are image files, check if model is vision capable
  290. const hasImages = messages.some((message) =>
  291. message.files?.some((file) => file.type === 'image')
  292. );
  293. if (hasImages && !(model.info?.meta?.capabilities?.vision ?? true)) {
  294. toast.error(
  295. $i18n.t('Model {{modelName}} is not vision capable', {
  296. modelName: model.name ?? model.id
  297. })
  298. );
  299. }
  300. // Create response message
  301. let responseMessageId = uuidv4();
  302. let responseMessage = {
  303. parentId: parentId,
  304. id: responseMessageId,
  305. childrenIds: [],
  306. role: 'assistant',
  307. content: '',
  308. model: model.id,
  309. modelName: model.name ?? model.id,
  310. userContext: null,
  311. timestamp: Math.floor(Date.now() / 1000) // Unix epoch
  312. };
  313. // Add message to history and Set currentId to messageId
  314. history.messages[responseMessageId] = responseMessage;
  315. history.currentId = responseMessageId;
  316. // Append messageId to childrenIds of parent message
  317. if (parentId !== null) {
  318. history.messages[parentId].childrenIds = [
  319. ...history.messages[parentId].childrenIds,
  320. responseMessageId
  321. ];
  322. }
  323. await tick();
  324. let userContext = null;
  325. if ($settings?.memory ?? false) {
  326. if (userContext === null) {
  327. const res = await queryMemory(localStorage.token, prompt).catch((error) => {
  328. toast.error(error);
  329. return null;
  330. });
  331. if (res) {
  332. if (res.documents[0].length > 0) {
  333. userContext = res.documents.reduce((acc, doc, index) => {
  334. const createdAtTimestamp = res.metadatas[index][0].created_at;
  335. const createdAtDate = new Date(createdAtTimestamp * 1000)
  336. .toISOString()
  337. .split('T')[0];
  338. acc.push(`${index + 1}. [${createdAtDate}]. ${doc[0]}`);
  339. return acc;
  340. }, []);
  341. }
  342. console.log(userContext);
  343. }
  344. }
  345. }
  346. responseMessage.userContext = userContext;
  347. if (useWebSearch) {
  348. await runWebSearchForPrompt(model.id, parentId, responseMessageId);
  349. }
  350. if (model?.owned_by === 'openai') {
  351. await sendPromptOpenAI(model, prompt, responseMessageId, _chatId);
  352. } else if (model) {
  353. await sendPromptOllama(model, prompt, responseMessageId, _chatId);
  354. }
  355. } else {
  356. toast.error($i18n.t(`Model {{modelId}} not found`, { modelId }));
  357. }
  358. })
  359. );
  360. await chats.set(await getChatList(localStorage.token));
  361. };
  362. const runWebSearchForPrompt = async (model: string, parentId: string, responseId: string) => {
  363. const responseMessage = history.messages[responseId];
  364. responseMessage.progress = $i18n.t('Generating search query');
  365. messages = messages;
  366. const searchQuery = await generateChatSearchQuery(model, parentId);
  367. if (!searchQuery) {
  368. toast.warning($i18n.t('No search query generated'));
  369. responseMessage.progress = undefined;
  370. messages = messages;
  371. return;
  372. }
  373. responseMessage.progress = $i18n.t("Searching the web for '{{searchQuery}}'", { searchQuery });
  374. messages = messages;
  375. const searchDocument = await runWebSearch(localStorage.token, searchQuery);
  376. if (searchDocument === undefined) {
  377. toast.warning($i18n.t('No search results found'));
  378. responseMessage.progress = undefined;
  379. messages = messages;
  380. return;
  381. }
  382. if (!responseMessage.files) {
  383. responseMessage.files = [];
  384. }
  385. responseMessage.files.push({
  386. collection_name: searchDocument.collection_name,
  387. name: searchQuery,
  388. type: 'websearch',
  389. upload_status: true,
  390. error: '',
  391. urls: searchDocument.filenames
  392. });
  393. responseMessage.progress = undefined;
  394. messages = messages;
  395. };
  396. const sendPromptOllama = async (model, userPrompt, responseMessageId, _chatId) => {
  397. model = model.id;
  398. const responseMessage = history.messages[responseMessageId];
  399. // Wait until history/message have been updated
  400. await tick();
  401. // Scroll down
  402. scrollToBottom();
  403. const messagesBody = [
  404. $settings.system || (responseMessage?.userContext ?? null)
  405. ? {
  406. role: 'system',
  407. content: `${$settings?.system ?? ''}${
  408. responseMessage?.userContext ?? null
  409. ? `\n\nUser Context:\n${(responseMessage?.userContext ?? []).join('\n')}`
  410. : ''
  411. }`
  412. }
  413. : undefined,
  414. ...messages
  415. ]
  416. .filter((message) => message?.content?.trim())
  417. .map((message, idx, arr) => {
  418. // Prepare the base message object
  419. const baseMessage = {
  420. role: message.role,
  421. content: message.content
  422. };
  423. // Extract and format image URLs if any exist
  424. const imageUrls = message.files
  425. ?.filter((file) => file.type === 'image')
  426. .map((file) => file.url.slice(file.url.indexOf(',') + 1));
  427. // Add images array only if it contains elements
  428. if (imageUrls && imageUrls.length > 0 && message.role === 'user') {
  429. baseMessage.images = imageUrls;
  430. }
  431. return baseMessage;
  432. });
  433. let lastImageIndex = -1;
  434. // Find the index of the last object with images
  435. messagesBody.forEach((item, index) => {
  436. if (item.images) {
  437. lastImageIndex = index;
  438. }
  439. });
  440. // Remove images from all but the last one
  441. messagesBody.forEach((item, index) => {
  442. if (index !== lastImageIndex) {
  443. delete item.images;
  444. }
  445. });
  446. const docs = messages
  447. .filter((message) => message?.files ?? null)
  448. .map((message) =>
  449. message.files.filter((item) => ['doc', 'collection', 'websearch'].includes(item.type))
  450. )
  451. .flat(1);
  452. const [res, controller] = await generateChatCompletion(localStorage.token, {
  453. model: model,
  454. messages: messagesBody,
  455. options: {
  456. ...($settings.params ?? {}),
  457. stop:
  458. $settings?.params?.stop ?? undefined
  459. ? $settings.params.stop.map((str) =>
  460. decodeURIComponent(JSON.parse('"' + str.replace(/\"/g, '\\"') + '"'))
  461. )
  462. : undefined,
  463. num_predict: $settings?.params?.max_tokens ?? undefined,
  464. repeat_penalty: $settings?.params?.frequency_penalty ?? undefined
  465. },
  466. format: $settings.requestFormat ?? undefined,
  467. keep_alive: $settings.keepAlive ?? undefined,
  468. docs: docs.length > 0 ? docs : undefined,
  469. citations: docs.length > 0
  470. });
  471. if (res && res.ok) {
  472. console.log('controller', controller);
  473. const reader = res.body
  474. .pipeThrough(new TextDecoderStream())
  475. .pipeThrough(splitStream('\n'))
  476. .getReader();
  477. while (true) {
  478. const { value, done } = await reader.read();
  479. if (done || stopResponseFlag || _chatId !== $chatId) {
  480. responseMessage.done = true;
  481. messages = messages;
  482. if (stopResponseFlag) {
  483. controller.abort('User: Stop Response');
  484. await cancelOllamaRequest(localStorage.token, currentRequestId);
  485. }
  486. currentRequestId = null;
  487. break;
  488. }
  489. try {
  490. let lines = value.split('\n');
  491. for (const line of lines) {
  492. if (line !== '') {
  493. console.log(line);
  494. let data = JSON.parse(line);
  495. if ('citations' in data) {
  496. responseMessage.citations = data.citations;
  497. continue;
  498. }
  499. if ('detail' in data) {
  500. throw data;
  501. }
  502. if ('id' in data) {
  503. console.log(data);
  504. currentRequestId = data.id;
  505. } else {
  506. if (data.done == false) {
  507. if (responseMessage.content == '' && data.message.content == '\n') {
  508. continue;
  509. } else {
  510. responseMessage.content += data.message.content;
  511. messages = messages;
  512. }
  513. } else {
  514. responseMessage.done = true;
  515. if (responseMessage.content == '') {
  516. responseMessage.error = true;
  517. responseMessage.content =
  518. 'Oops! No text generated from Ollama, Please try again.';
  519. }
  520. responseMessage.context = data.context ?? null;
  521. responseMessage.info = {
  522. total_duration: data.total_duration,
  523. load_duration: data.load_duration,
  524. sample_count: data.sample_count,
  525. sample_duration: data.sample_duration,
  526. prompt_eval_count: data.prompt_eval_count,
  527. prompt_eval_duration: data.prompt_eval_duration,
  528. eval_count: data.eval_count,
  529. eval_duration: data.eval_duration
  530. };
  531. messages = messages;
  532. if ($settings.notificationEnabled && !document.hasFocus()) {
  533. const notification = new Notification(
  534. selectedModelfile
  535. ? `${
  536. selectedModelfile.title.charAt(0).toUpperCase() +
  537. selectedModelfile.title.slice(1)
  538. }`
  539. : `${model}`,
  540. {
  541. body: responseMessage.content,
  542. icon: selectedModelfile?.imageUrl ?? `${WEBUI_BASE_URL}/static/favicon.png`
  543. }
  544. );
  545. }
  546. if ($settings.responseAutoCopy) {
  547. copyToClipboard(responseMessage.content);
  548. }
  549. if ($settings.responseAutoPlayback) {
  550. await tick();
  551. document.getElementById(`speak-button-${responseMessage.id}`)?.click();
  552. }
  553. }
  554. }
  555. }
  556. }
  557. } catch (error) {
  558. console.log(error);
  559. if ('detail' in error) {
  560. toast.error(error.detail);
  561. }
  562. break;
  563. }
  564. if (autoScroll) {
  565. scrollToBottom();
  566. }
  567. }
  568. if ($chatId == _chatId) {
  569. if ($settings.saveChatHistory ?? true) {
  570. chat = await updateChatById(localStorage.token, _chatId, {
  571. messages: messages,
  572. history: history,
  573. models: selectedModels
  574. });
  575. await chats.set(await getChatList(localStorage.token));
  576. }
  577. }
  578. } else {
  579. if (res !== null) {
  580. const error = await res.json();
  581. console.log(error);
  582. if ('detail' in error) {
  583. toast.error(error.detail);
  584. responseMessage.content = error.detail;
  585. } else {
  586. toast.error(error.error);
  587. responseMessage.content = error.error;
  588. }
  589. } else {
  590. toast.error(
  591. $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, { provider: 'Ollama' })
  592. );
  593. responseMessage.content = $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
  594. provider: 'Ollama'
  595. });
  596. }
  597. responseMessage.error = true;
  598. responseMessage.content = $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
  599. provider: 'Ollama'
  600. });
  601. responseMessage.done = true;
  602. messages = messages;
  603. }
  604. stopResponseFlag = false;
  605. await tick();
  606. if (autoScroll) {
  607. scrollToBottom();
  608. }
  609. if (messages.length == 2 && messages.at(1).content !== '') {
  610. window.history.replaceState(history.state, '', `/c/${_chatId}`);
  611. const _title = await generateChatTitle(userPrompt);
  612. await setChatTitle(_chatId, _title);
  613. }
  614. };
  615. const sendPromptOpenAI = async (model, userPrompt, responseMessageId, _chatId) => {
  616. const responseMessage = history.messages[responseMessageId];
  617. const docs = messages
  618. .filter((message) => message?.files ?? null)
  619. .map((message) =>
  620. message.files.filter((item) => ['doc', 'collection', 'websearch'].includes(item.type))
  621. )
  622. .flat(1);
  623. console.log(docs);
  624. scrollToBottom();
  625. try {
  626. const [res, controller] = await generateOpenAIChatCompletion(
  627. localStorage.token,
  628. {
  629. model: model.id,
  630. stream: true,
  631. messages: [
  632. $settings.system || (responseMessage?.userContext ?? null)
  633. ? {
  634. role: 'system',
  635. content: `${$settings?.system ?? ''}${
  636. responseMessage?.userContext ?? null
  637. ? `\n\nUser Context:\n${(responseMessage?.userContext ?? []).join('\n')}`
  638. : ''
  639. }`
  640. }
  641. : undefined,
  642. ...messages
  643. ]
  644. .filter((message) => message?.content?.trim())
  645. .map((message, idx, arr) => ({
  646. role: message.role,
  647. ...((message.files?.filter((file) => file.type === 'image').length > 0 ?? false) &&
  648. message.role === 'user'
  649. ? {
  650. content: [
  651. {
  652. type: 'text',
  653. text:
  654. arr.length - 1 !== idx
  655. ? message.content
  656. : message?.raContent ?? message.content
  657. },
  658. ...message.files
  659. .filter((file) => file.type === 'image')
  660. .map((file) => ({
  661. type: 'image_url',
  662. image_url: {
  663. url: file.url
  664. }
  665. }))
  666. ]
  667. }
  668. : {
  669. content:
  670. arr.length - 1 !== idx
  671. ? message.content
  672. : message?.raContent ?? message.content
  673. })
  674. })),
  675. seed: $settings?.params?.seed ?? undefined,
  676. stop:
  677. $settings?.params?.stop ?? undefined
  678. ? $settings.params.stop.map((str) =>
  679. decodeURIComponent(JSON.parse('"' + str.replace(/\"/g, '\\"') + '"'))
  680. )
  681. : undefined,
  682. temperature: $settings?.params?.temperature ?? undefined,
  683. top_p: $settings?.params?.top_p ?? undefined,
  684. frequency_penalty: $settings?.params?.frequency_penalty ?? undefined,
  685. max_tokens: $settings?.params?.max_tokens ?? undefined,
  686. docs: docs.length > 0 ? docs : undefined,
  687. citations: docs.length > 0
  688. },
  689. `${OPENAI_API_BASE_URL}`
  690. );
  691. // Wait until history/message have been updated
  692. await tick();
  693. scrollToBottom();
  694. if (res && res.ok && res.body) {
  695. const textStream = await createOpenAITextStream(res.body, $settings.splitLargeChunks);
  696. for await (const update of textStream) {
  697. const { value, done, citations, error } = update;
  698. if (error) {
  699. await handleOpenAIError(error, null, model, responseMessage);
  700. break;
  701. }
  702. if (done || stopResponseFlag || _chatId !== $chatId) {
  703. responseMessage.done = true;
  704. messages = messages;
  705. if (stopResponseFlag) {
  706. controller.abort('User: Stop Response');
  707. }
  708. break;
  709. }
  710. if (citations) {
  711. responseMessage.citations = citations;
  712. continue;
  713. }
  714. if (responseMessage.content == '' && value == '\n') {
  715. continue;
  716. } else {
  717. responseMessage.content += value;
  718. messages = messages;
  719. }
  720. if ($settings.notificationEnabled && !document.hasFocus()) {
  721. const notification = new Notification(`OpenAI ${model}`, {
  722. body: responseMessage.content,
  723. icon: `${WEBUI_BASE_URL}/static/favicon.png`
  724. });
  725. }
  726. if ($settings.responseAutoCopy) {
  727. copyToClipboard(responseMessage.content);
  728. }
  729. if ($settings.responseAutoPlayback) {
  730. await tick();
  731. document.getElementById(`speak-button-${responseMessage.id}`)?.click();
  732. }
  733. if (autoScroll) {
  734. scrollToBottom();
  735. }
  736. }
  737. if ($chatId == _chatId) {
  738. if ($settings.saveChatHistory ?? true) {
  739. chat = await updateChatById(localStorage.token, _chatId, {
  740. models: selectedModels,
  741. messages: messages,
  742. history: history
  743. });
  744. await chats.set(await getChatList(localStorage.token));
  745. }
  746. }
  747. } else {
  748. await handleOpenAIError(null, res, model, responseMessage);
  749. }
  750. } catch (error) {
  751. await handleOpenAIError(error, null, model, responseMessage);
  752. }
  753. messages = messages;
  754. stopResponseFlag = false;
  755. await tick();
  756. if (autoScroll) {
  757. scrollToBottom();
  758. }
  759. if (messages.length == 2) {
  760. window.history.replaceState(history.state, '', `/c/${_chatId}`);
  761. const _title = await generateChatTitle(userPrompt);
  762. await setChatTitle(_chatId, _title);
  763. }
  764. };
  765. const handleOpenAIError = async (error, res: Response | null, model, responseMessage) => {
  766. let errorMessage = '';
  767. let innerError;
  768. if (error) {
  769. innerError = error;
  770. } else if (res !== null) {
  771. innerError = await res.json();
  772. }
  773. console.error(innerError);
  774. if ('detail' in innerError) {
  775. toast.error(innerError.detail);
  776. errorMessage = innerError.detail;
  777. } else if ('error' in innerError) {
  778. if ('message' in innerError.error) {
  779. toast.error(innerError.error.message);
  780. errorMessage = innerError.error.message;
  781. } else {
  782. toast.error(innerError.error);
  783. errorMessage = innerError.error;
  784. }
  785. } else if ('message' in innerError) {
  786. toast.error(innerError.message);
  787. errorMessage = innerError.message;
  788. }
  789. responseMessage.error = true;
  790. responseMessage.content =
  791. $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
  792. provider: model.name ?? model.id
  793. }) +
  794. '\n' +
  795. errorMessage;
  796. responseMessage.done = true;
  797. messages = messages;
  798. };
  799. const stopResponse = () => {
  800. stopResponseFlag = true;
  801. console.log('stopResponse');
  802. };
  803. const regenerateResponse = async (message) => {
  804. console.log('regenerateResponse');
  805. if (messages.length != 0) {
  806. let userMessage = history.messages[message.parentId];
  807. let userPrompt = userMessage.content;
  808. if ((userMessage?.models ?? [...selectedModels]).length == 1) {
  809. await sendPrompt(userPrompt, userMessage.id);
  810. } else {
  811. await sendPrompt(userPrompt, userMessage.id, message.model);
  812. }
  813. }
  814. };
  815. const continueGeneration = async () => {
  816. console.log('continueGeneration');
  817. const _chatId = JSON.parse(JSON.stringify($chatId));
  818. if (messages.length != 0 && messages.at(-1).done == true) {
  819. const responseMessage = history.messages[history.currentId];
  820. responseMessage.done = false;
  821. await tick();
  822. const model = $models.filter((m) => m.id === responseMessage.model).at(0);
  823. if (model) {
  824. if (model?.external) {
  825. await sendPromptOpenAI(
  826. model,
  827. history.messages[responseMessage.parentId].content,
  828. responseMessage.id,
  829. _chatId
  830. );
  831. } else
  832. await sendPromptOllama(
  833. model,
  834. history.messages[responseMessage.parentId].content,
  835. responseMessage.id,
  836. _chatId
  837. );
  838. }
  839. } else {
  840. toast.error($i18n.t(`Model {{modelId}} not found`, { modelId }));
  841. }
  842. };
  843. const generateChatTitle = async (userPrompt) => {
  844. if ($settings?.title?.auto ?? true) {
  845. const model = $models.find((model) => model.id === selectedModels[0]);
  846. const titleModelId =
  847. model?.external ?? false
  848. ? $settings?.title?.modelExternal ?? selectedModels[0]
  849. : $settings?.title?.model ?? selectedModels[0];
  850. const titleModel = $models.find((model) => model.id === titleModelId);
  851. console.log(titleModel);
  852. const title = await generateTitle(
  853. localStorage.token,
  854. $settings?.title?.prompt ??
  855. $i18n.t(
  856. "Create a concise, 3-5 word phrase as a header for the following query, strictly adhering to the 3-5 word limit and avoiding the use of the word 'title':"
  857. ) + ' {{prompt}}',
  858. titleModelId,
  859. userPrompt,
  860. titleModel?.owned_by === 'openai' ?? false
  861. ? `${OPENAI_API_BASE_URL}`
  862. : `${OLLAMA_API_BASE_URL}/v1`
  863. );
  864. return title;
  865. } else {
  866. return `${userPrompt}`;
  867. }
  868. };
  869. const generateChatSearchQuery = async (modelId: string, messageId: string) => {
  870. const model = $models.find((model) => model.id === modelId);
  871. const taskModelId =
  872. model?.external ?? false
  873. ? $settings?.title?.modelExternal ?? modelId
  874. : $settings?.title?.model ?? modelId;
  875. const taskModel = $models.find((model) => model.id === taskModelId);
  876. const userMessage = history.messages[messageId];
  877. const userPrompt = userMessage.content;
  878. const previousMessages = messages
  879. .filter((message) => message.role === 'user')
  880. .map((message) => message.content);
  881. return await generateSearchQuery(
  882. localStorage.token,
  883. taskModelId,
  884. previousMessages,
  885. userPrompt,
  886. taskModel?.owned_by === 'openai' ?? false
  887. ? `${OPENAI_API_BASE_URL}`
  888. : `${OLLAMA_API_BASE_URL}/v1`
  889. );
  890. };
  891. const setChatTitle = async (_chatId, _title) => {
  892. if (_chatId === $chatId) {
  893. title = _title;
  894. }
  895. if ($settings.saveChatHistory ?? true) {
  896. chat = await updateChatById(localStorage.token, _chatId, { title: _title });
  897. await chats.set(await getChatList(localStorage.token));
  898. }
  899. };
  900. const getTags = async () => {
  901. return await getTagsById(localStorage.token, $chatId).catch(async (error) => {
  902. return [];
  903. });
  904. };
  905. const addTag = async (tagName) => {
  906. const res = await addTagById(localStorage.token, $chatId, tagName);
  907. tags = await getTags();
  908. chat = await updateChatById(localStorage.token, $chatId, {
  909. tags: tags
  910. });
  911. _tags.set(await getAllChatTags(localStorage.token));
  912. };
  913. const deleteTag = async (tagName) => {
  914. const res = await deleteTagById(localStorage.token, $chatId, tagName);
  915. tags = await getTags();
  916. chat = await updateChatById(localStorage.token, $chatId, {
  917. tags: tags
  918. });
  919. _tags.set(await getAllChatTags(localStorage.token));
  920. };
  921. </script>
  922. <svelte:head>
  923. <title>
  924. {title
  925. ? `${title.length > 30 ? `${title.slice(0, 30)}...` : title} | ${$WEBUI_NAME}`
  926. : `${$WEBUI_NAME}`}
  927. </title>
  928. </svelte:head>
  929. {#if !chatIdProp || (loaded && chatIdProp)}
  930. <div
  931. class="min-h-screen max-h-screen {$showSidebar
  932. ? 'md:max-w-[calc(100%-260px)]'
  933. : ''} w-full max-w-full flex flex-col"
  934. >
  935. <Navbar
  936. {title}
  937. bind:selectedModels
  938. bind:showModelSelector
  939. shareEnabled={messages.length > 0}
  940. {chat}
  941. {initNewChat}
  942. />
  943. {#if $banners.length > 0 && !$chatId && selectedModels.length <= 1}
  944. <div
  945. class="absolute top-[4.25rem] w-full {$showSidebar ? 'md:max-w-[calc(100%-260px)]' : ''}"
  946. >
  947. <div class=" flex flex-col gap-1 w-full">
  948. {#each $banners.filter( (b) => (b.dismissible ? !JSON.parse(localStorage.getItem('dismissedBannerIds') ?? '[]').includes(b.id) : true) ) as banner}
  949. <Banner
  950. {banner}
  951. on:dismiss={(e) => {
  952. const bannerId = e.detail;
  953. localStorage.setItem(
  954. 'dismissedBannerIds',
  955. JSON.stringify(
  956. [
  957. bannerId,
  958. ...JSON.parse(localStorage.getItem('dismissedBannerIds') ?? '[]')
  959. ].filter((id) => $banners.find((b) => b.id === id))
  960. )
  961. );
  962. }}
  963. />
  964. {/each}
  965. </div>
  966. </div>
  967. {/if}
  968. <div class="flex flex-col flex-auto">
  969. <div
  970. class=" pb-2.5 flex flex-col justify-between w-full flex-auto overflow-auto h-0 max-w-full"
  971. id="messages-container"
  972. bind:this={messagesContainerElement}
  973. on:scroll={(e) => {
  974. autoScroll =
  975. messagesContainerElement.scrollHeight - messagesContainerElement.scrollTop <=
  976. messagesContainerElement.clientHeight + 5;
  977. }}
  978. >
  979. <div class=" h-full w-full flex flex-col {chatIdProp ? 'py-4' : 'pt-2 pb-4'}">
  980. <Messages
  981. chatId={$chatId}
  982. {selectedModels}
  983. {processing}
  984. bind:history
  985. bind:messages
  986. bind:autoScroll
  987. bind:prompt
  988. bottomPadding={files.length > 0}
  989. {sendPrompt}
  990. {continueGeneration}
  991. {regenerateResponse}
  992. />
  993. </div>
  994. </div>
  995. </div>
  996. </div>
  997. <MessageInput
  998. bind:files
  999. bind:prompt
  1000. bind:autoScroll
  1001. bind:useWebSearch
  1002. bind:atSelectedModel
  1003. {selectedModels}
  1004. {messages}
  1005. {submitPrompt}
  1006. {stopResponse}
  1007. webSearchAvailable={$config.enable_websearch ?? false}
  1008. />
  1009. {/if}