Chat.svelte 31 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170
  1. <script lang="ts">
  2. import { v4 as uuidv4 } from 'uuid';
  3. import { toast } from 'svelte-sonner';
  4. import { getContext, onMount, tick } from 'svelte';
  5. import { goto } from '$app/navigation';
  6. import { page } from '$app/stores';
  7. import {
  8. chatId,
  9. chats,
  10. config,
  11. type Model,
  12. models,
  13. settings,
  14. showSidebar,
  15. tags as _tags,
  16. WEBUI_NAME,
  17. banners
  18. } from '$lib/stores';
  19. import { convertMessagesToHistory, copyToClipboard, splitStream } from '$lib/utils';
  20. import { cancelOllamaRequest, generateChatCompletion } from '$lib/apis/ollama';
  21. import {
  22. addTagById,
  23. createNewChat,
  24. deleteTagById,
  25. getAllChatTags,
  26. getChatById,
  27. getChatList,
  28. getTagsById,
  29. updateChatById
  30. } from '$lib/apis/chats';
  31. import {
  32. generateOpenAIChatCompletion,
  33. generateSearchQuery,
  34. generateTitle
  35. } from '$lib/apis/openai';
  36. import MessageInput from '$lib/components/chat/MessageInput.svelte';
  37. import Messages from '$lib/components/chat/Messages.svelte';
  38. import Navbar from '$lib/components/layout/Navbar.svelte';
  39. import { OLLAMA_API_BASE_URL, OPENAI_API_BASE_URL, WEBUI_BASE_URL } from '$lib/constants';
  40. import { createOpenAITextStream } from '$lib/apis/streaming';
  41. import { queryMemory } from '$lib/apis/memories';
  42. import type { Writable } from 'svelte/store';
  43. import type { i18n as i18nType } from 'i18next';
  44. import { runWebSearch } from '$lib/apis/rag';
  45. import Banner from '../common/Banner.svelte';
  46. import { getUserSettings } from '$lib/apis/users';
  47. const i18n: Writable<i18nType> = getContext('i18n');
  48. export let chatIdProp = '';
  49. let loaded = false;
  50. let stopResponseFlag = false;
  51. let autoScroll = true;
  52. let processing = '';
  53. let messagesContainerElement: HTMLDivElement;
  54. let currentRequestId = null;
  55. let showModelSelector = true;
  56. let selectedModels = [''];
  57. let atSelectedModel: Model | undefined;
  58. let useWebSearch = false;
  59. let chat = null;
  60. let tags = [];
  61. let title = '';
  62. let prompt = '';
  63. let files = [];
  64. let messages = [];
  65. let history = {
  66. messages: {},
  67. currentId: null
  68. };
  69. $: if (history.currentId !== null) {
  70. let _messages = [];
  71. let currentMessage = history.messages[history.currentId];
  72. while (currentMessage !== null) {
  73. _messages.unshift({ ...currentMessage });
  74. currentMessage =
  75. currentMessage.parentId !== null ? history.messages[currentMessage.parentId] : null;
  76. }
  77. messages = _messages;
  78. } else {
  79. messages = [];
  80. }
  81. $: if (chatIdProp) {
  82. (async () => {
  83. if (await loadChat()) {
  84. await tick();
  85. loaded = true;
  86. window.setTimeout(() => scrollToBottom(), 0);
  87. const chatInput = document.getElementById('chat-textarea');
  88. chatInput?.focus();
  89. } else {
  90. await goto('/');
  91. }
  92. })();
  93. }
  94. onMount(async () => {
  95. if (!$chatId) {
  96. await initNewChat();
  97. } else {
  98. if (!($settings.saveChatHistory ?? true)) {
  99. await goto('/');
  100. }
  101. }
  102. });
  103. //////////////////////////
  104. // Web functions
  105. //////////////////////////
  106. const initNewChat = async () => {
  107. if (currentRequestId !== null) {
  108. await cancelOllamaRequest(localStorage.token, currentRequestId);
  109. currentRequestId = null;
  110. }
  111. window.history.replaceState(history.state, '', `/`);
  112. await chatId.set('');
  113. autoScroll = true;
  114. title = '';
  115. messages = [];
  116. history = {
  117. messages: {},
  118. currentId: null
  119. };
  120. if ($page.url.searchParams.get('models')) {
  121. selectedModels = $page.url.searchParams.get('models')?.split(',');
  122. } else if ($settings?.models) {
  123. selectedModels = $settings?.models;
  124. } else if ($config?.default_models) {
  125. console.log($config?.default_models.split(',') ?? '');
  126. selectedModels = $config?.default_models.split(',');
  127. } else {
  128. selectedModels = [''];
  129. }
  130. if ($page.url.searchParams.get('q')) {
  131. prompt = $page.url.searchParams.get('q') ?? '';
  132. if (prompt) {
  133. await tick();
  134. submitPrompt(prompt);
  135. }
  136. }
  137. selectedModels = selectedModels.map((modelId) =>
  138. $models.map((m) => m.id).includes(modelId) ? modelId : ''
  139. );
  140. const userSettings = await getUserSettings(localStorage.token);
  141. if (userSettings) {
  142. settings.set(userSettings.ui);
  143. } else {
  144. settings.set(JSON.parse(localStorage.getItem('settings') ?? '{}'));
  145. }
  146. const chatInput = document.getElementById('chat-textarea');
  147. setTimeout(() => chatInput?.focus(), 0);
  148. };
  149. const loadChat = async () => {
  150. chatId.set(chatIdProp);
  151. chat = await getChatById(localStorage.token, $chatId).catch(async (error) => {
  152. await goto('/');
  153. return null;
  154. });
  155. if (chat) {
  156. tags = await getTags();
  157. const chatContent = chat.chat;
  158. if (chatContent) {
  159. console.log(chatContent);
  160. selectedModels =
  161. (chatContent?.models ?? undefined) !== undefined
  162. ? chatContent.models
  163. : [chatContent.models ?? ''];
  164. history =
  165. (chatContent?.history ?? undefined) !== undefined
  166. ? chatContent.history
  167. : convertMessagesToHistory(chatContent.messages);
  168. title = chatContent.title;
  169. const userSettings = await getUserSettings(localStorage.token);
  170. if (userSettings) {
  171. await settings.set(userSettings.ui);
  172. } else {
  173. await settings.set(JSON.parse(localStorage.getItem('settings') ?? '{}'));
  174. }
  175. await settings.set({
  176. ...$settings,
  177. system: chatContent.system ?? $settings.system,
  178. params: chatContent.options ?? $settings.params
  179. });
  180. autoScroll = true;
  181. await tick();
  182. if (messages.length > 0) {
  183. history.messages[messages.at(-1).id].done = true;
  184. }
  185. await tick();
  186. return true;
  187. } else {
  188. return null;
  189. }
  190. }
  191. };
  192. const scrollToBottom = async () => {
  193. await tick();
  194. if (messagesContainerElement) {
  195. messagesContainerElement.scrollTop = messagesContainerElement.scrollHeight;
  196. }
  197. };
  198. //////////////////////////
  199. // Ollama functions
  200. //////////////////////////
  201. const submitPrompt = async (userPrompt, _user = null) => {
  202. console.log('submitPrompt', $chatId);
  203. selectedModels = selectedModels.map((modelId) =>
  204. $models.map((m) => m.id).includes(modelId) ? modelId : ''
  205. );
  206. if (selectedModels.includes('')) {
  207. toast.error($i18n.t('Model not selected'));
  208. } else if (messages.length != 0 && messages.at(-1).done != true) {
  209. // Response not done
  210. console.log('wait');
  211. } else if (
  212. files.length > 0 &&
  213. files.filter((file) => file.upload_status === false).length > 0
  214. ) {
  215. // Upload not done
  216. toast.error(
  217. $i18n.t(
  218. `Oops! Hold tight! Your files are still in the processing oven. We're cooking them up to perfection. Please be patient and we'll let you know once they're ready.`
  219. )
  220. );
  221. } else {
  222. // Reset chat message textarea height
  223. document.getElementById('chat-textarea').style.height = '';
  224. // Create user message
  225. let userMessageId = uuidv4();
  226. let userMessage = {
  227. id: userMessageId,
  228. parentId: messages.length !== 0 ? messages.at(-1).id : null,
  229. childrenIds: [],
  230. role: 'user',
  231. user: _user ?? undefined,
  232. content: userPrompt,
  233. files: files.length > 0 ? files : undefined,
  234. timestamp: Math.floor(Date.now() / 1000), // Unix epoch
  235. models: selectedModels.filter((m, mIdx) => selectedModels.indexOf(m) === mIdx)
  236. };
  237. // Add message to history and Set currentId to messageId
  238. history.messages[userMessageId] = userMessage;
  239. history.currentId = userMessageId;
  240. // Append messageId to childrenIds of parent message
  241. if (messages.length !== 0) {
  242. history.messages[messages.at(-1).id].childrenIds.push(userMessageId);
  243. }
  244. // Wait until history/message have been updated
  245. await tick();
  246. // Create new chat if only one message in messages
  247. if (messages.length == 1) {
  248. if ($settings.saveChatHistory ?? true) {
  249. chat = await createNewChat(localStorage.token, {
  250. id: $chatId,
  251. title: $i18n.t('New Chat'),
  252. models: selectedModels,
  253. system: $settings.system ?? undefined,
  254. options: {
  255. ...($settings.params ?? {})
  256. },
  257. messages: messages,
  258. history: history,
  259. tags: [],
  260. timestamp: Date.now()
  261. });
  262. await chats.set(await getChatList(localStorage.token));
  263. await chatId.set(chat.id);
  264. } else {
  265. await chatId.set('local');
  266. }
  267. await tick();
  268. }
  269. // Reset chat input textarea
  270. prompt = '';
  271. document.getElementById('chat-textarea').style.height = '';
  272. files = [];
  273. // Send prompt
  274. await sendPrompt(userPrompt, userMessageId);
  275. }
  276. };
  277. const sendPrompt = async (prompt, parentId, modelId = null) => {
  278. const _chatId = JSON.parse(JSON.stringify($chatId));
  279. await Promise.all(
  280. (modelId
  281. ? [modelId]
  282. : atSelectedModel !== undefined
  283. ? [atSelectedModel.id]
  284. : selectedModels
  285. ).map(async (modelId) => {
  286. console.log('modelId', modelId);
  287. const model = $models.filter((m) => m.id === modelId).at(0);
  288. if (model) {
  289. // If there are image files, check if model is vision capable
  290. const hasImages = messages.some((message) =>
  291. message.files?.some((file) => file.type === 'image')
  292. );
  293. if (hasImages && !(model.info?.meta?.capabilities?.vision ?? true)) {
  294. toast.error(
  295. $i18n.t('Model {{modelName}} is not vision capable', {
  296. modelName: model.name ?? model.id
  297. })
  298. );
  299. }
  300. // Create response message
  301. let responseMessageId = uuidv4();
  302. let responseMessage = {
  303. parentId: parentId,
  304. id: responseMessageId,
  305. childrenIds: [],
  306. role: 'assistant',
  307. content: '',
  308. model: model.id,
  309. modelName: model.name ?? model.id,
  310. userContext: null,
  311. timestamp: Math.floor(Date.now() / 1000) // Unix epoch
  312. };
  313. // Add message to history and Set currentId to messageId
  314. history.messages[responseMessageId] = responseMessage;
  315. history.currentId = responseMessageId;
  316. // Append messageId to childrenIds of parent message
  317. if (parentId !== null) {
  318. history.messages[parentId].childrenIds = [
  319. ...history.messages[parentId].childrenIds,
  320. responseMessageId
  321. ];
  322. }
  323. await tick();
  324. let userContext = null;
  325. if ($settings?.memory ?? false) {
  326. if (userContext === null) {
  327. const res = await queryMemory(localStorage.token, prompt).catch((error) => {
  328. toast.error(error);
  329. return null;
  330. });
  331. if (res) {
  332. if (res.documents[0].length > 0) {
  333. userContext = res.documents.reduce((acc, doc, index) => {
  334. const createdAtTimestamp = res.metadatas[index][0].created_at;
  335. const createdAtDate = new Date(createdAtTimestamp * 1000)
  336. .toISOString()
  337. .split('T')[0];
  338. acc.push(`${index + 1}. [${createdAtDate}]. ${doc[0]}`);
  339. return acc;
  340. }, []);
  341. }
  342. console.log(userContext);
  343. }
  344. }
  345. }
  346. responseMessage.userContext = userContext;
  347. if (useWebSearch) {
  348. await getWebSearchResultsAsFiles(model.id, parentId, responseMessageId);
  349. }
  350. if (model?.owned_by === 'openai') {
  351. await sendPromptOpenAI(model, prompt, responseMessageId, _chatId);
  352. } else if (model) {
  353. await sendPromptOllama(model, prompt, responseMessageId, _chatId);
  354. }
  355. } else {
  356. toast.error($i18n.t(`Model {{modelId}} not found`, { modelId }));
  357. }
  358. })
  359. );
  360. await chats.set(await getChatList(localStorage.token));
  361. };
  362. const getWebSearchResultsAsFiles = async (
  363. model: string,
  364. parentId: string,
  365. responseId: string
  366. ) => {
  367. const responseMessage = history.messages[responseId];
  368. responseMessage.progress = $i18n.t('Generating search query');
  369. messages = messages;
  370. const searchQuery = await generateChatSearchQuery(model, parentId);
  371. if (!searchQuery) {
  372. toast.warning($i18n.t('No search query generated'));
  373. responseMessage.progress = undefined;
  374. messages = messages;
  375. return;
  376. }
  377. responseMessage.progress = $i18n.t("Searching the web for '{{searchQuery}}'", { searchQuery });
  378. messages = messages;
  379. const searchDocument = await runWebSearch(localStorage.token, searchQuery);
  380. if (searchDocument === undefined) {
  381. toast.warning($i18n.t('No search results found'));
  382. responseMessage.progress = undefined;
  383. messages = messages;
  384. return;
  385. }
  386. if (!responseMessage.files) {
  387. responseMessage.files = [];
  388. }
  389. responseMessage.files.push({
  390. collection_name: searchDocument.collection_name,
  391. name: searchQuery,
  392. type: 'websearch',
  393. upload_status: true,
  394. error: '',
  395. urls: searchDocument.filenames
  396. });
  397. responseMessage.progress = undefined;
  398. messages = messages;
  399. };
  400. const sendPromptOllama = async (model, userPrompt, responseMessageId, _chatId) => {
  401. model = model.id;
  402. const responseMessage = history.messages[responseMessageId];
  403. // Wait until history/message have been updated
  404. await tick();
  405. // Scroll down
  406. scrollToBottom();
  407. const messagesBody = [
  408. $settings.system || (responseMessage?.userContext ?? null)
  409. ? {
  410. role: 'system',
  411. content: `${$settings?.system ?? ''}${
  412. responseMessage?.userContext ?? null
  413. ? `\n\nUser Context:\n${(responseMessage?.userContext ?? []).join('\n')}`
  414. : ''
  415. }`
  416. }
  417. : undefined,
  418. ...messages
  419. ]
  420. .filter((message) => message?.content?.trim())
  421. .map((message, idx, arr) => {
  422. // Prepare the base message object
  423. const baseMessage = {
  424. role: message.role,
  425. content: message.content
  426. };
  427. // Extract and format image URLs if any exist
  428. const imageUrls = message.files
  429. ?.filter((file) => file.type === 'image')
  430. .map((file) => file.url.slice(file.url.indexOf(',') + 1));
  431. // Add images array only if it contains elements
  432. if (imageUrls && imageUrls.length > 0 && message.role === 'user') {
  433. baseMessage.images = imageUrls;
  434. }
  435. return baseMessage;
  436. });
  437. let lastImageIndex = -1;
  438. // Find the index of the last object with images
  439. messagesBody.forEach((item, index) => {
  440. if (item.images) {
  441. lastImageIndex = index;
  442. }
  443. });
  444. // Remove images from all but the last one
  445. messagesBody.forEach((item, index) => {
  446. if (index !== lastImageIndex) {
  447. delete item.images;
  448. }
  449. });
  450. const docs = messages
  451. .filter((message) => message?.files ?? null)
  452. .map((message) =>
  453. message.files.filter((item) => ['doc', 'collection', 'websearch'].includes(item.type))
  454. )
  455. .flat(1);
  456. const [res, controller] = await generateChatCompletion(localStorage.token, {
  457. model: model,
  458. messages: messagesBody,
  459. options: {
  460. ...($settings.params ?? {}),
  461. stop:
  462. $settings?.params?.stop ?? undefined
  463. ? $settings.params.stop.map((str) =>
  464. decodeURIComponent(JSON.parse('"' + str.replace(/\"/g, '\\"') + '"'))
  465. )
  466. : undefined,
  467. num_predict: $settings?.params?.max_tokens ?? undefined,
  468. repeat_penalty: $settings?.params?.frequency_penalty ?? undefined
  469. },
  470. format: $settings.requestFormat ?? undefined,
  471. keep_alive: $settings.keepAlive ?? undefined,
  472. docs: docs.length > 0 ? docs : undefined,
  473. citations: docs.length > 0
  474. });
  475. if (res && res.ok) {
  476. console.log('controller', controller);
  477. const reader = res.body
  478. .pipeThrough(new TextDecoderStream())
  479. .pipeThrough(splitStream('\n'))
  480. .getReader();
  481. while (true) {
  482. const { value, done } = await reader.read();
  483. if (done || stopResponseFlag || _chatId !== $chatId) {
  484. responseMessage.done = true;
  485. messages = messages;
  486. if (stopResponseFlag) {
  487. controller.abort('User: Stop Response');
  488. await cancelOllamaRequest(localStorage.token, currentRequestId);
  489. }
  490. currentRequestId = null;
  491. break;
  492. }
  493. try {
  494. let lines = value.split('\n');
  495. for (const line of lines) {
  496. if (line !== '') {
  497. console.log(line);
  498. let data = JSON.parse(line);
  499. if ('citations' in data) {
  500. responseMessage.citations = data.citations;
  501. continue;
  502. }
  503. if ('detail' in data) {
  504. throw data;
  505. }
  506. if ('id' in data) {
  507. console.log(data);
  508. currentRequestId = data.id;
  509. } else {
  510. if (data.done == false) {
  511. if (responseMessage.content == '' && data.message.content == '\n') {
  512. continue;
  513. } else {
  514. responseMessage.content += data.message.content;
  515. messages = messages;
  516. }
  517. } else {
  518. responseMessage.done = true;
  519. if (responseMessage.content == '') {
  520. responseMessage.error = true;
  521. responseMessage.content =
  522. 'Oops! No text generated from Ollama, Please try again.';
  523. }
  524. responseMessage.context = data.context ?? null;
  525. responseMessage.info = {
  526. total_duration: data.total_duration,
  527. load_duration: data.load_duration,
  528. sample_count: data.sample_count,
  529. sample_duration: data.sample_duration,
  530. prompt_eval_count: data.prompt_eval_count,
  531. prompt_eval_duration: data.prompt_eval_duration,
  532. eval_count: data.eval_count,
  533. eval_duration: data.eval_duration
  534. };
  535. messages = messages;
  536. if ($settings.notificationEnabled && !document.hasFocus()) {
  537. const notification = new Notification(
  538. selectedModelfile
  539. ? `${
  540. selectedModelfile.title.charAt(0).toUpperCase() +
  541. selectedModelfile.title.slice(1)
  542. }`
  543. : `${model}`,
  544. {
  545. body: responseMessage.content,
  546. icon: selectedModelfile?.imageUrl ?? `${WEBUI_BASE_URL}/static/favicon.png`
  547. }
  548. );
  549. }
  550. if ($settings.responseAutoCopy) {
  551. copyToClipboard(responseMessage.content);
  552. }
  553. if ($settings.responseAutoPlayback) {
  554. await tick();
  555. document.getElementById(`speak-button-${responseMessage.id}`)?.click();
  556. }
  557. }
  558. }
  559. }
  560. }
  561. } catch (error) {
  562. console.log(error);
  563. if ('detail' in error) {
  564. toast.error(error.detail);
  565. }
  566. break;
  567. }
  568. if (autoScroll) {
  569. scrollToBottom();
  570. }
  571. }
  572. if ($chatId == _chatId) {
  573. if ($settings.saveChatHistory ?? true) {
  574. chat = await updateChatById(localStorage.token, _chatId, {
  575. messages: messages,
  576. history: history,
  577. models: selectedModels
  578. });
  579. await chats.set(await getChatList(localStorage.token));
  580. }
  581. }
  582. } else {
  583. if (res !== null) {
  584. const error = await res.json();
  585. console.log(error);
  586. if ('detail' in error) {
  587. toast.error(error.detail);
  588. responseMessage.content = error.detail;
  589. } else {
  590. toast.error(error.error);
  591. responseMessage.content = error.error;
  592. }
  593. } else {
  594. toast.error(
  595. $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, { provider: 'Ollama' })
  596. );
  597. responseMessage.content = $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
  598. provider: 'Ollama'
  599. });
  600. }
  601. responseMessage.error = true;
  602. responseMessage.content = $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
  603. provider: 'Ollama'
  604. });
  605. responseMessage.done = true;
  606. messages = messages;
  607. }
  608. stopResponseFlag = false;
  609. await tick();
  610. if (autoScroll) {
  611. scrollToBottom();
  612. }
  613. if (messages.length == 2 && messages.at(1).content !== '') {
  614. window.history.replaceState(history.state, '', `/c/${_chatId}`);
  615. const _title = await generateChatTitle(userPrompt);
  616. await setChatTitle(_chatId, _title);
  617. }
  618. };
  619. const sendPromptOpenAI = async (model, userPrompt, responseMessageId, _chatId) => {
  620. const responseMessage = history.messages[responseMessageId];
  621. const docs = messages
  622. .filter((message) => message?.files ?? null)
  623. .map((message) =>
  624. message.files.filter((item) => ['doc', 'collection', 'websearch'].includes(item.type))
  625. )
  626. .flat(1);
  627. console.log(docs);
  628. scrollToBottom();
  629. try {
  630. const [res, controller] = await generateOpenAIChatCompletion(
  631. localStorage.token,
  632. {
  633. model: model.id,
  634. stream: true,
  635. messages: [
  636. $settings.system || (responseMessage?.userContext ?? null)
  637. ? {
  638. role: 'system',
  639. content: `${$settings?.system ?? ''}${
  640. responseMessage?.userContext ?? null
  641. ? `\n\nUser Context:\n${(responseMessage?.userContext ?? []).join('\n')}`
  642. : ''
  643. }`
  644. }
  645. : undefined,
  646. ...messages
  647. ]
  648. .filter((message) => message?.content?.trim())
  649. .map((message, idx, arr) => ({
  650. role: message.role,
  651. ...((message.files?.filter((file) => file.type === 'image').length > 0 ?? false) &&
  652. message.role === 'user'
  653. ? {
  654. content: [
  655. {
  656. type: 'text',
  657. text:
  658. arr.length - 1 !== idx
  659. ? message.content
  660. : message?.raContent ?? message.content
  661. },
  662. ...message.files
  663. .filter((file) => file.type === 'image')
  664. .map((file) => ({
  665. type: 'image_url',
  666. image_url: {
  667. url: file.url
  668. }
  669. }))
  670. ]
  671. }
  672. : {
  673. content:
  674. arr.length - 1 !== idx
  675. ? message.content
  676. : message?.raContent ?? message.content
  677. })
  678. })),
  679. seed: $settings?.params?.seed ?? undefined,
  680. stop:
  681. $settings?.params?.stop ?? undefined
  682. ? $settings.params.stop.map((str) =>
  683. decodeURIComponent(JSON.parse('"' + str.replace(/\"/g, '\\"') + '"'))
  684. )
  685. : undefined,
  686. temperature: $settings?.params?.temperature ?? undefined,
  687. top_p: $settings?.params?.top_p ?? undefined,
  688. frequency_penalty: $settings?.params?.frequency_penalty ?? undefined,
  689. max_tokens: $settings?.params?.max_tokens ?? undefined,
  690. docs: docs.length > 0 ? docs : undefined,
  691. citations: docs.length > 0
  692. },
  693. `${OPENAI_API_BASE_URL}`
  694. );
  695. // Wait until history/message have been updated
  696. await tick();
  697. scrollToBottom();
  698. if (res && res.ok && res.body) {
  699. const textStream = await createOpenAITextStream(res.body, $settings.splitLargeChunks);
  700. for await (const update of textStream) {
  701. const { value, done, citations, error } = update;
  702. if (error) {
  703. await handleOpenAIError(error, null, model, responseMessage);
  704. break;
  705. }
  706. if (done || stopResponseFlag || _chatId !== $chatId) {
  707. responseMessage.done = true;
  708. messages = messages;
  709. if (stopResponseFlag) {
  710. controller.abort('User: Stop Response');
  711. }
  712. break;
  713. }
  714. if (citations) {
  715. responseMessage.citations = citations;
  716. continue;
  717. }
  718. if (responseMessage.content == '' && value == '\n') {
  719. continue;
  720. } else {
  721. responseMessage.content += value;
  722. messages = messages;
  723. }
  724. if ($settings.notificationEnabled && !document.hasFocus()) {
  725. const notification = new Notification(`OpenAI ${model}`, {
  726. body: responseMessage.content,
  727. icon: `${WEBUI_BASE_URL}/static/favicon.png`
  728. });
  729. }
  730. if ($settings.responseAutoCopy) {
  731. copyToClipboard(responseMessage.content);
  732. }
  733. if ($settings.responseAutoPlayback) {
  734. await tick();
  735. document.getElementById(`speak-button-${responseMessage.id}`)?.click();
  736. }
  737. if (autoScroll) {
  738. scrollToBottom();
  739. }
  740. }
  741. if ($chatId == _chatId) {
  742. if ($settings.saveChatHistory ?? true) {
  743. chat = await updateChatById(localStorage.token, _chatId, {
  744. models: selectedModels,
  745. messages: messages,
  746. history: history
  747. });
  748. await chats.set(await getChatList(localStorage.token));
  749. }
  750. }
  751. } else {
  752. await handleOpenAIError(null, res, model, responseMessage);
  753. }
  754. } catch (error) {
  755. await handleOpenAIError(error, null, model, responseMessage);
  756. }
  757. messages = messages;
  758. stopResponseFlag = false;
  759. await tick();
  760. if (autoScroll) {
  761. scrollToBottom();
  762. }
  763. if (messages.length == 2) {
  764. window.history.replaceState(history.state, '', `/c/${_chatId}`);
  765. const _title = await generateChatTitle(userPrompt);
  766. await setChatTitle(_chatId, _title);
  767. }
  768. };
  769. const handleOpenAIError = async (error, res: Response | null, model, responseMessage) => {
  770. let errorMessage = '';
  771. let innerError;
  772. if (error) {
  773. innerError = error;
  774. } else if (res !== null) {
  775. innerError = await res.json();
  776. }
  777. console.error(innerError);
  778. if ('detail' in innerError) {
  779. toast.error(innerError.detail);
  780. errorMessage = innerError.detail;
  781. } else if ('error' in innerError) {
  782. if ('message' in innerError.error) {
  783. toast.error(innerError.error.message);
  784. errorMessage = innerError.error.message;
  785. } else {
  786. toast.error(innerError.error);
  787. errorMessage = innerError.error;
  788. }
  789. } else if ('message' in innerError) {
  790. toast.error(innerError.message);
  791. errorMessage = innerError.message;
  792. }
  793. responseMessage.error = true;
  794. responseMessage.content =
  795. $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
  796. provider: model.name ?? model.id
  797. }) +
  798. '\n' +
  799. errorMessage;
  800. responseMessage.done = true;
  801. messages = messages;
  802. };
  803. const stopResponse = () => {
  804. stopResponseFlag = true;
  805. console.log('stopResponse');
  806. };
  807. const regenerateResponse = async (message) => {
  808. console.log('regenerateResponse');
  809. if (messages.length != 0) {
  810. let userMessage = history.messages[message.parentId];
  811. let userPrompt = userMessage.content;
  812. if ((userMessage?.models ?? [...selectedModels]).length == 1) {
  813. await sendPrompt(userPrompt, userMessage.id);
  814. } else {
  815. await sendPrompt(userPrompt, userMessage.id, message.model);
  816. }
  817. }
  818. };
  819. const continueGeneration = async () => {
  820. console.log('continueGeneration');
  821. const _chatId = JSON.parse(JSON.stringify($chatId));
  822. if (messages.length != 0 && messages.at(-1).done == true) {
  823. const responseMessage = history.messages[history.currentId];
  824. responseMessage.done = false;
  825. await tick();
  826. const model = $models.filter((m) => m.id === responseMessage.model).at(0);
  827. if (model) {
  828. if (model?.external) {
  829. await sendPromptOpenAI(
  830. model,
  831. history.messages[responseMessage.parentId].content,
  832. responseMessage.id,
  833. _chatId
  834. );
  835. } else
  836. await sendPromptOllama(
  837. model,
  838. history.messages[responseMessage.parentId].content,
  839. responseMessage.id,
  840. _chatId
  841. );
  842. }
  843. } else {
  844. toast.error($i18n.t(`Model {{modelId}} not found`, { modelId }));
  845. }
  846. };
  847. const generateChatTitle = async (userPrompt) => {
  848. if ($settings?.title?.auto ?? true) {
  849. const model = $models.find((model) => model.id === selectedModels[0]);
  850. const titleModelId =
  851. model?.external ?? false
  852. ? $settings?.title?.modelExternal ?? selectedModels[0]
  853. : $settings?.title?.model ?? selectedModels[0];
  854. const titleModel = $models.find((model) => model.id === titleModelId);
  855. console.log(titleModel);
  856. const title = await generateTitle(
  857. localStorage.token,
  858. $settings?.title?.prompt ??
  859. $i18n.t(
  860. "Create a concise, 3-5 word phrase as a header for the following query, strictly adhering to the 3-5 word limit and avoiding the use of the word 'title':"
  861. ) + ' {{prompt}}',
  862. titleModelId,
  863. userPrompt,
  864. titleModel?.owned_by === 'openai' ?? false
  865. ? `${OPENAI_API_BASE_URL}`
  866. : `${OLLAMA_API_BASE_URL}/v1`
  867. );
  868. return title;
  869. } else {
  870. return `${userPrompt}`;
  871. }
  872. };
  873. const generateChatSearchQuery = async (modelId: string, messageId: string) => {
  874. const model = $models.find((model) => model.id === modelId);
  875. const taskModelId =
  876. model?.external ?? false
  877. ? $settings?.title?.modelExternal ?? modelId
  878. : $settings?.title?.model ?? modelId;
  879. const taskModel = $models.find((model) => model.id === taskModelId);
  880. const userMessage = history.messages[messageId];
  881. const userPrompt = userMessage.content;
  882. const previousMessages = messages
  883. .filter((message) => message.role === 'user')
  884. .map((message) => message.content);
  885. return await generateSearchQuery(
  886. localStorage.token,
  887. taskModelId,
  888. previousMessages,
  889. userPrompt,
  890. taskModel?.owned_by === 'openai' ?? false
  891. ? `${OPENAI_API_BASE_URL}`
  892. : `${OLLAMA_API_BASE_URL}/v1`
  893. );
  894. };
  895. const setChatTitle = async (_chatId, _title) => {
  896. if (_chatId === $chatId) {
  897. title = _title;
  898. }
  899. if ($settings.saveChatHistory ?? true) {
  900. chat = await updateChatById(localStorage.token, _chatId, { title: _title });
  901. await chats.set(await getChatList(localStorage.token));
  902. }
  903. };
  904. const getTags = async () => {
  905. return await getTagsById(localStorage.token, $chatId).catch(async (error) => {
  906. return [];
  907. });
  908. };
  909. const addTag = async (tagName) => {
  910. const res = await addTagById(localStorage.token, $chatId, tagName);
  911. tags = await getTags();
  912. chat = await updateChatById(localStorage.token, $chatId, {
  913. tags: tags
  914. });
  915. _tags.set(await getAllChatTags(localStorage.token));
  916. };
  917. const deleteTag = async (tagName) => {
  918. const res = await deleteTagById(localStorage.token, $chatId, tagName);
  919. tags = await getTags();
  920. chat = await updateChatById(localStorage.token, $chatId, {
  921. tags: tags
  922. });
  923. _tags.set(await getAllChatTags(localStorage.token));
  924. };
  925. </script>
  926. <svelte:head>
  927. <title>
  928. {title
  929. ? `${title.length > 30 ? `${title.slice(0, 30)}...` : title} | ${$WEBUI_NAME}`
  930. : `${$WEBUI_NAME}`}
  931. </title>
  932. </svelte:head>
  933. {#if !chatIdProp || (loaded && chatIdProp)}
  934. <div
  935. class="min-h-screen max-h-screen {$showSidebar
  936. ? 'md:max-w-[calc(100%-260px)]'
  937. : ''} w-full max-w-full flex flex-col"
  938. >
  939. <Navbar
  940. {title}
  941. bind:selectedModels
  942. bind:showModelSelector
  943. shareEnabled={messages.length > 0}
  944. {chat}
  945. {initNewChat}
  946. />
  947. {#if $banners.length > 0 && !$chatId && selectedModels.length <= 1}
  948. <div
  949. class="absolute top-[4.25rem] w-full {$showSidebar ? 'md:max-w-[calc(100%-260px)]' : ''}"
  950. >
  951. <div class=" flex flex-col gap-1 w-full">
  952. {#each $banners.filter( (b) => (b.dismissible ? !JSON.parse(localStorage.getItem('dismissedBannerIds') ?? '[]').includes(b.id) : true) ) as banner}
  953. <Banner
  954. {banner}
  955. on:dismiss={(e) => {
  956. const bannerId = e.detail;
  957. localStorage.setItem(
  958. 'dismissedBannerIds',
  959. JSON.stringify(
  960. [
  961. bannerId,
  962. ...JSON.parse(localStorage.getItem('dismissedBannerIds') ?? '[]')
  963. ].filter((id) => $banners.find((b) => b.id === id))
  964. )
  965. );
  966. }}
  967. />
  968. {/each}
  969. </div>
  970. </div>
  971. {/if}
  972. <div class="flex flex-col flex-auto">
  973. <div
  974. class=" pb-2.5 flex flex-col justify-between w-full flex-auto overflow-auto h-0 max-w-full"
  975. id="messages-container"
  976. bind:this={messagesContainerElement}
  977. on:scroll={(e) => {
  978. autoScroll =
  979. messagesContainerElement.scrollHeight - messagesContainerElement.scrollTop <=
  980. messagesContainerElement.clientHeight + 5;
  981. }}
  982. >
  983. <div class=" h-full w-full flex flex-col {chatIdProp ? 'py-4' : 'pt-2 pb-4'}">
  984. <Messages
  985. chatId={$chatId}
  986. {selectedModels}
  987. {processing}
  988. bind:history
  989. bind:messages
  990. bind:autoScroll
  991. bind:prompt
  992. bottomPadding={files.length > 0}
  993. {sendPrompt}
  994. {continueGeneration}
  995. {regenerateResponse}
  996. />
  997. </div>
  998. </div>
  999. </div>
  1000. </div>
  1001. <MessageInput
  1002. bind:files
  1003. bind:prompt
  1004. bind:autoScroll
  1005. bind:useWebSearch
  1006. bind:atSelectedModel
  1007. {selectedModels}
  1008. {messages}
  1009. {submitPrompt}
  1010. {stopResponse}
  1011. webSearchAvailable={$config?.features.enable_web_search ?? false}
  1012. />
  1013. {/if}