Chat.svelte 40 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544
  1. <script lang="ts">
  2. import { v4 as uuidv4 } from 'uuid';
  3. import { toast } from 'svelte-sonner';
  4. import mermaid from 'mermaid';
  5. import { getContext, onMount, tick } from 'svelte';
  6. import { goto } from '$app/navigation';
  7. import { page } from '$app/stores';
  8. import type { Writable } from 'svelte/store';
  9. import type { i18n as i18nType } from 'i18next';
  10. import { OLLAMA_API_BASE_URL, OPENAI_API_BASE_URL, WEBUI_BASE_URL } from '$lib/constants';
  11. import {
  12. chatId,
  13. chats,
  14. config,
  15. type Model,
  16. models,
  17. settings,
  18. showSidebar,
  19. tags as _tags,
  20. WEBUI_NAME,
  21. banners,
  22. user,
  23. socket,
  24. showCallOverlay,
  25. tools
  26. } from '$lib/stores';
  27. import {
  28. convertMessagesToHistory,
  29. copyToClipboard,
  30. extractSentencesForAudio,
  31. getUserPosition,
  32. promptTemplate,
  33. splitStream
  34. } from '$lib/utils';
  35. import { generateChatCompletion } from '$lib/apis/ollama';
  36. import {
  37. addTagById,
  38. createNewChat,
  39. deleteTagById,
  40. getAllChatTags,
  41. getChatById,
  42. getChatList,
  43. getTagsById,
  44. updateChatById
  45. } from '$lib/apis/chats';
  46. import { generateOpenAIChatCompletion } from '$lib/apis/openai';
  47. import { runWebSearch } from '$lib/apis/rag';
  48. import { createOpenAITextStream } from '$lib/apis/streaming';
  49. import { queryMemory } from '$lib/apis/memories';
  50. import { getAndUpdateUserLocation, getUserSettings } from '$lib/apis/users';
  51. import { chatCompleted, generateTitle, generateSearchQuery } from '$lib/apis';
  52. import Banner from '../common/Banner.svelte';
  53. import MessageInput from '$lib/components/chat/MessageInput.svelte';
  54. import Messages from '$lib/components/chat/Messages.svelte';
  55. import Navbar from '$lib/components/layout/Navbar.svelte';
  56. import CallOverlay from './MessageInput/CallOverlay.svelte';
  57. import { error } from '@sveltejs/kit';
  58. import ChatControls from './ChatControls.svelte';
  59. import EventConfirmDialog from '../common/ConfirmDialog.svelte';
  60. const i18n: Writable<i18nType> = getContext('i18n');
  61. export let chatIdProp = '';
  62. let loaded = false;
  63. const eventTarget = new EventTarget();
  64. let showControls = false;
  65. let stopResponseFlag = false;
  66. let autoScroll = true;
  67. let processing = '';
  68. let messagesContainerElement: HTMLDivElement;
  69. let showEventConfirmation = false;
  70. let eventConfirmationTitle = '';
  71. let eventConfirmationMessage = '';
  72. let eventCallback = null;
  73. let showModelSelector = true;
  74. let selectedModels = [''];
  75. let atSelectedModel: Model | undefined;
  76. let selectedModelIds = [];
  77. $: selectedModelIds = atSelectedModel !== undefined ? [atSelectedModel.id] : selectedModels;
  78. let selectedToolIds = [];
  79. let webSearchEnabled = false;
  80. let chat = null;
  81. let tags = [];
  82. let title = '';
  83. let prompt = '';
  84. let files = [];
  85. let messages = [];
  86. let history = {
  87. messages: {},
  88. currentId: null
  89. };
  90. let params = {};
  91. $: if (history.currentId !== null) {
  92. let _messages = [];
  93. let currentMessage = history.messages[history.currentId];
  94. while (currentMessage !== null) {
  95. _messages.unshift({ ...currentMessage });
  96. currentMessage =
  97. currentMessage.parentId !== null ? history.messages[currentMessage.parentId] : null;
  98. }
  99. messages = _messages;
  100. } else {
  101. messages = [];
  102. }
  103. $: if (chatIdProp) {
  104. (async () => {
  105. console.log(chatIdProp);
  106. if (chatIdProp && (await loadChat())) {
  107. await tick();
  108. loaded = true;
  109. window.setTimeout(() => scrollToBottom(), 0);
  110. const chatInput = document.getElementById('chat-textarea');
  111. chatInput?.focus();
  112. } else {
  113. await goto('/');
  114. }
  115. })();
  116. }
  117. const chatEventHandler = async (event, cb) => {
  118. if (event.chat_id === $chatId) {
  119. await tick();
  120. console.log(event);
  121. let message = history.messages[event.message_id];
  122. const type = event?.data?.type ?? null;
  123. const data = event?.data?.data ?? null;
  124. if (type === 'status') {
  125. if (message?.statusHistory) {
  126. message.statusHistory.push(data);
  127. } else {
  128. message.statusHistory = [data];
  129. }
  130. } else if (type === 'citation') {
  131. if (message?.citations) {
  132. message.citations.push(data);
  133. } else {
  134. message.citations = [data];
  135. }
  136. } else if (type === 'confirmation') {
  137. eventCallback = cb;
  138. showEventConfirmation = true;
  139. eventConfirmationTitle = data.title;
  140. eventConfirmationMessage = data.message;
  141. } else {
  142. console.log('Unknown message type', data);
  143. }
  144. messages = messages;
  145. }
  146. };
  147. onMount(async () => {
  148. const onMessageHandler = async (event) => {
  149. if (event.origin === window.origin) {
  150. // Replace with your iframe's origin
  151. console.log('Message received from iframe:', event.data);
  152. if (event.data.type === 'input:prompt') {
  153. console.log(event.data.text);
  154. const inputElement = document.getElementById('chat-textarea');
  155. if (inputElement) {
  156. prompt = event.data.text;
  157. inputElement.focus();
  158. }
  159. }
  160. if (event.data.type === 'action:submit') {
  161. console.log(event.data.text);
  162. if (prompt !== '') {
  163. await tick();
  164. submitPrompt(prompt);
  165. }
  166. }
  167. if (event.data.type === 'input:prompt:submit') {
  168. console.log(event.data.text);
  169. if (prompt !== '') {
  170. await tick();
  171. submitPrompt(event.data.text);
  172. }
  173. }
  174. }
  175. };
  176. window.addEventListener('message', onMessageHandler);
  177. $socket.on('chat-events', chatEventHandler);
  178. if (!$chatId) {
  179. chatId.subscribe(async (value) => {
  180. if (!value) {
  181. await initNewChat();
  182. }
  183. });
  184. } else {
  185. if (!($settings.saveChatHistory ?? true)) {
  186. await goto('/');
  187. }
  188. }
  189. return () => {
  190. window.removeEventListener('message', onMessageHandler);
  191. $socket.off('chat-events');
  192. };
  193. });
  194. //////////////////////////
  195. // Web functions
  196. //////////////////////////
  197. const initNewChat = async () => {
  198. window.history.replaceState(history.state, '', `/`);
  199. await chatId.set('');
  200. autoScroll = true;
  201. title = '';
  202. messages = [];
  203. history = {
  204. messages: {},
  205. currentId: null
  206. };
  207. params = {};
  208. if ($page.url.searchParams.get('models')) {
  209. selectedModels = $page.url.searchParams.get('models')?.split(',');
  210. } else if ($settings?.models) {
  211. selectedModels = $settings?.models;
  212. } else if ($config?.default_models) {
  213. console.log($config?.default_models.split(',') ?? '');
  214. selectedModels = $config?.default_models.split(',');
  215. } else {
  216. selectedModels = [''];
  217. }
  218. if ($page.url.searchParams.get('q')) {
  219. prompt = $page.url.searchParams.get('q') ?? '';
  220. if (prompt) {
  221. await tick();
  222. submitPrompt(prompt);
  223. }
  224. }
  225. selectedModels = selectedModels.map((modelId) =>
  226. $models.map((m) => m.id).includes(modelId) ? modelId : ''
  227. );
  228. const userSettings = await getUserSettings(localStorage.token);
  229. if (userSettings) {
  230. settings.set(userSettings.ui);
  231. } else {
  232. settings.set(JSON.parse(localStorage.getItem('settings') ?? '{}'));
  233. }
  234. const chatInput = document.getElementById('chat-textarea');
  235. setTimeout(() => chatInput?.focus(), 0);
  236. };
  237. const loadChat = async () => {
  238. chatId.set(chatIdProp);
  239. chat = await getChatById(localStorage.token, $chatId).catch(async (error) => {
  240. await goto('/');
  241. return null;
  242. });
  243. if (chat) {
  244. tags = await getTags();
  245. const chatContent = chat.chat;
  246. if (chatContent) {
  247. console.log(chatContent);
  248. selectedModels =
  249. (chatContent?.models ?? undefined) !== undefined
  250. ? chatContent.models
  251. : [chatContent.models ?? ''];
  252. history =
  253. (chatContent?.history ?? undefined) !== undefined
  254. ? chatContent.history
  255. : convertMessagesToHistory(chatContent.messages);
  256. title = chatContent.title;
  257. const userSettings = await getUserSettings(localStorage.token);
  258. if (userSettings) {
  259. await settings.set(userSettings.ui);
  260. } else {
  261. await settings.set(JSON.parse(localStorage.getItem('settings') ?? '{}'));
  262. }
  263. params = chatContent?.params ?? {};
  264. autoScroll = true;
  265. await tick();
  266. if (messages.length > 0) {
  267. history.messages[messages.at(-1).id].done = true;
  268. }
  269. await tick();
  270. return true;
  271. } else {
  272. return null;
  273. }
  274. }
  275. };
  276. const scrollToBottom = async () => {
  277. await tick();
  278. if (messagesContainerElement) {
  279. messagesContainerElement.scrollTop = messagesContainerElement.scrollHeight;
  280. }
  281. };
  282. const createMessagesList = (responseMessageId) => {
  283. const message = history.messages[responseMessageId];
  284. if (message.parentId) {
  285. return [...createMessagesList(message.parentId), message];
  286. } else {
  287. return [message];
  288. }
  289. };
  290. const chatCompletedHandler = async (modelId, responseMessageId, messages) => {
  291. await mermaid.run({
  292. querySelector: '.mermaid'
  293. });
  294. const res = await chatCompleted(localStorage.token, {
  295. model: modelId,
  296. messages: messages.map((m) => ({
  297. id: m.id,
  298. role: m.role,
  299. content: m.content,
  300. info: m.info ? m.info : undefined,
  301. timestamp: m.timestamp
  302. })),
  303. chat_id: $chatId,
  304. session_id: $socket?.id,
  305. id: responseMessageId
  306. }).catch((error) => {
  307. toast.error(error);
  308. messages.at(-1).error = { content: error };
  309. return null;
  310. });
  311. if (res !== null) {
  312. // Update chat history with the new messages
  313. for (const message of res.messages) {
  314. history.messages[message.id] = {
  315. ...history.messages[message.id],
  316. ...(history.messages[message.id].content !== message.content
  317. ? { originalContent: history.messages[message.id].content }
  318. : {}),
  319. ...message
  320. };
  321. }
  322. }
  323. };
  324. const getChatEventEmitter = async (modelId: string, chatId: string = '') => {
  325. return setInterval(() => {
  326. $socket?.emit('usage', {
  327. action: 'chat',
  328. model: modelId,
  329. chat_id: chatId
  330. });
  331. }, 1000);
  332. };
  333. //////////////////////////
  334. // Chat functions
  335. //////////////////////////
  336. const submitPrompt = async (userPrompt, { _raw = false } = {}) => {
  337. let _responses = [];
  338. console.log('submitPrompt', $chatId);
  339. selectedModels = selectedModels.map((modelId) =>
  340. $models.map((m) => m.id).includes(modelId) ? modelId : ''
  341. );
  342. if (selectedModels.includes('')) {
  343. toast.error($i18n.t('Model not selected'));
  344. } else if (messages.length != 0 && messages.at(-1).done != true) {
  345. // Response not done
  346. console.log('wait');
  347. } else if (messages.length != 0 && messages.at(-1).error) {
  348. // Error in response
  349. toast.error(
  350. $i18n.t(
  351. `Oops! There was an error in the previous response. Please try again or contact admin.`
  352. )
  353. );
  354. } else if (
  355. files.length > 0 &&
  356. files.filter((file) => file.type !== 'image' && file.status !== 'processed').length > 0
  357. ) {
  358. // Upload not done
  359. toast.error(
  360. $i18n.t(
  361. `Oops! Hold tight! Your files are still in the processing oven. We're cooking them up to perfection. Please be patient and we'll let you know once they're ready.`
  362. )
  363. );
  364. } else {
  365. // Reset chat input textarea
  366. const chatTextAreaElement = document.getElementById('chat-textarea');
  367. if (chatTextAreaElement) {
  368. chatTextAreaElement.value = '';
  369. chatTextAreaElement.style.height = '';
  370. }
  371. const _files = JSON.parse(JSON.stringify(files));
  372. files = [];
  373. prompt = '';
  374. // Create user message
  375. let userMessageId = uuidv4();
  376. let userMessage = {
  377. id: userMessageId,
  378. parentId: messages.length !== 0 ? messages.at(-1).id : null,
  379. childrenIds: [],
  380. role: 'user',
  381. content: userPrompt,
  382. files: _files.length > 0 ? _files : undefined,
  383. timestamp: Math.floor(Date.now() / 1000), // Unix epoch
  384. models: selectedModels.filter((m, mIdx) => selectedModels.indexOf(m) === mIdx)
  385. };
  386. // Add message to history and Set currentId to messageId
  387. history.messages[userMessageId] = userMessage;
  388. history.currentId = userMessageId;
  389. // Append messageId to childrenIds of parent message
  390. if (messages.length !== 0) {
  391. history.messages[messages.at(-1).id].childrenIds.push(userMessageId);
  392. }
  393. // Wait until history/message have been updated
  394. await tick();
  395. _responses = await sendPrompt(userPrompt, userMessageId, { newChat: true });
  396. }
  397. return _responses;
  398. };
  399. const sendPrompt = async (prompt, parentId, { modelId = null, newChat = false } = {}) => {
  400. let _responses = [];
  401. // If modelId is provided, use it, else use selected model
  402. let selectedModelIds = modelId
  403. ? [modelId]
  404. : atSelectedModel !== undefined
  405. ? [atSelectedModel.id]
  406. : selectedModels;
  407. // Create response messages for each selected model
  408. const responseMessageIds = {};
  409. for (const modelId of selectedModelIds) {
  410. const model = $models.filter((m) => m.id === modelId).at(0);
  411. if (model) {
  412. let responseMessageId = uuidv4();
  413. let responseMessage = {
  414. parentId: parentId,
  415. id: responseMessageId,
  416. childrenIds: [],
  417. role: 'assistant',
  418. content: '',
  419. model: model.id,
  420. modelName: model.name ?? model.id,
  421. userContext: null,
  422. timestamp: Math.floor(Date.now() / 1000) // Unix epoch
  423. };
  424. // Add message to history and Set currentId to messageId
  425. history.messages[responseMessageId] = responseMessage;
  426. history.currentId = responseMessageId;
  427. // Append messageId to childrenIds of parent message
  428. if (parentId !== null) {
  429. history.messages[parentId].childrenIds = [
  430. ...history.messages[parentId].childrenIds,
  431. responseMessageId
  432. ];
  433. }
  434. responseMessageIds[modelId] = responseMessageId;
  435. }
  436. }
  437. await tick();
  438. // Create new chat if only one message in messages
  439. if (newChat && messages.length == 2) {
  440. if ($settings.saveChatHistory ?? true) {
  441. chat = await createNewChat(localStorage.token, {
  442. id: $chatId,
  443. title: $i18n.t('New Chat'),
  444. models: selectedModels,
  445. system: $settings.system ?? undefined,
  446. params: params,
  447. messages: messages,
  448. history: history,
  449. tags: [],
  450. timestamp: Date.now()
  451. });
  452. await chats.set(await getChatList(localStorage.token));
  453. await chatId.set(chat.id);
  454. } else {
  455. await chatId.set('local');
  456. }
  457. await tick();
  458. }
  459. const _chatId = JSON.parse(JSON.stringify($chatId));
  460. await Promise.all(
  461. selectedModelIds.map(async (modelId) => {
  462. console.log('modelId', modelId);
  463. const model = $models.filter((m) => m.id === modelId).at(0);
  464. if (model) {
  465. // If there are image files, check if model is vision capable
  466. const hasImages = messages.some((message) =>
  467. message.files?.some((file) => file.type === 'image')
  468. );
  469. if (hasImages && !(model.info?.meta?.capabilities?.vision ?? true)) {
  470. toast.error(
  471. $i18n.t('Model {{modelName}} is not vision capable', {
  472. modelName: model.name ?? model.id
  473. })
  474. );
  475. }
  476. let responseMessageId = responseMessageIds[modelId];
  477. let responseMessage = history.messages[responseMessageId];
  478. let userContext = null;
  479. if ($settings?.memory ?? false) {
  480. if (userContext === null) {
  481. const res = await queryMemory(localStorage.token, prompt).catch((error) => {
  482. toast.error(error);
  483. return null;
  484. });
  485. if (res) {
  486. if (res.documents[0].length > 0) {
  487. userContext = res.documents[0].reduce((acc, doc, index) => {
  488. const createdAtTimestamp = res.metadatas[0][index].created_at;
  489. const createdAtDate = new Date(createdAtTimestamp * 1000)
  490. .toISOString()
  491. .split('T')[0];
  492. return `${acc}${index + 1}. [${createdAtDate}]. ${doc}\n`;
  493. }, '');
  494. }
  495. console.log(userContext);
  496. }
  497. }
  498. }
  499. responseMessage.userContext = userContext;
  500. const chatEventEmitter = await getChatEventEmitter(model.id, _chatId);
  501. if (webSearchEnabled) {
  502. await getWebSearchResults(model.id, parentId, responseMessageId);
  503. }
  504. let _response = null;
  505. if (model?.owned_by === 'openai') {
  506. _response = await sendPromptOpenAI(model, prompt, responseMessageId, _chatId);
  507. } else if (model) {
  508. _response = await sendPromptOllama(model, prompt, responseMessageId, _chatId);
  509. }
  510. _responses.push(_response);
  511. if (chatEventEmitter) clearInterval(chatEventEmitter);
  512. } else {
  513. toast.error($i18n.t(`Model {{modelId}} not found`, { modelId }));
  514. }
  515. })
  516. );
  517. await chats.set(await getChatList(localStorage.token));
  518. return _responses;
  519. };
  520. const sendPromptOllama = async (model, userPrompt, responseMessageId, _chatId) => {
  521. let _response = null;
  522. const responseMessage = history.messages[responseMessageId];
  523. // Wait until history/message have been updated
  524. await tick();
  525. // Scroll down
  526. scrollToBottom();
  527. const messagesBody = [
  528. params?.system || $settings.system || (responseMessage?.userContext ?? null)
  529. ? {
  530. role: 'system',
  531. content: `${promptTemplate(
  532. params?.system ?? $settings?.system ?? '',
  533. $user.name,
  534. $settings?.userLocation
  535. ? await getAndUpdateUserLocation(localStorage.token)
  536. : undefined
  537. )}${
  538. responseMessage?.userContext ?? null
  539. ? `\n\nUser Context:\n${responseMessage?.userContext ?? ''}`
  540. : ''
  541. }`
  542. }
  543. : undefined,
  544. ...messages
  545. ]
  546. .filter((message) => message?.content?.trim())
  547. .map((message, idx, arr) => {
  548. // Prepare the base message object
  549. const baseMessage = {
  550. role: message.role,
  551. content: message.content
  552. };
  553. // Extract and format image URLs if any exist
  554. const imageUrls = message.files
  555. ?.filter((file) => file.type === 'image')
  556. .map((file) => file.url.slice(file.url.indexOf(',') + 1));
  557. // Add images array only if it contains elements
  558. if (imageUrls && imageUrls.length > 0 && message.role === 'user') {
  559. baseMessage.images = imageUrls;
  560. }
  561. return baseMessage;
  562. });
  563. let lastImageIndex = -1;
  564. // Find the index of the last object with images
  565. messagesBody.forEach((item, index) => {
  566. if (item.images) {
  567. lastImageIndex = index;
  568. }
  569. });
  570. // Remove images from all but the last one
  571. messagesBody.forEach((item, index) => {
  572. if (index !== lastImageIndex) {
  573. delete item.images;
  574. }
  575. });
  576. let files = [];
  577. if (model?.info?.meta?.knowledge ?? false) {
  578. files = model.info.meta.knowledge;
  579. }
  580. const lastUserMessage = messages.filter((message) => message.role === 'user').at(-1);
  581. files = [
  582. ...files,
  583. ...(lastUserMessage?.files?.filter((item) =>
  584. ['doc', 'file', 'collection', 'web_search_results'].includes(item.type)
  585. ) ?? []),
  586. ...(responseMessage?.files?.filter((item) =>
  587. ['doc', 'file', 'collection', 'web_search_results'].includes(item.type)
  588. ) ?? [])
  589. ].filter(
  590. // Remove duplicates
  591. (item, index, array) =>
  592. array.findIndex((i) => JSON.stringify(i) === JSON.stringify(item)) === index
  593. );
  594. eventTarget.dispatchEvent(
  595. new CustomEvent('chat:start', {
  596. detail: {
  597. id: responseMessageId
  598. }
  599. })
  600. );
  601. await tick();
  602. const [res, controller] = await generateChatCompletion(localStorage.token, {
  603. stream: true,
  604. model: model.id,
  605. messages: messagesBody,
  606. options: {
  607. ...(params ?? $settings.params ?? {}),
  608. stop:
  609. params?.stop ?? $settings?.params?.stop ?? undefined
  610. ? (params?.stop ?? $settings.params.stop).map((str) =>
  611. decodeURIComponent(JSON.parse('"' + str.replace(/\"/g, '\\"') + '"'))
  612. )
  613. : undefined,
  614. num_predict: params?.max_tokens ?? $settings?.params?.max_tokens ?? undefined,
  615. repeat_penalty:
  616. params?.frequency_penalty ?? $settings?.params?.frequency_penalty ?? undefined
  617. },
  618. format: $settings.requestFormat ?? undefined,
  619. keep_alive: $settings.keepAlive ?? undefined,
  620. tool_ids: selectedToolIds.length > 0 ? selectedToolIds : undefined,
  621. files: files.length > 0 ? files : undefined,
  622. session_id: $socket?.id,
  623. chat_id: $chatId,
  624. id: responseMessageId
  625. });
  626. if (res && res.ok) {
  627. console.log('controller', controller);
  628. const reader = res.body
  629. .pipeThrough(new TextDecoderStream())
  630. .pipeThrough(splitStream('\n'))
  631. .getReader();
  632. while (true) {
  633. const { value, done } = await reader.read();
  634. if (done || stopResponseFlag || _chatId !== $chatId) {
  635. responseMessage.done = true;
  636. messages = messages;
  637. if (stopResponseFlag) {
  638. controller.abort('User: Stop Response');
  639. } else {
  640. const messages = createMessagesList(responseMessageId);
  641. await chatCompletedHandler(model.id, responseMessageId, messages);
  642. }
  643. _response = responseMessage.content;
  644. break;
  645. }
  646. try {
  647. let lines = value.split('\n');
  648. for (const line of lines) {
  649. if (line !== '') {
  650. console.log(line);
  651. let data = JSON.parse(line);
  652. if ('citations' in data) {
  653. responseMessage.citations = data.citations;
  654. continue;
  655. }
  656. if ('detail' in data) {
  657. throw data;
  658. }
  659. if (data.done == false) {
  660. if (responseMessage.content == '' && data.message.content == '\n') {
  661. continue;
  662. } else {
  663. responseMessage.content += data.message.content;
  664. const sentences = extractSentencesForAudio(responseMessage.content);
  665. sentences.pop();
  666. // dispatch only last sentence and make sure it hasn't been dispatched before
  667. if (
  668. sentences.length > 0 &&
  669. sentences[sentences.length - 1] !== responseMessage.lastSentence
  670. ) {
  671. responseMessage.lastSentence = sentences[sentences.length - 1];
  672. eventTarget.dispatchEvent(
  673. new CustomEvent('chat', {
  674. detail: { id: responseMessageId, content: sentences[sentences.length - 1] }
  675. })
  676. );
  677. }
  678. messages = messages;
  679. }
  680. } else {
  681. responseMessage.done = true;
  682. if (responseMessage.content == '') {
  683. responseMessage.error = {
  684. code: 400,
  685. content: `Oops! No text generated from Ollama, Please try again.`
  686. };
  687. }
  688. responseMessage.context = data.context ?? null;
  689. responseMessage.info = {
  690. total_duration: data.total_duration,
  691. load_duration: data.load_duration,
  692. sample_count: data.sample_count,
  693. sample_duration: data.sample_duration,
  694. prompt_eval_count: data.prompt_eval_count,
  695. prompt_eval_duration: data.prompt_eval_duration,
  696. eval_count: data.eval_count,
  697. eval_duration: data.eval_duration
  698. };
  699. messages = messages;
  700. if ($settings.notificationEnabled && !document.hasFocus()) {
  701. const notification = new Notification(`${model.id}`, {
  702. body: responseMessage.content,
  703. icon: `${WEBUI_BASE_URL}/static/favicon.png`
  704. });
  705. }
  706. if ($settings?.responseAutoCopy ?? false) {
  707. copyToClipboard(responseMessage.content);
  708. }
  709. if ($settings.responseAutoPlayback && !$showCallOverlay) {
  710. await tick();
  711. document.getElementById(`speak-button-${responseMessage.id}`)?.click();
  712. }
  713. }
  714. }
  715. }
  716. } catch (error) {
  717. console.log(error);
  718. if ('detail' in error) {
  719. toast.error(error.detail);
  720. }
  721. break;
  722. }
  723. if (autoScroll) {
  724. scrollToBottom();
  725. }
  726. }
  727. if ($chatId == _chatId) {
  728. if ($settings.saveChatHistory ?? true) {
  729. chat = await updateChatById(localStorage.token, _chatId, {
  730. messages: messages,
  731. history: history,
  732. models: selectedModels,
  733. params: params
  734. });
  735. await chats.set(await getChatList(localStorage.token));
  736. }
  737. }
  738. } else {
  739. if (res !== null) {
  740. const error = await res.json();
  741. console.log(error);
  742. if ('detail' in error) {
  743. toast.error(error.detail);
  744. responseMessage.error = { content: error.detail };
  745. } else {
  746. toast.error(error.error);
  747. responseMessage.error = { content: error.error };
  748. }
  749. } else {
  750. toast.error(
  751. $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, { provider: 'Ollama' })
  752. );
  753. responseMessage.error = {
  754. content: $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
  755. provider: 'Ollama'
  756. })
  757. };
  758. }
  759. responseMessage.done = true;
  760. messages = messages;
  761. }
  762. stopResponseFlag = false;
  763. await tick();
  764. let lastSentence = extractSentencesForAudio(responseMessage.content)?.at(-1) ?? '';
  765. if (lastSentence) {
  766. eventTarget.dispatchEvent(
  767. new CustomEvent('chat', {
  768. detail: { id: responseMessageId, content: lastSentence }
  769. })
  770. );
  771. }
  772. eventTarget.dispatchEvent(
  773. new CustomEvent('chat:finish', {
  774. detail: {
  775. id: responseMessageId,
  776. content: responseMessage.content
  777. }
  778. })
  779. );
  780. if (autoScroll) {
  781. scrollToBottom();
  782. }
  783. if (messages.length == 2 && messages.at(1).content !== '') {
  784. window.history.replaceState(history.state, '', `/c/${_chatId}`);
  785. const _title = await generateChatTitle(userPrompt);
  786. await setChatTitle(_chatId, _title);
  787. }
  788. return _response;
  789. };
  790. const sendPromptOpenAI = async (model, userPrompt, responseMessageId, _chatId) => {
  791. let _response = null;
  792. const responseMessage = history.messages[responseMessageId];
  793. let files = [];
  794. if (model?.info?.meta?.knowledge ?? false) {
  795. files = model.info.meta.knowledge;
  796. }
  797. const lastUserMessage = messages.filter((message) => message.role === 'user').at(-1);
  798. files = [
  799. ...files,
  800. ...(lastUserMessage?.files?.filter((item) =>
  801. ['doc', 'file', 'collection', 'web_search_results'].includes(item.type)
  802. ) ?? []),
  803. ...(responseMessage?.files?.filter((item) =>
  804. ['doc', 'file', 'collection', 'web_search_results'].includes(item.type)
  805. ) ?? [])
  806. ].filter(
  807. // Remove duplicates
  808. (item, index, array) =>
  809. array.findIndex((i) => JSON.stringify(i) === JSON.stringify(item)) === index
  810. );
  811. scrollToBottom();
  812. eventTarget.dispatchEvent(
  813. new CustomEvent('chat:start', {
  814. detail: {
  815. id: responseMessageId
  816. }
  817. })
  818. );
  819. await tick();
  820. try {
  821. const [res, controller] = await generateOpenAIChatCompletion(
  822. localStorage.token,
  823. {
  824. stream: true,
  825. model: model.id,
  826. stream_options:
  827. model.info?.meta?.capabilities?.usage ?? false
  828. ? {
  829. include_usage: true
  830. }
  831. : undefined,
  832. messages: [
  833. params?.system || $settings.system || (responseMessage?.userContext ?? null)
  834. ? {
  835. role: 'system',
  836. content: `${promptTemplate(
  837. params?.system ?? $settings?.system ?? '',
  838. $user.name,
  839. $settings?.userLocation
  840. ? await getAndUpdateUserLocation(localStorage.token)
  841. : undefined
  842. )}${
  843. responseMessage?.userContext ?? null
  844. ? `\n\nUser Context:\n${responseMessage?.userContext ?? ''}`
  845. : ''
  846. }`
  847. }
  848. : undefined,
  849. ...messages
  850. ]
  851. .filter((message) => message?.content?.trim())
  852. .map((message, idx, arr) => ({
  853. role: message.role,
  854. ...((message.files?.filter((file) => file.type === 'image').length > 0 ?? false) &&
  855. message.role === 'user'
  856. ? {
  857. content: [
  858. {
  859. type: 'text',
  860. text:
  861. arr.length - 1 !== idx
  862. ? message.content
  863. : message?.raContent ?? message.content
  864. },
  865. ...message.files
  866. .filter((file) => file.type === 'image')
  867. .map((file) => ({
  868. type: 'image_url',
  869. image_url: {
  870. url: file.url
  871. }
  872. }))
  873. ]
  874. }
  875. : {
  876. content:
  877. arr.length - 1 !== idx
  878. ? message.content
  879. : message?.raContent ?? message.content
  880. })
  881. })),
  882. seed: params?.seed ?? $settings?.params?.seed ?? undefined,
  883. stop:
  884. params?.stop ?? $settings?.params?.stop ?? undefined
  885. ? (params?.stop ?? $settings.params.stop).map((str) =>
  886. decodeURIComponent(JSON.parse('"' + str.replace(/\"/g, '\\"') + '"'))
  887. )
  888. : undefined,
  889. temperature: params?.temperature ?? $settings?.params?.temperature ?? undefined,
  890. top_p: params?.top_p ?? $settings?.params?.top_p ?? undefined,
  891. frequency_penalty:
  892. params?.frequency_penalty ?? $settings?.params?.frequency_penalty ?? undefined,
  893. max_tokens: params?.max_tokens ?? $settings?.params?.max_tokens ?? undefined,
  894. tool_ids: selectedToolIds.length > 0 ? selectedToolIds : undefined,
  895. files: files.length > 0 ? files : undefined,
  896. session_id: $socket?.id,
  897. chat_id: $chatId,
  898. id: responseMessageId
  899. },
  900. `${WEBUI_BASE_URL}/api`
  901. );
  902. // Wait until history/message have been updated
  903. await tick();
  904. scrollToBottom();
  905. if (res && res.ok && res.body) {
  906. const textStream = await createOpenAITextStream(res.body, $settings.splitLargeChunks);
  907. let lastUsage = null;
  908. for await (const update of textStream) {
  909. const { value, done, citations, error, usage } = update;
  910. if (error) {
  911. await handleOpenAIError(error, null, model, responseMessage);
  912. break;
  913. }
  914. if (done || stopResponseFlag || _chatId !== $chatId) {
  915. responseMessage.done = true;
  916. messages = messages;
  917. if (stopResponseFlag) {
  918. controller.abort('User: Stop Response');
  919. } else {
  920. const messages = createMessagesList(responseMessageId);
  921. await chatCompletedHandler(model.id, responseMessageId, messages);
  922. }
  923. _response = responseMessage.content;
  924. break;
  925. }
  926. if (usage) {
  927. lastUsage = usage;
  928. }
  929. if (citations) {
  930. responseMessage.citations = citations;
  931. continue;
  932. }
  933. if (responseMessage.content == '' && value == '\n') {
  934. continue;
  935. } else {
  936. responseMessage.content += value;
  937. const sentences = extractSentencesForAudio(responseMessage.content);
  938. sentences.pop();
  939. // dispatch only last sentence and make sure it hasn't been dispatched before
  940. if (
  941. sentences.length > 0 &&
  942. sentences[sentences.length - 1] !== responseMessage.lastSentence
  943. ) {
  944. responseMessage.lastSentence = sentences[sentences.length - 1];
  945. eventTarget.dispatchEvent(
  946. new CustomEvent('chat', {
  947. detail: { id: responseMessageId, content: sentences[sentences.length - 1] }
  948. })
  949. );
  950. }
  951. messages = messages;
  952. }
  953. if (autoScroll) {
  954. scrollToBottom();
  955. }
  956. }
  957. if ($settings.notificationEnabled && !document.hasFocus()) {
  958. const notification = new Notification(`${model.id}`, {
  959. body: responseMessage.content,
  960. icon: `${WEBUI_BASE_URL}/static/favicon.png`
  961. });
  962. }
  963. if ($settings.responseAutoCopy) {
  964. copyToClipboard(responseMessage.content);
  965. }
  966. if ($settings.responseAutoPlayback && !$showCallOverlay) {
  967. await tick();
  968. document.getElementById(`speak-button-${responseMessage.id}`)?.click();
  969. }
  970. if (lastUsage) {
  971. responseMessage.info = { ...lastUsage, openai: true };
  972. }
  973. if ($chatId == _chatId) {
  974. if ($settings.saveChatHistory ?? true) {
  975. chat = await updateChatById(localStorage.token, _chatId, {
  976. models: selectedModels,
  977. messages: messages,
  978. history: history,
  979. params: params
  980. });
  981. await chats.set(await getChatList(localStorage.token));
  982. }
  983. }
  984. } else {
  985. await handleOpenAIError(null, res, model, responseMessage);
  986. }
  987. } catch (error) {
  988. await handleOpenAIError(error, null, model, responseMessage);
  989. }
  990. messages = messages;
  991. stopResponseFlag = false;
  992. await tick();
  993. let lastSentence = extractSentencesForAudio(responseMessage.content)?.at(-1) ?? '';
  994. if (lastSentence) {
  995. eventTarget.dispatchEvent(
  996. new CustomEvent('chat', {
  997. detail: { id: responseMessageId, content: lastSentence }
  998. })
  999. );
  1000. }
  1001. eventTarget.dispatchEvent(
  1002. new CustomEvent('chat:finish', {
  1003. detail: {
  1004. id: responseMessageId,
  1005. content: responseMessage.content
  1006. }
  1007. })
  1008. );
  1009. if (autoScroll) {
  1010. scrollToBottom();
  1011. }
  1012. if (messages.length == 2) {
  1013. window.history.replaceState(history.state, '', `/c/${_chatId}`);
  1014. const _title = await generateChatTitle(userPrompt);
  1015. await setChatTitle(_chatId, _title);
  1016. }
  1017. return _response;
  1018. };
  1019. const handleOpenAIError = async (error, res: Response | null, model, responseMessage) => {
  1020. let errorMessage = '';
  1021. let innerError;
  1022. if (error) {
  1023. innerError = error;
  1024. } else if (res !== null) {
  1025. innerError = await res.json();
  1026. }
  1027. console.error(innerError);
  1028. if ('detail' in innerError) {
  1029. toast.error(innerError.detail);
  1030. errorMessage = innerError.detail;
  1031. } else if ('error' in innerError) {
  1032. if ('message' in innerError.error) {
  1033. toast.error(innerError.error.message);
  1034. errorMessage = innerError.error.message;
  1035. } else {
  1036. toast.error(innerError.error);
  1037. errorMessage = innerError.error;
  1038. }
  1039. } else if ('message' in innerError) {
  1040. toast.error(innerError.message);
  1041. errorMessage = innerError.message;
  1042. }
  1043. responseMessage.error = {
  1044. content:
  1045. $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
  1046. provider: model.name ?? model.id
  1047. }) +
  1048. '\n' +
  1049. errorMessage
  1050. };
  1051. responseMessage.done = true;
  1052. messages = messages;
  1053. };
  1054. const stopResponse = () => {
  1055. stopResponseFlag = true;
  1056. console.log('stopResponse');
  1057. };
  1058. const regenerateResponse = async (message) => {
  1059. console.log('regenerateResponse');
  1060. if (messages.length != 0) {
  1061. let userMessage = history.messages[message.parentId];
  1062. let userPrompt = userMessage.content;
  1063. if ((userMessage?.models ?? [...selectedModels]).length == 1) {
  1064. // If user message has only one model selected, sendPrompt automatically selects it for regeneration
  1065. await sendPrompt(userPrompt, userMessage.id);
  1066. } else {
  1067. // If there are multiple models selected, use the model of the response message for regeneration
  1068. // e.g. many model chat
  1069. await sendPrompt(userPrompt, userMessage.id, { modelId: message.model });
  1070. }
  1071. }
  1072. };
  1073. const continueGeneration = async () => {
  1074. console.log('continueGeneration');
  1075. const _chatId = JSON.parse(JSON.stringify($chatId));
  1076. if (messages.length != 0 && messages.at(-1).done == true) {
  1077. const responseMessage = history.messages[history.currentId];
  1078. responseMessage.done = false;
  1079. await tick();
  1080. const model = $models.filter((m) => m.id === responseMessage.model).at(0);
  1081. if (model) {
  1082. if (model?.owned_by === 'openai') {
  1083. await sendPromptOpenAI(
  1084. model,
  1085. history.messages[responseMessage.parentId].content,
  1086. responseMessage.id,
  1087. _chatId
  1088. );
  1089. } else
  1090. await sendPromptOllama(
  1091. model,
  1092. history.messages[responseMessage.parentId].content,
  1093. responseMessage.id,
  1094. _chatId
  1095. );
  1096. }
  1097. } else {
  1098. toast.error($i18n.t(`Model {{modelId}} not found`, { modelId }));
  1099. }
  1100. };
  1101. const generateChatTitle = async (userPrompt) => {
  1102. if ($settings?.title?.auto ?? true) {
  1103. const title = await generateTitle(
  1104. localStorage.token,
  1105. selectedModels[0],
  1106. userPrompt,
  1107. $chatId
  1108. ).catch((error) => {
  1109. console.error(error);
  1110. return 'New Chat';
  1111. });
  1112. return title;
  1113. } else {
  1114. return `${userPrompt}`;
  1115. }
  1116. };
  1117. const setChatTitle = async (_chatId, _title) => {
  1118. if (_chatId === $chatId) {
  1119. title = _title;
  1120. }
  1121. if ($settings.saveChatHistory ?? true) {
  1122. chat = await updateChatById(localStorage.token, _chatId, { title: _title });
  1123. await chats.set(await getChatList(localStorage.token));
  1124. }
  1125. };
  1126. const getWebSearchResults = async (model: string, parentId: string, responseId: string) => {
  1127. const responseMessage = history.messages[responseId];
  1128. const userMessage = history.messages[parentId];
  1129. responseMessage.statusHistory = [
  1130. {
  1131. done: false,
  1132. action: 'web_search',
  1133. description: $i18n.t('Generating search query')
  1134. }
  1135. ];
  1136. messages = messages;
  1137. const prompt = userMessage.content;
  1138. let searchQuery = await generateSearchQuery(localStorage.token, model, messages, prompt).catch(
  1139. (error) => {
  1140. console.log(error);
  1141. return prompt;
  1142. }
  1143. );
  1144. if (!searchQuery) {
  1145. toast.warning($i18n.t('No search query generated'));
  1146. responseMessage.statusHistory.push({
  1147. done: true,
  1148. error: true,
  1149. action: 'web_search',
  1150. description: 'No search query generated'
  1151. });
  1152. messages = messages;
  1153. }
  1154. responseMessage.statusHistory.push({
  1155. done: false,
  1156. action: 'web_search',
  1157. description: $i18n.t(`Searching "{{searchQuery}}"`, { searchQuery })
  1158. });
  1159. messages = messages;
  1160. const results = await runWebSearch(localStorage.token, searchQuery).catch((error) => {
  1161. console.log(error);
  1162. toast.error(error);
  1163. return null;
  1164. });
  1165. if (results) {
  1166. responseMessage.statusHistory.push({
  1167. done: true,
  1168. action: 'web_search',
  1169. description: $i18n.t('Searched {{count}} sites', { count: results.filenames.length }),
  1170. query: searchQuery,
  1171. urls: results.filenames
  1172. });
  1173. if (responseMessage?.files ?? undefined === undefined) {
  1174. responseMessage.files = [];
  1175. }
  1176. responseMessage.files.push({
  1177. collection_name: results.collection_name,
  1178. name: searchQuery,
  1179. type: 'web_search_results',
  1180. urls: results.filenames
  1181. });
  1182. messages = messages;
  1183. } else {
  1184. responseMessage.statusHistory.push({
  1185. done: true,
  1186. error: true,
  1187. action: 'web_search',
  1188. description: 'No search results found'
  1189. });
  1190. messages = messages;
  1191. }
  1192. };
  1193. const getTags = async () => {
  1194. return await getTagsById(localStorage.token, $chatId).catch(async (error) => {
  1195. return [];
  1196. });
  1197. };
  1198. </script>
  1199. <svelte:head>
  1200. <title>
  1201. {title
  1202. ? `${title.length > 30 ? `${title.slice(0, 30)}...` : title} | ${$WEBUI_NAME}`
  1203. : `${$WEBUI_NAME}`}
  1204. </title>
  1205. </svelte:head>
  1206. <audio id="audioElement" src="" style="display: none;" />
  1207. <EventConfirmDialog
  1208. bind:show={showEventConfirmation}
  1209. title={eventConfirmationTitle}
  1210. message={eventConfirmationMessage}
  1211. on:confirm={(e) => {
  1212. eventCallback(true);
  1213. }}
  1214. on:cancel={() => {
  1215. eventCallback(false);
  1216. }}
  1217. />
  1218. {#if $showCallOverlay}
  1219. <CallOverlay
  1220. {submitPrompt}
  1221. {stopResponse}
  1222. bind:files
  1223. modelId={selectedModelIds?.at(0) ?? null}
  1224. chatId={$chatId}
  1225. {eventTarget}
  1226. />
  1227. {/if}
  1228. {#if !chatIdProp || (loaded && chatIdProp)}
  1229. <div
  1230. class="h-screen max-h-[100dvh] {$showSidebar
  1231. ? 'md:max-w-[calc(100%-260px)]'
  1232. : ''} w-full max-w-full flex flex-col"
  1233. >
  1234. {#if $settings?.backgroundImageUrl ?? null}
  1235. <div
  1236. class="absolute {$showSidebar
  1237. ? 'md:max-w-[calc(100%-260px)] md:translate-x-[260px]'
  1238. : ''} top-0 left-0 w-full h-full bg-cover bg-center bg-no-repeat"
  1239. style="background-image: url({$settings.backgroundImageUrl}) "
  1240. />
  1241. <div
  1242. class="absolute top-0 left-0 w-full h-full bg-gradient-to-t from-white to-white/85 dark:from-gray-900 dark:to-[#171717]/90 z-0"
  1243. />
  1244. {/if}
  1245. <Navbar
  1246. {title}
  1247. bind:selectedModels
  1248. bind:showModelSelector
  1249. bind:showControls
  1250. shareEnabled={messages.length > 0}
  1251. {chat}
  1252. {initNewChat}
  1253. />
  1254. {#if $banners.length > 0 && messages.length === 0 && !$chatId && selectedModels.length <= 1}
  1255. <div
  1256. class="absolute top-[4.25rem] w-full {$showSidebar
  1257. ? 'md:max-w-[calc(100%-260px)]'
  1258. : ''} z-20"
  1259. >
  1260. <div class=" flex flex-col gap-1 w-full">
  1261. {#each $banners.filter( (b) => (b.dismissible ? !JSON.parse(localStorage.getItem('dismissedBannerIds') ?? '[]').includes(b.id) : true) ) as banner}
  1262. <Banner
  1263. {banner}
  1264. on:dismiss={(e) => {
  1265. const bannerId = e.detail;
  1266. localStorage.setItem(
  1267. 'dismissedBannerIds',
  1268. JSON.stringify(
  1269. [
  1270. bannerId,
  1271. ...JSON.parse(localStorage.getItem('dismissedBannerIds') ?? '[]')
  1272. ].filter((id) => $banners.find((b) => b.id === id))
  1273. )
  1274. );
  1275. }}
  1276. />
  1277. {/each}
  1278. </div>
  1279. </div>
  1280. {/if}
  1281. <div class="flex flex-col flex-auto z-10">
  1282. <div
  1283. class=" pb-2.5 flex flex-col justify-between w-full flex-auto overflow-auto h-0 max-w-full z-10 scrollbar-hidden {showControls
  1284. ? 'lg:pr-[24rem]'
  1285. : ''}"
  1286. id="messages-container"
  1287. bind:this={messagesContainerElement}
  1288. on:scroll={(e) => {
  1289. autoScroll =
  1290. messagesContainerElement.scrollHeight - messagesContainerElement.scrollTop <=
  1291. messagesContainerElement.clientHeight + 5;
  1292. }}
  1293. >
  1294. <div class=" h-full w-full flex flex-col {chatIdProp ? 'py-4' : 'pt-2 pb-4'}">
  1295. <Messages
  1296. chatId={$chatId}
  1297. {selectedModels}
  1298. {processing}
  1299. bind:history
  1300. bind:messages
  1301. bind:autoScroll
  1302. bind:prompt
  1303. bottomPadding={files.length > 0}
  1304. {sendPrompt}
  1305. {continueGeneration}
  1306. {regenerateResponse}
  1307. />
  1308. </div>
  1309. </div>
  1310. <div class={showControls ? 'lg:pr-[24rem]' : ''}>
  1311. <MessageInput
  1312. bind:files
  1313. bind:prompt
  1314. bind:autoScroll
  1315. bind:selectedToolIds
  1316. bind:webSearchEnabled
  1317. bind:atSelectedModel
  1318. availableToolIds={selectedModelIds.reduce((a, e, i, arr) => {
  1319. const model = $models.find((m) => m.id === e);
  1320. if (model?.info?.meta?.toolIds ?? false) {
  1321. return [...new Set([...a, ...model.info.meta.toolIds])];
  1322. }
  1323. return a;
  1324. }, [])}
  1325. transparentBackground={$settings?.backgroundImageUrl ?? false}
  1326. {selectedModels}
  1327. {messages}
  1328. {submitPrompt}
  1329. {stopResponse}
  1330. />
  1331. </div>
  1332. </div>
  1333. <ChatControls bind:show={showControls} bind:params />
  1334. </div>
  1335. {/if}