Chat.svelte 65 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477
  1. <script lang="ts">
  2. import { v4 as uuidv4 } from 'uuid';
  3. import { toast } from 'svelte-sonner';
  4. import mermaid from 'mermaid';
  5. import { PaneGroup, Pane, PaneResizer } from 'paneforge';
  6. import { getContext, onDestroy, onMount, tick } from 'svelte';
  7. const i18n: Writable<i18nType> = getContext('i18n');
  8. import { goto } from '$app/navigation';
  9. import { page } from '$app/stores';
  10. import { get, type Unsubscriber, type Writable } from 'svelte/store';
  11. import type { i18n as i18nType } from 'i18next';
  12. import { WEBUI_BASE_URL } from '$lib/constants';
  13. import {
  14. chatId,
  15. chats,
  16. config,
  17. type Model,
  18. models,
  19. tags as allTags,
  20. settings,
  21. showSidebar,
  22. WEBUI_NAME,
  23. banners,
  24. user,
  25. socket,
  26. showControls,
  27. showCallOverlay,
  28. currentChatPage,
  29. temporaryChatEnabled,
  30. mobile,
  31. showOverview,
  32. chatTitle,
  33. showArtifacts
  34. } from '$lib/stores';
  35. import {
  36. convertMessagesToHistory,
  37. copyToClipboard,
  38. getMessageContentParts,
  39. extractSentencesForAudio,
  40. promptTemplate,
  41. splitStream
  42. } from '$lib/utils';
  43. import { generateChatCompletion } from '$lib/apis/ollama';
  44. import {
  45. addTagById,
  46. createNewChat,
  47. deleteTagById,
  48. deleteTagsById,
  49. getAllTags,
  50. getChatById,
  51. getChatList,
  52. getTagsById,
  53. updateChatById
  54. } from '$lib/apis/chats';
  55. import { generateOpenAIChatCompletion } from '$lib/apis/openai';
  56. import { processWeb, processWebSearch, processYoutubeVideo } from '$lib/apis/retrieval';
  57. import { createOpenAITextStream } from '$lib/apis/streaming';
  58. import { queryMemory } from '$lib/apis/memories';
  59. import { getAndUpdateUserLocation, getUserSettings } from '$lib/apis/users';
  60. import {
  61. chatCompleted,
  62. generateTitle,
  63. generateSearchQuery,
  64. chatAction,
  65. generateMoACompletion,
  66. generateTags
  67. } from '$lib/apis';
  68. import Banner from '../common/Banner.svelte';
  69. import MessageInput from '$lib/components/chat/MessageInput.svelte';
  70. import Messages from '$lib/components/chat/Messages.svelte';
  71. import Navbar from '$lib/components/layout/Navbar.svelte';
  72. import ChatControls from './ChatControls.svelte';
  73. import EventConfirmDialog from '../common/ConfirmDialog.svelte';
  74. import Placeholder from './Placeholder.svelte';
  75. export let chatIdProp = '';
  76. let loaded = false;
  77. const eventTarget = new EventTarget();
  78. let controlPane;
  79. let controlPaneComponent;
  80. let stopResponseFlag = false;
  81. let autoScroll = true;
  82. let processing = '';
  83. let messagesContainerElement: HTMLDivElement;
  84. let navbarElement;
  85. let showEventConfirmation = false;
  86. let eventConfirmationTitle = '';
  87. let eventConfirmationMessage = '';
  88. let eventConfirmationInput = false;
  89. let eventConfirmationInputPlaceholder = '';
  90. let eventConfirmationInputValue = '';
  91. let eventCallback = null;
  92. let chatIdUnsubscriber: Unsubscriber | undefined;
  93. let selectedModels = [''];
  94. let atSelectedModel: Model | undefined;
  95. let selectedModelIds = [];
  96. $: selectedModelIds = atSelectedModel !== undefined ? [atSelectedModel.id] : selectedModels;
  97. let selectedToolIds = [];
  98. let webSearchEnabled = false;
  99. let chat = null;
  100. let tags = [];
  101. let history = {
  102. messages: {},
  103. currentId: null
  104. };
  105. // Chat Input
  106. let prompt = '';
  107. let chatFiles = [];
  108. let files = [];
  109. let params = {};
  110. $: if (chatIdProp) {
  111. (async () => {
  112. console.log(chatIdProp);
  113. if (chatIdProp && (await loadChat())) {
  114. await tick();
  115. loaded = true;
  116. window.setTimeout(() => scrollToBottom(), 0);
  117. const chatInput = document.getElementById('chat-input');
  118. chatInput?.focus();
  119. } else {
  120. await goto('/');
  121. }
  122. })();
  123. }
  124. const showMessage = async (message) => {
  125. const _chatId = JSON.parse(JSON.stringify($chatId));
  126. let _messageId = JSON.parse(JSON.stringify(message.id));
  127. let messageChildrenIds = history.messages[_messageId].childrenIds;
  128. while (messageChildrenIds.length !== 0) {
  129. _messageId = messageChildrenIds.at(-1);
  130. messageChildrenIds = history.messages[_messageId].childrenIds;
  131. }
  132. history.currentId = _messageId;
  133. await tick();
  134. await tick();
  135. await tick();
  136. const messageElement = document.getElementById(`message-${message.id}`);
  137. if (messageElement) {
  138. messageElement.scrollIntoView({ behavior: 'smooth' });
  139. }
  140. await tick();
  141. saveChatHandler(_chatId);
  142. };
  143. const chatEventHandler = async (event, cb) => {
  144. if (event.chat_id === $chatId) {
  145. await tick();
  146. console.log(event);
  147. let message = history.messages[event.message_id];
  148. const type = event?.data?.type ?? null;
  149. const data = event?.data?.data ?? null;
  150. if (type === 'status') {
  151. if (message?.statusHistory) {
  152. message.statusHistory.push(data);
  153. } else {
  154. message.statusHistory = [data];
  155. }
  156. } else if (type === 'citation') {
  157. if (data?.type === 'code_execution') {
  158. // Code execution; update existing code execution by ID, or add new one.
  159. if (!message?.code_executions) {
  160. message.code_executions = [];
  161. }
  162. const existingCodeExecutionIndex = message.code_executions.findIndex(
  163. (execution) => execution.id === data.id
  164. );
  165. if (existingCodeExecutionIndex !== -1) {
  166. message.code_executions[existingCodeExecutionIndex] = data;
  167. } else {
  168. message.code_executions.push(data);
  169. }
  170. message.code_executions = message.code_executions;
  171. } else {
  172. // Regular citation.
  173. if (message?.citations) {
  174. message.citations.push(data);
  175. } else {
  176. message.citations = [data];
  177. }
  178. }
  179. } else if (type === 'message') {
  180. message.content += data.content;
  181. } else if (type === 'replace') {
  182. message.content = data.content;
  183. } else if (type === 'action') {
  184. if (data.action === 'continue') {
  185. const continueButton = document.getElementById('continue-response-button');
  186. if (continueButton) {
  187. continueButton.click();
  188. }
  189. }
  190. } else if (type === 'confirmation') {
  191. eventCallback = cb;
  192. eventConfirmationInput = false;
  193. showEventConfirmation = true;
  194. eventConfirmationTitle = data.title;
  195. eventConfirmationMessage = data.message;
  196. } else if (type === 'execute') {
  197. eventCallback = cb;
  198. try {
  199. // Use Function constructor to evaluate code in a safer way
  200. const asyncFunction = new Function(`return (async () => { ${data.code} })()`);
  201. const result = await asyncFunction(); // Await the result of the async function
  202. if (cb) {
  203. cb(result);
  204. }
  205. } catch (error) {
  206. console.error('Error executing code:', error);
  207. }
  208. } else if (type === 'input') {
  209. eventCallback = cb;
  210. eventConfirmationInput = true;
  211. showEventConfirmation = true;
  212. eventConfirmationTitle = data.title;
  213. eventConfirmationMessage = data.message;
  214. eventConfirmationInputPlaceholder = data.placeholder;
  215. eventConfirmationInputValue = data?.value ?? '';
  216. } else {
  217. console.log('Unknown message type', data);
  218. }
  219. history.messages[event.message_id] = message;
  220. }
  221. };
  222. const onMessageHandler = async (event: {
  223. origin: string;
  224. data: { type: string; text: string };
  225. }) => {
  226. if (event.origin !== window.origin) {
  227. return;
  228. }
  229. // Replace with your iframe's origin
  230. if (event.data.type === 'input:prompt') {
  231. console.debug(event.data.text);
  232. const inputElement = document.getElementById('chat-input');
  233. if (inputElement) {
  234. prompt = event.data.text;
  235. inputElement.focus();
  236. }
  237. }
  238. if (event.data.type === 'action:submit') {
  239. console.debug(event.data.text);
  240. if (prompt !== '') {
  241. await tick();
  242. submitPrompt(prompt);
  243. }
  244. }
  245. if (event.data.type === 'input:prompt:submit') {
  246. console.debug(event.data.text);
  247. if (prompt !== '') {
  248. await tick();
  249. submitPrompt(event.data.text);
  250. }
  251. }
  252. };
  253. onMount(async () => {
  254. window.addEventListener('message', onMessageHandler);
  255. $socket?.on('chat-events', chatEventHandler);
  256. if (!$chatId) {
  257. chatIdUnsubscriber = chatId.subscribe(async (value) => {
  258. if (!value) {
  259. await initNewChat();
  260. }
  261. });
  262. } else {
  263. if ($temporaryChatEnabled) {
  264. await goto('/');
  265. }
  266. }
  267. showControls.subscribe(async (value) => {
  268. if (controlPane && !$mobile) {
  269. try {
  270. if (value) {
  271. controlPaneComponent.openPane();
  272. } else {
  273. controlPane.collapse();
  274. }
  275. } catch (e) {
  276. // ignore
  277. }
  278. }
  279. if (!value) {
  280. showCallOverlay.set(false);
  281. showOverview.set(false);
  282. showArtifacts.set(false);
  283. }
  284. });
  285. const chatInput = document.getElementById('chat-input');
  286. chatInput?.focus();
  287. chats.subscribe(() => {});
  288. });
  289. onDestroy(() => {
  290. chatIdUnsubscriber?.();
  291. window.removeEventListener('message', onMessageHandler);
  292. $socket?.off('chat-events');
  293. });
  294. // File upload functions
  295. const uploadGoogleDriveFile = async (fileData) => {
  296. console.log('Starting uploadGoogleDriveFile with:', {
  297. id: fileData.id,
  298. name: fileData.name,
  299. url: fileData.url,
  300. headers: {
  301. Authorization: `Bearer ${token}`
  302. }
  303. });
  304. // Validate input
  305. if (!fileData?.id || !fileData?.name || !fileData?.url || !fileData?.headers?.Authorization) {
  306. throw new Error('Invalid file data provided');
  307. }
  308. const tempItemId = uuidv4();
  309. const fileItem = {
  310. type: 'file',
  311. file: '',
  312. id: null,
  313. url: fileData.url,
  314. name: fileData.name,
  315. collection_name: '',
  316. status: 'uploading',
  317. error: '',
  318. itemId: tempItemId,
  319. size: 0
  320. };
  321. try {
  322. files = [...files, fileItem];
  323. console.log('Processing web file with URL:', fileData.url);
  324. // Configure fetch options with proper headers
  325. const fetchOptions = {
  326. headers: {
  327. 'Authorization': fileData.headers.Authorization,
  328. 'Accept': '*/*'
  329. },
  330. method: 'GET'
  331. };
  332. // Attempt to fetch the file
  333. console.log('Fetching file content from Google Drive...');
  334. const fileResponse = await fetch(fileData.url, fetchOptions);
  335. if (!fileResponse.ok) {
  336. const errorText = await fileResponse.text();
  337. throw new Error(`Failed to fetch file (${fileResponse.status}): ${errorText}`);
  338. }
  339. // Get content type from response
  340. const contentType = fileResponse.headers.get('content-type') || 'application/octet-stream';
  341. console.log('Response received with content-type:', contentType);
  342. // Convert response to blob
  343. console.log('Converting response to blob...');
  344. const fileBlob = await fileResponse.blob();
  345. if (fileBlob.size === 0) {
  346. throw new Error('Retrieved file is empty');
  347. }
  348. console.log('Blob created:', {
  349. size: fileBlob.size,
  350. type: fileBlob.type || contentType
  351. });
  352. // Create File object with proper MIME type
  353. const file = new File([fileBlob], fileData.name, {
  354. type: fileBlob.type || contentType
  355. });
  356. console.log('File object created:', {
  357. name: file.name,
  358. size: file.size,
  359. type: file.type
  360. });
  361. if (file.size === 0) {
  362. throw new Error('Created file is empty');
  363. }
  364. // Upload file to server
  365. console.log('Uploading file to server...');
  366. const uploadedFile = await uploadFile(localStorage.token, file);
  367. if (!uploadedFile) {
  368. throw new Error('Server returned null response for file upload');
  369. }
  370. console.log('File uploaded successfully:', uploadedFile);
  371. // Update file item with upload results
  372. fileItem.status = 'uploaded';
  373. fileItem.file = uploadedFile;
  374. fileItem.id = uploadedFile.id;
  375. fileItem.size = file.size;
  376. fileItem.collection_name = uploadedFile?.meta?.collection_name;
  377. fileItem.url = `${WEBUI_API_BASE_URL}/files/${uploadedFile.id}`;
  378. files = files;
  379. toast.success($i18n.t('File uploaded successfully'));
  380. } catch (e) {
  381. console.error('Error uploading file:', e);
  382. files = files.filter((f) => f.itemId !== tempItemId);
  383. toast.error($i18n.t('Error uploading file: {{error}}', {
  384. error: e.message || 'Unknown error'
  385. }));
  386. }
  387. };
  388. const handleGoogleDrivePicker = async () => {
  389. try {
  390. const fileData = await createPicker();
  391. if (fileData) {
  392. await uploadGoogleDriveFile(fileData);
  393. }
  394. } catch (error) {
  395. toast.error('Error accessing Google Drive: ' + error.message);
  396. }
  397. };
  398. const uploadWeb = async (url) => {
  399. console.log(url);
  400. const fileItem = {
  401. type: 'doc',
  402. name: url,
  403. collection_name: '',
  404. status: 'uploading',
  405. url: url,
  406. error: ''
  407. };
  408. try {
  409. files = [...files, fileItem];
  410. const res = await processWeb(localStorage.token, '', url);
  411. if (res) {
  412. fileItem.status = 'uploaded';
  413. fileItem.collection_name = res.collection_name;
  414. fileItem.file = {
  415. ...res.file,
  416. ...fileItem.file
  417. };
  418. files = files;
  419. }
  420. } catch (e) {
  421. // Remove the failed doc from the files array
  422. files = files.filter((f) => f.name !== url);
  423. toast.error(JSON.stringify(e));
  424. }
  425. };
  426. const uploadYoutubeTranscription = async (url) => {
  427. console.log(url);
  428. const fileItem = {
  429. type: 'doc',
  430. name: url,
  431. collection_name: '',
  432. status: 'uploading',
  433. context: 'full',
  434. url: url,
  435. error: ''
  436. };
  437. try {
  438. files = [...files, fileItem];
  439. const res = await processYoutubeVideo(localStorage.token, url);
  440. if (res) {
  441. fileItem.status = 'uploaded';
  442. fileItem.collection_name = res.collection_name;
  443. fileItem.file = {
  444. ...res.file,
  445. ...fileItem.file
  446. };
  447. files = files;
  448. }
  449. } catch (e) {
  450. // Remove the failed doc from the files array
  451. files = files.filter((f) => f.name !== url);
  452. toast.error(e);
  453. }
  454. };
  455. //////////////////////////
  456. // Web functions
  457. //////////////////////////
  458. const initNewChat = async () => {
  459. await showControls.set(false);
  460. await showCallOverlay.set(false);
  461. await showOverview.set(false);
  462. await showArtifacts.set(false);
  463. if ($page.url.pathname.includes('/c/')) {
  464. window.history.replaceState(history.state, '', `/`);
  465. }
  466. autoScroll = true;
  467. await chatId.set('');
  468. await chatTitle.set('');
  469. history = {
  470. messages: {},
  471. currentId: null
  472. };
  473. chatFiles = [];
  474. params = {};
  475. if ($page.url.searchParams.get('models')) {
  476. selectedModels = $page.url.searchParams.get('models')?.split(',');
  477. } else if ($page.url.searchParams.get('model')) {
  478. const urlModels = $page.url.searchParams.get('model')?.split(',');
  479. if (urlModels.length === 1) {
  480. const m = $models.find((m) => m.id === urlModels[0]);
  481. if (!m) {
  482. const modelSelectorButton = document.getElementById('model-selector-0-button');
  483. if (modelSelectorButton) {
  484. modelSelectorButton.click();
  485. await tick();
  486. const modelSelectorInput = document.getElementById('model-search-input');
  487. if (modelSelectorInput) {
  488. modelSelectorInput.focus();
  489. modelSelectorInput.value = urlModels[0];
  490. modelSelectorInput.dispatchEvent(new Event('input'));
  491. }
  492. }
  493. } else {
  494. selectedModels = urlModels;
  495. }
  496. } else {
  497. selectedModels = urlModels;
  498. }
  499. } else if ($settings?.models) {
  500. selectedModels = $settings?.models;
  501. } else if ($config?.default_models) {
  502. console.log($config?.default_models.split(',') ?? '');
  503. selectedModels = $config?.default_models.split(',');
  504. }
  505. selectedModels = selectedModels.filter((modelId) => $models.map((m) => m.id).includes(modelId));
  506. if (selectedModels.length === 0 || (selectedModels.length === 1 && selectedModels[0] === '')) {
  507. if ($models.length > 0) {
  508. selectedModels = [$models[0].id];
  509. } else {
  510. selectedModels = [''];
  511. }
  512. }
  513. console.log(selectedModels);
  514. if ($page.url.searchParams.get('youtube')) {
  515. uploadYoutubeTranscription(
  516. `https://www.youtube.com/watch?v=${$page.url.searchParams.get('youtube')}`
  517. );
  518. }
  519. if ($page.url.searchParams.get('web-search') === 'true') {
  520. webSearchEnabled = true;
  521. }
  522. if ($page.url.searchParams.get('tools')) {
  523. selectedToolIds = $page.url.searchParams
  524. .get('tools')
  525. ?.split(',')
  526. .map((id) => id.trim())
  527. .filter((id) => id);
  528. } else if ($page.url.searchParams.get('tool-ids')) {
  529. selectedToolIds = $page.url.searchParams
  530. .get('tool-ids')
  531. ?.split(',')
  532. .map((id) => id.trim())
  533. .filter((id) => id);
  534. }
  535. if ($page.url.searchParams.get('call') === 'true') {
  536. showCallOverlay.set(true);
  537. showControls.set(true);
  538. }
  539. if ($page.url.searchParams.get('q')) {
  540. prompt = $page.url.searchParams.get('q') ?? '';
  541. if (prompt) {
  542. await tick();
  543. submitPrompt(prompt);
  544. }
  545. }
  546. selectedModels = selectedModels.map((modelId) =>
  547. $models.map((m) => m.id).includes(modelId) ? modelId : ''
  548. );
  549. const userSettings = await getUserSettings(localStorage.token);
  550. if (userSettings) {
  551. settings.set(userSettings.ui);
  552. } else {
  553. settings.set(JSON.parse(localStorage.getItem('settings') ?? '{}'));
  554. }
  555. const chatInput = document.getElementById('chat-input');
  556. setTimeout(() => chatInput?.focus(), 0);
  557. };
  558. const loadChat = async () => {
  559. chatId.set(chatIdProp);
  560. chat = await getChatById(localStorage.token, $chatId).catch(async (error) => {
  561. await goto('/');
  562. return null;
  563. });
  564. if (chat) {
  565. tags = await getTagsById(localStorage.token, $chatId).catch(async (error) => {
  566. return [];
  567. });
  568. const chatContent = chat.chat;
  569. if (chatContent) {
  570. console.log(chatContent);
  571. selectedModels =
  572. (chatContent?.models ?? undefined) !== undefined
  573. ? chatContent.models
  574. : [chatContent.models ?? ''];
  575. history =
  576. (chatContent?.history ?? undefined) !== undefined
  577. ? chatContent.history
  578. : convertMessagesToHistory(chatContent.messages);
  579. chatTitle.set(chatContent.title);
  580. const userSettings = await getUserSettings(localStorage.token);
  581. if (userSettings) {
  582. await settings.set(userSettings.ui);
  583. } else {
  584. await settings.set(JSON.parse(localStorage.getItem('settings') ?? '{}'));
  585. }
  586. params = chatContent?.params ?? {};
  587. chatFiles = chatContent?.files ?? [];
  588. autoScroll = true;
  589. await tick();
  590. if (history.currentId) {
  591. history.messages[history.currentId].done = true;
  592. }
  593. await tick();
  594. return true;
  595. } else {
  596. return null;
  597. }
  598. }
  599. };
  600. const scrollToBottom = async () => {
  601. await tick();
  602. if (messagesContainerElement) {
  603. messagesContainerElement.scrollTop = messagesContainerElement.scrollHeight;
  604. }
  605. };
  606. const createMessagesList = (responseMessageId) => {
  607. if (responseMessageId === null) {
  608. return [];
  609. }
  610. const message = history.messages[responseMessageId];
  611. if (message?.parentId) {
  612. return [...createMessagesList(message.parentId), message];
  613. } else {
  614. return [message];
  615. }
  616. };
  617. const chatCompletedHandler = async (chatId, modelId, responseMessageId, messages) => {
  618. await mermaid.run({
  619. querySelector: '.mermaid'
  620. });
  621. const res = await chatCompleted(localStorage.token, {
  622. model: modelId,
  623. messages: messages.map((m) => ({
  624. id: m.id,
  625. role: m.role,
  626. content: m.content,
  627. info: m.info ? m.info : undefined,
  628. timestamp: m.timestamp
  629. })),
  630. chat_id: chatId,
  631. session_id: $socket?.id,
  632. id: responseMessageId
  633. }).catch((error) => {
  634. toast.error(error);
  635. messages.at(-1).error = { content: error };
  636. return null;
  637. });
  638. if (res !== null) {
  639. // Update chat history with the new messages
  640. for (const message of res.messages) {
  641. history.messages[message.id] = {
  642. ...history.messages[message.id],
  643. ...(history.messages[message.id].content !== message.content
  644. ? { originalContent: history.messages[message.id].content }
  645. : {}),
  646. ...message
  647. };
  648. }
  649. }
  650. await tick();
  651. if ($chatId == chatId) {
  652. if (!$temporaryChatEnabled) {
  653. chat = await updateChatById(localStorage.token, chatId, {
  654. models: selectedModels,
  655. messages: messages,
  656. history: history,
  657. params: params,
  658. files: chatFiles
  659. });
  660. currentChatPage.set(1);
  661. await chats.set(await getChatList(localStorage.token, $currentChatPage));
  662. }
  663. }
  664. };
  665. const chatActionHandler = async (chatId, actionId, modelId, responseMessageId, event = null) => {
  666. const messages = createMessagesList(responseMessageId);
  667. const res = await chatAction(localStorage.token, actionId, {
  668. model: modelId,
  669. messages: messages.map((m) => ({
  670. id: m.id,
  671. role: m.role,
  672. content: m.content,
  673. info: m.info ? m.info : undefined,
  674. timestamp: m.timestamp
  675. })),
  676. ...(event ? { event: event } : {}),
  677. chat_id: chatId,
  678. session_id: $socket?.id,
  679. id: responseMessageId
  680. }).catch((error) => {
  681. toast.error(error);
  682. messages.at(-1).error = { content: error };
  683. return null;
  684. });
  685. if (res !== null) {
  686. // Update chat history with the new messages
  687. for (const message of res.messages) {
  688. history.messages[message.id] = {
  689. ...history.messages[message.id],
  690. ...(history.messages[message.id].content !== message.content
  691. ? { originalContent: history.messages[message.id].content }
  692. : {}),
  693. ...message
  694. };
  695. }
  696. }
  697. if ($chatId == chatId) {
  698. if (!$temporaryChatEnabled) {
  699. chat = await updateChatById(localStorage.token, chatId, {
  700. models: selectedModels,
  701. messages: messages,
  702. history: history,
  703. params: params,
  704. files: chatFiles
  705. });
  706. currentChatPage.set(1);
  707. await chats.set(await getChatList(localStorage.token, $currentChatPage));
  708. }
  709. }
  710. };
  711. const getChatEventEmitter = async (modelId: string, chatId: string = '') => {
  712. return setInterval(() => {
  713. $socket?.emit('usage', {
  714. action: 'chat',
  715. model: modelId,
  716. chat_id: chatId
  717. });
  718. }, 1000);
  719. };
  720. const createMessagePair = async (userPrompt) => {
  721. prompt = '';
  722. if (selectedModels.length === 0) {
  723. toast.error($i18n.t('Model not selected'));
  724. } else {
  725. const modelId = selectedModels[0];
  726. const model = $models.filter((m) => m.id === modelId).at(0);
  727. const messages = createMessagesList(history.currentId);
  728. const parentMessage = messages.length !== 0 ? messages.at(-1) : null;
  729. const userMessageId = uuidv4();
  730. const responseMessageId = uuidv4();
  731. const userMessage = {
  732. id: userMessageId,
  733. parentId: parentMessage ? parentMessage.id : null,
  734. childrenIds: [responseMessageId],
  735. role: 'user',
  736. content: userPrompt ? userPrompt : `[PROMPT] ${userMessageId}`,
  737. timestamp: Math.floor(Date.now() / 1000)
  738. };
  739. const responseMessage = {
  740. id: responseMessageId,
  741. parentId: userMessageId,
  742. childrenIds: [],
  743. role: 'assistant',
  744. content: `[RESPONSE] ${responseMessageId}`,
  745. done: true,
  746. model: modelId,
  747. modelName: model.name ?? model.id,
  748. modelIdx: 0,
  749. timestamp: Math.floor(Date.now() / 1000)
  750. };
  751. if (parentMessage) {
  752. parentMessage.childrenIds.push(userMessageId);
  753. history.messages[parentMessage.id] = parentMessage;
  754. }
  755. history.messages[userMessageId] = userMessage;
  756. history.messages[responseMessageId] = responseMessage;
  757. history.currentId = responseMessageId;
  758. await tick();
  759. if (autoScroll) {
  760. scrollToBottom();
  761. }
  762. if (messages.length === 0) {
  763. await initChatHandler();
  764. } else {
  765. await saveChatHandler($chatId);
  766. }
  767. }
  768. };
  769. //////////////////////////
  770. // Chat functions
  771. //////////////////////////
  772. const submitPrompt = async (userPrompt, { _raw = false } = {}) => {
  773. console.log('submitPrompt', userPrompt, $chatId);
  774. const messages = createMessagesList(history.currentId);
  775. selectedModels = selectedModels.map((modelId) =>
  776. $models.map((m) => m.id).includes(modelId) ? modelId : ''
  777. );
  778. if (userPrompt === '') {
  779. toast.error($i18n.t('Please enter a prompt'));
  780. return;
  781. }
  782. if (selectedModels.includes('')) {
  783. toast.error($i18n.t('Model not selected'));
  784. return;
  785. }
  786. if (messages.length != 0 && messages.at(-1).done != true) {
  787. // Response not done
  788. return;
  789. }
  790. if (messages.length != 0 && messages.at(-1).error) {
  791. // Error in response
  792. toast.error($i18n.t(`Oops! There was an error in the previous response.`));
  793. return;
  794. }
  795. if (
  796. files.length > 0 &&
  797. files.filter((file) => file.type !== 'image' && file.status === 'uploading').length > 0
  798. ) {
  799. toast.error(
  800. $i18n.t(`Oops! There are files still uploading. Please wait for the upload to complete.`)
  801. );
  802. return;
  803. }
  804. if (
  805. ($config?.file?.max_count ?? null) !== null &&
  806. files.length + chatFiles.length > $config?.file?.max_count
  807. ) {
  808. toast.error(
  809. $i18n.t(`You can only chat with a maximum of {{maxCount}} file(s) at a time.`, {
  810. maxCount: $config?.file?.max_count
  811. })
  812. );
  813. return;
  814. }
  815. let _responses = [];
  816. prompt = '';
  817. await tick();
  818. // Reset chat input textarea
  819. const chatInputContainer = document.getElementById('chat-input-container');
  820. if (chatInputContainer) {
  821. chatInputContainer.value = '';
  822. chatInputContainer.style.height = '';
  823. }
  824. const _files = JSON.parse(JSON.stringify(files));
  825. chatFiles.push(..._files.filter((item) => ['doc', 'file', 'collection'].includes(item.type)));
  826. chatFiles = chatFiles.filter(
  827. // Remove duplicates
  828. (item, index, array) =>
  829. array.findIndex((i) => JSON.stringify(i) === JSON.stringify(item)) === index
  830. );
  831. files = [];
  832. prompt = '';
  833. // Create user message
  834. let userMessageId = uuidv4();
  835. let userMessage = {
  836. id: userMessageId,
  837. parentId: messages.length !== 0 ? messages.at(-1).id : null,
  838. childrenIds: [],
  839. role: 'user',
  840. content: userPrompt,
  841. files: _files.length > 0 ? _files : undefined,
  842. timestamp: Math.floor(Date.now() / 1000), // Unix epoch
  843. models: selectedModels
  844. };
  845. // Add message to history and Set currentId to messageId
  846. history.messages[userMessageId] = userMessage;
  847. history.currentId = userMessageId;
  848. // Append messageId to childrenIds of parent message
  849. if (messages.length !== 0) {
  850. history.messages[messages.at(-1).id].childrenIds.push(userMessageId);
  851. }
  852. // Wait until history/message have been updated
  853. await tick();
  854. // focus on chat input
  855. const chatInput = document.getElementById('chat-input');
  856. chatInput?.focus();
  857. _responses = await sendPrompt(userPrompt, userMessageId, { newChat: true });
  858. return _responses;
  859. };
  860. const sendPrompt = async (
  861. prompt: string,
  862. parentId: string,
  863. { modelId = null, modelIdx = null, newChat = false } = {}
  864. ) => {
  865. // Create new chat if newChat is true and first user message
  866. if (
  867. newChat &&
  868. history.messages[history.currentId].parentId === null &&
  869. history.messages[history.currentId].role === 'user'
  870. ) {
  871. await initChatHandler();
  872. }
  873. let _responses: string[] = [];
  874. // If modelId is provided, use it, else use selected model
  875. let selectedModelIds = modelId
  876. ? [modelId]
  877. : atSelectedModel !== undefined
  878. ? [atSelectedModel.id]
  879. : selectedModels;
  880. // Create response messages for each selected model
  881. const responseMessageIds: Record<PropertyKey, string> = {};
  882. for (const [_modelIdx, modelId] of selectedModelIds.entries()) {
  883. const model = $models.filter((m) => m.id === modelId).at(0);
  884. if (model) {
  885. let responseMessageId = uuidv4();
  886. let responseMessage = {
  887. parentId: parentId,
  888. id: responseMessageId,
  889. childrenIds: [],
  890. role: 'assistant',
  891. content: '',
  892. model: model.id,
  893. modelName: model.name ?? model.id,
  894. modelIdx: modelIdx ? modelIdx : _modelIdx,
  895. userContext: null,
  896. timestamp: Math.floor(Date.now() / 1000) // Unix epoch
  897. };
  898. // Add message to history and Set currentId to messageId
  899. history.messages[responseMessageId] = responseMessage;
  900. history.currentId = responseMessageId;
  901. // Append messageId to childrenIds of parent message
  902. if (parentId !== null) {
  903. history.messages[parentId].childrenIds = [
  904. ...history.messages[parentId].childrenIds,
  905. responseMessageId
  906. ];
  907. }
  908. responseMessageIds[`${modelId}-${modelIdx ? modelIdx : _modelIdx}`] = responseMessageId;
  909. }
  910. }
  911. await tick();
  912. const _chatId = JSON.parse(JSON.stringify($chatId));
  913. await Promise.all(
  914. selectedModelIds.map(async (modelId, _modelIdx) => {
  915. console.log('modelId', modelId);
  916. const model = $models.filter((m) => m.id === modelId).at(0);
  917. if (model) {
  918. const messages = createMessagesList(parentId);
  919. // If there are image files, check if model is vision capable
  920. const hasImages = messages.some((message) =>
  921. message.files?.some((file) => file.type === 'image')
  922. );
  923. if (hasImages && !(model.info?.meta?.capabilities?.vision ?? true)) {
  924. toast.error(
  925. $i18n.t('Model {{modelName}} is not vision capable', {
  926. modelName: model.name ?? model.id
  927. })
  928. );
  929. }
  930. let responseMessageId =
  931. responseMessageIds[`${modelId}-${modelIdx ? modelIdx : _modelIdx}`];
  932. let responseMessage = history.messages[responseMessageId];
  933. let userContext = null;
  934. if ($settings?.memory ?? false) {
  935. if (userContext === null) {
  936. const res = await queryMemory(localStorage.token, prompt).catch((error) => {
  937. toast.error(error);
  938. return null;
  939. });
  940. if (res) {
  941. if (res.documents[0].length > 0) {
  942. userContext = res.documents[0].reduce((acc, doc, index) => {
  943. const createdAtTimestamp = res.metadatas[0][index].created_at;
  944. const createdAtDate = new Date(createdAtTimestamp * 1000)
  945. .toISOString()
  946. .split('T')[0];
  947. return `${acc}${index + 1}. [${createdAtDate}]. ${doc}\n`;
  948. }, '');
  949. }
  950. console.log(userContext);
  951. }
  952. }
  953. }
  954. responseMessage.userContext = userContext;
  955. const chatEventEmitter = await getChatEventEmitter(model.id, _chatId);
  956. scrollToBottom();
  957. if (webSearchEnabled) {
  958. await getWebSearchResults(model.id, parentId, responseMessageId);
  959. }
  960. let _response = null;
  961. if (model?.owned_by === 'ollama') {
  962. _response = await sendPromptOllama(model, prompt, responseMessageId, _chatId);
  963. } else if (model) {
  964. _response = await sendPromptOpenAI(model, prompt, responseMessageId, _chatId);
  965. }
  966. _responses.push(_response);
  967. if (chatEventEmitter) clearInterval(chatEventEmitter);
  968. } else {
  969. toast.error($i18n.t(`Model {{modelId}} not found`, { modelId }));
  970. }
  971. })
  972. );
  973. currentChatPage.set(1);
  974. chats.set(await getChatList(localStorage.token, $currentChatPage));
  975. return _responses;
  976. };
  977. const sendPromptOllama = async (model, userPrompt, responseMessageId, _chatId) => {
  978. let _response: string | null = null;
  979. const responseMessage = history.messages[responseMessageId];
  980. const userMessage = history.messages[responseMessage.parentId];
  981. // Wait until history/message have been updated
  982. await tick();
  983. // Scroll down
  984. scrollToBottom();
  985. const messagesBody = [
  986. params?.system || $settings.system || (responseMessage?.userContext ?? null)
  987. ? {
  988. role: 'system',
  989. content: `${promptTemplate(
  990. params?.system ?? $settings?.system ?? '',
  991. $user.name,
  992. $settings?.userLocation
  993. ? await getAndUpdateUserLocation(localStorage.token)
  994. : undefined
  995. )}${
  996. (responseMessage?.userContext ?? null)
  997. ? `\n\nUser Context:\n${responseMessage?.userContext ?? ''}`
  998. : ''
  999. }`
  1000. }
  1001. : undefined,
  1002. ...createMessagesList(responseMessageId)
  1003. ]
  1004. .filter((message) => message?.content?.trim())
  1005. .map((message) => {
  1006. // Prepare the base message object
  1007. const baseMessage = {
  1008. role: message.role,
  1009. content: message?.merged?.content ?? message.content
  1010. };
  1011. // Extract and format image URLs if any exist
  1012. const imageUrls = message.files
  1013. ?.filter((file) => file.type === 'image')
  1014. .map((file) => file.url.slice(file.url.indexOf(',') + 1));
  1015. // Add images array only if it contains elements
  1016. if (imageUrls && imageUrls.length > 0 && message.role === 'user') {
  1017. baseMessage.images = imageUrls;
  1018. }
  1019. return baseMessage;
  1020. });
  1021. let lastImageIndex = -1;
  1022. // Find the index of the last object with images
  1023. messagesBody.forEach((item, index) => {
  1024. if (item.images) {
  1025. lastImageIndex = index;
  1026. }
  1027. });
  1028. // Remove images from all but the last one
  1029. messagesBody.forEach((item, index) => {
  1030. if (index !== lastImageIndex) {
  1031. delete item.images;
  1032. }
  1033. });
  1034. let files = JSON.parse(JSON.stringify(chatFiles));
  1035. if (model?.info?.meta?.knowledge ?? false) {
  1036. // Only initialize and add status if knowledge exists
  1037. responseMessage.statusHistory = [
  1038. {
  1039. action: 'knowledge_search',
  1040. description: $i18n.t(`Searching Knowledge for "{{searchQuery}}"`, {
  1041. searchQuery: userMessage.content
  1042. }),
  1043. done: false
  1044. }
  1045. ];
  1046. files.push(
  1047. ...model.info.meta.knowledge.map((item) => {
  1048. if (item?.collection_name) {
  1049. return {
  1050. id: item.collection_name,
  1051. name: item.name,
  1052. legacy: true
  1053. };
  1054. } else if (item?.collection_names) {
  1055. return {
  1056. name: item.name,
  1057. type: 'collection',
  1058. collection_names: item.collection_names,
  1059. legacy: true
  1060. };
  1061. } else {
  1062. return item;
  1063. }
  1064. })
  1065. );
  1066. history.messages[responseMessageId] = responseMessage;
  1067. }
  1068. files.push(
  1069. ...(userMessage?.files ?? []).filter((item) =>
  1070. ['doc', 'file', 'collection'].includes(item.type)
  1071. ),
  1072. ...(responseMessage?.files ?? []).filter((item) => ['web_search_results'].includes(item.type))
  1073. );
  1074. // Remove duplicates
  1075. files = files.filter(
  1076. (item, index, array) =>
  1077. array.findIndex((i) => JSON.stringify(i) === JSON.stringify(item)) === index
  1078. );
  1079. scrollToBottom();
  1080. eventTarget.dispatchEvent(
  1081. new CustomEvent('chat:start', {
  1082. detail: {
  1083. id: responseMessageId
  1084. }
  1085. })
  1086. );
  1087. await tick();
  1088. const stream =
  1089. model?.info?.params?.stream_response ??
  1090. $settings?.params?.stream_response ??
  1091. params?.stream_response ??
  1092. true;
  1093. const [res, controller] = await generateChatCompletion(localStorage.token, {
  1094. stream: stream,
  1095. model: model.id,
  1096. messages: messagesBody,
  1097. options: {
  1098. ...{ ...($settings?.params ?? {}), ...params },
  1099. stop:
  1100. (params?.stop ?? $settings?.params?.stop ?? undefined)
  1101. ? (params?.stop.split(',').map((token) => token.trim()) ?? $settings.params.stop).map(
  1102. (str) => decodeURIComponent(JSON.parse('"' + str.replace(/\"/g, '\\"') + '"'))
  1103. )
  1104. : undefined,
  1105. num_predict: params?.max_tokens ?? $settings?.params?.max_tokens ?? undefined,
  1106. repeat_penalty:
  1107. params?.frequency_penalty ?? $settings?.params?.frequency_penalty ?? undefined
  1108. },
  1109. format: $settings.requestFormat ?? undefined,
  1110. keep_alive: $settings.keepAlive ?? undefined,
  1111. tool_ids: selectedToolIds.length > 0 ? selectedToolIds : undefined,
  1112. files: files.length > 0 ? files : undefined,
  1113. session_id: $socket?.id,
  1114. chat_id: $chatId,
  1115. id: responseMessageId
  1116. });
  1117. if (res && res.ok) {
  1118. if (!stream) {
  1119. const response = await res.json();
  1120. console.log(response);
  1121. responseMessage.content = response.message.content;
  1122. responseMessage.info = {
  1123. eval_count: response.eval_count,
  1124. eval_duration: response.eval_duration,
  1125. load_duration: response.load_duration,
  1126. prompt_eval_count: response.prompt_eval_count,
  1127. prompt_eval_duration: response.prompt_eval_duration,
  1128. total_duration: response.total_duration
  1129. };
  1130. responseMessage.done = true;
  1131. } else {
  1132. console.log('controller', controller);
  1133. const reader = res.body
  1134. .pipeThrough(new TextDecoderStream())
  1135. .pipeThrough(splitStream('\n'))
  1136. .getReader();
  1137. while (true) {
  1138. const { value, done } = await reader.read();
  1139. if (done || stopResponseFlag || _chatId !== $chatId) {
  1140. responseMessage.done = true;
  1141. history.messages[responseMessageId] = responseMessage;
  1142. if (stopResponseFlag) {
  1143. controller.abort('User: Stop Response');
  1144. }
  1145. _response = responseMessage.content;
  1146. break;
  1147. }
  1148. try {
  1149. let lines = value.split('\n');
  1150. for (const line of lines) {
  1151. if (line !== '') {
  1152. console.log(line);
  1153. let data = JSON.parse(line);
  1154. if ('citations' in data) {
  1155. responseMessage.citations = data.citations;
  1156. // Only remove status if it was initially set
  1157. if (model?.info?.meta?.knowledge ?? false) {
  1158. responseMessage.statusHistory = responseMessage.statusHistory.filter(
  1159. (status) => status.action !== 'knowledge_search'
  1160. );
  1161. }
  1162. continue;
  1163. }
  1164. if ('detail' in data) {
  1165. throw data;
  1166. }
  1167. if (data.done == false) {
  1168. if (responseMessage.content == '' && data.message.content == '\n') {
  1169. continue;
  1170. } else {
  1171. responseMessage.content += data.message.content;
  1172. if (navigator.vibrate && ($settings?.hapticFeedback ?? false)) {
  1173. navigator.vibrate(5);
  1174. }
  1175. const messageContentParts = getMessageContentParts(
  1176. responseMessage.content,
  1177. $config?.audio?.tts?.split_on ?? 'punctuation'
  1178. );
  1179. messageContentParts.pop();
  1180. // dispatch only last sentence and make sure it hasn't been dispatched before
  1181. if (
  1182. messageContentParts.length > 0 &&
  1183. messageContentParts[messageContentParts.length - 1] !==
  1184. responseMessage.lastSentence
  1185. ) {
  1186. responseMessage.lastSentence =
  1187. messageContentParts[messageContentParts.length - 1];
  1188. eventTarget.dispatchEvent(
  1189. new CustomEvent('chat', {
  1190. detail: {
  1191. id: responseMessageId,
  1192. content: messageContentParts[messageContentParts.length - 1]
  1193. }
  1194. })
  1195. );
  1196. }
  1197. history.messages[responseMessageId] = responseMessage;
  1198. }
  1199. } else {
  1200. responseMessage.done = true;
  1201. if (responseMessage.content == '') {
  1202. responseMessage.error = {
  1203. code: 400,
  1204. content: `Oops! No text generated from Ollama, Please try again.`
  1205. };
  1206. }
  1207. responseMessage.context = data.context ?? null;
  1208. responseMessage.info = {
  1209. total_duration: data.total_duration,
  1210. load_duration: data.load_duration,
  1211. sample_count: data.sample_count,
  1212. sample_duration: data.sample_duration,
  1213. prompt_eval_count: data.prompt_eval_count,
  1214. prompt_eval_duration: data.prompt_eval_duration,
  1215. eval_count: data.eval_count,
  1216. eval_duration: data.eval_duration
  1217. };
  1218. history.messages[responseMessageId] = responseMessage;
  1219. if ($settings.notificationEnabled && !document.hasFocus()) {
  1220. const notification = new Notification(`${model.id}`, {
  1221. body: responseMessage.content,
  1222. icon: `${WEBUI_BASE_URL}/static/favicon.png`
  1223. });
  1224. }
  1225. if ($settings?.responseAutoCopy ?? false) {
  1226. copyToClipboard(responseMessage.content);
  1227. }
  1228. if ($settings.responseAutoPlayback && !$showCallOverlay) {
  1229. await tick();
  1230. document.getElementById(`speak-button-${responseMessage.id}`)?.click();
  1231. }
  1232. }
  1233. }
  1234. }
  1235. } catch (error) {
  1236. console.log(error);
  1237. if ('detail' in error) {
  1238. toast.error(error.detail);
  1239. }
  1240. break;
  1241. }
  1242. if (autoScroll) {
  1243. scrollToBottom();
  1244. }
  1245. }
  1246. }
  1247. } else {
  1248. if (res !== null) {
  1249. const error = await res.json();
  1250. console.log(error);
  1251. if ('detail' in error) {
  1252. toast.error(error.detail);
  1253. responseMessage.error = { content: error.detail };
  1254. } else {
  1255. toast.error(error.error);
  1256. responseMessage.error = { content: error.error };
  1257. }
  1258. } else {
  1259. toast.error(
  1260. $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, { provider: 'Ollama' })
  1261. );
  1262. responseMessage.error = {
  1263. content: $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
  1264. provider: 'Ollama'
  1265. })
  1266. };
  1267. }
  1268. responseMessage.done = true;
  1269. if (responseMessage.statusHistory) {
  1270. responseMessage.statusHistory = responseMessage.statusHistory.filter(
  1271. (status) => status.action !== 'knowledge_search'
  1272. );
  1273. }
  1274. }
  1275. await saveChatHandler(_chatId);
  1276. history.messages[responseMessageId] = responseMessage;
  1277. await chatCompletedHandler(
  1278. _chatId,
  1279. model.id,
  1280. responseMessageId,
  1281. createMessagesList(responseMessageId)
  1282. );
  1283. stopResponseFlag = false;
  1284. await tick();
  1285. let lastMessageContentPart =
  1286. getMessageContentParts(
  1287. responseMessage.content,
  1288. $config?.audio?.tts?.split_on ?? 'punctuation'
  1289. )?.at(-1) ?? '';
  1290. if (lastMessageContentPart) {
  1291. eventTarget.dispatchEvent(
  1292. new CustomEvent('chat', {
  1293. detail: { id: responseMessageId, content: lastMessageContentPart }
  1294. })
  1295. );
  1296. }
  1297. eventTarget.dispatchEvent(
  1298. new CustomEvent('chat:finish', {
  1299. detail: {
  1300. id: responseMessageId,
  1301. content: responseMessage.content
  1302. }
  1303. })
  1304. );
  1305. if (autoScroll) {
  1306. scrollToBottom();
  1307. }
  1308. const messages = createMessagesList(responseMessageId);
  1309. if (messages.length == 2 && messages.at(-1).content !== '' && selectedModels[0] === model.id) {
  1310. window.history.replaceState(history.state, '', `/c/${_chatId}`);
  1311. const title = await generateChatTitle(messages);
  1312. await setChatTitle(_chatId, title);
  1313. if ($settings?.autoTags ?? true) {
  1314. await setChatTags(messages);
  1315. }
  1316. }
  1317. return _response;
  1318. };
  1319. const sendPromptOpenAI = async (model, userPrompt, responseMessageId, _chatId) => {
  1320. let _response = null;
  1321. const responseMessage = history.messages[responseMessageId];
  1322. const userMessage = history.messages[responseMessage.parentId];
  1323. let files = JSON.parse(JSON.stringify(chatFiles));
  1324. if (model?.info?.meta?.knowledge ?? false) {
  1325. // Only initialize and add status if knowledge exists
  1326. responseMessage.statusHistory = [
  1327. {
  1328. action: 'knowledge_search',
  1329. description: $i18n.t(`Searching Knowledge for "{{searchQuery}}"`, {
  1330. searchQuery: userMessage.content
  1331. }),
  1332. done: false
  1333. }
  1334. ];
  1335. files.push(
  1336. ...model.info.meta.knowledge.map((item) => {
  1337. if (item?.collection_name) {
  1338. return {
  1339. id: item.collection_name,
  1340. name: item.name,
  1341. legacy: true
  1342. };
  1343. } else if (item?.collection_names) {
  1344. return {
  1345. name: item.name,
  1346. type: 'collection',
  1347. collection_names: item.collection_names,
  1348. legacy: true
  1349. };
  1350. } else {
  1351. return item;
  1352. }
  1353. })
  1354. );
  1355. history.messages[responseMessageId] = responseMessage;
  1356. }
  1357. files.push(
  1358. ...(userMessage?.files ?? []).filter((item) =>
  1359. ['doc', 'file', 'collection'].includes(item.type)
  1360. ),
  1361. ...(responseMessage?.files ?? []).filter((item) => ['web_search_results'].includes(item.type))
  1362. );
  1363. // Remove duplicates
  1364. files = files.filter(
  1365. (item, index, array) =>
  1366. array.findIndex((i) => JSON.stringify(i) === JSON.stringify(item)) === index
  1367. );
  1368. scrollToBottom();
  1369. eventTarget.dispatchEvent(
  1370. new CustomEvent('chat:start', {
  1371. detail: {
  1372. id: responseMessageId
  1373. }
  1374. })
  1375. );
  1376. await tick();
  1377. try {
  1378. const stream =
  1379. model?.info?.params?.stream_response ??
  1380. $settings?.params?.stream_response ??
  1381. params?.stream_response ??
  1382. true;
  1383. const [res, controller] = await generateOpenAIChatCompletion(
  1384. localStorage.token,
  1385. {
  1386. stream: stream,
  1387. model: model.id,
  1388. ...(stream && (model.info?.meta?.capabilities?.usage ?? false)
  1389. ? {
  1390. stream_options: {
  1391. include_usage: true
  1392. }
  1393. }
  1394. : {}),
  1395. messages: [
  1396. params?.system || $settings.system || (responseMessage?.userContext ?? null)
  1397. ? {
  1398. role: 'system',
  1399. content: `${promptTemplate(
  1400. params?.system ?? $settings?.system ?? '',
  1401. $user.name,
  1402. $settings?.userLocation
  1403. ? await getAndUpdateUserLocation(localStorage.token)
  1404. : undefined
  1405. )}${
  1406. (responseMessage?.userContext ?? null)
  1407. ? `\n\nUser Context:\n${responseMessage?.userContext ?? ''}`
  1408. : ''
  1409. }`
  1410. }
  1411. : undefined,
  1412. ...createMessagesList(responseMessageId)
  1413. ]
  1414. .filter((message) => message?.content?.trim())
  1415. .map((message, idx, arr) => ({
  1416. role: message.role,
  1417. ...((message.files?.filter((file) => file.type === 'image').length > 0 ?? false) &&
  1418. message.role === 'user'
  1419. ? {
  1420. content: [
  1421. {
  1422. type: 'text',
  1423. text: message?.merged?.content ?? message.content
  1424. },
  1425. ...message.files
  1426. .filter((file) => file.type === 'image')
  1427. .map((file) => ({
  1428. type: 'image_url',
  1429. image_url: {
  1430. url: file.url
  1431. }
  1432. }))
  1433. ]
  1434. }
  1435. : {
  1436. content: message?.merged?.content ?? message.content
  1437. })
  1438. })),
  1439. seed: params?.seed ?? $settings?.params?.seed ?? undefined,
  1440. stop:
  1441. (params?.stop ?? $settings?.params?.stop ?? undefined)
  1442. ? (params?.stop.split(',').map((token) => token.trim()) ?? $settings.params.stop).map(
  1443. (str) => decodeURIComponent(JSON.parse('"' + str.replace(/\"/g, '\\"') + '"'))
  1444. )
  1445. : undefined,
  1446. temperature: params?.temperature ?? $settings?.params?.temperature ?? undefined,
  1447. top_p: params?.top_p ?? $settings?.params?.top_p ?? undefined,
  1448. frequency_penalty:
  1449. params?.frequency_penalty ?? $settings?.params?.frequency_penalty ?? undefined,
  1450. max_tokens: params?.max_tokens ?? $settings?.params?.max_tokens ?? undefined,
  1451. tool_ids: selectedToolIds.length > 0 ? selectedToolIds : undefined,
  1452. files: files.length > 0 ? files : undefined,
  1453. session_id: $socket?.id,
  1454. chat_id: $chatId,
  1455. id: responseMessageId
  1456. },
  1457. `${WEBUI_BASE_URL}/api`
  1458. );
  1459. // Wait until history/message have been updated
  1460. await tick();
  1461. scrollToBottom();
  1462. if (res && res.ok && res.body) {
  1463. if (!stream) {
  1464. const response = await res.json();
  1465. console.log(response);
  1466. responseMessage.content = response.choices[0].message.content;
  1467. responseMessage.info = { ...response.usage, openai: true };
  1468. responseMessage.done = true;
  1469. } else {
  1470. const textStream = await createOpenAITextStream(res.body, $settings.splitLargeChunks);
  1471. for await (const update of textStream) {
  1472. const { value, done, citations, selectedModelId, error, usage } = update;
  1473. if (error) {
  1474. await handleOpenAIError(error, null, model, responseMessage);
  1475. break;
  1476. }
  1477. if (done || stopResponseFlag || _chatId !== $chatId) {
  1478. responseMessage.done = true;
  1479. history.messages[responseMessageId] = responseMessage;
  1480. if (stopResponseFlag) {
  1481. controller.abort('User: Stop Response');
  1482. }
  1483. _response = responseMessage.content;
  1484. break;
  1485. }
  1486. if (usage) {
  1487. responseMessage.info = { ...usage, openai: true, usage };
  1488. }
  1489. if (selectedModelId) {
  1490. responseMessage.selectedModelId = selectedModelId;
  1491. responseMessage.arena = true;
  1492. continue;
  1493. }
  1494. if (citations) {
  1495. responseMessage.citations = citations;
  1496. // Only remove status if it was initially set
  1497. if (model?.info?.meta?.knowledge ?? false) {
  1498. responseMessage.statusHistory = responseMessage.statusHistory.filter(
  1499. (status) => status.action !== 'knowledge_search'
  1500. );
  1501. }
  1502. continue;
  1503. }
  1504. if (responseMessage.content == '' && value == '\n') {
  1505. continue;
  1506. } else {
  1507. responseMessage.content += value;
  1508. if (navigator.vibrate && ($settings?.hapticFeedback ?? false)) {
  1509. navigator.vibrate(5);
  1510. }
  1511. const messageContentParts = getMessageContentParts(
  1512. responseMessage.content,
  1513. $config?.audio?.tts?.split_on ?? 'punctuation'
  1514. );
  1515. messageContentParts.pop();
  1516. // dispatch only last sentence and make sure it hasn't been dispatched before
  1517. if (
  1518. messageContentParts.length > 0 &&
  1519. messageContentParts[messageContentParts.length - 1] !== responseMessage.lastSentence
  1520. ) {
  1521. responseMessage.lastSentence = messageContentParts[messageContentParts.length - 1];
  1522. eventTarget.dispatchEvent(
  1523. new CustomEvent('chat', {
  1524. detail: {
  1525. id: responseMessageId,
  1526. content: messageContentParts[messageContentParts.length - 1]
  1527. }
  1528. })
  1529. );
  1530. }
  1531. history.messages[responseMessageId] = responseMessage;
  1532. }
  1533. if (autoScroll) {
  1534. scrollToBottom();
  1535. }
  1536. }
  1537. }
  1538. if ($settings.notificationEnabled && !document.hasFocus()) {
  1539. const notification = new Notification(`${model.id}`, {
  1540. body: responseMessage.content,
  1541. icon: `${WEBUI_BASE_URL}/static/favicon.png`
  1542. });
  1543. }
  1544. if ($settings.responseAutoCopy) {
  1545. copyToClipboard(responseMessage.content);
  1546. }
  1547. if ($settings.responseAutoPlayback && !$showCallOverlay) {
  1548. await tick();
  1549. document.getElementById(`speak-button-${responseMessage.id}`)?.click();
  1550. }
  1551. } else {
  1552. await handleOpenAIError(null, res, model, responseMessage);
  1553. }
  1554. } catch (error) {
  1555. await handleOpenAIError(error, null, model, responseMessage);
  1556. }
  1557. await saveChatHandler(_chatId);
  1558. history.messages[responseMessageId] = responseMessage;
  1559. await chatCompletedHandler(
  1560. _chatId,
  1561. model.id,
  1562. responseMessageId,
  1563. createMessagesList(responseMessageId)
  1564. );
  1565. stopResponseFlag = false;
  1566. await tick();
  1567. let lastMessageContentPart =
  1568. getMessageContentParts(
  1569. responseMessage.content,
  1570. $config?.audio?.tts?.split_on ?? 'punctuation'
  1571. )?.at(-1) ?? '';
  1572. if (lastMessageContentPart) {
  1573. eventTarget.dispatchEvent(
  1574. new CustomEvent('chat', {
  1575. detail: { id: responseMessageId, content: lastMessageContentPart }
  1576. })
  1577. );
  1578. }
  1579. eventTarget.dispatchEvent(
  1580. new CustomEvent('chat:finish', {
  1581. detail: {
  1582. id: responseMessageId,
  1583. content: responseMessage.content
  1584. }
  1585. })
  1586. );
  1587. if (autoScroll) {
  1588. scrollToBottom();
  1589. }
  1590. const messages = createMessagesList(responseMessageId);
  1591. if (messages.length == 2 && selectedModels[0] === model.id) {
  1592. window.history.replaceState(history.state, '', `/c/${_chatId}`);
  1593. const title = await generateChatTitle(messages);
  1594. await setChatTitle(_chatId, title);
  1595. if ($settings?.autoTags ?? true) {
  1596. await setChatTags(messages);
  1597. }
  1598. }
  1599. return _response;
  1600. };
  1601. const handleOpenAIError = async (error, res: Response | null, model, responseMessage) => {
  1602. let errorMessage = '';
  1603. let innerError;
  1604. if (error) {
  1605. innerError = error;
  1606. } else if (res !== null) {
  1607. innerError = await res.json();
  1608. }
  1609. console.error(innerError);
  1610. if ('detail' in innerError) {
  1611. toast.error(innerError.detail);
  1612. errorMessage = innerError.detail;
  1613. } else if ('error' in innerError) {
  1614. if ('message' in innerError.error) {
  1615. toast.error(innerError.error.message);
  1616. errorMessage = innerError.error.message;
  1617. } else {
  1618. toast.error(innerError.error);
  1619. errorMessage = innerError.error;
  1620. }
  1621. } else if ('message' in innerError) {
  1622. toast.error(innerError.message);
  1623. errorMessage = innerError.message;
  1624. }
  1625. responseMessage.error = {
  1626. content:
  1627. $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
  1628. provider: model.name ?? model.id
  1629. }) +
  1630. '\n' +
  1631. errorMessage
  1632. };
  1633. responseMessage.done = true;
  1634. if (responseMessage.statusHistory) {
  1635. responseMessage.statusHistory = responseMessage.statusHistory.filter(
  1636. (status) => status.action !== 'knowledge_search'
  1637. );
  1638. }
  1639. history.messages[responseMessage.id] = responseMessage;
  1640. };
  1641. const stopResponse = () => {
  1642. stopResponseFlag = true;
  1643. console.log('stopResponse');
  1644. };
  1645. const regenerateResponse = async (message) => {
  1646. console.log('regenerateResponse');
  1647. if (history.currentId) {
  1648. let userMessage = history.messages[message.parentId];
  1649. let userPrompt = userMessage.content;
  1650. if ((userMessage?.models ?? [...selectedModels]).length == 1) {
  1651. // If user message has only one model selected, sendPrompt automatically selects it for regeneration
  1652. await sendPrompt(userPrompt, userMessage.id);
  1653. } else {
  1654. // If there are multiple models selected, use the model of the response message for regeneration
  1655. // e.g. many model chat
  1656. await sendPrompt(userPrompt, userMessage.id, {
  1657. modelId: message.model,
  1658. modelIdx: message.modelIdx
  1659. });
  1660. }
  1661. }
  1662. };
  1663. const continueResponse = async () => {
  1664. console.log('continueResponse');
  1665. const _chatId = JSON.parse(JSON.stringify($chatId));
  1666. if (history.currentId && history.messages[history.currentId].done == true) {
  1667. const responseMessage = history.messages[history.currentId];
  1668. responseMessage.done = false;
  1669. await tick();
  1670. const model = $models.filter((m) => m.id === responseMessage.model).at(0);
  1671. if (model) {
  1672. if (model?.owned_by === 'openai') {
  1673. await sendPromptOpenAI(
  1674. model,
  1675. history.messages[responseMessage.parentId].content,
  1676. responseMessage.id,
  1677. _chatId
  1678. );
  1679. } else
  1680. await sendPromptOllama(
  1681. model,
  1682. history.messages[responseMessage.parentId].content,
  1683. responseMessage.id,
  1684. _chatId
  1685. );
  1686. }
  1687. } else {
  1688. toast.error($i18n.t(`Model {{modelId}} not found`, { modelId }));
  1689. }
  1690. };
  1691. const mergeResponses = async (messageId, responses, _chatId) => {
  1692. console.log('mergeResponses', messageId, responses);
  1693. const message = history.messages[messageId];
  1694. const mergedResponse = {
  1695. status: true,
  1696. content: ''
  1697. };
  1698. message.merged = mergedResponse;
  1699. history.messages[messageId] = message;
  1700. try {
  1701. const [res, controller] = await generateMoACompletion(
  1702. localStorage.token,
  1703. message.model,
  1704. history.messages[message.parentId].content,
  1705. responses
  1706. );
  1707. if (res && res.ok && res.body) {
  1708. const textStream = await createOpenAITextStream(res.body, $settings.splitLargeChunks);
  1709. for await (const update of textStream) {
  1710. const { value, done, citations, error, usage } = update;
  1711. if (error || done) {
  1712. break;
  1713. }
  1714. if (mergedResponse.content == '' && value == '\n') {
  1715. continue;
  1716. } else {
  1717. mergedResponse.content += value;
  1718. history.messages[messageId] = message;
  1719. }
  1720. if (autoScroll) {
  1721. scrollToBottom();
  1722. }
  1723. }
  1724. await saveChatHandler(_chatId);
  1725. } else {
  1726. console.error(res);
  1727. }
  1728. } catch (e) {
  1729. console.error(e);
  1730. }
  1731. };
  1732. const generateChatTitle = async (messages) => {
  1733. if ($settings?.title?.auto ?? true) {
  1734. const lastMessage = messages.at(-1);
  1735. const modelId = selectedModels[0];
  1736. const title = await generateTitle(localStorage.token, modelId, messages, $chatId).catch(
  1737. (error) => {
  1738. console.error(error);
  1739. return 'New Chat';
  1740. }
  1741. );
  1742. return title;
  1743. } else {
  1744. return 'New Chat';
  1745. }
  1746. };
  1747. const setChatTitle = async (_chatId, title) => {
  1748. if (_chatId === $chatId) {
  1749. chatTitle.set(title);
  1750. }
  1751. if (!$temporaryChatEnabled) {
  1752. chat = await updateChatById(localStorage.token, _chatId, { title: title });
  1753. currentChatPage.set(1);
  1754. await chats.set(await getChatList(localStorage.token, $currentChatPage));
  1755. }
  1756. };
  1757. const setChatTags = async (messages) => {
  1758. if (!$temporaryChatEnabled) {
  1759. const currentTags = await getTagsById(localStorage.token, $chatId);
  1760. if (currentTags.length > 0) {
  1761. const res = await deleteTagsById(localStorage.token, $chatId);
  1762. if (res) {
  1763. allTags.set(await getAllTags(localStorage.token));
  1764. }
  1765. }
  1766. const lastMessage = messages.at(-1);
  1767. const modelId = selectedModels[0];
  1768. let generatedTags = await generateTags(localStorage.token, modelId, messages, $chatId).catch(
  1769. (error) => {
  1770. console.error(error);
  1771. return [];
  1772. }
  1773. );
  1774. generatedTags = generatedTags.filter(
  1775. (tag) => !currentTags.find((t) => t.id === tag.replaceAll(' ', '_').toLowerCase())
  1776. );
  1777. console.log(generatedTags);
  1778. for (const tag of generatedTags) {
  1779. await addTagById(localStorage.token, $chatId, tag);
  1780. }
  1781. chat = await getChatById(localStorage.token, $chatId);
  1782. allTags.set(await getAllTags(localStorage.token));
  1783. }
  1784. };
  1785. const getWebSearchResults = async (
  1786. model: string,
  1787. parentId: string,
  1788. responseMessageId: string
  1789. ) => {
  1790. const responseMessage = history.messages[responseMessageId];
  1791. const userMessage = history.messages[parentId];
  1792. const messages = createMessagesList(history.currentId);
  1793. responseMessage.statusHistory = [
  1794. {
  1795. done: false,
  1796. action: 'web_search',
  1797. description: $i18n.t('Generating search query')
  1798. }
  1799. ];
  1800. history.messages[responseMessageId] = responseMessage;
  1801. const prompt = userMessage.content;
  1802. let searchQuery = await generateSearchQuery(
  1803. localStorage.token,
  1804. model,
  1805. messages.filter((message) => message?.content?.trim()),
  1806. prompt
  1807. ).catch((error) => {
  1808. console.log(error);
  1809. return prompt;
  1810. });
  1811. if (!searchQuery || searchQuery == '') {
  1812. responseMessage.statusHistory.push({
  1813. done: true,
  1814. error: true,
  1815. action: 'web_search',
  1816. description: $i18n.t('No search query generated')
  1817. });
  1818. history.messages[responseMessageId] = responseMessage;
  1819. return;
  1820. }
  1821. responseMessage.statusHistory.push({
  1822. done: false,
  1823. action: 'web_search',
  1824. description: $i18n.t(`Searching "{{searchQuery}}"`, { searchQuery })
  1825. });
  1826. history.messages[responseMessageId] = responseMessage;
  1827. const results = await processWebSearch(localStorage.token, searchQuery).catch((error) => {
  1828. console.log(error);
  1829. toast.error(error);
  1830. return null;
  1831. });
  1832. if (results) {
  1833. responseMessage.statusHistory.push({
  1834. done: true,
  1835. action: 'web_search',
  1836. description: $i18n.t('Searched {{count}} sites', { count: results.filenames.length }),
  1837. query: searchQuery,
  1838. urls: results.filenames
  1839. });
  1840. if (responseMessage?.files ?? undefined === undefined) {
  1841. responseMessage.files = [];
  1842. }
  1843. responseMessage.files.push({
  1844. collection_name: results.collection_name,
  1845. name: searchQuery,
  1846. type: 'web_search_results',
  1847. urls: results.filenames
  1848. });
  1849. history.messages[responseMessageId] = responseMessage;
  1850. } else {
  1851. responseMessage.statusHistory.push({
  1852. done: true,
  1853. error: true,
  1854. action: 'web_search',
  1855. description: 'No search results found'
  1856. });
  1857. history.messages[responseMessageId] = responseMessage;
  1858. }
  1859. };
  1860. const initChatHandler = async () => {
  1861. if (!$temporaryChatEnabled) {
  1862. chat = await createNewChat(localStorage.token, {
  1863. id: $chatId,
  1864. title: $i18n.t('New Chat'),
  1865. models: selectedModels,
  1866. system: $settings.system ?? undefined,
  1867. params: params,
  1868. history: history,
  1869. messages: createMessagesList(history.currentId),
  1870. tags: [],
  1871. timestamp: Date.now()
  1872. });
  1873. currentChatPage.set(1);
  1874. await chats.set(await getChatList(localStorage.token, $currentChatPage));
  1875. await chatId.set(chat.id);
  1876. } else {
  1877. await chatId.set('local');
  1878. }
  1879. await tick();
  1880. };
  1881. const saveChatHandler = async (_chatId) => {
  1882. if ($chatId == _chatId) {
  1883. if (!$temporaryChatEnabled) {
  1884. chat = await updateChatById(localStorage.token, _chatId, {
  1885. models: selectedModels,
  1886. history: history,
  1887. messages: createMessagesList(history.currentId),
  1888. params: params,
  1889. files: chatFiles
  1890. });
  1891. currentChatPage.set(1);
  1892. await chats.set(await getChatList(localStorage.token, $currentChatPage));
  1893. }
  1894. }
  1895. };
  1896. </script>
  1897. <svelte:head>
  1898. <title>
  1899. {$chatTitle
  1900. ? `${$chatTitle.length > 30 ? `${$chatTitle.slice(0, 30)}...` : $chatTitle} | ${$WEBUI_NAME}`
  1901. : `${$WEBUI_NAME}`}
  1902. </title>
  1903. </svelte:head>
  1904. <audio id="audioElement" src="" style="display: none;" />
  1905. <EventConfirmDialog
  1906. bind:show={showEventConfirmation}
  1907. title={eventConfirmationTitle}
  1908. message={eventConfirmationMessage}
  1909. input={eventConfirmationInput}
  1910. inputPlaceholder={eventConfirmationInputPlaceholder}
  1911. inputValue={eventConfirmationInputValue}
  1912. on:confirm={(e) => {
  1913. if (e.detail) {
  1914. eventCallback(e.detail);
  1915. } else {
  1916. eventCallback(true);
  1917. }
  1918. }}
  1919. on:cancel={() => {
  1920. eventCallback(false);
  1921. }}
  1922. />
  1923. {#if !chatIdProp || (loaded && chatIdProp)}
  1924. <div
  1925. class="h-screen max-h-[100dvh] {$showSidebar
  1926. ? 'md:max-w-[calc(100%-260px)]'
  1927. : ''} w-full max-w-full flex flex-col"
  1928. id="chat-container"
  1929. >
  1930. {#if $settings?.backgroundImageUrl ?? null}
  1931. <div
  1932. class="absolute {$showSidebar
  1933. ? 'md:max-w-[calc(100%-260px)] md:translate-x-[260px]'
  1934. : ''} top-0 left-0 w-full h-full bg-cover bg-center bg-no-repeat"
  1935. style="background-image: url({$settings.backgroundImageUrl}) "
  1936. />
  1937. <div
  1938. class="absolute top-0 left-0 w-full h-full bg-gradient-to-t from-white to-white/85 dark:from-gray-900 dark:to-[#171717]/90 z-0"
  1939. />
  1940. {/if}
  1941. <Navbar
  1942. bind:this={navbarElement}
  1943. chat={{
  1944. id: $chatId,
  1945. chat: {
  1946. title: $chatTitle,
  1947. models: selectedModels,
  1948. system: $settings.system ?? undefined,
  1949. params: params,
  1950. history: history,
  1951. timestamp: Date.now()
  1952. }
  1953. }}
  1954. title={$chatTitle}
  1955. bind:selectedModels
  1956. shareEnabled={!!history.currentId}
  1957. {initNewChat}
  1958. />
  1959. <PaneGroup direction="horizontal" class="w-full h-full">
  1960. <Pane defaultSize={50} class="h-full flex w-full relative">
  1961. {#if $banners.length > 0 && !history.currentId && !$chatId && selectedModels.length <= 1}
  1962. <div class="absolute top-12 left-0 right-0 w-full z-30">
  1963. <div class=" flex flex-col gap-1 w-full">
  1964. {#each $banners.filter( (b) => (b.dismissible ? !JSON.parse(localStorage.getItem('dismissedBannerIds') ?? '[]').includes(b.id) : true) ) as banner}
  1965. <Banner
  1966. {banner}
  1967. on:dismiss={(e) => {
  1968. const bannerId = e.detail;
  1969. localStorage.setItem(
  1970. 'dismissedBannerIds',
  1971. JSON.stringify(
  1972. [
  1973. bannerId,
  1974. ...JSON.parse(localStorage.getItem('dismissedBannerIds') ?? '[]')
  1975. ].filter((id) => $banners.find((b) => b.id === id))
  1976. )
  1977. );
  1978. }}
  1979. />
  1980. {/each}
  1981. </div>
  1982. </div>
  1983. {/if}
  1984. <div class="flex flex-col flex-auto z-10 w-full">
  1985. {#if $settings?.landingPageMode === 'chat' || createMessagesList(history.currentId).length > 0}
  1986. <div
  1987. class=" pb-2.5 flex flex-col justify-between w-full flex-auto overflow-auto h-0 max-w-full z-10 scrollbar-hidden"
  1988. id="messages-container"
  1989. bind:this={messagesContainerElement}
  1990. on:scroll={(e) => {
  1991. autoScroll =
  1992. messagesContainerElement.scrollHeight - messagesContainerElement.scrollTop <=
  1993. messagesContainerElement.clientHeight + 5;
  1994. }}
  1995. >
  1996. <div class=" h-full w-full flex flex-col">
  1997. <Messages
  1998. chatId={$chatId}
  1999. bind:history
  2000. bind:autoScroll
  2001. bind:prompt
  2002. {selectedModels}
  2003. {sendPrompt}
  2004. {showMessage}
  2005. {continueResponse}
  2006. {regenerateResponse}
  2007. {mergeResponses}
  2008. {chatActionHandler}
  2009. bottomPadding={files.length > 0}
  2010. on:submit={async (e) => {
  2011. if (e.detail) {
  2012. // New user message
  2013. let userPrompt = e.detail.prompt;
  2014. let userMessageId = uuidv4();
  2015. let userMessage = {
  2016. id: userMessageId,
  2017. parentId: e.detail.parentId,
  2018. childrenIds: [],
  2019. role: 'user',
  2020. content: userPrompt,
  2021. models: selectedModels
  2022. };
  2023. let messageParentId = e.detail.parentId;
  2024. if (messageParentId !== null) {
  2025. history.messages[messageParentId].childrenIds = [
  2026. ...history.messages[messageParentId].childrenIds,
  2027. userMessageId
  2028. ];
  2029. }
  2030. history.messages[userMessageId] = userMessage;
  2031. history.currentId = userMessageId;
  2032. await tick();
  2033. await sendPrompt(userPrompt, userMessageId);
  2034. }
  2035. }}
  2036. />
  2037. </div>
  2038. </div>
  2039. <div class=" pb-[1.6rem]">
  2040. <MessageInput
  2041. {history}
  2042. {selectedModels}
  2043. bind:files
  2044. bind:prompt
  2045. bind:autoScroll
  2046. bind:selectedToolIds
  2047. bind:webSearchEnabled
  2048. bind:atSelectedModel
  2049. availableToolIds={selectedModelIds.reduce((a, e, i, arr) => {
  2050. const model = $models.find((m) => m.id === e);
  2051. if (model?.info?.meta?.toolIds ?? false) {
  2052. return [...new Set([...a, ...model.info.meta.toolIds])];
  2053. }
  2054. return a;
  2055. }, [])}
  2056. transparentBackground={$settings?.backgroundImageUrl ?? false}
  2057. {stopResponse}
  2058. {createMessagePair}
  2059. on:upload={async (e) => {
  2060. const { type, data } = e.detail;
  2061. if (type === 'web') {
  2062. await uploadWeb(data);
  2063. } else if (type === 'youtube') {
  2064. await uploadYoutubeTranscription(data);
  2065. } else if (type === 'google-drive') {
  2066. await uploadGoogleDriveFile(data);
  2067. }
  2068. }}
  2069. on:submit={async (e) => {
  2070. if (e.detail) {
  2071. await tick();
  2072. submitPrompt(e.detail.replaceAll('\n\n', '\n'));
  2073. }
  2074. }}
  2075. />
  2076. <div
  2077. class="absolute bottom-1.5 text-xs text-gray-500 text-center line-clamp-1 right-0 left-0"
  2078. >
  2079. {$i18n.t('LLMs can make mistakes. Verify important information.')}
  2080. </div>
  2081. </div>
  2082. {:else}
  2083. <div class="overflow-auto w-full h-full flex items-center">
  2084. <Placeholder
  2085. {history}
  2086. {selectedModels}
  2087. bind:files
  2088. bind:prompt
  2089. bind:autoScroll
  2090. bind:selectedToolIds
  2091. bind:webSearchEnabled
  2092. bind:atSelectedModel
  2093. availableToolIds={selectedModelIds.reduce((a, e, i, arr) => {
  2094. const model = $models.find((m) => m.id === e);
  2095. if (model?.info?.meta?.toolIds ?? false) {
  2096. return [...new Set([...a, ...model.info.meta.toolIds])];
  2097. }
  2098. return a;
  2099. }, [])}
  2100. transparentBackground={$settings?.backgroundImageUrl ?? false}
  2101. {stopResponse}
  2102. {createMessagePair}
  2103. on:upload={async (e) => {
  2104. const { type, data } = e.detail;
  2105. if (type === 'web') {
  2106. await uploadWeb(data);
  2107. } else if (type === 'youtube') {
  2108. await uploadYoutubeTranscription(data);
  2109. }
  2110. }}
  2111. on:submit={async (e) => {
  2112. if (e.detail) {
  2113. await tick();
  2114. submitPrompt(e.detail.replaceAll('\n\n', '\n'));
  2115. }
  2116. }}
  2117. />
  2118. </div>
  2119. {/if}
  2120. </div>
  2121. </Pane>
  2122. <ChatControls
  2123. bind:this={controlPaneComponent}
  2124. bind:history
  2125. bind:chatFiles
  2126. bind:params
  2127. bind:files
  2128. bind:pane={controlPane}
  2129. chatId={$chatId}
  2130. modelId={selectedModelIds?.at(0) ?? null}
  2131. models={selectedModelIds.reduce((a, e, i, arr) => {
  2132. const model = $models.find((m) => m.id === e);
  2133. if (model) {
  2134. return [...a, model];
  2135. }
  2136. return a;
  2137. }, [])}
  2138. {submitPrompt}
  2139. {stopResponse}
  2140. {showMessage}
  2141. {eventTarget}
  2142. />
  2143. </PaneGroup>
  2144. </div>
  2145. {/if}