Chat.svelte 33 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290
  1. <script lang="ts">
  2. import { v4 as uuidv4 } from 'uuid';
  3. import { toast } from 'svelte-sonner';
  4. import mermaid from 'mermaid';
  5. import { getContext, onMount, tick } from 'svelte';
  6. import { goto } from '$app/navigation';
  7. import { page } from '$app/stores';
  8. import type { Writable } from 'svelte/store';
  9. import type { i18n as i18nType } from 'i18next';
  10. import { OLLAMA_API_BASE_URL, OPENAI_API_BASE_URL, WEBUI_BASE_URL } from '$lib/constants';
  11. import {
  12. chatId,
  13. chats,
  14. config,
  15. type Model,
  16. models,
  17. settings,
  18. showSidebar,
  19. tags as _tags,
  20. WEBUI_NAME,
  21. banners,
  22. user,
  23. socket,
  24. showCallOverlay
  25. } from '$lib/stores';
  26. import {
  27. convertMessagesToHistory,
  28. copyToClipboard,
  29. promptTemplate,
  30. splitStream
  31. } from '$lib/utils';
  32. import { generateChatCompletion } from '$lib/apis/ollama';
  33. import {
  34. addTagById,
  35. createNewChat,
  36. deleteTagById,
  37. getAllChatTags,
  38. getChatById,
  39. getChatList,
  40. getTagsById,
  41. updateChatById
  42. } from '$lib/apis/chats';
  43. import { generateOpenAIChatCompletion } from '$lib/apis/openai';
  44. import { runWebSearch } from '$lib/apis/rag';
  45. import { createOpenAITextStream } from '$lib/apis/streaming';
  46. import { queryMemory } from '$lib/apis/memories';
  47. import { getUserSettings } from '$lib/apis/users';
  48. import { chatCompleted, generateTitle, generateSearchQuery } from '$lib/apis';
  49. import Banner from '../common/Banner.svelte';
  50. import MessageInput from '$lib/components/chat/MessageInput.svelte';
  51. import Messages from '$lib/components/chat/Messages.svelte';
  52. import Navbar from '$lib/components/layout/Navbar.svelte';
  53. import CallOverlay from './MessageInput/CallOverlay.svelte';
  54. import { error } from '@sveltejs/kit';
  55. const i18n: Writable<i18nType> = getContext('i18n');
  56. export let chatIdProp = '';
  57. let loaded = false;
  58. let stopResponseFlag = false;
  59. let autoScroll = true;
  60. let processing = '';
  61. let messagesContainerElement: HTMLDivElement;
  62. let showModelSelector = true;
  63. let selectedModels = [''];
  64. let atSelectedModel: Model | undefined;
  65. let selectedToolIds = [];
  66. let webSearchEnabled = false;
  67. let chat = null;
  68. let tags = [];
  69. let title = '';
  70. let prompt = '';
  71. let files = [];
  72. let messages = [];
  73. let history = {
  74. messages: {},
  75. currentId: null
  76. };
  77. $: if (history.currentId !== null) {
  78. let _messages = [];
  79. let currentMessage = history.messages[history.currentId];
  80. while (currentMessage !== null) {
  81. _messages.unshift({ ...currentMessage });
  82. currentMessage =
  83. currentMessage.parentId !== null ? history.messages[currentMessage.parentId] : null;
  84. }
  85. messages = _messages;
  86. } else {
  87. messages = [];
  88. }
  89. $: if (chatIdProp) {
  90. (async () => {
  91. if (await loadChat()) {
  92. await tick();
  93. loaded = true;
  94. window.setTimeout(() => scrollToBottom(), 0);
  95. const chatInput = document.getElementById('chat-textarea');
  96. chatInput?.focus();
  97. } else {
  98. await goto('/');
  99. }
  100. })();
  101. }
  102. onMount(async () => {
  103. if (!$chatId) {
  104. await initNewChat();
  105. } else {
  106. if (!($settings.saveChatHistory ?? true)) {
  107. await goto('/');
  108. }
  109. }
  110. });
  111. //////////////////////////
  112. // Web functions
  113. //////////////////////////
  114. const initNewChat = async () => {
  115. window.history.replaceState(history.state, '', `/`);
  116. await chatId.set('');
  117. autoScroll = true;
  118. title = '';
  119. messages = [];
  120. history = {
  121. messages: {},
  122. currentId: null
  123. };
  124. if ($page.url.searchParams.get('models')) {
  125. selectedModels = $page.url.searchParams.get('models')?.split(',');
  126. } else if ($settings?.models) {
  127. selectedModels = $settings?.models;
  128. } else if ($config?.default_models) {
  129. console.log($config?.default_models.split(',') ?? '');
  130. selectedModels = $config?.default_models.split(',');
  131. } else {
  132. selectedModels = [''];
  133. }
  134. if ($page.url.searchParams.get('q')) {
  135. prompt = $page.url.searchParams.get('q') ?? '';
  136. if (prompt) {
  137. await tick();
  138. submitPrompt(prompt);
  139. }
  140. }
  141. selectedModels = selectedModels.map((modelId) =>
  142. $models.map((m) => m.id).includes(modelId) ? modelId : ''
  143. );
  144. const userSettings = await getUserSettings(localStorage.token);
  145. if (userSettings) {
  146. settings.set(userSettings.ui);
  147. } else {
  148. settings.set(JSON.parse(localStorage.getItem('settings') ?? '{}'));
  149. }
  150. const chatInput = document.getElementById('chat-textarea');
  151. setTimeout(() => chatInput?.focus(), 0);
  152. };
  153. const loadChat = async () => {
  154. chatId.set(chatIdProp);
  155. chat = await getChatById(localStorage.token, $chatId).catch(async (error) => {
  156. await goto('/');
  157. return null;
  158. });
  159. if (chat) {
  160. tags = await getTags();
  161. const chatContent = chat.chat;
  162. if (chatContent) {
  163. console.log(chatContent);
  164. selectedModels =
  165. (chatContent?.models ?? undefined) !== undefined
  166. ? chatContent.models
  167. : [chatContent.models ?? ''];
  168. history =
  169. (chatContent?.history ?? undefined) !== undefined
  170. ? chatContent.history
  171. : convertMessagesToHistory(chatContent.messages);
  172. title = chatContent.title;
  173. const userSettings = await getUserSettings(localStorage.token);
  174. if (userSettings) {
  175. await settings.set(userSettings.ui);
  176. } else {
  177. await settings.set(JSON.parse(localStorage.getItem('settings') ?? '{}'));
  178. }
  179. await settings.set({
  180. ...$settings,
  181. system: chatContent.system ?? $settings.system,
  182. params: chatContent.options ?? $settings.params
  183. });
  184. autoScroll = true;
  185. await tick();
  186. if (messages.length > 0) {
  187. history.messages[messages.at(-1).id].done = true;
  188. }
  189. await tick();
  190. return true;
  191. } else {
  192. return null;
  193. }
  194. }
  195. };
  196. const scrollToBottom = async () => {
  197. await tick();
  198. if (messagesContainerElement) {
  199. messagesContainerElement.scrollTop = messagesContainerElement.scrollHeight;
  200. }
  201. };
  202. const createMessagesList = (responseMessageId) => {
  203. const message = history.messages[responseMessageId];
  204. if (message.parentId) {
  205. return [...createMessagesList(message.parentId), message];
  206. } else {
  207. return [message];
  208. }
  209. };
  210. const chatCompletedHandler = async (modelId, messages) => {
  211. await mermaid.run({
  212. querySelector: '.mermaid'
  213. });
  214. const res = await chatCompleted(localStorage.token, {
  215. model: modelId,
  216. messages: messages.map((m) => ({
  217. id: m.id,
  218. role: m.role,
  219. content: m.content,
  220. timestamp: m.timestamp
  221. })),
  222. chat_id: $chatId
  223. }).catch((error) => {
  224. console.error(error);
  225. return null;
  226. });
  227. if (res !== null) {
  228. // Update chat history with the new messages
  229. for (const message of res.messages) {
  230. history.messages[message.id] = {
  231. ...history.messages[message.id],
  232. ...(history.messages[message.id].content !== message.content
  233. ? { originalContent: history.messages[message.id].content }
  234. : {}),
  235. ...message
  236. };
  237. }
  238. }
  239. };
  240. const getChatEventEmitter = async (modelId: string, chatId: string = '') => {
  241. return setInterval(() => {
  242. $socket?.emit('usage', {
  243. action: 'chat',
  244. model: modelId,
  245. chat_id: chatId
  246. });
  247. }, 1000);
  248. };
  249. //////////////////////////
  250. // Chat functions
  251. //////////////////////////
  252. const submitPrompt = async (userPrompt, _user = null) => {
  253. let _responses = [];
  254. console.log('submitPrompt', $chatId);
  255. selectedModels = selectedModels.map((modelId) =>
  256. $models.map((m) => m.id).includes(modelId) ? modelId : ''
  257. );
  258. if (selectedModels.includes('')) {
  259. toast.error($i18n.t('Model not selected'));
  260. } else if (messages.length != 0 && messages.at(-1).done != true) {
  261. // Response not done
  262. console.log('wait');
  263. } else if (
  264. files.length > 0 &&
  265. files.filter((file) => file.upload_status === false).length > 0
  266. ) {
  267. // Upload not done
  268. toast.error(
  269. $i18n.t(
  270. `Oops! Hold tight! Your files are still in the processing oven. We're cooking them up to perfection. Please be patient and we'll let you know once they're ready.`
  271. )
  272. );
  273. } else {
  274. // Reset chat input textarea
  275. const chatTextAreaElement = document.getElementById('chat-textarea');
  276. if (chatTextAreaElement) {
  277. chatTextAreaElement.value = '';
  278. chatTextAreaElement.style.height = '';
  279. }
  280. const _files = JSON.parse(JSON.stringify(files));
  281. files = [];
  282. prompt = '';
  283. // Create user message
  284. let userMessageId = uuidv4();
  285. let userMessage = {
  286. id: userMessageId,
  287. parentId: messages.length !== 0 ? messages.at(-1).id : null,
  288. childrenIds: [],
  289. role: 'user',
  290. user: _user ?? undefined,
  291. content: userPrompt,
  292. files: _files.length > 0 ? _files : undefined,
  293. timestamp: Math.floor(Date.now() / 1000), // Unix epoch
  294. models: selectedModels.filter((m, mIdx) => selectedModels.indexOf(m) === mIdx)
  295. };
  296. // Add message to history and Set currentId to messageId
  297. history.messages[userMessageId] = userMessage;
  298. history.currentId = userMessageId;
  299. // Append messageId to childrenIds of parent message
  300. if (messages.length !== 0) {
  301. history.messages[messages.at(-1).id].childrenIds.push(userMessageId);
  302. }
  303. // Wait until history/message have been updated
  304. await tick();
  305. // Send prompt
  306. _responses = await sendPrompt(userPrompt, userMessageId);
  307. }
  308. return _responses;
  309. };
  310. const sendPrompt = async (prompt, parentId, modelId = null, newChat = true) => {
  311. let _responses = [];
  312. // If modelId is provided, use it, else use selected model
  313. let selectedModelIds = modelId
  314. ? [modelId]
  315. : atSelectedModel !== undefined
  316. ? [atSelectedModel.id]
  317. : selectedModels;
  318. // Create response messages for each selected model
  319. const responseMessageIds = {};
  320. for (const modelId of selectedModelIds) {
  321. const model = $models.filter((m) => m.id === modelId).at(0);
  322. if (model) {
  323. let responseMessageId = uuidv4();
  324. let responseMessage = {
  325. parentId: parentId,
  326. id: responseMessageId,
  327. childrenIds: [],
  328. role: 'assistant',
  329. content: '',
  330. model: model.id,
  331. modelName: model.name ?? model.id,
  332. userContext: null,
  333. timestamp: Math.floor(Date.now() / 1000) // Unix epoch
  334. };
  335. // Add message to history and Set currentId to messageId
  336. history.messages[responseMessageId] = responseMessage;
  337. history.currentId = responseMessageId;
  338. // Append messageId to childrenIds of parent message
  339. if (parentId !== null) {
  340. history.messages[parentId].childrenIds = [
  341. ...history.messages[parentId].childrenIds,
  342. responseMessageId
  343. ];
  344. }
  345. responseMessageIds[modelId] = responseMessageId;
  346. }
  347. }
  348. await tick();
  349. // Create new chat if only one message in messages
  350. if (newChat && messages.length == 2) {
  351. if ($settings.saveChatHistory ?? true) {
  352. chat = await createNewChat(localStorage.token, {
  353. id: $chatId,
  354. title: $i18n.t('New Chat'),
  355. models: selectedModels,
  356. system: $settings.system ?? undefined,
  357. options: {
  358. ...($settings.params ?? {})
  359. },
  360. messages: messages,
  361. history: history,
  362. tags: [],
  363. timestamp: Date.now()
  364. });
  365. await chats.set(await getChatList(localStorage.token));
  366. await chatId.set(chat.id);
  367. } else {
  368. await chatId.set('local');
  369. }
  370. await tick();
  371. }
  372. const _chatId = JSON.parse(JSON.stringify($chatId));
  373. await Promise.all(
  374. selectedModelIds.map(async (modelId) => {
  375. console.log('modelId', modelId);
  376. const model = $models.filter((m) => m.id === modelId).at(0);
  377. if (model) {
  378. // If there are image files, check if model is vision capable
  379. const hasImages = messages.some((message) =>
  380. message.files?.some((file) => file.type === 'image')
  381. );
  382. if (hasImages && !(model.info?.meta?.capabilities?.vision ?? true)) {
  383. toast.error(
  384. $i18n.t('Model {{modelName}} is not vision capable', {
  385. modelName: model.name ?? model.id
  386. })
  387. );
  388. }
  389. let responseMessageId = responseMessageIds[modelId];
  390. let responseMessage = history.messages[responseMessageId];
  391. let userContext = null;
  392. if ($settings?.memory ?? false) {
  393. if (userContext === null) {
  394. const res = await queryMemory(localStorage.token, prompt).catch((error) => {
  395. toast.error(error);
  396. return null;
  397. });
  398. if (res) {
  399. if (res.documents[0].length > 0) {
  400. userContext = res.documents.reduce((acc, doc, index) => {
  401. const createdAtTimestamp = res.metadatas[index][0].created_at;
  402. const createdAtDate = new Date(createdAtTimestamp * 1000)
  403. .toISOString()
  404. .split('T')[0];
  405. acc.push(`${index + 1}. [${createdAtDate}]. ${doc[0]}`);
  406. return acc;
  407. }, []);
  408. }
  409. console.log(userContext);
  410. }
  411. }
  412. }
  413. responseMessage.userContext = userContext;
  414. const chatEventEmitter = await getChatEventEmitter(model.id, _chatId);
  415. if (webSearchEnabled) {
  416. await getWebSearchResults(model.id, parentId, responseMessageId);
  417. }
  418. let _response = null;
  419. if (model?.owned_by === 'openai') {
  420. _response = await sendPromptOpenAI(model, prompt, responseMessageId, _chatId);
  421. } else if (model) {
  422. _response = await sendPromptOllama(model, prompt, responseMessageId, _chatId);
  423. }
  424. _responses.push(_response);
  425. console.log('chatEventEmitter', chatEventEmitter);
  426. if (chatEventEmitter) clearInterval(chatEventEmitter);
  427. } else {
  428. toast.error($i18n.t(`Model {{modelId}} not found`, { modelId }));
  429. }
  430. })
  431. );
  432. await chats.set(await getChatList(localStorage.token));
  433. return _responses;
  434. };
  435. const getWebSearchResults = async (model: string, parentId: string, responseId: string) => {
  436. const responseMessage = history.messages[responseId];
  437. responseMessage.statusHistory = [
  438. {
  439. done: false,
  440. action: 'web_search',
  441. description: $i18n.t('Generating search query')
  442. }
  443. ];
  444. messages = messages;
  445. const prompt = history.messages[parentId].content;
  446. let searchQuery = await generateSearchQuery(localStorage.token, model, messages, prompt).catch(
  447. (error) => {
  448. console.log(error);
  449. return prompt;
  450. }
  451. );
  452. if (!searchQuery) {
  453. toast.warning($i18n.t('No search query generated'));
  454. responseMessage.statusHistory.push({
  455. done: true,
  456. error: true,
  457. action: 'web_search',
  458. description: 'No search query generated'
  459. });
  460. messages = messages;
  461. }
  462. responseMessage.statusHistory.push({
  463. done: false,
  464. action: 'web_search',
  465. description: $i18n.t(`Searching "{{searchQuery}}"`, { searchQuery })
  466. });
  467. messages = messages;
  468. const results = await runWebSearch(localStorage.token, searchQuery).catch((error) => {
  469. console.log(error);
  470. toast.error(error);
  471. return null;
  472. });
  473. if (results) {
  474. responseMessage.statusHistory.push({
  475. done: true,
  476. action: 'web_search',
  477. description: $i18n.t('Searched {{count}} sites', { count: results.filenames.length }),
  478. query: searchQuery,
  479. urls: results.filenames
  480. });
  481. if (responseMessage?.files ?? undefined === undefined) {
  482. responseMessage.files = [];
  483. }
  484. responseMessage.files.push({
  485. collection_name: results.collection_name,
  486. name: searchQuery,
  487. type: 'web_search_results',
  488. urls: results.filenames
  489. });
  490. messages = messages;
  491. } else {
  492. responseMessage.statusHistory.push({
  493. done: true,
  494. error: true,
  495. action: 'web_search',
  496. description: 'No search results found'
  497. });
  498. messages = messages;
  499. }
  500. };
  501. const sendPromptOllama = async (model, userPrompt, responseMessageId, _chatId) => {
  502. let _response = null;
  503. const responseMessage = history.messages[responseMessageId];
  504. // Wait until history/message have been updated
  505. await tick();
  506. // Scroll down
  507. scrollToBottom();
  508. const messagesBody = [
  509. $settings.system || (responseMessage?.userContext ?? null)
  510. ? {
  511. role: 'system',
  512. content: `${promptTemplate($settings?.system ?? '', $user.name)}${
  513. responseMessage?.userContext ?? null
  514. ? `\n\nUser Context:\n${(responseMessage?.userContext ?? []).join('\n')}`
  515. : ''
  516. }`
  517. }
  518. : undefined,
  519. ...messages
  520. ]
  521. .filter((message) => message?.content?.trim())
  522. .map((message, idx, arr) => {
  523. // Prepare the base message object
  524. const baseMessage = {
  525. role: message.role,
  526. content: message.content
  527. };
  528. // Extract and format image URLs if any exist
  529. const imageUrls = message.files
  530. ?.filter((file) => file.type === 'image')
  531. .map((file) => file.url.slice(file.url.indexOf(',') + 1));
  532. // Add images array only if it contains elements
  533. if (imageUrls && imageUrls.length > 0 && message.role === 'user') {
  534. baseMessage.images = imageUrls;
  535. }
  536. return baseMessage;
  537. });
  538. let lastImageIndex = -1;
  539. // Find the index of the last object with images
  540. messagesBody.forEach((item, index) => {
  541. if (item.images) {
  542. lastImageIndex = index;
  543. }
  544. });
  545. // Remove images from all but the last one
  546. messagesBody.forEach((item, index) => {
  547. if (index !== lastImageIndex) {
  548. delete item.images;
  549. }
  550. });
  551. let docs = [];
  552. if (model?.info?.meta?.knowledge ?? false) {
  553. docs = model.info.meta.knowledge;
  554. }
  555. docs = [
  556. ...docs,
  557. ...messages
  558. .filter((message) => message?.files ?? null)
  559. .map((message) =>
  560. message.files.filter((item) =>
  561. ['doc', 'collection', 'web_search_results'].includes(item.type)
  562. )
  563. )
  564. .flat(1)
  565. ].filter(
  566. (item, index, array) =>
  567. array.findIndex((i) => JSON.stringify(i) === JSON.stringify(item)) === index
  568. );
  569. const [res, controller] = await generateChatCompletion(localStorage.token, {
  570. model: model.id,
  571. messages: messagesBody,
  572. options: {
  573. ...($settings.params ?? {}),
  574. stop:
  575. $settings?.params?.stop ?? undefined
  576. ? $settings.params.stop.map((str) =>
  577. decodeURIComponent(JSON.parse('"' + str.replace(/\"/g, '\\"') + '"'))
  578. )
  579. : undefined,
  580. num_predict: $settings?.params?.max_tokens ?? undefined,
  581. repeat_penalty: $settings?.params?.frequency_penalty ?? undefined
  582. },
  583. format: $settings.requestFormat ?? undefined,
  584. keep_alive: $settings.keepAlive ?? undefined,
  585. tool_ids: selectedToolIds.length > 0 ? selectedToolIds : undefined,
  586. docs: docs.length > 0 ? docs : undefined,
  587. citations: docs.length > 0,
  588. chat_id: $chatId
  589. });
  590. if (res && res.ok) {
  591. console.log('controller', controller);
  592. const reader = res.body
  593. .pipeThrough(new TextDecoderStream())
  594. .pipeThrough(splitStream('\n'))
  595. .getReader();
  596. while (true) {
  597. const { value, done } = await reader.read();
  598. if (done || stopResponseFlag || _chatId !== $chatId) {
  599. responseMessage.done = true;
  600. messages = messages;
  601. if (stopResponseFlag) {
  602. controller.abort('User: Stop Response');
  603. } else {
  604. const messages = createMessagesList(responseMessageId);
  605. await chatCompletedHandler(model.id, messages);
  606. }
  607. _response = responseMessage.content;
  608. break;
  609. }
  610. try {
  611. let lines = value.split('\n');
  612. for (const line of lines) {
  613. if (line !== '') {
  614. console.log(line);
  615. let data = JSON.parse(line);
  616. if ('citations' in data) {
  617. responseMessage.citations = data.citations;
  618. continue;
  619. }
  620. if ('detail' in data) {
  621. throw data;
  622. }
  623. if (data.done == false) {
  624. if (responseMessage.content == '' && data.message.content == '\n') {
  625. continue;
  626. } else {
  627. responseMessage.content += data.message.content;
  628. messages = messages;
  629. }
  630. } else {
  631. responseMessage.done = true;
  632. if (responseMessage.content == '') {
  633. responseMessage.error = {
  634. code: 400,
  635. content: `Oops! No text generated from Ollama, Please try again.`
  636. };
  637. }
  638. responseMessage.context = data.context ?? null;
  639. responseMessage.info = {
  640. total_duration: data.total_duration,
  641. load_duration: data.load_duration,
  642. sample_count: data.sample_count,
  643. sample_duration: data.sample_duration,
  644. prompt_eval_count: data.prompt_eval_count,
  645. prompt_eval_duration: data.prompt_eval_duration,
  646. eval_count: data.eval_count,
  647. eval_duration: data.eval_duration
  648. };
  649. messages = messages;
  650. if ($settings.notificationEnabled && !document.hasFocus()) {
  651. const notification = new Notification(
  652. selectedModelfile
  653. ? `${
  654. selectedModelfile.title.charAt(0).toUpperCase() +
  655. selectedModelfile.title.slice(1)
  656. }`
  657. : `${model.id}`,
  658. {
  659. body: responseMessage.content,
  660. icon: selectedModelfile?.imageUrl ?? `${WEBUI_BASE_URL}/static/favicon.png`
  661. }
  662. );
  663. }
  664. if ($settings.responseAutoCopy) {
  665. copyToClipboard(responseMessage.content);
  666. }
  667. if ($settings.responseAutoPlayback && !$showCallOverlay) {
  668. await tick();
  669. document.getElementById(`speak-button-${responseMessage.id}`)?.click();
  670. }
  671. }
  672. }
  673. }
  674. } catch (error) {
  675. console.log(error);
  676. if ('detail' in error) {
  677. toast.error(error.detail);
  678. }
  679. break;
  680. }
  681. if (autoScroll) {
  682. scrollToBottom();
  683. }
  684. }
  685. if ($chatId == _chatId) {
  686. if ($settings.saveChatHistory ?? true) {
  687. chat = await updateChatById(localStorage.token, _chatId, {
  688. messages: messages,
  689. history: history,
  690. models: selectedModels
  691. });
  692. await chats.set(await getChatList(localStorage.token));
  693. }
  694. }
  695. } else {
  696. if (res !== null) {
  697. const error = await res.json();
  698. console.log(error);
  699. if ('detail' in error) {
  700. toast.error(error.detail);
  701. responseMessage.error = { content: error.detail };
  702. } else {
  703. toast.error(error.error);
  704. responseMessage.error = { content: error.error };
  705. }
  706. } else {
  707. toast.error(
  708. $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, { provider: 'Ollama' })
  709. );
  710. responseMessage.error = {
  711. content: $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
  712. provider: 'Ollama'
  713. })
  714. };
  715. }
  716. responseMessage.done = true;
  717. messages = messages;
  718. }
  719. stopResponseFlag = false;
  720. await tick();
  721. if (autoScroll) {
  722. scrollToBottom();
  723. }
  724. if (messages.length == 2 && messages.at(1).content !== '') {
  725. window.history.replaceState(history.state, '', `/c/${_chatId}`);
  726. const _title = await generateChatTitle(userPrompt);
  727. await setChatTitle(_chatId, _title);
  728. }
  729. return _response;
  730. };
  731. const sendPromptOpenAI = async (model, userPrompt, responseMessageId, _chatId) => {
  732. let _response = null;
  733. const responseMessage = history.messages[responseMessageId];
  734. let docs = [];
  735. if (model?.info?.meta?.knowledge ?? false) {
  736. docs = model.info.meta.knowledge;
  737. }
  738. docs = [
  739. ...docs,
  740. ...messages
  741. .filter((message) => message?.files ?? null)
  742. .map((message) =>
  743. message.files.filter((item) =>
  744. ['doc', 'collection', 'web_search_results'].includes(item.type)
  745. )
  746. )
  747. .flat(1)
  748. ].filter(
  749. (item, index, array) =>
  750. array.findIndex((i) => JSON.stringify(i) === JSON.stringify(item)) === index
  751. );
  752. scrollToBottom();
  753. try {
  754. const [res, controller] = await generateOpenAIChatCompletion(
  755. localStorage.token,
  756. {
  757. model: model.id,
  758. stream: true,
  759. stream_options:
  760. model.info?.meta?.capabilities?.usage ?? false
  761. ? {
  762. include_usage: true
  763. }
  764. : undefined,
  765. messages: [
  766. $settings.system || (responseMessage?.userContext ?? null)
  767. ? {
  768. role: 'system',
  769. content: `${promptTemplate($settings?.system ?? '', $user.name)}${
  770. responseMessage?.userContext ?? null
  771. ? `\n\nUser Context:\n${(responseMessage?.userContext ?? []).join('\n')}`
  772. : ''
  773. }`
  774. }
  775. : undefined,
  776. ...messages
  777. ]
  778. .filter((message) => message?.content?.trim())
  779. .map((message, idx, arr) => ({
  780. role: message.role,
  781. ...((message.files?.filter((file) => file.type === 'image').length > 0 ?? false) &&
  782. message.role === 'user'
  783. ? {
  784. content: [
  785. {
  786. type: 'text',
  787. text:
  788. arr.length - 1 !== idx
  789. ? message.content
  790. : message?.raContent ?? message.content
  791. },
  792. ...message.files
  793. .filter((file) => file.type === 'image')
  794. .map((file) => ({
  795. type: 'image_url',
  796. image_url: {
  797. url: file.url
  798. }
  799. }))
  800. ]
  801. }
  802. : {
  803. content:
  804. arr.length - 1 !== idx
  805. ? message.content
  806. : message?.raContent ?? message.content
  807. })
  808. })),
  809. seed: $settings?.params?.seed ?? undefined,
  810. stop:
  811. $settings?.params?.stop ?? undefined
  812. ? $settings.params.stop.map((str) =>
  813. decodeURIComponent(JSON.parse('"' + str.replace(/\"/g, '\\"') + '"'))
  814. )
  815. : undefined,
  816. temperature: $settings?.params?.temperature ?? undefined,
  817. top_p: $settings?.params?.top_p ?? undefined,
  818. frequency_penalty: $settings?.params?.frequency_penalty ?? undefined,
  819. max_tokens: $settings?.params?.max_tokens ?? undefined,
  820. tool_ids: selectedToolIds.length > 0 ? selectedToolIds : undefined,
  821. docs: docs.length > 0 ? docs : undefined,
  822. citations: docs.length > 0,
  823. chat_id: $chatId
  824. },
  825. `${OPENAI_API_BASE_URL}`
  826. );
  827. // Wait until history/message have been updated
  828. await tick();
  829. scrollToBottom();
  830. if (res && res.ok && res.body) {
  831. const textStream = await createOpenAITextStream(res.body, $settings.splitLargeChunks);
  832. let lastUsage = null;
  833. for await (const update of textStream) {
  834. const { value, done, citations, error, usage } = update;
  835. if (error) {
  836. await handleOpenAIError(error, null, model, responseMessage);
  837. break;
  838. }
  839. if (done || stopResponseFlag || _chatId !== $chatId) {
  840. responseMessage.done = true;
  841. messages = messages;
  842. if (stopResponseFlag) {
  843. controller.abort('User: Stop Response');
  844. } else {
  845. const messages = createMessagesList(responseMessageId);
  846. await chatCompletedHandler(model.id, messages);
  847. }
  848. _response = responseMessage.content;
  849. break;
  850. }
  851. if (usage) {
  852. lastUsage = usage;
  853. }
  854. if (citations) {
  855. responseMessage.citations = citations;
  856. continue;
  857. }
  858. if (responseMessage.content == '' && value == '\n') {
  859. continue;
  860. } else {
  861. responseMessage.content += value;
  862. messages = messages;
  863. }
  864. if (autoScroll) {
  865. scrollToBottom();
  866. }
  867. }
  868. if ($settings.notificationEnabled && !document.hasFocus()) {
  869. const notification = new Notification(`${model.id}`, {
  870. body: responseMessage.content,
  871. icon: `${WEBUI_BASE_URL}/static/favicon.png`
  872. });
  873. }
  874. if ($settings.responseAutoCopy) {
  875. copyToClipboard(responseMessage.content);
  876. }
  877. if ($settings.responseAutoPlayback && !$showCallOverlay) {
  878. await tick();
  879. document.getElementById(`speak-button-${responseMessage.id}`)?.click();
  880. }
  881. if (lastUsage) {
  882. responseMessage.info = { ...lastUsage, openai: true };
  883. }
  884. if ($chatId == _chatId) {
  885. if ($settings.saveChatHistory ?? true) {
  886. chat = await updateChatById(localStorage.token, _chatId, {
  887. models: selectedModels,
  888. messages: messages,
  889. history: history
  890. });
  891. await chats.set(await getChatList(localStorage.token));
  892. }
  893. }
  894. } else {
  895. await handleOpenAIError(null, res, model, responseMessage);
  896. }
  897. } catch (error) {
  898. await handleOpenAIError(error, null, model, responseMessage);
  899. }
  900. messages = messages;
  901. stopResponseFlag = false;
  902. await tick();
  903. if (autoScroll) {
  904. scrollToBottom();
  905. }
  906. if (messages.length == 2) {
  907. window.history.replaceState(history.state, '', `/c/${_chatId}`);
  908. const _title = await generateChatTitle(userPrompt);
  909. await setChatTitle(_chatId, _title);
  910. }
  911. return _response;
  912. };
  913. const handleOpenAIError = async (error, res: Response | null, model, responseMessage) => {
  914. let errorMessage = '';
  915. let innerError;
  916. if (error) {
  917. innerError = error;
  918. } else if (res !== null) {
  919. innerError = await res.json();
  920. }
  921. console.error(innerError);
  922. if ('detail' in innerError) {
  923. toast.error(innerError.detail);
  924. errorMessage = innerError.detail;
  925. } else if ('error' in innerError) {
  926. if ('message' in innerError.error) {
  927. toast.error(innerError.error.message);
  928. errorMessage = innerError.error.message;
  929. } else {
  930. toast.error(innerError.error);
  931. errorMessage = innerError.error;
  932. }
  933. } else if ('message' in innerError) {
  934. toast.error(innerError.message);
  935. errorMessage = innerError.message;
  936. }
  937. responseMessage.error = {
  938. content:
  939. $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
  940. provider: model.name ?? model.id
  941. }) +
  942. '\n' +
  943. errorMessage
  944. };
  945. responseMessage.done = true;
  946. messages = messages;
  947. };
  948. const stopResponse = () => {
  949. stopResponseFlag = true;
  950. console.log('stopResponse');
  951. };
  952. const regenerateResponse = async (message) => {
  953. console.log('regenerateResponse');
  954. if (messages.length != 0) {
  955. let userMessage = history.messages[message.parentId];
  956. let userPrompt = userMessage.content;
  957. if ((userMessage?.models ?? [...selectedModels]).length == 1) {
  958. await sendPrompt(userPrompt, userMessage.id, undefined, false);
  959. } else {
  960. await sendPrompt(userPrompt, userMessage.id, message.model, false);
  961. }
  962. }
  963. };
  964. const continueGeneration = async () => {
  965. console.log('continueGeneration');
  966. const _chatId = JSON.parse(JSON.stringify($chatId));
  967. if (messages.length != 0 && messages.at(-1).done == true) {
  968. const responseMessage = history.messages[history.currentId];
  969. responseMessage.done = false;
  970. await tick();
  971. const model = $models.filter((m) => m.id === responseMessage.model).at(0);
  972. if (model) {
  973. if (model?.owned_by === 'openai') {
  974. await sendPromptOpenAI(
  975. model,
  976. history.messages[responseMessage.parentId].content,
  977. responseMessage.id,
  978. _chatId
  979. );
  980. } else
  981. await sendPromptOllama(
  982. model,
  983. history.messages[responseMessage.parentId].content,
  984. responseMessage.id,
  985. _chatId
  986. );
  987. }
  988. } else {
  989. toast.error($i18n.t(`Model {{modelId}} not found`, { modelId }));
  990. }
  991. };
  992. const generateChatTitle = async (userPrompt) => {
  993. if ($settings?.title?.auto ?? true) {
  994. const title = await generateTitle(
  995. localStorage.token,
  996. selectedModels[0],
  997. userPrompt,
  998. $chatId
  999. ).catch((error) => {
  1000. console.error(error);
  1001. return 'New Chat';
  1002. });
  1003. return title;
  1004. } else {
  1005. return `${userPrompt}`;
  1006. }
  1007. };
  1008. const setChatTitle = async (_chatId, _title) => {
  1009. if (_chatId === $chatId) {
  1010. title = _title;
  1011. }
  1012. if ($settings.saveChatHistory ?? true) {
  1013. chat = await updateChatById(localStorage.token, _chatId, { title: _title });
  1014. await chats.set(await getChatList(localStorage.token));
  1015. }
  1016. };
  1017. const getTags = async () => {
  1018. return await getTagsById(localStorage.token, $chatId).catch(async (error) => {
  1019. return [];
  1020. });
  1021. };
  1022. </script>
  1023. <svelte:head>
  1024. <title>
  1025. {title
  1026. ? `${title.length > 30 ? `${title.slice(0, 30)}...` : title} | ${$WEBUI_NAME}`
  1027. : `${$WEBUI_NAME}`}
  1028. </title>
  1029. </svelte:head>
  1030. <CallOverlay {submitPrompt} bind:files />
  1031. {#if !chatIdProp || (loaded && chatIdProp)}
  1032. <div
  1033. class="h-screen max-h-[100dvh] {$showSidebar
  1034. ? 'md:max-w-[calc(100%-260px)]'
  1035. : ''} w-full max-w-full flex flex-col"
  1036. >
  1037. <Navbar
  1038. {title}
  1039. bind:selectedModels
  1040. bind:showModelSelector
  1041. shareEnabled={messages.length > 0}
  1042. {chat}
  1043. {initNewChat}
  1044. />
  1045. {#if $banners.length > 0 && messages.length === 0 && !$chatId && selectedModels.length <= 1}
  1046. <div
  1047. class="absolute top-[4.25rem] w-full {$showSidebar ? 'md:max-w-[calc(100%-260px)]' : ''}"
  1048. >
  1049. <div class=" flex flex-col gap-1 w-full">
  1050. {#each $banners.filter( (b) => (b.dismissible ? !JSON.parse(localStorage.getItem('dismissedBannerIds') ?? '[]').includes(b.id) : true) ) as banner}
  1051. <Banner
  1052. {banner}
  1053. on:dismiss={(e) => {
  1054. const bannerId = e.detail;
  1055. localStorage.setItem(
  1056. 'dismissedBannerIds',
  1057. JSON.stringify(
  1058. [
  1059. bannerId,
  1060. ...JSON.parse(localStorage.getItem('dismissedBannerIds') ?? '[]')
  1061. ].filter((id) => $banners.find((b) => b.id === id))
  1062. )
  1063. );
  1064. }}
  1065. />
  1066. {/each}
  1067. </div>
  1068. </div>
  1069. {/if}
  1070. <div class="flex flex-col flex-auto">
  1071. <div
  1072. class=" pb-2.5 flex flex-col justify-between w-full flex-auto overflow-auto h-0 max-w-full"
  1073. id="messages-container"
  1074. bind:this={messagesContainerElement}
  1075. on:scroll={(e) => {
  1076. autoScroll =
  1077. messagesContainerElement.scrollHeight - messagesContainerElement.scrollTop <=
  1078. messagesContainerElement.clientHeight + 5;
  1079. }}
  1080. >
  1081. <div class=" h-full w-full flex flex-col {chatIdProp ? 'py-4' : 'pt-2 pb-4'}">
  1082. <Messages
  1083. chatId={$chatId}
  1084. {selectedModels}
  1085. {processing}
  1086. bind:history
  1087. bind:messages
  1088. bind:autoScroll
  1089. bind:prompt
  1090. bottomPadding={files.length > 0}
  1091. {sendPrompt}
  1092. {continueGeneration}
  1093. {regenerateResponse}
  1094. />
  1095. </div>
  1096. </div>
  1097. <MessageInput
  1098. bind:files
  1099. bind:prompt
  1100. bind:autoScroll
  1101. bind:selectedToolIds
  1102. bind:webSearchEnabled
  1103. bind:atSelectedModel
  1104. {selectedModels}
  1105. {messages}
  1106. {submitPrompt}
  1107. {stopResponse}
  1108. />
  1109. </div>
  1110. </div>
  1111. {/if}