Chat.svelte 33 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278
  1. <script lang="ts">
  2. import { v4 as uuidv4 } from 'uuid';
  3. import { toast } from 'svelte-sonner';
  4. import mermaid from 'mermaid';
  5. import { getContext, onMount, tick } from 'svelte';
  6. import { goto } from '$app/navigation';
  7. import { page } from '$app/stores';
  8. import {
  9. chatId,
  10. chats,
  11. config,
  12. type Model,
  13. models,
  14. settings,
  15. showSidebar,
  16. tags as _tags,
  17. WEBUI_NAME,
  18. banners,
  19. user,
  20. socket
  21. } from '$lib/stores';
  22. import {
  23. convertMessagesToHistory,
  24. copyToClipboard,
  25. promptTemplate,
  26. splitStream
  27. } from '$lib/utils';
  28. import { generateChatCompletion } from '$lib/apis/ollama';
  29. import {
  30. addTagById,
  31. createNewChat,
  32. deleteTagById,
  33. getAllChatTags,
  34. getChatById,
  35. getChatList,
  36. getTagsById,
  37. updateChatById
  38. } from '$lib/apis/chats';
  39. import {
  40. generateOpenAIChatCompletion,
  41. generateSearchQuery,
  42. generateTitle
  43. } from '$lib/apis/openai';
  44. import MessageInput from '$lib/components/chat/MessageInput.svelte';
  45. import Messages from '$lib/components/chat/Messages.svelte';
  46. import Navbar from '$lib/components/layout/Navbar.svelte';
  47. import { OLLAMA_API_BASE_URL, OPENAI_API_BASE_URL, WEBUI_BASE_URL } from '$lib/constants';
  48. import { createOpenAITextStream } from '$lib/apis/streaming';
  49. import { queryMemory } from '$lib/apis/memories';
  50. import type { Writable } from 'svelte/store';
  51. import type { i18n as i18nType } from 'i18next';
  52. import { runWebSearch } from '$lib/apis/rag';
  53. import Banner from '../common/Banner.svelte';
  54. import { getUserSettings } from '$lib/apis/users';
  55. import { chatCompleted } from '$lib/apis';
  56. const i18n: Writable<i18nType> = getContext('i18n');
  57. export let chatIdProp = '';
  58. let loaded = false;
  59. let stopResponseFlag = false;
  60. let autoScroll = true;
  61. let processing = '';
  62. let messagesContainerElement: HTMLDivElement;
  63. let showModelSelector = true;
  64. let selectedModels = [''];
  65. let atSelectedModel: Model | undefined;
  66. let webSearchEnabled = false;
  67. let chat = null;
  68. let tags = [];
  69. let title = '';
  70. let prompt = '';
  71. let files = [];
  72. let messages = [];
  73. let history = {
  74. messages: {},
  75. currentId: null
  76. };
  77. $: if (history.currentId !== null) {
  78. let _messages = [];
  79. let currentMessage = history.messages[history.currentId];
  80. while (currentMessage !== null) {
  81. _messages.unshift({ ...currentMessage });
  82. currentMessage =
  83. currentMessage.parentId !== null ? history.messages[currentMessage.parentId] : null;
  84. }
  85. messages = _messages;
  86. } else {
  87. messages = [];
  88. }
  89. $: if (chatIdProp) {
  90. (async () => {
  91. if (await loadChat()) {
  92. await tick();
  93. loaded = true;
  94. window.setTimeout(() => scrollToBottom(), 0);
  95. const chatInput = document.getElementById('chat-textarea');
  96. chatInput?.focus();
  97. } else {
  98. await goto('/');
  99. }
  100. })();
  101. }
  102. onMount(async () => {
  103. if (!$chatId) {
  104. await initNewChat();
  105. } else {
  106. if (!($settings.saveChatHistory ?? true)) {
  107. await goto('/');
  108. }
  109. }
  110. });
  111. //////////////////////////
  112. // Web functions
  113. //////////////////////////
  114. const initNewChat = async () => {
  115. window.history.replaceState(history.state, '', `/`);
  116. await chatId.set('');
  117. autoScroll = true;
  118. title = '';
  119. messages = [];
  120. history = {
  121. messages: {},
  122. currentId: null
  123. };
  124. if ($page.url.searchParams.get('models')) {
  125. selectedModels = $page.url.searchParams.get('models')?.split(',');
  126. } else if ($settings?.models) {
  127. selectedModels = $settings?.models;
  128. } else if ($config?.default_models) {
  129. console.log($config?.default_models.split(',') ?? '');
  130. selectedModels = $config?.default_models.split(',');
  131. } else {
  132. selectedModels = [''];
  133. }
  134. if ($page.url.searchParams.get('q')) {
  135. prompt = $page.url.searchParams.get('q') ?? '';
  136. if (prompt) {
  137. await tick();
  138. submitPrompt(prompt);
  139. }
  140. }
  141. selectedModels = selectedModels.map((modelId) =>
  142. $models.map((m) => m.id).includes(modelId) ? modelId : ''
  143. );
  144. const userSettings = await getUserSettings(localStorage.token);
  145. if (userSettings) {
  146. settings.set(userSettings.ui);
  147. } else {
  148. settings.set(JSON.parse(localStorage.getItem('settings') ?? '{}'));
  149. }
  150. const chatInput = document.getElementById('chat-textarea');
  151. setTimeout(() => chatInput?.focus(), 0);
  152. };
  153. const loadChat = async () => {
  154. chatId.set(chatIdProp);
  155. chat = await getChatById(localStorage.token, $chatId).catch(async (error) => {
  156. await goto('/');
  157. return null;
  158. });
  159. if (chat) {
  160. tags = await getTags();
  161. const chatContent = chat.chat;
  162. if (chatContent) {
  163. console.log(chatContent);
  164. selectedModels =
  165. (chatContent?.models ?? undefined) !== undefined
  166. ? chatContent.models
  167. : [chatContent.models ?? ''];
  168. history =
  169. (chatContent?.history ?? undefined) !== undefined
  170. ? chatContent.history
  171. : convertMessagesToHistory(chatContent.messages);
  172. title = chatContent.title;
  173. const userSettings = await getUserSettings(localStorage.token);
  174. if (userSettings) {
  175. await settings.set(userSettings.ui);
  176. } else {
  177. await settings.set(JSON.parse(localStorage.getItem('settings') ?? '{}'));
  178. }
  179. await settings.set({
  180. ...$settings,
  181. system: chatContent.system ?? $settings.system,
  182. params: chatContent.options ?? $settings.params
  183. });
  184. autoScroll = true;
  185. await tick();
  186. if (messages.length > 0) {
  187. history.messages[messages.at(-1).id].done = true;
  188. }
  189. await tick();
  190. return true;
  191. } else {
  192. return null;
  193. }
  194. }
  195. };
  196. const scrollToBottom = async () => {
  197. await tick();
  198. if (messagesContainerElement) {
  199. messagesContainerElement.scrollTop = messagesContainerElement.scrollHeight;
  200. }
  201. };
  202. const createMessagesList = (responseMessageId) => {
  203. const message = history.messages[responseMessageId];
  204. if (message.parentId) {
  205. return [...createMessagesList(message.parentId), message];
  206. } else {
  207. return [message];
  208. }
  209. };
  210. const chatCompletedHandler = async (modelId, messages) => {
  211. await mermaid.run({
  212. querySelector: '.mermaid'
  213. });
  214. const res = await chatCompleted(localStorage.token, {
  215. model: modelId,
  216. messages: messages.map((m) => ({
  217. id: m.id,
  218. role: m.role,
  219. content: m.content,
  220. timestamp: m.timestamp
  221. })),
  222. chat_id: $chatId
  223. }).catch((error) => {
  224. console.error(error);
  225. return null;
  226. });
  227. if (res !== null) {
  228. // Update chat history with the new messages
  229. for (const message of res.messages) {
  230. history.messages[message.id] = {
  231. ...history.messages[message.id],
  232. ...(history.messages[message.id].content !== message.content
  233. ? { originalContent: history.messages[message.id].content }
  234. : {}),
  235. ...message
  236. };
  237. }
  238. }
  239. };
  240. const getChatEventEmitter = async (modelId: string, chatId: string = '') => {
  241. return setInterval(() => {
  242. $socket?.emit('usage', {
  243. action: 'chat',
  244. model: modelId,
  245. chat_id: chatId
  246. });
  247. }, 1000);
  248. };
  249. //////////////////////////
  250. // Ollama functions
  251. //////////////////////////
  252. const submitPrompt = async (userPrompt, _user = null) => {
  253. console.log('submitPrompt', $chatId);
  254. selectedModels = selectedModels.map((modelId) =>
  255. $models.map((m) => m.id).includes(modelId) ? modelId : ''
  256. );
  257. if (selectedModels.includes('')) {
  258. toast.error($i18n.t('Model not selected'));
  259. } else if (messages.length != 0 && messages.at(-1).done != true) {
  260. // Response not done
  261. console.log('wait');
  262. } else if (
  263. files.length > 0 &&
  264. files.filter((file) => file.upload_status === false).length > 0
  265. ) {
  266. // Upload not done
  267. toast.error(
  268. $i18n.t(
  269. `Oops! Hold tight! Your files are still in the processing oven. We're cooking them up to perfection. Please be patient and we'll let you know once they're ready.`
  270. )
  271. );
  272. } else {
  273. // Reset chat message textarea height
  274. document.getElementById('chat-textarea').style.height = '';
  275. // Create user message
  276. let userMessageId = uuidv4();
  277. let userMessage = {
  278. id: userMessageId,
  279. parentId: messages.length !== 0 ? messages.at(-1).id : null,
  280. childrenIds: [],
  281. role: 'user',
  282. user: _user ?? undefined,
  283. content: userPrompt,
  284. files: files.length > 0 ? files : undefined,
  285. timestamp: Math.floor(Date.now() / 1000), // Unix epoch
  286. models: selectedModels.filter((m, mIdx) => selectedModels.indexOf(m) === mIdx)
  287. };
  288. // Add message to history and Set currentId to messageId
  289. history.messages[userMessageId] = userMessage;
  290. history.currentId = userMessageId;
  291. // Append messageId to childrenIds of parent message
  292. if (messages.length !== 0) {
  293. history.messages[messages.at(-1).id].childrenIds.push(userMessageId);
  294. }
  295. // Wait until history/message have been updated
  296. await tick();
  297. // Create new chat if only one message in messages
  298. if (messages.length == 1) {
  299. if ($settings.saveChatHistory ?? true) {
  300. chat = await createNewChat(localStorage.token, {
  301. id: $chatId,
  302. title: $i18n.t('New Chat'),
  303. models: selectedModels,
  304. system: $settings.system ?? undefined,
  305. options: {
  306. ...($settings.params ?? {})
  307. },
  308. messages: messages,
  309. history: history,
  310. tags: [],
  311. timestamp: Date.now()
  312. });
  313. await chats.set(await getChatList(localStorage.token));
  314. await chatId.set(chat.id);
  315. } else {
  316. await chatId.set('local');
  317. }
  318. await tick();
  319. }
  320. // Reset chat input textarea
  321. prompt = '';
  322. document.getElementById('chat-textarea').style.height = '';
  323. files = [];
  324. // Send prompt
  325. await sendPrompt(userPrompt, userMessageId);
  326. }
  327. };
  328. const sendPrompt = async (prompt, parentId, modelId = null) => {
  329. const _chatId = JSON.parse(JSON.stringify($chatId));
  330. await Promise.all(
  331. (modelId
  332. ? [modelId]
  333. : atSelectedModel !== undefined
  334. ? [atSelectedModel.id]
  335. : selectedModels
  336. ).map(async (modelId) => {
  337. console.log('modelId', modelId);
  338. const model = $models.filter((m) => m.id === modelId).at(0);
  339. if (model) {
  340. // If there are image files, check if model is vision capable
  341. const hasImages = messages.some((message) =>
  342. message.files?.some((file) => file.type === 'image')
  343. );
  344. if (hasImages && !(model.info?.meta?.capabilities?.vision ?? true)) {
  345. toast.error(
  346. $i18n.t('Model {{modelName}} is not vision capable', {
  347. modelName: model.name ?? model.id
  348. })
  349. );
  350. }
  351. // Create response message
  352. let responseMessageId = uuidv4();
  353. let responseMessage = {
  354. parentId: parentId,
  355. id: responseMessageId,
  356. childrenIds: [],
  357. role: 'assistant',
  358. content: '',
  359. model: model.id,
  360. modelName: model.name ?? model.id,
  361. userContext: null,
  362. timestamp: Math.floor(Date.now() / 1000) // Unix epoch
  363. };
  364. // Add message to history and Set currentId to messageId
  365. history.messages[responseMessageId] = responseMessage;
  366. history.currentId = responseMessageId;
  367. // Append messageId to childrenIds of parent message
  368. if (parentId !== null) {
  369. history.messages[parentId].childrenIds = [
  370. ...history.messages[parentId].childrenIds,
  371. responseMessageId
  372. ];
  373. }
  374. await tick();
  375. let userContext = null;
  376. if ($settings?.memory ?? false) {
  377. if (userContext === null) {
  378. const res = await queryMemory(localStorage.token, prompt).catch((error) => {
  379. toast.error(error);
  380. return null;
  381. });
  382. if (res) {
  383. if (res.documents[0].length > 0) {
  384. userContext = res.documents.reduce((acc, doc, index) => {
  385. const createdAtTimestamp = res.metadatas[index][0].created_at;
  386. const createdAtDate = new Date(createdAtTimestamp * 1000)
  387. .toISOString()
  388. .split('T')[0];
  389. acc.push(`${index + 1}. [${createdAtDate}]. ${doc[0]}`);
  390. return acc;
  391. }, []);
  392. }
  393. console.log(userContext);
  394. }
  395. }
  396. }
  397. responseMessage.userContext = userContext;
  398. const chatEventEmitter = await getChatEventEmitter(model.id, _chatId);
  399. if (webSearchEnabled) {
  400. await getWebSearchResults(model.id, parentId, responseMessageId);
  401. }
  402. if (model?.owned_by === 'openai') {
  403. await sendPromptOpenAI(model, prompt, responseMessageId, _chatId);
  404. } else if (model) {
  405. await sendPromptOllama(model, prompt, responseMessageId, _chatId);
  406. }
  407. console.log('chatEventEmitter', chatEventEmitter);
  408. if (chatEventEmitter) clearInterval(chatEventEmitter);
  409. } else {
  410. toast.error($i18n.t(`Model {{modelId}} not found`, { modelId }));
  411. }
  412. })
  413. );
  414. await chats.set(await getChatList(localStorage.token));
  415. };
  416. const getWebSearchResults = async (model: string, parentId: string, responseId: string) => {
  417. const responseMessage = history.messages[responseId];
  418. responseMessage.status = {
  419. done: false,
  420. action: 'web_search',
  421. description: $i18n.t('Generating search query')
  422. };
  423. messages = messages;
  424. const prompt = history.messages[parentId].content;
  425. let searchQuery = prompt;
  426. if (prompt.length > 100) {
  427. searchQuery = await generateChatSearchQuery(model, prompt);
  428. if (!searchQuery) {
  429. toast.warning($i18n.t('No search query generated'));
  430. responseMessage.status = {
  431. ...responseMessage.status,
  432. done: true,
  433. error: true,
  434. description: 'No search query generated'
  435. };
  436. messages = messages;
  437. return;
  438. }
  439. }
  440. responseMessage.status = {
  441. ...responseMessage.status,
  442. description: $i18n.t("Searching the web for '{{searchQuery}}'", { searchQuery })
  443. };
  444. messages = messages;
  445. const results = await runWebSearch(localStorage.token, searchQuery).catch((error) => {
  446. console.log(error);
  447. toast.error(error);
  448. return null;
  449. });
  450. if (results) {
  451. responseMessage.status = {
  452. ...responseMessage.status,
  453. done: true,
  454. description: $i18n.t('Searched {{count}} sites', { count: results.filenames.length }),
  455. urls: results.filenames
  456. };
  457. if (responseMessage?.files ?? undefined === undefined) {
  458. responseMessage.files = [];
  459. }
  460. responseMessage.files.push({
  461. collection_name: results.collection_name,
  462. name: searchQuery,
  463. type: 'web_search_results',
  464. urls: results.filenames
  465. });
  466. messages = messages;
  467. } else {
  468. responseMessage.status = {
  469. ...responseMessage.status,
  470. done: true,
  471. error: true,
  472. description: 'No search results found'
  473. };
  474. messages = messages;
  475. }
  476. };
  477. const sendPromptOllama = async (model, userPrompt, responseMessageId, _chatId) => {
  478. model = model.id;
  479. const responseMessage = history.messages[responseMessageId];
  480. // Wait until history/message have been updated
  481. await tick();
  482. // Scroll down
  483. scrollToBottom();
  484. const messagesBody = [
  485. $settings.system || (responseMessage?.userContext ?? null)
  486. ? {
  487. role: 'system',
  488. content: `${promptTemplate($settings?.system ?? '', $user.name)}${
  489. responseMessage?.userContext ?? null
  490. ? `\n\nUser Context:\n${(responseMessage?.userContext ?? []).join('\n')}`
  491. : ''
  492. }`
  493. }
  494. : undefined,
  495. ...messages
  496. ]
  497. .filter((message) => message?.content?.trim())
  498. .map((message, idx, arr) => {
  499. // Prepare the base message object
  500. const baseMessage = {
  501. role: message.role,
  502. content: message.content
  503. };
  504. // Extract and format image URLs if any exist
  505. const imageUrls = message.files
  506. ?.filter((file) => file.type === 'image')
  507. .map((file) => file.url.slice(file.url.indexOf(',') + 1));
  508. // Add images array only if it contains elements
  509. if (imageUrls && imageUrls.length > 0 && message.role === 'user') {
  510. baseMessage.images = imageUrls;
  511. }
  512. return baseMessage;
  513. });
  514. let lastImageIndex = -1;
  515. // Find the index of the last object with images
  516. messagesBody.forEach((item, index) => {
  517. if (item.images) {
  518. lastImageIndex = index;
  519. }
  520. });
  521. // Remove images from all but the last one
  522. messagesBody.forEach((item, index) => {
  523. if (index !== lastImageIndex) {
  524. delete item.images;
  525. }
  526. });
  527. const docs = messages
  528. .filter((message) => message?.files ?? null)
  529. .map((message) =>
  530. message.files.filter((item) =>
  531. ['doc', 'collection', 'web_search_results'].includes(item.type)
  532. )
  533. )
  534. .flat(1);
  535. const [res, controller] = await generateChatCompletion(localStorage.token, {
  536. model: model,
  537. messages: messagesBody,
  538. options: {
  539. ...($settings.params ?? {}),
  540. stop:
  541. $settings?.params?.stop ?? undefined
  542. ? $settings.params.stop.map((str) =>
  543. decodeURIComponent(JSON.parse('"' + str.replace(/\"/g, '\\"') + '"'))
  544. )
  545. : undefined,
  546. num_predict: $settings?.params?.max_tokens ?? undefined,
  547. repeat_penalty: $settings?.params?.frequency_penalty ?? undefined
  548. },
  549. format: $settings.requestFormat ?? undefined,
  550. keep_alive: $settings.keepAlive ?? undefined,
  551. docs: docs.length > 0 ? docs : undefined,
  552. citations: docs.length > 0,
  553. chat_id: $chatId
  554. });
  555. if (res && res.ok) {
  556. console.log('controller', controller);
  557. const reader = res.body
  558. .pipeThrough(new TextDecoderStream())
  559. .pipeThrough(splitStream('\n'))
  560. .getReader();
  561. while (true) {
  562. const { value, done } = await reader.read();
  563. if (done || stopResponseFlag || _chatId !== $chatId) {
  564. responseMessage.done = true;
  565. messages = messages;
  566. if (stopResponseFlag) {
  567. controller.abort('User: Stop Response');
  568. } else {
  569. const messages = createMessagesList(responseMessageId);
  570. await chatCompletedHandler(model, messages);
  571. }
  572. break;
  573. }
  574. try {
  575. let lines = value.split('\n');
  576. for (const line of lines) {
  577. if (line !== '') {
  578. console.log(line);
  579. let data = JSON.parse(line);
  580. if ('citations' in data) {
  581. responseMessage.citations = data.citations;
  582. continue;
  583. }
  584. if ('detail' in data) {
  585. throw data;
  586. }
  587. if (data.done == false) {
  588. if (responseMessage.content == '' && data.message.content == '\n') {
  589. continue;
  590. } else {
  591. responseMessage.content += data.message.content;
  592. messages = messages;
  593. }
  594. } else {
  595. responseMessage.done = true;
  596. if (responseMessage.content == '') {
  597. responseMessage.error = {
  598. code: 400,
  599. content: `Oops! No text generated from Ollama, Please try again.`
  600. };
  601. }
  602. responseMessage.context = data.context ?? null;
  603. responseMessage.info = {
  604. total_duration: data.total_duration,
  605. load_duration: data.load_duration,
  606. sample_count: data.sample_count,
  607. sample_duration: data.sample_duration,
  608. prompt_eval_count: data.prompt_eval_count,
  609. prompt_eval_duration: data.prompt_eval_duration,
  610. eval_count: data.eval_count,
  611. eval_duration: data.eval_duration
  612. };
  613. messages = messages;
  614. if ($settings.notificationEnabled && !document.hasFocus()) {
  615. const notification = new Notification(
  616. selectedModelfile
  617. ? `${
  618. selectedModelfile.title.charAt(0).toUpperCase() +
  619. selectedModelfile.title.slice(1)
  620. }`
  621. : `${model}`,
  622. {
  623. body: responseMessage.content,
  624. icon: selectedModelfile?.imageUrl ?? `${WEBUI_BASE_URL}/static/favicon.png`
  625. }
  626. );
  627. }
  628. if ($settings.responseAutoCopy) {
  629. copyToClipboard(responseMessage.content);
  630. }
  631. if ($settings.responseAutoPlayback) {
  632. await tick();
  633. document.getElementById(`speak-button-${responseMessage.id}`)?.click();
  634. }
  635. }
  636. }
  637. }
  638. } catch (error) {
  639. console.log(error);
  640. if ('detail' in error) {
  641. toast.error(error.detail);
  642. }
  643. break;
  644. }
  645. if (autoScroll) {
  646. scrollToBottom();
  647. }
  648. }
  649. if ($chatId == _chatId) {
  650. if ($settings.saveChatHistory ?? true) {
  651. chat = await updateChatById(localStorage.token, _chatId, {
  652. messages: messages,
  653. history: history,
  654. models: selectedModels
  655. });
  656. await chats.set(await getChatList(localStorage.token));
  657. }
  658. }
  659. } else {
  660. if (res !== null) {
  661. const error = await res.json();
  662. console.log(error);
  663. if ('detail' in error) {
  664. toast.error(error.detail);
  665. responseMessage.error = { content: error.detail };
  666. } else {
  667. toast.error(error.error);
  668. responseMessage.error = { content: error.error };
  669. }
  670. } else {
  671. toast.error(
  672. $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, { provider: 'Ollama' })
  673. );
  674. responseMessage.error = {
  675. content: $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
  676. provider: 'Ollama'
  677. })
  678. };
  679. }
  680. responseMessage.done = true;
  681. messages = messages;
  682. }
  683. stopResponseFlag = false;
  684. await tick();
  685. if (autoScroll) {
  686. scrollToBottom();
  687. }
  688. if (messages.length == 2 && messages.at(1).content !== '') {
  689. window.history.replaceState(history.state, '', `/c/${_chatId}`);
  690. const _title = await generateChatTitle(userPrompt);
  691. await setChatTitle(_chatId, _title);
  692. }
  693. };
  694. const sendPromptOpenAI = async (model, userPrompt, responseMessageId, _chatId) => {
  695. const responseMessage = history.messages[responseMessageId];
  696. const docs = messages
  697. .filter((message) => message?.files ?? null)
  698. .map((message) =>
  699. message.files.filter((item) =>
  700. ['doc', 'collection', 'web_search_results'].includes(item.type)
  701. )
  702. )
  703. .flat(1);
  704. console.log(docs);
  705. scrollToBottom();
  706. try {
  707. const [res, controller] = await generateOpenAIChatCompletion(
  708. localStorage.token,
  709. {
  710. model: model.id,
  711. stream: true,
  712. stream_options:
  713. model.info?.meta?.capabilities?.usage ?? false
  714. ? {
  715. include_usage: true
  716. }
  717. : undefined,
  718. messages: [
  719. $settings.system || (responseMessage?.userContext ?? null)
  720. ? {
  721. role: 'system',
  722. content: `${promptTemplate($settings?.system ?? '', $user.name)}${
  723. responseMessage?.userContext ?? null
  724. ? `\n\nUser Context:\n${(responseMessage?.userContext ?? []).join('\n')}`
  725. : ''
  726. }`
  727. }
  728. : undefined,
  729. ...messages
  730. ]
  731. .filter((message) => message?.content?.trim())
  732. .map((message, idx, arr) => ({
  733. role: message.role,
  734. ...((message.files?.filter((file) => file.type === 'image').length > 0 ?? false) &&
  735. message.role === 'user'
  736. ? {
  737. content: [
  738. {
  739. type: 'text',
  740. text:
  741. arr.length - 1 !== idx
  742. ? message.content
  743. : message?.raContent ?? message.content
  744. },
  745. ...message.files
  746. .filter((file) => file.type === 'image')
  747. .map((file) => ({
  748. type: 'image_url',
  749. image_url: {
  750. url: file.url
  751. }
  752. }))
  753. ]
  754. }
  755. : {
  756. content:
  757. arr.length - 1 !== idx
  758. ? message.content
  759. : message?.raContent ?? message.content
  760. })
  761. })),
  762. seed: $settings?.params?.seed ?? undefined,
  763. stop:
  764. $settings?.params?.stop ?? undefined
  765. ? $settings.params.stop.map((str) =>
  766. decodeURIComponent(JSON.parse('"' + str.replace(/\"/g, '\\"') + '"'))
  767. )
  768. : undefined,
  769. temperature: $settings?.params?.temperature ?? undefined,
  770. top_p: $settings?.params?.top_p ?? undefined,
  771. frequency_penalty: $settings?.params?.frequency_penalty ?? undefined,
  772. max_tokens: $settings?.params?.max_tokens ?? undefined,
  773. docs: docs.length > 0 ? docs : undefined,
  774. citations: docs.length > 0,
  775. chat_id: $chatId
  776. },
  777. `${OPENAI_API_BASE_URL}`
  778. );
  779. // Wait until history/message have been updated
  780. await tick();
  781. scrollToBottom();
  782. if (res && res.ok && res.body) {
  783. const textStream = await createOpenAITextStream(res.body, $settings.splitLargeChunks);
  784. let lastUsage = null;
  785. for await (const update of textStream) {
  786. const { value, done, citations, error, usage } = update;
  787. if (error) {
  788. await handleOpenAIError(error, null, model, responseMessage);
  789. break;
  790. }
  791. if (done || stopResponseFlag || _chatId !== $chatId) {
  792. responseMessage.done = true;
  793. messages = messages;
  794. if (stopResponseFlag) {
  795. controller.abort('User: Stop Response');
  796. } else {
  797. const messages = createMessagesList(responseMessageId);
  798. await chatCompletedHandler(model.id, messages);
  799. }
  800. break;
  801. }
  802. if (usage) {
  803. lastUsage = usage;
  804. }
  805. if (citations) {
  806. responseMessage.citations = citations;
  807. continue;
  808. }
  809. if (responseMessage.content == '' && value == '\n') {
  810. continue;
  811. } else {
  812. responseMessage.content += value;
  813. messages = messages;
  814. }
  815. if (autoScroll) {
  816. scrollToBottom();
  817. }
  818. }
  819. if ($settings.notificationEnabled && !document.hasFocus()) {
  820. const notification = new Notification(`OpenAI ${model}`, {
  821. body: responseMessage.content,
  822. icon: `${WEBUI_BASE_URL}/static/favicon.png`
  823. });
  824. }
  825. if ($settings.responseAutoCopy) {
  826. copyToClipboard(responseMessage.content);
  827. }
  828. if ($settings.responseAutoPlayback) {
  829. await tick();
  830. document.getElementById(`speak-button-${responseMessage.id}`)?.click();
  831. }
  832. if (lastUsage) {
  833. responseMessage.info = { ...lastUsage, openai: true };
  834. }
  835. if ($chatId == _chatId) {
  836. if ($settings.saveChatHistory ?? true) {
  837. chat = await updateChatById(localStorage.token, _chatId, {
  838. models: selectedModels,
  839. messages: messages,
  840. history: history
  841. });
  842. await chats.set(await getChatList(localStorage.token));
  843. }
  844. }
  845. } else {
  846. await handleOpenAIError(null, res, model, responseMessage);
  847. }
  848. } catch (error) {
  849. await handleOpenAIError(error, null, model, responseMessage);
  850. }
  851. messages = messages;
  852. stopResponseFlag = false;
  853. await tick();
  854. if (autoScroll) {
  855. scrollToBottom();
  856. }
  857. if (messages.length == 2) {
  858. window.history.replaceState(history.state, '', `/c/${_chatId}`);
  859. const _title = await generateChatTitle(userPrompt);
  860. await setChatTitle(_chatId, _title);
  861. }
  862. };
  863. const handleOpenAIError = async (error, res: Response | null, model, responseMessage) => {
  864. let errorMessage = '';
  865. let innerError;
  866. if (error) {
  867. innerError = error;
  868. } else if (res !== null) {
  869. innerError = await res.json();
  870. }
  871. console.error(innerError);
  872. if ('detail' in innerError) {
  873. toast.error(innerError.detail);
  874. errorMessage = innerError.detail;
  875. } else if ('error' in innerError) {
  876. if ('message' in innerError.error) {
  877. toast.error(innerError.error.message);
  878. errorMessage = innerError.error.message;
  879. } else {
  880. toast.error(innerError.error);
  881. errorMessage = innerError.error;
  882. }
  883. } else if ('message' in innerError) {
  884. toast.error(innerError.message);
  885. errorMessage = innerError.message;
  886. }
  887. responseMessage.error = {
  888. content:
  889. $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
  890. provider: model.name ?? model.id
  891. }) +
  892. '\n' +
  893. errorMessage
  894. };
  895. responseMessage.done = true;
  896. messages = messages;
  897. };
  898. const stopResponse = () => {
  899. stopResponseFlag = true;
  900. console.log('stopResponse');
  901. };
  902. const regenerateResponse = async (message) => {
  903. console.log('regenerateResponse');
  904. if (messages.length != 0) {
  905. let userMessage = history.messages[message.parentId];
  906. let userPrompt = userMessage.content;
  907. if ((userMessage?.models ?? [...selectedModels]).length == 1) {
  908. await sendPrompt(userPrompt, userMessage.id);
  909. } else {
  910. await sendPrompt(userPrompt, userMessage.id, message.model);
  911. }
  912. }
  913. };
  914. const continueGeneration = async () => {
  915. console.log('continueGeneration');
  916. const _chatId = JSON.parse(JSON.stringify($chatId));
  917. if (messages.length != 0 && messages.at(-1).done == true) {
  918. const responseMessage = history.messages[history.currentId];
  919. responseMessage.done = false;
  920. await tick();
  921. const model = $models.filter((m) => m.id === responseMessage.model).at(0);
  922. if (model) {
  923. if (model?.owned_by === 'openai') {
  924. await sendPromptOpenAI(
  925. model,
  926. history.messages[responseMessage.parentId].content,
  927. responseMessage.id,
  928. _chatId
  929. );
  930. } else
  931. await sendPromptOllama(
  932. model,
  933. history.messages[responseMessage.parentId].content,
  934. responseMessage.id,
  935. _chatId
  936. );
  937. }
  938. } else {
  939. toast.error($i18n.t(`Model {{modelId}} not found`, { modelId }));
  940. }
  941. };
  942. const generateChatTitle = async (userPrompt) => {
  943. if ($settings?.title?.auto ?? true) {
  944. const model = $models.find((model) => model.id === selectedModels[0]);
  945. const titleModelId =
  946. model?.owned_by === 'openai' ?? false
  947. ? $settings?.title?.modelExternal ?? selectedModels[0]
  948. : $settings?.title?.model ?? selectedModels[0];
  949. const titleModel = $models.find((model) => model.id === titleModelId);
  950. console.log(titleModel);
  951. const title = await generateTitle(
  952. localStorage.token,
  953. $settings?.title?.prompt ??
  954. $i18n.t(
  955. "Create a concise, 3-5 word phrase as a header for the following query, strictly adhering to the 3-5 word limit and avoiding the use of the word 'title':"
  956. ) + ' {{prompt}}',
  957. titleModelId,
  958. userPrompt,
  959. $chatId,
  960. titleModel?.owned_by === 'openai' ?? false
  961. ? `${OPENAI_API_BASE_URL}`
  962. : `${OLLAMA_API_BASE_URL}/v1`
  963. );
  964. return title;
  965. } else {
  966. return `${userPrompt}`;
  967. }
  968. };
  969. const generateChatSearchQuery = async (modelId: string, prompt: string) => {
  970. const model = $models.find((model) => model.id === modelId);
  971. const taskModelId =
  972. model?.owned_by === 'openai' ?? false
  973. ? $settings?.title?.modelExternal ?? modelId
  974. : $settings?.title?.model ?? modelId;
  975. const taskModel = $models.find((model) => model.id === taskModelId);
  976. const previousMessages = messages
  977. .filter((message) => message.role === 'user')
  978. .map((message) => message.content);
  979. return await generateSearchQuery(
  980. localStorage.token,
  981. taskModelId,
  982. previousMessages,
  983. prompt,
  984. taskModel?.owned_by === 'openai' ?? false
  985. ? `${OPENAI_API_BASE_URL}`
  986. : `${OLLAMA_API_BASE_URL}/v1`
  987. );
  988. };
  989. const setChatTitle = async (_chatId, _title) => {
  990. if (_chatId === $chatId) {
  991. title = _title;
  992. }
  993. if ($settings.saveChatHistory ?? true) {
  994. chat = await updateChatById(localStorage.token, _chatId, { title: _title });
  995. await chats.set(await getChatList(localStorage.token));
  996. }
  997. };
  998. const getTags = async () => {
  999. return await getTagsById(localStorage.token, $chatId).catch(async (error) => {
  1000. return [];
  1001. });
  1002. };
  1003. const addTag = async (tagName) => {
  1004. const res = await addTagById(localStorage.token, $chatId, tagName);
  1005. tags = await getTags();
  1006. chat = await updateChatById(localStorage.token, $chatId, {
  1007. tags: tags
  1008. });
  1009. _tags.set(await getAllChatTags(localStorage.token));
  1010. };
  1011. const deleteTag = async (tagName) => {
  1012. const res = await deleteTagById(localStorage.token, $chatId, tagName);
  1013. tags = await getTags();
  1014. chat = await updateChatById(localStorage.token, $chatId, {
  1015. tags: tags
  1016. });
  1017. _tags.set(await getAllChatTags(localStorage.token));
  1018. };
  1019. </script>
  1020. <svelte:head>
  1021. <title>
  1022. {title
  1023. ? `${title.length > 30 ? `${title.slice(0, 30)}...` : title} | ${$WEBUI_NAME}`
  1024. : `${$WEBUI_NAME}`}
  1025. </title>
  1026. </svelte:head>
  1027. {#if !chatIdProp || (loaded && chatIdProp)}
  1028. <div
  1029. class="h-screen max-h-[100dvh] {$showSidebar
  1030. ? 'md:max-w-[calc(100%-260px)]'
  1031. : ''} w-full max-w-full flex flex-col"
  1032. >
  1033. <Navbar
  1034. {title}
  1035. bind:selectedModels
  1036. bind:showModelSelector
  1037. shareEnabled={messages.length > 0}
  1038. {chat}
  1039. {initNewChat}
  1040. />
  1041. {#if $banners.length > 0 && messages.length === 0 && !$chatId && selectedModels.length <= 1}
  1042. <div
  1043. class="absolute top-[4.25rem] w-full {$showSidebar ? 'md:max-w-[calc(100%-260px)]' : ''}"
  1044. >
  1045. <div class=" flex flex-col gap-1 w-full">
  1046. {#each $banners.filter( (b) => (b.dismissible ? !JSON.parse(localStorage.getItem('dismissedBannerIds') ?? '[]').includes(b.id) : true) ) as banner}
  1047. <Banner
  1048. {banner}
  1049. on:dismiss={(e) => {
  1050. const bannerId = e.detail;
  1051. localStorage.setItem(
  1052. 'dismissedBannerIds',
  1053. JSON.stringify(
  1054. [
  1055. bannerId,
  1056. ...JSON.parse(localStorage.getItem('dismissedBannerIds') ?? '[]')
  1057. ].filter((id) => $banners.find((b) => b.id === id))
  1058. )
  1059. );
  1060. }}
  1061. />
  1062. {/each}
  1063. </div>
  1064. </div>
  1065. {/if}
  1066. <div class="flex flex-col flex-auto">
  1067. <div
  1068. class=" pb-2.5 flex flex-col justify-between w-full flex-auto overflow-auto h-0 max-w-full"
  1069. id="messages-container"
  1070. bind:this={messagesContainerElement}
  1071. on:scroll={(e) => {
  1072. autoScroll =
  1073. messagesContainerElement.scrollHeight - messagesContainerElement.scrollTop <=
  1074. messagesContainerElement.clientHeight + 5;
  1075. }}
  1076. >
  1077. <div class=" h-full w-full flex flex-col {chatIdProp ? 'py-4' : 'pt-2 pb-4'}">
  1078. <Messages
  1079. chatId={$chatId}
  1080. {selectedModels}
  1081. {processing}
  1082. bind:history
  1083. bind:messages
  1084. bind:autoScroll
  1085. bind:prompt
  1086. bottomPadding={files.length > 0}
  1087. {sendPrompt}
  1088. {continueGeneration}
  1089. {regenerateResponse}
  1090. />
  1091. </div>
  1092. </div>
  1093. <MessageInput
  1094. bind:files
  1095. bind:prompt
  1096. bind:autoScroll
  1097. bind:webSearchEnabled
  1098. bind:atSelectedModel
  1099. {selectedModels}
  1100. {messages}
  1101. {submitPrompt}
  1102. {stopResponse}
  1103. />
  1104. </div>
  1105. </div>
  1106. {/if}