Chat.svelte 33 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272
  1. <script lang="ts">
  2. import { v4 as uuidv4 } from 'uuid';
  3. import { toast } from 'svelte-sonner';
  4. import { getContext, onMount, tick } from 'svelte';
  5. import { goto } from '$app/navigation';
  6. import { page } from '$app/stores';
  7. import {
  8. chatId,
  9. chats,
  10. config,
  11. type Model,
  12. models,
  13. settings,
  14. showSidebar,
  15. tags as _tags,
  16. WEBUI_NAME,
  17. banners
  18. } from '$lib/stores';
  19. import { convertMessagesToHistory, copyToClipboard, splitStream } from '$lib/utils';
  20. import { cancelOllamaRequest, generateChatCompletion } from '$lib/apis/ollama';
  21. import {
  22. addTagById,
  23. createNewChat,
  24. deleteTagById,
  25. getAllChatTags,
  26. getChatById,
  27. getChatList,
  28. getTagsById,
  29. updateChatById
  30. } from '$lib/apis/chats';
  31. import {
  32. generateOpenAIChatCompletion,
  33. generateSearchQuery,
  34. generateTitle
  35. } from '$lib/apis/openai';
  36. import MessageInput from '$lib/components/chat/MessageInput.svelte';
  37. import Messages from '$lib/components/chat/Messages.svelte';
  38. import Navbar from '$lib/components/layout/Navbar.svelte';
  39. import { OLLAMA_API_BASE_URL, OPENAI_API_BASE_URL, WEBUI_BASE_URL } from '$lib/constants';
  40. import { createOpenAITextStream } from '$lib/apis/streaming';
  41. import { queryMemory } from '$lib/apis/memories';
  42. import type { Writable } from 'svelte/store';
  43. import type { i18n as i18nType } from 'i18next';
  44. import { runWebSearch } from '$lib/apis/rag';
  45. import Banner from '../common/Banner.svelte';
  46. import { getUserSettings } from '$lib/apis/users';
  47. import { chatCompleted } from '$lib/apis';
  48. const i18n: Writable<i18nType> = getContext('i18n');
  49. export let chatIdProp = '';
  50. let loaded = false;
  51. let stopResponseFlag = false;
  52. let autoScroll = true;
  53. let processing = '';
  54. let messagesContainerElement: HTMLDivElement;
  55. let currentRequestId = null;
  56. let showModelSelector = true;
  57. let selectedModels = [''];
  58. let atSelectedModel: Model | undefined;
  59. let webSearchEnabled = false;
  60. let chat = null;
  61. let tags = [];
  62. let title = '';
  63. let prompt = '';
  64. let files = [];
  65. let messages = [];
  66. let history = {
  67. messages: {},
  68. currentId: null
  69. };
  70. $: if (history.currentId !== null) {
  71. let _messages = [];
  72. let currentMessage = history.messages[history.currentId];
  73. while (currentMessage !== null) {
  74. _messages.unshift({ ...currentMessage });
  75. currentMessage =
  76. currentMessage.parentId !== null ? history.messages[currentMessage.parentId] : null;
  77. }
  78. messages = _messages;
  79. } else {
  80. messages = [];
  81. }
  82. $: if (chatIdProp) {
  83. (async () => {
  84. if (await loadChat()) {
  85. await tick();
  86. loaded = true;
  87. window.setTimeout(() => scrollToBottom(), 0);
  88. const chatInput = document.getElementById('chat-textarea');
  89. chatInput?.focus();
  90. } else {
  91. await goto('/');
  92. }
  93. })();
  94. }
  95. onMount(async () => {
  96. if (!$chatId) {
  97. await initNewChat();
  98. } else {
  99. if (!($settings.saveChatHistory ?? true)) {
  100. await goto('/');
  101. }
  102. }
  103. });
  104. //////////////////////////
  105. // Web functions
  106. //////////////////////////
  107. const initNewChat = async () => {
  108. if (currentRequestId !== null) {
  109. await cancelOllamaRequest(localStorage.token, currentRequestId);
  110. currentRequestId = null;
  111. }
  112. window.history.replaceState(history.state, '', `/`);
  113. await chatId.set('');
  114. autoScroll = true;
  115. title = '';
  116. messages = [];
  117. history = {
  118. messages: {},
  119. currentId: null
  120. };
  121. if ($page.url.searchParams.get('models')) {
  122. selectedModels = $page.url.searchParams.get('models')?.split(',');
  123. } else if ($settings?.models) {
  124. selectedModels = $settings?.models;
  125. } else if ($config?.default_models) {
  126. console.log($config?.default_models.split(',') ?? '');
  127. selectedModels = $config?.default_models.split(',');
  128. } else {
  129. selectedModels = [''];
  130. }
  131. if ($page.url.searchParams.get('q')) {
  132. prompt = $page.url.searchParams.get('q') ?? '';
  133. if (prompt) {
  134. await tick();
  135. submitPrompt(prompt);
  136. }
  137. }
  138. selectedModels = selectedModels.map((modelId) =>
  139. $models.map((m) => m.id).includes(modelId) ? modelId : ''
  140. );
  141. const userSettings = await getUserSettings(localStorage.token);
  142. if (userSettings) {
  143. settings.set(userSettings.ui);
  144. } else {
  145. settings.set(JSON.parse(localStorage.getItem('settings') ?? '{}'));
  146. }
  147. const chatInput = document.getElementById('chat-textarea');
  148. setTimeout(() => chatInput?.focus(), 0);
  149. };
  150. const loadChat = async () => {
  151. chatId.set(chatIdProp);
  152. chat = await getChatById(localStorage.token, $chatId).catch(async (error) => {
  153. await goto('/');
  154. return null;
  155. });
  156. if (chat) {
  157. tags = await getTags();
  158. const chatContent = chat.chat;
  159. if (chatContent) {
  160. console.log(chatContent);
  161. selectedModels =
  162. (chatContent?.models ?? undefined) !== undefined
  163. ? chatContent.models
  164. : [chatContent.models ?? ''];
  165. history =
  166. (chatContent?.history ?? undefined) !== undefined
  167. ? chatContent.history
  168. : convertMessagesToHistory(chatContent.messages);
  169. title = chatContent.title;
  170. const userSettings = await getUserSettings(localStorage.token);
  171. if (userSettings) {
  172. await settings.set(userSettings.ui);
  173. } else {
  174. await settings.set(JSON.parse(localStorage.getItem('settings') ?? '{}'));
  175. }
  176. await settings.set({
  177. ...$settings,
  178. system: chatContent.system ?? $settings.system,
  179. params: chatContent.options ?? $settings.params
  180. });
  181. autoScroll = true;
  182. await tick();
  183. if (messages.length > 0) {
  184. history.messages[messages.at(-1).id].done = true;
  185. }
  186. await tick();
  187. return true;
  188. } else {
  189. return null;
  190. }
  191. }
  192. };
  193. const scrollToBottom = async () => {
  194. await tick();
  195. if (messagesContainerElement) {
  196. messagesContainerElement.scrollTop = messagesContainerElement.scrollHeight;
  197. }
  198. };
  199. const createMessagesList = (responseMessageId) => {
  200. const message = history.messages[responseMessageId];
  201. if (message.parentId) {
  202. return [...createMessagesList(message.parentId), message];
  203. } else {
  204. return [message];
  205. }
  206. };
  207. //////////////////////////
  208. // Ollama functions
  209. //////////////////////////
  210. const submitPrompt = async (userPrompt, _user = null) => {
  211. console.log('submitPrompt', $chatId);
  212. selectedModels = selectedModels.map((modelId) =>
  213. $models.map((m) => m.id).includes(modelId) ? modelId : ''
  214. );
  215. if (selectedModels.includes('')) {
  216. toast.error($i18n.t('Model not selected'));
  217. } else if (messages.length != 0 && messages.at(-1).done != true) {
  218. // Response not done
  219. console.log('wait');
  220. } else if (
  221. files.length > 0 &&
  222. files.filter((file) => file.upload_status === false).length > 0
  223. ) {
  224. // Upload not done
  225. toast.error(
  226. $i18n.t(
  227. `Oops! Hold tight! Your files are still in the processing oven. We're cooking them up to perfection. Please be patient and we'll let you know once they're ready.`
  228. )
  229. );
  230. } else {
  231. // Reset chat message textarea height
  232. document.getElementById('chat-textarea').style.height = '';
  233. // Create user message
  234. let userMessageId = uuidv4();
  235. let userMessage = {
  236. id: userMessageId,
  237. parentId: messages.length !== 0 ? messages.at(-1).id : null,
  238. childrenIds: [],
  239. role: 'user',
  240. user: _user ?? undefined,
  241. content: userPrompt,
  242. files: files.length > 0 ? files : undefined,
  243. timestamp: Math.floor(Date.now() / 1000), // Unix epoch
  244. models: selectedModels.filter((m, mIdx) => selectedModels.indexOf(m) === mIdx)
  245. };
  246. // Add message to history and Set currentId to messageId
  247. history.messages[userMessageId] = userMessage;
  248. history.currentId = userMessageId;
  249. // Append messageId to childrenIds of parent message
  250. if (messages.length !== 0) {
  251. history.messages[messages.at(-1).id].childrenIds.push(userMessageId);
  252. }
  253. // Wait until history/message have been updated
  254. await tick();
  255. // Create new chat if only one message in messages
  256. if (messages.length == 1) {
  257. if ($settings.saveChatHistory ?? true) {
  258. chat = await createNewChat(localStorage.token, {
  259. id: $chatId,
  260. title: $i18n.t('New Chat'),
  261. models: selectedModels,
  262. system: $settings.system ?? undefined,
  263. options: {
  264. ...($settings.params ?? {})
  265. },
  266. messages: messages,
  267. history: history,
  268. tags: [],
  269. timestamp: Date.now()
  270. });
  271. await chats.set(await getChatList(localStorage.token));
  272. await chatId.set(chat.id);
  273. } else {
  274. await chatId.set('local');
  275. }
  276. await tick();
  277. }
  278. // Reset chat input textarea
  279. prompt = '';
  280. document.getElementById('chat-textarea').style.height = '';
  281. files = [];
  282. // Send prompt
  283. await sendPrompt(userPrompt, userMessageId);
  284. }
  285. };
  286. const sendPrompt = async (prompt, parentId, modelId = null) => {
  287. const _chatId = JSON.parse(JSON.stringify($chatId));
  288. await Promise.all(
  289. (modelId
  290. ? [modelId]
  291. : atSelectedModel !== undefined
  292. ? [atSelectedModel.id]
  293. : selectedModels
  294. ).map(async (modelId) => {
  295. console.log('modelId', modelId);
  296. const model = $models.filter((m) => m.id === modelId).at(0);
  297. if (model) {
  298. // If there are image files, check if model is vision capable
  299. const hasImages = messages.some((message) =>
  300. message.files?.some((file) => file.type === 'image')
  301. );
  302. if (hasImages && !(model.info?.meta?.capabilities?.vision ?? true)) {
  303. toast.error(
  304. $i18n.t('Model {{modelName}} is not vision capable', {
  305. modelName: model.name ?? model.id
  306. })
  307. );
  308. }
  309. // Create response message
  310. let responseMessageId = uuidv4();
  311. let responseMessage = {
  312. parentId: parentId,
  313. id: responseMessageId,
  314. childrenIds: [],
  315. role: 'assistant',
  316. content: '',
  317. model: model.id,
  318. modelName: model.name ?? model.id,
  319. userContext: null,
  320. timestamp: Math.floor(Date.now() / 1000) // Unix epoch
  321. };
  322. // Add message to history and Set currentId to messageId
  323. history.messages[responseMessageId] = responseMessage;
  324. history.currentId = responseMessageId;
  325. // Append messageId to childrenIds of parent message
  326. if (parentId !== null) {
  327. history.messages[parentId].childrenIds = [
  328. ...history.messages[parentId].childrenIds,
  329. responseMessageId
  330. ];
  331. }
  332. await tick();
  333. let userContext = null;
  334. if ($settings?.memory ?? false) {
  335. if (userContext === null) {
  336. const res = await queryMemory(localStorage.token, prompt).catch((error) => {
  337. toast.error(error);
  338. return null;
  339. });
  340. if (res) {
  341. if (res.documents[0].length > 0) {
  342. userContext = res.documents.reduce((acc, doc, index) => {
  343. const createdAtTimestamp = res.metadatas[index][0].created_at;
  344. const createdAtDate = new Date(createdAtTimestamp * 1000)
  345. .toISOString()
  346. .split('T')[0];
  347. acc.push(`${index + 1}. [${createdAtDate}]. ${doc[0]}`);
  348. return acc;
  349. }, []);
  350. }
  351. console.log(userContext);
  352. }
  353. }
  354. }
  355. responseMessage.userContext = userContext;
  356. if (webSearchEnabled) {
  357. await getWebSearchResults(model.id, parentId, responseMessageId);
  358. }
  359. if (model?.owned_by === 'openai') {
  360. await sendPromptOpenAI(model, prompt, responseMessageId, _chatId);
  361. } else if (model) {
  362. await sendPromptOllama(model, prompt, responseMessageId, _chatId);
  363. }
  364. } else {
  365. toast.error($i18n.t(`Model {{modelId}} not found`, { modelId }));
  366. }
  367. })
  368. );
  369. await chats.set(await getChatList(localStorage.token));
  370. };
  371. const getWebSearchResults = async (model: string, parentId: string, responseId: string) => {
  372. const responseMessage = history.messages[responseId];
  373. responseMessage.status = {
  374. done: false,
  375. action: 'web_search',
  376. description: $i18n.t('Generating search query')
  377. };
  378. messages = messages;
  379. const prompt = history.messages[parentId].content;
  380. let searchQuery = prompt;
  381. if (prompt.length > 100) {
  382. searchQuery = await generateChatSearchQuery(model, prompt);
  383. if (!searchQuery) {
  384. toast.warning($i18n.t('No search query generated'));
  385. responseMessage.status = {
  386. ...responseMessage.status,
  387. done: true,
  388. error: true,
  389. description: 'No search query generated'
  390. };
  391. messages = messages;
  392. return;
  393. }
  394. }
  395. responseMessage.status = {
  396. ...responseMessage.status,
  397. description: $i18n.t("Searching the web for '{{searchQuery}}'", { searchQuery })
  398. };
  399. messages = messages;
  400. const results = await runWebSearch(localStorage.token, searchQuery);
  401. if (results === undefined) {
  402. toast.warning($i18n.t('No search results found'));
  403. responseMessage.status = {
  404. ...responseMessage.status,
  405. done: true,
  406. error: true,
  407. description: 'No search results found'
  408. };
  409. messages = messages;
  410. return;
  411. }
  412. responseMessage.status = {
  413. ...responseMessage.status,
  414. done: true,
  415. description: $i18n.t('Searched {{count}} sites', { count: results.filenames.length }),
  416. urls: results.filenames
  417. };
  418. if (responseMessage?.files ?? undefined === undefined) {
  419. responseMessage.files = [];
  420. }
  421. responseMessage.files.push({
  422. collection_name: results.collection_name,
  423. name: searchQuery,
  424. type: 'web_search_results',
  425. urls: results.filenames
  426. });
  427. messages = messages;
  428. };
  429. const sendPromptOllama = async (model, userPrompt, responseMessageId, _chatId) => {
  430. model = model.id;
  431. const responseMessage = history.messages[responseMessageId];
  432. // Wait until history/message have been updated
  433. await tick();
  434. // Scroll down
  435. scrollToBottom();
  436. const messagesBody = [
  437. $settings.system || (responseMessage?.userContext ?? null)
  438. ? {
  439. role: 'system',
  440. content: `${$settings?.system ?? ''}${
  441. responseMessage?.userContext ?? null
  442. ? `\n\nUser Context:\n${(responseMessage?.userContext ?? []).join('\n')}`
  443. : ''
  444. }`
  445. }
  446. : undefined,
  447. ...messages
  448. ]
  449. .filter((message) => message?.content?.trim())
  450. .map((message, idx, arr) => {
  451. // Prepare the base message object
  452. const baseMessage = {
  453. role: message.role,
  454. content: message.content
  455. };
  456. // Extract and format image URLs if any exist
  457. const imageUrls = message.files
  458. ?.filter((file) => file.type === 'image')
  459. .map((file) => file.url.slice(file.url.indexOf(',') + 1));
  460. // Add images array only if it contains elements
  461. if (imageUrls && imageUrls.length > 0 && message.role === 'user') {
  462. baseMessage.images = imageUrls;
  463. }
  464. return baseMessage;
  465. });
  466. let lastImageIndex = -1;
  467. // Find the index of the last object with images
  468. messagesBody.forEach((item, index) => {
  469. if (item.images) {
  470. lastImageIndex = index;
  471. }
  472. });
  473. // Remove images from all but the last one
  474. messagesBody.forEach((item, index) => {
  475. if (index !== lastImageIndex) {
  476. delete item.images;
  477. }
  478. });
  479. const docs = messages
  480. .filter((message) => message?.files ?? null)
  481. .map((message) =>
  482. message.files.filter((item) =>
  483. ['doc', 'collection', 'web_search_results'].includes(item.type)
  484. )
  485. )
  486. .flat(1);
  487. const [res, controller] = await generateChatCompletion(localStorage.token, {
  488. model: model,
  489. messages: messagesBody,
  490. options: {
  491. ...($settings.params ?? {}),
  492. stop:
  493. $settings?.params?.stop ?? undefined
  494. ? $settings.params.stop.map((str) =>
  495. decodeURIComponent(JSON.parse('"' + str.replace(/\"/g, '\\"') + '"'))
  496. )
  497. : undefined,
  498. num_predict: $settings?.params?.max_tokens ?? undefined,
  499. repeat_penalty: $settings?.params?.frequency_penalty ?? undefined
  500. },
  501. format: $settings.requestFormat ?? undefined,
  502. keep_alive: $settings.keepAlive ?? undefined,
  503. docs: docs.length > 0 ? docs : undefined,
  504. citations: docs.length > 0,
  505. chat_id: $chatId
  506. });
  507. if (res && res.ok) {
  508. console.log('controller', controller);
  509. const reader = res.body
  510. .pipeThrough(new TextDecoderStream())
  511. .pipeThrough(splitStream('\n'))
  512. .getReader();
  513. while (true) {
  514. const { value, done } = await reader.read();
  515. if (done || stopResponseFlag || _chatId !== $chatId) {
  516. responseMessage.done = true;
  517. messages = messages;
  518. if (stopResponseFlag) {
  519. controller.abort('User: Stop Response');
  520. await cancelOllamaRequest(localStorage.token, currentRequestId);
  521. } else {
  522. const messages = createMessagesList(responseMessageId);
  523. const res = await chatCompleted(localStorage.token, {
  524. model: model,
  525. messages: messages.map((m) => ({
  526. id: m.id,
  527. role: m.role,
  528. content: m.content,
  529. timestamp: m.timestamp
  530. })),
  531. chat_id: $chatId
  532. }).catch((error) => {
  533. console.error(error);
  534. return null;
  535. });
  536. if (res !== null) {
  537. // Update chat history with the new messages
  538. for (const message of res.messages) {
  539. history.messages[message.id] = {
  540. ...history.messages[message.id],
  541. originalContent: history.messages[message.id].content,
  542. ...message
  543. };
  544. }
  545. }
  546. }
  547. currentRequestId = null;
  548. break;
  549. }
  550. try {
  551. let lines = value.split('\n');
  552. for (const line of lines) {
  553. if (line !== '') {
  554. console.log(line);
  555. let data = JSON.parse(line);
  556. if ('citations' in data) {
  557. responseMessage.citations = data.citations;
  558. continue;
  559. }
  560. if ('detail' in data) {
  561. throw data;
  562. }
  563. if ('id' in data) {
  564. console.log(data);
  565. currentRequestId = data.id;
  566. } else {
  567. if (data.done == false) {
  568. if (responseMessage.content == '' && data.message.content == '\n') {
  569. continue;
  570. } else {
  571. responseMessage.content += data.message.content;
  572. messages = messages;
  573. }
  574. } else {
  575. responseMessage.done = true;
  576. if (responseMessage.content == '') {
  577. responseMessage.error = true;
  578. responseMessage.content =
  579. 'Oops! No text generated from Ollama, Please try again.';
  580. }
  581. responseMessage.context = data.context ?? null;
  582. responseMessage.info = {
  583. total_duration: data.total_duration,
  584. load_duration: data.load_duration,
  585. sample_count: data.sample_count,
  586. sample_duration: data.sample_duration,
  587. prompt_eval_count: data.prompt_eval_count,
  588. prompt_eval_duration: data.prompt_eval_duration,
  589. eval_count: data.eval_count,
  590. eval_duration: data.eval_duration
  591. };
  592. messages = messages;
  593. if ($settings.notificationEnabled && !document.hasFocus()) {
  594. const notification = new Notification(
  595. selectedModelfile
  596. ? `${
  597. selectedModelfile.title.charAt(0).toUpperCase() +
  598. selectedModelfile.title.slice(1)
  599. }`
  600. : `${model}`,
  601. {
  602. body: responseMessage.content,
  603. icon: selectedModelfile?.imageUrl ?? `${WEBUI_BASE_URL}/static/favicon.png`
  604. }
  605. );
  606. }
  607. if ($settings.responseAutoCopy) {
  608. copyToClipboard(responseMessage.content);
  609. }
  610. if ($settings.responseAutoPlayback) {
  611. await tick();
  612. document.getElementById(`speak-button-${responseMessage.id}`)?.click();
  613. }
  614. }
  615. }
  616. }
  617. }
  618. } catch (error) {
  619. console.log(error);
  620. if ('detail' in error) {
  621. toast.error(error.detail);
  622. }
  623. break;
  624. }
  625. if (autoScroll) {
  626. scrollToBottom();
  627. }
  628. }
  629. if ($chatId == _chatId) {
  630. if ($settings.saveChatHistory ?? true) {
  631. chat = await updateChatById(localStorage.token, _chatId, {
  632. messages: messages,
  633. history: history,
  634. models: selectedModels
  635. });
  636. await chats.set(await getChatList(localStorage.token));
  637. }
  638. }
  639. } else {
  640. if (res !== null) {
  641. const error = await res.json();
  642. console.log(error);
  643. if ('detail' in error) {
  644. toast.error(error.detail);
  645. responseMessage.content = error.detail;
  646. } else {
  647. toast.error(error.error);
  648. responseMessage.content = error.error;
  649. }
  650. } else {
  651. toast.error(
  652. $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, { provider: 'Ollama' })
  653. );
  654. responseMessage.content = $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
  655. provider: 'Ollama'
  656. });
  657. }
  658. responseMessage.error = true;
  659. responseMessage.content = $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
  660. provider: 'Ollama'
  661. });
  662. responseMessage.done = true;
  663. messages = messages;
  664. }
  665. stopResponseFlag = false;
  666. await tick();
  667. if (autoScroll) {
  668. scrollToBottom();
  669. }
  670. if (messages.length == 2 && messages.at(1).content !== '') {
  671. window.history.replaceState(history.state, '', `/c/${_chatId}`);
  672. const _title = await generateChatTitle(userPrompt);
  673. await setChatTitle(_chatId, _title);
  674. }
  675. };
  676. const sendPromptOpenAI = async (model, userPrompt, responseMessageId, _chatId) => {
  677. const responseMessage = history.messages[responseMessageId];
  678. const docs = messages
  679. .filter((message) => message?.files ?? null)
  680. .map((message) =>
  681. message.files.filter((item) =>
  682. ['doc', 'collection', 'web_search_results'].includes(item.type)
  683. )
  684. )
  685. .flat(1);
  686. console.log(docs);
  687. scrollToBottom();
  688. try {
  689. const [res, controller] = await generateOpenAIChatCompletion(
  690. localStorage.token,
  691. {
  692. model: model.id,
  693. stream: true,
  694. stream_options:
  695. model.info?.meta?.capabilities?.usage ?? false
  696. ? {
  697. include_usage: true
  698. }
  699. : undefined,
  700. messages: [
  701. $settings.system || (responseMessage?.userContext ?? null)
  702. ? {
  703. role: 'system',
  704. content: `${$settings?.system ?? ''}${
  705. responseMessage?.userContext ?? null
  706. ? `\n\nUser Context:\n${(responseMessage?.userContext ?? []).join('\n')}`
  707. : ''
  708. }`
  709. }
  710. : undefined,
  711. ...messages
  712. ]
  713. .filter((message) => message?.content?.trim())
  714. .map((message, idx, arr) => ({
  715. role: message.role,
  716. ...((message.files?.filter((file) => file.type === 'image').length > 0 ?? false) &&
  717. message.role === 'user'
  718. ? {
  719. content: [
  720. {
  721. type: 'text',
  722. text:
  723. arr.length - 1 !== idx
  724. ? message.content
  725. : message?.raContent ?? message.content
  726. },
  727. ...message.files
  728. .filter((file) => file.type === 'image')
  729. .map((file) => ({
  730. type: 'image_url',
  731. image_url: {
  732. url: file.url
  733. }
  734. }))
  735. ]
  736. }
  737. : {
  738. content:
  739. arr.length - 1 !== idx
  740. ? message.content
  741. : message?.raContent ?? message.content
  742. })
  743. })),
  744. seed: $settings?.params?.seed ?? undefined,
  745. stop:
  746. $settings?.params?.stop ?? undefined
  747. ? $settings.params.stop.map((str) =>
  748. decodeURIComponent(JSON.parse('"' + str.replace(/\"/g, '\\"') + '"'))
  749. )
  750. : undefined,
  751. temperature: $settings?.params?.temperature ?? undefined,
  752. top_p: $settings?.params?.top_p ?? undefined,
  753. frequency_penalty: $settings?.params?.frequency_penalty ?? undefined,
  754. max_tokens: $settings?.params?.max_tokens ?? undefined,
  755. docs: docs.length > 0 ? docs : undefined,
  756. citations: docs.length > 0,
  757. chat_id: $chatId
  758. },
  759. `${OPENAI_API_BASE_URL}`
  760. );
  761. // Wait until history/message have been updated
  762. await tick();
  763. scrollToBottom();
  764. if (res && res.ok && res.body) {
  765. const textStream = await createOpenAITextStream(res.body, $settings.splitLargeChunks);
  766. let lastUsage = null;
  767. for await (const update of textStream) {
  768. const { value, done, citations, error, usage } = update;
  769. if (error) {
  770. await handleOpenAIError(error, null, model, responseMessage);
  771. break;
  772. }
  773. if (done || stopResponseFlag || _chatId !== $chatId) {
  774. responseMessage.done = true;
  775. messages = messages;
  776. if (stopResponseFlag) {
  777. controller.abort('User: Stop Response');
  778. } else {
  779. const messages = createMessagesList(responseMessageId);
  780. const res = await chatCompleted(localStorage.token, {
  781. model: model,
  782. messages: messages.map((m) => ({
  783. id: m.id,
  784. role: m.role,
  785. content: m.content,
  786. timestamp: m.timestamp
  787. })),
  788. chat_id: $chatId
  789. }).catch((error) => {
  790. console.error(error);
  791. return null;
  792. });
  793. if (res !== null) {
  794. // Update chat history with the new messages
  795. for (const message of res.messages) {
  796. history.messages[message.id] = { ...history.messages[message.id], ...message };
  797. }
  798. }
  799. }
  800. break;
  801. }
  802. if (usage) {
  803. lastUsage = usage;
  804. }
  805. if (citations) {
  806. responseMessage.citations = citations;
  807. continue;
  808. }
  809. if (responseMessage.content == '' && value == '\n') {
  810. continue;
  811. } else {
  812. responseMessage.content += value;
  813. messages = messages;
  814. }
  815. if ($settings.notificationEnabled && !document.hasFocus()) {
  816. const notification = new Notification(`OpenAI ${model}`, {
  817. body: responseMessage.content,
  818. icon: `${WEBUI_BASE_URL}/static/favicon.png`
  819. });
  820. }
  821. if ($settings.responseAutoCopy) {
  822. copyToClipboard(responseMessage.content);
  823. }
  824. if ($settings.responseAutoPlayback) {
  825. await tick();
  826. document.getElementById(`speak-button-${responseMessage.id}`)?.click();
  827. }
  828. if (autoScroll) {
  829. scrollToBottom();
  830. }
  831. }
  832. if (lastUsage) {
  833. responseMessage.info = { ...lastUsage, openai: true };
  834. }
  835. if ($chatId == _chatId) {
  836. if ($settings.saveChatHistory ?? true) {
  837. chat = await updateChatById(localStorage.token, _chatId, {
  838. models: selectedModels,
  839. messages: messages,
  840. history: history
  841. });
  842. await chats.set(await getChatList(localStorage.token));
  843. }
  844. }
  845. } else {
  846. await handleOpenAIError(null, res, model, responseMessage);
  847. }
  848. } catch (error) {
  849. await handleOpenAIError(error, null, model, responseMessage);
  850. }
  851. messages = messages;
  852. stopResponseFlag = false;
  853. await tick();
  854. if (autoScroll) {
  855. scrollToBottom();
  856. }
  857. if (messages.length == 2) {
  858. window.history.replaceState(history.state, '', `/c/${_chatId}`);
  859. const _title = await generateChatTitle(userPrompt);
  860. await setChatTitle(_chatId, _title);
  861. }
  862. };
  863. const handleOpenAIError = async (error, res: Response | null, model, responseMessage) => {
  864. let errorMessage = '';
  865. let innerError;
  866. if (error) {
  867. innerError = error;
  868. } else if (res !== null) {
  869. innerError = await res.json();
  870. }
  871. console.error(innerError);
  872. if ('detail' in innerError) {
  873. toast.error(innerError.detail);
  874. errorMessage = innerError.detail;
  875. } else if ('error' in innerError) {
  876. if ('message' in innerError.error) {
  877. toast.error(innerError.error.message);
  878. errorMessage = innerError.error.message;
  879. } else {
  880. toast.error(innerError.error);
  881. errorMessage = innerError.error;
  882. }
  883. } else if ('message' in innerError) {
  884. toast.error(innerError.message);
  885. errorMessage = innerError.message;
  886. }
  887. responseMessage.error = true;
  888. responseMessage.content =
  889. $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
  890. provider: model.name ?? model.id
  891. }) +
  892. '\n' +
  893. errorMessage;
  894. responseMessage.done = true;
  895. messages = messages;
  896. };
  897. const stopResponse = () => {
  898. stopResponseFlag = true;
  899. console.log('stopResponse');
  900. };
  901. const regenerateResponse = async (message) => {
  902. console.log('regenerateResponse');
  903. if (messages.length != 0) {
  904. let userMessage = history.messages[message.parentId];
  905. let userPrompt = userMessage.content;
  906. if ((userMessage?.models ?? [...selectedModels]).length == 1) {
  907. await sendPrompt(userPrompt, userMessage.id);
  908. } else {
  909. await sendPrompt(userPrompt, userMessage.id, message.model);
  910. }
  911. }
  912. };
  913. const continueGeneration = async () => {
  914. console.log('continueGeneration');
  915. const _chatId = JSON.parse(JSON.stringify($chatId));
  916. if (messages.length != 0 && messages.at(-1).done == true) {
  917. const responseMessage = history.messages[history.currentId];
  918. responseMessage.done = false;
  919. await tick();
  920. const model = $models.filter((m) => m.id === responseMessage.model).at(0);
  921. if (model) {
  922. if (model?.owned_by === 'openai') {
  923. await sendPromptOpenAI(
  924. model,
  925. history.messages[responseMessage.parentId].content,
  926. responseMessage.id,
  927. _chatId
  928. );
  929. } else
  930. await sendPromptOllama(
  931. model,
  932. history.messages[responseMessage.parentId].content,
  933. responseMessage.id,
  934. _chatId
  935. );
  936. }
  937. } else {
  938. toast.error($i18n.t(`Model {{modelId}} not found`, { modelId }));
  939. }
  940. };
  941. const generateChatTitle = async (userPrompt) => {
  942. if ($settings?.title?.auto ?? true) {
  943. const model = $models.find((model) => model.id === selectedModels[0]);
  944. const titleModelId =
  945. model?.owned_by === 'openai' ?? false
  946. ? $settings?.title?.modelExternal ?? selectedModels[0]
  947. : $settings?.title?.model ?? selectedModels[0];
  948. const titleModel = $models.find((model) => model.id === titleModelId);
  949. console.log(titleModel);
  950. const title = await generateTitle(
  951. localStorage.token,
  952. $settings?.title?.prompt ??
  953. $i18n.t(
  954. "Create a concise, 3-5 word phrase as a header for the following query, strictly adhering to the 3-5 word limit and avoiding the use of the word 'title':"
  955. ) + ' {{prompt}}',
  956. titleModelId,
  957. userPrompt,
  958. titleModel?.owned_by === 'openai' ?? false
  959. ? `${OPENAI_API_BASE_URL}`
  960. : `${OLLAMA_API_BASE_URL}/v1`
  961. );
  962. return title;
  963. } else {
  964. return `${userPrompt}`;
  965. }
  966. };
  967. const generateChatSearchQuery = async (modelId: string, prompt: string) => {
  968. const model = $models.find((model) => model.id === modelId);
  969. const taskModelId =
  970. model?.owned_by === 'openai' ?? false
  971. ? $settings?.title?.modelExternal ?? modelId
  972. : $settings?.title?.model ?? modelId;
  973. const taskModel = $models.find((model) => model.id === taskModelId);
  974. const previousMessages = messages
  975. .filter((message) => message.role === 'user')
  976. .map((message) => message.content);
  977. return await generateSearchQuery(
  978. localStorage.token,
  979. taskModelId,
  980. previousMessages,
  981. prompt,
  982. taskModel?.owned_by === 'openai' ?? false
  983. ? `${OPENAI_API_BASE_URL}`
  984. : `${OLLAMA_API_BASE_URL}/v1`
  985. );
  986. };
  987. const setChatTitle = async (_chatId, _title) => {
  988. if (_chatId === $chatId) {
  989. title = _title;
  990. }
  991. if ($settings.saveChatHistory ?? true) {
  992. chat = await updateChatById(localStorage.token, _chatId, { title: _title });
  993. await chats.set(await getChatList(localStorage.token));
  994. }
  995. };
  996. const getTags = async () => {
  997. return await getTagsById(localStorage.token, $chatId).catch(async (error) => {
  998. return [];
  999. });
  1000. };
  1001. const addTag = async (tagName) => {
  1002. const res = await addTagById(localStorage.token, $chatId, tagName);
  1003. tags = await getTags();
  1004. chat = await updateChatById(localStorage.token, $chatId, {
  1005. tags: tags
  1006. });
  1007. _tags.set(await getAllChatTags(localStorage.token));
  1008. };
  1009. const deleteTag = async (tagName) => {
  1010. const res = await deleteTagById(localStorage.token, $chatId, tagName);
  1011. tags = await getTags();
  1012. chat = await updateChatById(localStorage.token, $chatId, {
  1013. tags: tags
  1014. });
  1015. _tags.set(await getAllChatTags(localStorage.token));
  1016. };
  1017. </script>
  1018. <svelte:head>
  1019. <title>
  1020. {title
  1021. ? `${title.length > 30 ? `${title.slice(0, 30)}...` : title} | ${$WEBUI_NAME}`
  1022. : `${$WEBUI_NAME}`}
  1023. </title>
  1024. </svelte:head>
  1025. {#if !chatIdProp || (loaded && chatIdProp)}
  1026. <div
  1027. class="min-h-screen max-h-screen {$showSidebar
  1028. ? 'md:max-w-[calc(100%-260px)]'
  1029. : ''} w-full max-w-full flex flex-col"
  1030. >
  1031. <Navbar
  1032. {title}
  1033. bind:selectedModels
  1034. bind:showModelSelector
  1035. shareEnabled={messages.length > 0}
  1036. {chat}
  1037. {initNewChat}
  1038. />
  1039. {#if $banners.length > 0 && !$chatId && selectedModels.length <= 1}
  1040. <div
  1041. class="absolute top-[4.25rem] w-full {$showSidebar ? 'md:max-w-[calc(100%-260px)]' : ''}"
  1042. >
  1043. <div class=" flex flex-col gap-1 w-full">
  1044. {#each $banners.filter( (b) => (b.dismissible ? !JSON.parse(localStorage.getItem('dismissedBannerIds') ?? '[]').includes(b.id) : true) ) as banner}
  1045. <Banner
  1046. {banner}
  1047. on:dismiss={(e) => {
  1048. const bannerId = e.detail;
  1049. localStorage.setItem(
  1050. 'dismissedBannerIds',
  1051. JSON.stringify(
  1052. [
  1053. bannerId,
  1054. ...JSON.parse(localStorage.getItem('dismissedBannerIds') ?? '[]')
  1055. ].filter((id) => $banners.find((b) => b.id === id))
  1056. )
  1057. );
  1058. }}
  1059. />
  1060. {/each}
  1061. </div>
  1062. </div>
  1063. {/if}
  1064. <div class="flex flex-col flex-auto">
  1065. <div
  1066. class=" pb-2.5 flex flex-col justify-between w-full flex-auto overflow-auto h-0 max-w-full"
  1067. id="messages-container"
  1068. bind:this={messagesContainerElement}
  1069. on:scroll={(e) => {
  1070. autoScroll =
  1071. messagesContainerElement.scrollHeight - messagesContainerElement.scrollTop <=
  1072. messagesContainerElement.clientHeight + 5;
  1073. }}
  1074. >
  1075. <div class=" h-full w-full flex flex-col {chatIdProp ? 'py-4' : 'pt-2 pb-4'}">
  1076. <Messages
  1077. chatId={$chatId}
  1078. {selectedModels}
  1079. {processing}
  1080. bind:history
  1081. bind:messages
  1082. bind:autoScroll
  1083. bind:prompt
  1084. bottomPadding={files.length > 0}
  1085. {sendPrompt}
  1086. {continueGeneration}
  1087. {regenerateResponse}
  1088. />
  1089. </div>
  1090. </div>
  1091. </div>
  1092. </div>
  1093. <MessageInput
  1094. bind:files
  1095. bind:prompt
  1096. bind:autoScroll
  1097. bind:webSearchEnabled
  1098. bind:atSelectedModel
  1099. {selectedModels}
  1100. {messages}
  1101. {submitPrompt}
  1102. {stopResponse}
  1103. />
  1104. {/if}