index.ts 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510
  1. import { OPENAI_API_BASE_URL } from '$lib/constants';
  2. import { promptTemplate } from '$lib/utils';
  3. import { type Model, models, settings } from '$lib/stores';
  4. export const getOpenAIConfig = async (token: string = '') => {
  5. let error = null;
  6. const res = await fetch(`${OPENAI_API_BASE_URL}/config`, {
  7. method: 'GET',
  8. headers: {
  9. Accept: 'application/json',
  10. 'Content-Type': 'application/json',
  11. ...(token && { authorization: `Bearer ${token}` })
  12. }
  13. })
  14. .then(async (res) => {
  15. if (!res.ok) throw await res.json();
  16. return res.json();
  17. })
  18. .catch((err) => {
  19. console.log(err);
  20. if ('detail' in err) {
  21. error = err.detail;
  22. } else {
  23. error = 'Server connection failed';
  24. }
  25. return null;
  26. });
  27. if (error) {
  28. throw error;
  29. }
  30. return res;
  31. };
  32. export const updateOpenAIConfig = async (token: string = '', enable_openai_api: boolean) => {
  33. let error = null;
  34. const res = await fetch(`${OPENAI_API_BASE_URL}/config/update`, {
  35. method: 'POST',
  36. headers: {
  37. Accept: 'application/json',
  38. 'Content-Type': 'application/json',
  39. ...(token && { authorization: `Bearer ${token}` })
  40. },
  41. body: JSON.stringify({
  42. enable_openai_api: enable_openai_api
  43. })
  44. })
  45. .then(async (res) => {
  46. if (!res.ok) throw await res.json();
  47. return res.json();
  48. })
  49. .catch((err) => {
  50. console.log(err);
  51. if ('detail' in err) {
  52. error = err.detail;
  53. } else {
  54. error = 'Server connection failed';
  55. }
  56. return null;
  57. });
  58. if (error) {
  59. throw error;
  60. }
  61. return res;
  62. };
  63. export const getOpenAIUrls = async (token: string = '') => {
  64. let error = null;
  65. const res = await fetch(`${OPENAI_API_BASE_URL}/urls`, {
  66. method: 'GET',
  67. headers: {
  68. Accept: 'application/json',
  69. 'Content-Type': 'application/json',
  70. ...(token && { authorization: `Bearer ${token}` })
  71. }
  72. })
  73. .then(async (res) => {
  74. if (!res.ok) throw await res.json();
  75. return res.json();
  76. })
  77. .catch((err) => {
  78. console.log(err);
  79. if ('detail' in err) {
  80. error = err.detail;
  81. } else {
  82. error = 'Server connection failed';
  83. }
  84. return null;
  85. });
  86. if (error) {
  87. throw error;
  88. }
  89. return res.OPENAI_API_BASE_URLS;
  90. };
  91. export const updateOpenAIUrls = async (token: string = '', urls: string[]) => {
  92. let error = null;
  93. const res = await fetch(`${OPENAI_API_BASE_URL}/urls/update`, {
  94. method: 'POST',
  95. headers: {
  96. Accept: 'application/json',
  97. 'Content-Type': 'application/json',
  98. ...(token && { authorization: `Bearer ${token}` })
  99. },
  100. body: JSON.stringify({
  101. urls: urls
  102. })
  103. })
  104. .then(async (res) => {
  105. if (!res.ok) throw await res.json();
  106. return res.json();
  107. })
  108. .catch((err) => {
  109. console.log(err);
  110. if ('detail' in err) {
  111. error = err.detail;
  112. } else {
  113. error = 'Server connection failed';
  114. }
  115. return null;
  116. });
  117. if (error) {
  118. throw error;
  119. }
  120. return res.OPENAI_API_BASE_URLS;
  121. };
  122. export const getOpenAIKeys = async (token: string = '') => {
  123. let error = null;
  124. const res = await fetch(`${OPENAI_API_BASE_URL}/keys`, {
  125. method: 'GET',
  126. headers: {
  127. Accept: 'application/json',
  128. 'Content-Type': 'application/json',
  129. ...(token && { authorization: `Bearer ${token}` })
  130. }
  131. })
  132. .then(async (res) => {
  133. if (!res.ok) throw await res.json();
  134. return res.json();
  135. })
  136. .catch((err) => {
  137. console.log(err);
  138. if ('detail' in err) {
  139. error = err.detail;
  140. } else {
  141. error = 'Server connection failed';
  142. }
  143. return null;
  144. });
  145. if (error) {
  146. throw error;
  147. }
  148. return res.OPENAI_API_KEYS;
  149. };
  150. export const updateOpenAIKeys = async (token: string = '', keys: string[]) => {
  151. let error = null;
  152. const res = await fetch(`${OPENAI_API_BASE_URL}/keys/update`, {
  153. method: 'POST',
  154. headers: {
  155. Accept: 'application/json',
  156. 'Content-Type': 'application/json',
  157. ...(token && { authorization: `Bearer ${token}` })
  158. },
  159. body: JSON.stringify({
  160. keys: keys
  161. })
  162. })
  163. .then(async (res) => {
  164. if (!res.ok) throw await res.json();
  165. return res.json();
  166. })
  167. .catch((err) => {
  168. console.log(err);
  169. if ('detail' in err) {
  170. error = err.detail;
  171. } else {
  172. error = 'Server connection failed';
  173. }
  174. return null;
  175. });
  176. if (error) {
  177. throw error;
  178. }
  179. return res.OPENAI_API_KEYS;
  180. };
  181. export const getOpenAIModels = async (token: string = '') => {
  182. let error = null;
  183. const res = await fetch(`${OPENAI_API_BASE_URL}/models`, {
  184. method: 'GET',
  185. headers: {
  186. Accept: 'application/json',
  187. 'Content-Type': 'application/json',
  188. ...(token && { authorization: `Bearer ${token}` })
  189. }
  190. })
  191. .then(async (res) => {
  192. if (!res.ok) throw await res.json();
  193. return res.json();
  194. })
  195. .catch((err) => {
  196. error = `OpenAI: ${err?.error?.message ?? 'Network Problem'}`;
  197. return [];
  198. });
  199. if (error) {
  200. throw error;
  201. }
  202. const models = Array.isArray(res) ? res : res?.data ?? null;
  203. return models
  204. ? models
  205. .map((model) => ({
  206. id: model.id,
  207. name: model.name ?? model.id,
  208. external: true,
  209. custom_info: model.custom_info
  210. }))
  211. .sort((a, b) => {
  212. return a.name.localeCompare(b.name);
  213. })
  214. : models;
  215. };
  216. export const getOpenAIModelsDirect = async (
  217. base_url: string = 'https://api.openai.com/v1',
  218. api_key: string = ''
  219. ) => {
  220. let error = null;
  221. const res = await fetch(`${base_url}/models`, {
  222. method: 'GET',
  223. headers: {
  224. 'Content-Type': 'application/json',
  225. Authorization: `Bearer ${api_key}`
  226. }
  227. })
  228. .then(async (res) => {
  229. if (!res.ok) throw await res.json();
  230. return res.json();
  231. })
  232. .catch((err) => {
  233. console.log(err);
  234. error = `OpenAI: ${err?.error?.message ?? 'Network Problem'}`;
  235. return null;
  236. });
  237. if (error) {
  238. throw error;
  239. }
  240. const models = Array.isArray(res) ? res : res?.data ?? null;
  241. return models
  242. .map((model) => ({ id: model.id, name: model.name ?? model.id, external: true }))
  243. .filter((model) => (base_url.includes('openai') ? model.name.includes('gpt') : true))
  244. .sort((a, b) => {
  245. return a.name.localeCompare(b.name);
  246. });
  247. };
  248. export const generateOpenAIChatCompletion = async (
  249. token: string = '',
  250. body: object,
  251. url: string = OPENAI_API_BASE_URL
  252. ): Promise<[Response | null, AbortController]> => {
  253. const controller = new AbortController();
  254. let error = null;
  255. const res = await fetch(`${url}/chat/completions`, {
  256. signal: controller.signal,
  257. method: 'POST',
  258. headers: {
  259. Authorization: `Bearer ${token}`,
  260. 'Content-Type': 'application/json'
  261. },
  262. body: JSON.stringify(body)
  263. }).catch((err) => {
  264. console.log(err);
  265. error = err;
  266. return null;
  267. });
  268. if (error) {
  269. throw error;
  270. }
  271. return [res, controller];
  272. };
  273. export const synthesizeOpenAISpeech = async (
  274. token: string = '',
  275. speaker: string = 'alloy',
  276. text: string = '',
  277. model: string = 'tts-1'
  278. ) => {
  279. let error = null;
  280. const res = await fetch(`${OPENAI_API_BASE_URL}/audio/speech`, {
  281. method: 'POST',
  282. headers: {
  283. Authorization: `Bearer ${token}`,
  284. 'Content-Type': 'application/json'
  285. },
  286. body: JSON.stringify({
  287. model: model,
  288. input: text,
  289. voice: speaker
  290. })
  291. }).catch((err) => {
  292. console.log(err);
  293. error = err;
  294. return null;
  295. });
  296. if (error) {
  297. throw error;
  298. }
  299. return res;
  300. };
  301. export const generateTitle = async (
  302. token: string = '',
  303. template: string,
  304. model: string,
  305. prompt: string,
  306. url: string = OPENAI_API_BASE_URL
  307. ) => {
  308. let error = null;
  309. template = promptTemplate(template, prompt);
  310. console.log(template);
  311. const res = await fetch(`${url}/chat/completions`, {
  312. method: 'POST',
  313. headers: {
  314. Accept: 'application/json',
  315. 'Content-Type': 'application/json',
  316. Authorization: `Bearer ${token}`
  317. },
  318. body: JSON.stringify({
  319. model: model,
  320. messages: [
  321. {
  322. role: 'user',
  323. content: template
  324. }
  325. ],
  326. stream: false,
  327. // Restricting the max tokens to 50 to avoid long titles
  328. max_tokens: 50
  329. })
  330. })
  331. .then(async (res) => {
  332. if (!res.ok) throw await res.json();
  333. return res.json();
  334. })
  335. .catch((err) => {
  336. console.log(err);
  337. if ('detail' in err) {
  338. error = err.detail;
  339. }
  340. return null;
  341. });
  342. if (error) {
  343. throw error;
  344. }
  345. return res?.choices[0]?.message?.content.replace(/["']/g, '') ?? 'New Chat';
  346. };
  347. export const generateSearchQuery = async (
  348. token: string = '',
  349. model: string,
  350. previousMessages: string[],
  351. prompt: string,
  352. url: string = OPENAI_API_BASE_URL
  353. ): Promise<string | undefined> => {
  354. let error = null;
  355. // TODO: Allow users to specify the prompt
  356. // Get the current date in the format "January 20, 2024"
  357. const currentDate = new Intl.DateTimeFormat('en-US', {
  358. year: 'numeric',
  359. month: 'long',
  360. day: '2-digit'
  361. }).format(new Date());
  362. const yesterdayDate = new Intl.DateTimeFormat('en-US', {
  363. year: 'numeric',
  364. month: 'long',
  365. day: '2-digit'
  366. }).format(new Date());
  367. const res = await fetch(`${url}/chat/completions`, {
  368. method: 'POST',
  369. headers: {
  370. Accept: 'application/json',
  371. 'Content-Type': 'application/json',
  372. Authorization: `Bearer ${token}`
  373. },
  374. body: JSON.stringify({
  375. model: model,
  376. // Few shot prompting
  377. messages: [
  378. {
  379. role: 'assistant',
  380. content: `You are tasked with generating web search queries. Give me an appropriate query to answer my question for google search. Answer with only the query. Today is ${currentDate}.`
  381. },
  382. {
  383. role: 'user',
  384. content: `Previous Questions:
  385. - Who is the president of France?
  386. Current Question: What about Mexico?`
  387. },
  388. {
  389. role: 'assistant',
  390. content: 'President of Mexico'
  391. },
  392. {
  393. role: 'user',
  394. content: `Previous questions:
  395. - When is the next formula 1 grand prix?
  396. Current Question: Where is it being hosted?`
  397. },
  398. {
  399. role: 'assistant',
  400. content: 'location of next formula 1 grand prix'
  401. },
  402. {
  403. role: 'user',
  404. content: 'Current Question: What type of printhead does the Epson F2270 DTG printer use?'
  405. },
  406. {
  407. role: 'assistant',
  408. content: 'Epson F2270 DTG printer printhead'
  409. },
  410. {
  411. role: 'user',
  412. content: 'What were the news yesterday?'
  413. },
  414. {
  415. role: 'assistant',
  416. content: `news ${yesterdayDate}`
  417. },
  418. {
  419. role: 'user',
  420. content: 'What is the current weather in Paris?'
  421. },
  422. {
  423. role: 'assistant',
  424. content: `weather in Paris ${currentDate}`
  425. },
  426. {
  427. role: 'user',
  428. content:
  429. (previousMessages.length > 0
  430. ? `Previous Questions:\n${previousMessages.join('\n')}\n\n`
  431. : '') + `Current Question: ${prompt}`
  432. }
  433. ],
  434. stream: false,
  435. // Restricting the max tokens to 30 to avoid long search queries
  436. max_tokens: 30
  437. })
  438. })
  439. .then(async (res) => {
  440. if (!res.ok) throw await res.json();
  441. return res.json();
  442. })
  443. .catch((err) => {
  444. console.log(err);
  445. if ('detail' in err) {
  446. error = err.detail;
  447. }
  448. return undefined;
  449. });
  450. if (error) {
  451. throw error;
  452. }
  453. return res?.choices[0]?.message?.content.replace(/["']/g, '') ?? undefined;
  454. };