index.ts 9.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438
  1. import { OPENAI_API_BASE_URL } from '$lib/constants';
  2. import { promptTemplate } from '$lib/utils';
  3. import { type Model, models, settings } from '$lib/stores';
  4. export const getOpenAIUrls = async (token: string = '') => {
  5. let error = null;
  6. const res = await fetch(`${OPENAI_API_BASE_URL}/urls`, {
  7. method: 'GET',
  8. headers: {
  9. Accept: 'application/json',
  10. 'Content-Type': 'application/json',
  11. ...(token && { authorization: `Bearer ${token}` })
  12. }
  13. })
  14. .then(async (res) => {
  15. if (!res.ok) throw await res.json();
  16. return res.json();
  17. })
  18. .catch((err) => {
  19. console.log(err);
  20. if ('detail' in err) {
  21. error = err.detail;
  22. } else {
  23. error = 'Server connection failed';
  24. }
  25. return null;
  26. });
  27. if (error) {
  28. throw error;
  29. }
  30. return res.OPENAI_API_BASE_URLS;
  31. };
  32. export const updateOpenAIUrls = async (token: string = '', urls: string[]) => {
  33. let error = null;
  34. const res = await fetch(`${OPENAI_API_BASE_URL}/urls/update`, {
  35. method: 'POST',
  36. headers: {
  37. Accept: 'application/json',
  38. 'Content-Type': 'application/json',
  39. ...(token && { authorization: `Bearer ${token}` })
  40. },
  41. body: JSON.stringify({
  42. urls: urls
  43. })
  44. })
  45. .then(async (res) => {
  46. if (!res.ok) throw await res.json();
  47. return res.json();
  48. })
  49. .catch((err) => {
  50. console.log(err);
  51. if ('detail' in err) {
  52. error = err.detail;
  53. } else {
  54. error = 'Server connection failed';
  55. }
  56. return null;
  57. });
  58. if (error) {
  59. throw error;
  60. }
  61. return res.OPENAI_API_BASE_URLS;
  62. };
  63. export const getOpenAIKeys = async (token: string = '') => {
  64. let error = null;
  65. const res = await fetch(`${OPENAI_API_BASE_URL}/keys`, {
  66. method: 'GET',
  67. headers: {
  68. Accept: 'application/json',
  69. 'Content-Type': 'application/json',
  70. ...(token && { authorization: `Bearer ${token}` })
  71. }
  72. })
  73. .then(async (res) => {
  74. if (!res.ok) throw await res.json();
  75. return res.json();
  76. })
  77. .catch((err) => {
  78. console.log(err);
  79. if ('detail' in err) {
  80. error = err.detail;
  81. } else {
  82. error = 'Server connection failed';
  83. }
  84. return null;
  85. });
  86. if (error) {
  87. throw error;
  88. }
  89. return res.OPENAI_API_KEYS;
  90. };
  91. export const updateOpenAIKeys = async (token: string = '', keys: string[]) => {
  92. let error = null;
  93. const res = await fetch(`${OPENAI_API_BASE_URL}/keys/update`, {
  94. method: 'POST',
  95. headers: {
  96. Accept: 'application/json',
  97. 'Content-Type': 'application/json',
  98. ...(token && { authorization: `Bearer ${token}` })
  99. },
  100. body: JSON.stringify({
  101. keys: keys
  102. })
  103. })
  104. .then(async (res) => {
  105. if (!res.ok) throw await res.json();
  106. return res.json();
  107. })
  108. .catch((err) => {
  109. console.log(err);
  110. if ('detail' in err) {
  111. error = err.detail;
  112. } else {
  113. error = 'Server connection failed';
  114. }
  115. return null;
  116. });
  117. if (error) {
  118. throw error;
  119. }
  120. return res.OPENAI_API_KEYS;
  121. };
  122. export const getOpenAIModels = async (token: string = '') => {
  123. let error = null;
  124. const res = await fetch(`${OPENAI_API_BASE_URL}/models`, {
  125. method: 'GET',
  126. headers: {
  127. Accept: 'application/json',
  128. 'Content-Type': 'application/json',
  129. ...(token && { authorization: `Bearer ${token}` })
  130. }
  131. })
  132. .then(async (res) => {
  133. if (!res.ok) throw await res.json();
  134. return res.json();
  135. })
  136. .catch((err) => {
  137. error = `OpenAI: ${err?.error?.message ?? 'Network Problem'}`;
  138. return [];
  139. });
  140. if (error) {
  141. throw error;
  142. }
  143. const models = Array.isArray(res) ? res : res?.data ?? null;
  144. return models
  145. ? models
  146. .map((model) => ({ id: model.id, name: model.name ?? model.id, external: true }))
  147. .sort((a, b) => {
  148. return a.name.localeCompare(b.name);
  149. })
  150. : models;
  151. };
  152. export const getOpenAIModelsDirect = async (
  153. base_url: string = 'https://api.openai.com/v1',
  154. api_key: string = ''
  155. ) => {
  156. let error = null;
  157. const res = await fetch(`${base_url}/models`, {
  158. method: 'GET',
  159. headers: {
  160. 'Content-Type': 'application/json',
  161. Authorization: `Bearer ${api_key}`
  162. }
  163. })
  164. .then(async (res) => {
  165. if (!res.ok) throw await res.json();
  166. return res.json();
  167. })
  168. .catch((err) => {
  169. console.log(err);
  170. error = `OpenAI: ${err?.error?.message ?? 'Network Problem'}`;
  171. return null;
  172. });
  173. if (error) {
  174. throw error;
  175. }
  176. const models = Array.isArray(res) ? res : res?.data ?? null;
  177. return models
  178. .map((model) => ({ id: model.id, name: model.name ?? model.id, external: true }))
  179. .filter((model) => (base_url.includes('openai') ? model.name.includes('gpt') : true))
  180. .sort((a, b) => {
  181. return a.name.localeCompare(b.name);
  182. });
  183. };
  184. export const generateOpenAIChatCompletion = async (
  185. token: string = '',
  186. body: object,
  187. url: string = OPENAI_API_BASE_URL
  188. ): Promise<[Response | null, AbortController]> => {
  189. const controller = new AbortController();
  190. let error = null;
  191. const res = await fetch(`${url}/chat/completions`, {
  192. signal: controller.signal,
  193. method: 'POST',
  194. headers: {
  195. Authorization: `Bearer ${token}`,
  196. 'Content-Type': 'application/json'
  197. },
  198. body: JSON.stringify(body)
  199. }).catch((err) => {
  200. console.log(err);
  201. error = err;
  202. return null;
  203. });
  204. if (error) {
  205. throw error;
  206. }
  207. return [res, controller];
  208. };
  209. export const synthesizeOpenAISpeech = async (
  210. token: string = '',
  211. speaker: string = 'alloy',
  212. text: string = '',
  213. model: string = 'tts-1'
  214. ) => {
  215. let error = null;
  216. const res = await fetch(`${OPENAI_API_BASE_URL}/audio/speech`, {
  217. method: 'POST',
  218. headers: {
  219. Authorization: `Bearer ${token}`,
  220. 'Content-Type': 'application/json'
  221. },
  222. body: JSON.stringify({
  223. model: model,
  224. input: text,
  225. voice: speaker
  226. })
  227. }).catch((err) => {
  228. console.log(err);
  229. error = err;
  230. return null;
  231. });
  232. if (error) {
  233. throw error;
  234. }
  235. return res;
  236. };
  237. export const generateTitle = async (
  238. token: string = '',
  239. template: string,
  240. model: string,
  241. prompt: string,
  242. url: string = OPENAI_API_BASE_URL
  243. ) => {
  244. let error = null;
  245. template = promptTemplate(template, prompt);
  246. console.log(template);
  247. const res = await fetch(`${url}/chat/completions`, {
  248. method: 'POST',
  249. headers: {
  250. Accept: 'application/json',
  251. 'Content-Type': 'application/json',
  252. Authorization: `Bearer ${token}`
  253. },
  254. body: JSON.stringify({
  255. model: model,
  256. messages: [
  257. {
  258. role: 'user',
  259. content: template
  260. }
  261. ],
  262. stream: false,
  263. // Restricting the max tokens to 50 to avoid long titles
  264. max_tokens: 50
  265. })
  266. })
  267. .then(async (res) => {
  268. if (!res.ok) throw await res.json();
  269. return res.json();
  270. })
  271. .catch((err) => {
  272. console.log(err);
  273. if ('detail' in err) {
  274. error = err.detail;
  275. }
  276. return null;
  277. });
  278. if (error) {
  279. throw error;
  280. }
  281. return res?.choices[0]?.message?.content.replace(/["']/g, '') ?? 'New Chat';
  282. };
  283. export const generateSearchQuery = async (
  284. token: string = '',
  285. model: string,
  286. previousMessages: string[],
  287. prompt: string,
  288. url: string = OPENAI_API_BASE_URL
  289. ): Promise<string | undefined> => {
  290. let error = null;
  291. // TODO: Allow users to specify the prompt
  292. // Get the current date in the format "January 20, 2024"
  293. const currentDate = new Intl.DateTimeFormat('en-US', {
  294. year: 'numeric',
  295. month: 'long',
  296. day: '2-digit'
  297. }).format(new Date());
  298. const yesterdayDate = new Intl.DateTimeFormat('en-US', {
  299. year: 'numeric',
  300. month: 'long',
  301. day: '2-digit'
  302. }).format(new Date());
  303. const res = await fetch(`${url}/chat/completions`, {
  304. method: 'POST',
  305. headers: {
  306. Accept: 'application/json',
  307. 'Content-Type': 'application/json',
  308. Authorization: `Bearer ${token}`
  309. },
  310. body: JSON.stringify({
  311. model: model,
  312. // Few shot prompting
  313. messages: [
  314. {
  315. role: 'assistant',
  316. content: `You are tasked with generating web search queries. Give me an appropriate query to answer my question for google search. Answer with only the query. Today is ${currentDate}.`
  317. },
  318. {
  319. role: 'user',
  320. content: `Previous Questions:
  321. - Who is the president of France?
  322. Current Question: What about Mexico?`
  323. },
  324. {
  325. role: 'assistant',
  326. content: 'President of Mexico'
  327. },
  328. {
  329. role: 'user',
  330. content: `Previous questions:
  331. - When is the next formula 1 grand prix?
  332. Current Question: Where is it being hosted?`
  333. },
  334. {
  335. role: 'assistant',
  336. content: 'location of next formula 1 grand prix'
  337. },
  338. {
  339. role: 'user',
  340. content: 'Current Question: What type of printhead does the Epson F2270 DTG printer use?'
  341. },
  342. {
  343. role: 'assistant',
  344. content: 'Epson F2270 DTG printer printhead'
  345. },
  346. {
  347. role: 'user',
  348. content: 'What were the news yesterday?'
  349. },
  350. {
  351. role: 'assistant',
  352. content: `news ${yesterdayDate}`
  353. },
  354. {
  355. role: 'user',
  356. content: 'What is the current weather in Paris?'
  357. },
  358. {
  359. role: 'assistant',
  360. content: `weather in Paris ${currentDate}`
  361. },
  362. {
  363. role: 'user',
  364. content:
  365. (previousMessages.length > 0
  366. ? `Previous Questions:\n${previousMessages.join('\n')}\n\n`
  367. : '') + `Current Question: ${prompt}`
  368. }
  369. ],
  370. stream: false,
  371. // Restricting the max tokens to 30 to avoid long search queries
  372. max_tokens: 30
  373. })
  374. })
  375. .then(async (res) => {
  376. if (!res.ok) throw await res.json();
  377. return res.json();
  378. })
  379. .catch((err) => {
  380. console.log(err);
  381. if ('detail' in err) {
  382. error = err.detail;
  383. }
  384. return undefined;
  385. });
  386. if (error) {
  387. throw error;
  388. }
  389. return res?.choices[0]?.message?.content.replace(/["']/g, '') ?? undefined;
  390. };