index.ts 2.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136
  1. import { OLLAMA_API_BASE_URL } from '$lib/constants';
  2. export const getOllamaVersion = async (
  3. base_url: string = OLLAMA_API_BASE_URL,
  4. token: string = ''
  5. ) => {
  6. let error = null;
  7. const res = await fetch(`${base_url}/version`, {
  8. method: 'GET',
  9. headers: {
  10. Accept: 'application/json',
  11. 'Content-Type': 'application/json',
  12. ...(token && { authorization: `Bearer ${token}` })
  13. }
  14. })
  15. .then(async (res) => {
  16. if (!res.ok) throw await res.json();
  17. return res.json();
  18. })
  19. .catch((err) => {
  20. console.log(err);
  21. if ('detail' in err) {
  22. error = err.detail;
  23. } else {
  24. error = 'Server connection failed';
  25. }
  26. return null;
  27. });
  28. if (error) {
  29. throw error;
  30. }
  31. return res?.version ?? '0';
  32. };
  33. export const getOllamaModels = async (
  34. base_url: string = OLLAMA_API_BASE_URL,
  35. token: string = ''
  36. ) => {
  37. let error = null;
  38. const res = await fetch(`${base_url}/tags`, {
  39. method: 'GET',
  40. headers: {
  41. Accept: 'application/json',
  42. 'Content-Type': 'application/json',
  43. ...(token && { authorization: `Bearer ${token}` })
  44. }
  45. })
  46. .then(async (res) => {
  47. if (!res.ok) throw await res.json();
  48. return res.json();
  49. })
  50. .catch((err) => {
  51. console.log(err);
  52. if ('detail' in err) {
  53. error = err.detail;
  54. } else {
  55. error = 'Server connection failed';
  56. }
  57. return null;
  58. });
  59. if (error) {
  60. throw error;
  61. }
  62. return res?.models ?? [];
  63. };
  64. export const generateTitle = async (
  65. base_url: string = OLLAMA_API_BASE_URL,
  66. token: string = '',
  67. model: string,
  68. prompt: string
  69. ) => {
  70. let error = null;
  71. const res = await fetch(`${base_url}/generate`, {
  72. method: 'POST',
  73. headers: {
  74. 'Content-Type': 'text/event-stream',
  75. Authorization: `Bearer ${token}`
  76. },
  77. body: JSON.stringify({
  78. model: model,
  79. prompt: `Generate a brief 3-5 word title for this question, excluding the term 'title.' Then, please reply with only the title: ${prompt}`,
  80. stream: false
  81. })
  82. })
  83. .then(async (res) => {
  84. if (!res.ok) throw await res.json();
  85. return res.json();
  86. })
  87. .catch((err) => {
  88. console.log(err);
  89. if ('detail' in err) {
  90. error = err.detail;
  91. }
  92. return null;
  93. });
  94. if (error) {
  95. throw error;
  96. }
  97. return res?.response ?? 'New Chat';
  98. };
  99. export const generateChatCompletion = async (
  100. base_url: string = OLLAMA_API_BASE_URL,
  101. token: string = '',
  102. body: object
  103. ) => {
  104. let error = null;
  105. const res = await fetch(`${base_url}/chat`, {
  106. method: 'POST',
  107. headers: {
  108. 'Content-Type': 'text/event-stream',
  109. Authorization: `Bearer ${token}`
  110. },
  111. body: JSON.stringify(body)
  112. }).catch((err) => {
  113. error = err;
  114. return null;
  115. });
  116. if (error) {
  117. throw error;
  118. }
  119. return res;
  120. };