ソースを参照

fix: ollama custom url support

Timothy J. Baek 1 年間 前
コミット
3b3b7fb46a
2 ファイル変更39 行追加34 行削除
  1. 2 2
      src/lib/components/chat/SettingsModal.svelte
  2. 37 32
      src/routes/+page.svelte

+ 2 - 2
src/lib/components/chat/SettingsModal.svelte

@@ -65,7 +65,7 @@
 		if (API_BASE_URL === '') {
 			API_BASE_URL = BUILD_TIME_API_BASE_URL;
 		}
-		const res = await getModelTags(API_BASE_URL);
+		const res = await getModelTags(API_BASE_URL, 'ollama');
 
 		if (res) {
 			toast.success('Server connection verified');
@@ -774,7 +774,7 @@
 							<div>
 								<a href="https://github.com/ollama-webui/ollama-webui">
 									<img
-										alt="followers"
+										alt="Github Repo"
 										src="https://img.shields.io/github/stars/ollama-webui/ollama-webui?style=social&label=Star us on Github"
 									/>
 								</a>

+ 37 - 32
src/routes/+page.svelte

@@ -236,6 +236,7 @@
 		console.log(updated);
 		settings = { ...settings, ...updated };
 		localStorage.setItem('settings', JSON.stringify(settings));
+		API_BASE_URL = updated?.API_BASE_URL ?? API_BASE_URL;
 		await getModelTags();
 	};
 
@@ -374,7 +375,7 @@
 	// Ollama functions
 	//////////////////////////
 
-	const getModelTags = async (url = null) => {
+	const getModelTags = async (url = null, type = 'all') => {
 		const res = await fetch(`${url === null ? API_BASE_URL : url}/tags`, {
 			method: 'GET',
 			headers: {
@@ -394,43 +395,47 @@
 
 		console.log(res);
 
-		if (settings.OPENAI_API_KEY) {
-			// Validate OPENAI_API_KEY
-			const openaiModelRes = await fetch(`https://api.openai.com/v1/models`, {
-				method: 'GET',
-				headers: {
-					'Content-Type': 'application/json',
-					Authorization: `Bearer ${settings.OPENAI_API_KEY}`
-				}
-			})
-				.then(async (res) => {
-					if (!res.ok) throw await res.json();
-					return res.json();
+		if (type === 'all') {
+			if (settings.OPENAI_API_KEY) {
+				// Validate OPENAI_API_KEY
+				const openaiModelRes = await fetch(`https://api.openai.com/v1/models`, {
+					method: 'GET',
+					headers: {
+						'Content-Type': 'application/json',
+						Authorization: `Bearer ${settings.OPENAI_API_KEY}`
+					}
 				})
-				.catch((error) => {
-					console.log(error);
-					toast.error(`OpenAI: ${error?.error?.message ?? 'Network Problem'}`);
-					return null;
-				});
-			const openaiModels = openaiModelRes?.data ?? null;
-
-			if (openaiModels) {
-				models = [
-					...(res?.models ?? []),
-					{ name: 'hr' },
-
-					...openaiModels
-						.map((model) => ({ name: model.id, label: 'OpenAI' }))
-						.filter((model) => model.name.includes('gpt'))
-				];
+					.then(async (res) => {
+						if (!res.ok) throw await res.json();
+						return res.json();
+					})
+					.catch((error) => {
+						console.log(error);
+						toast.error(`OpenAI: ${error?.error?.message ?? 'Network Problem'}`);
+						return null;
+					});
+				const openaiModels = openaiModelRes?.data ?? null;
+
+				if (openaiModels) {
+					models = [
+						...(res?.models ?? []),
+						{ name: 'hr' },
+
+						...openaiModels
+							.map((model) => ({ name: model.id, label: 'OpenAI' }))
+							.filter((model) => model.name.includes('gpt'))
+					];
+				} else {
+					models = res?.models ?? [];
+				}
 			} else {
 				models = res?.models ?? [];
 			}
+
+			return models;
 		} else {
-			models = res?.models ?? [];
+			return res?.models ?? null;
 		}
-
-		return models;
 	};
 
 	const sendPrompt = async (userPrompt) => {