Bladeren bron

feat: sort model list by alphabetical order

Timothy J. Baek 1 jaar geleden
bovenliggende
commit
f6640c4e8b
3 gewijzigde bestanden met toevoegingen van 17 en 27 verwijderingen
  1. 3 1
      src/lib/apis/ollama/index.ts
  2. 4 1
      src/lib/apis/openai/index.ts
  3. 10 25
      src/lib/components/chat/SettingsModal.svelte

+ 3 - 1
src/lib/apis/ollama/index.ts

@@ -67,7 +67,9 @@ export const getOllamaModels = async (
 		throw error;
 	}
 
-	return res?.models ?? [];
+	return (res?.models ?? []).sort((a, b) => {
+		return a.name.localeCompare(b.name);
+	});
 };
 
 export const generateTitle = async (

+ 4 - 1
src/lib/apis/openai/index.ts

@@ -29,5 +29,8 @@ export const getOpenAIModels = async (
 
 	return models
 		.map((model) => ({ name: model.id, external: true }))
-		.filter((model) => (base_url.includes('openai') ? model.name.includes('gpt') : true));
+		.filter((model) => (base_url.includes('openai') ? model.name.includes('gpt') : true))
+		.sort((a, b) => {
+			return a.name.localeCompare(b.name);
+		});
 };

+ 10 - 25
src/lib/components/chat/SettingsModal.svelte

@@ -7,7 +7,7 @@
 	import { config, models, settings, user, chats } from '$lib/stores';
 	import { splitStream, getGravatarURL } from '$lib/utils';
 
-	import { getOllamaVersion } from '$lib/apis/ollama';
+	import { getOllamaVersion, getOllamaModels } from '$lib/apis/ollama';
 	import { createNewChat, deleteAllChats, getAllChats, getChatList } from '$lib/apis/chats';
 	import {
 		WEB_UI_VERSION,
@@ -545,30 +545,15 @@
 
 	const getModels = async (url = '', type = 'all') => {
 		let models = [];
-		const res = await fetch(`${url ? url : $settings?.API_BASE_URL ?? OLLAMA_API_BASE_URL}/tags`, {
-			method: 'GET',
-			headers: {
-				Accept: 'application/json',
-				'Content-Type': 'application/json',
-				...($settings.authHeader && { Authorization: $settings.authHeader }),
-				...($user && { Authorization: `Bearer ${localStorage.token}` })
-			}
-		})
-			.then(async (res) => {
-				if (!res.ok) throw await res.json();
-				return res.json();
-			})
-			.catch((error) => {
-				console.log(error);
-				if ('detail' in error) {
-					toast.error(error.detail);
-				} else {
-					toast.error('Server connection failed');
-				}
-				return null;
-			});
-		console.log(res);
-		models.push(...(res?.models ?? []));
+		models.push(
+			...(await getOllamaModels(
+				url ? url : $settings?.API_BASE_URL ?? OLLAMA_API_BASE_URL,
+				localStorage.token
+			).catch((error) => {
+				toast.error(error);
+				return [];
+			}))
+		);
 
 		// If OpenAI API Key exists
 		if (type === 'all' && $settings.OPENAI_API_KEY) {