Browse Source

feat: display pipelines server in api settings

Timothy J. Baek 11 months ago
parent
commit
5cf44ac7da

+ 10 - 8
backend/apps/openai/main.py

@@ -184,13 +184,10 @@ async def speech(request: Request, user=Depends(get_verified_user)):
 async def fetch_url(url, key):
     timeout = aiohttp.ClientTimeout(total=5)
     try:
-        if key != "":
-            headers = {"Authorization": f"Bearer {key}"}
-            async with aiohttp.ClientSession(timeout=timeout) as session:
-                async with session.get(url, headers=headers) as response:
-                    return await response.json()
-        else:
-            return None
+        headers = {"Authorization": f"Bearer {key}"}
+        async with aiohttp.ClientSession(timeout=timeout) as session:
+            async with session.get(url, headers=headers) as response:
+                return await response.json()
     except Exception as e:
         # Handle connection error here
         log.error(f"Connection error: {e}")
@@ -277,11 +274,16 @@ async def get_models(url_idx: Optional[int] = None, user=Depends(get_current_use
         return models
     else:
         url = app.state.config.OPENAI_API_BASE_URLS[url_idx]
+        key = app.state.config.OPENAI_API_KEYS[url_idx]
+
+        headers = {}
+        headers["Authorization"] = f"Bearer {key}"
+        headers["Content-Type"] = "application/json"
 
         r = None
 
         try:
-            r = requests.request(method="GET", url=f"{url}/models")
+            r = requests.request(method="GET", url=f"{url}/models", headers=headers)
             r.raise_for_status()
 
             response_data = r.json()

+ 10 - 7
src/lib/apis/openai/index.ts

@@ -206,14 +206,17 @@ export const updateOpenAIKeys = async (token: string = '', keys: string[]) => {
 export const getOpenAIModels = async (token: string, urlIdx?: number) => {
 	let error = null;
 
-	const res = await fetch(`${OPENAI_API_BASE_URL}/models${urlIdx ? `/${urlIdx}` : ''}`, {
-		method: 'GET',
-		headers: {
-			Accept: 'application/json',
-			'Content-Type': 'application/json',
-			...(token && { authorization: `Bearer ${token}` })
+	const res = await fetch(
+		`${OPENAI_API_BASE_URL}/models${typeof urlIdx === 'number' ? `/${urlIdx}` : ''}`,
+		{
+			method: 'GET',
+			headers: {
+				Accept: 'application/json',
+				'Content-Type': 'application/json',
+				...(token && { authorization: `Bearer ${token}` })
+			}
 		}
-	})
+	)
 		.then(async (res) => {
 			if (!res.ok) throw await res.json();
 			return res.json();

+ 39 - 3
src/lib/components/chat/Settings/Connections.svelte

@@ -34,6 +34,8 @@
 	let OPENAI_API_KEYS = [''];
 	let OPENAI_API_BASE_URLS = [''];
 
+	let pipelineUrls = {};
+
 	let ENABLE_OPENAI_API = null;
 	let ENABLE_OLLAMA_API = null;
 
@@ -48,7 +50,9 @@
 
 		if (res) {
 			toast.success($i18n.t('Server connection verified'));
-			console.log(res);
+			if (res.pipelines) {
+				pipelineUrls[OPENAI_API_BASE_URLS[idx]] = true;
+			}
 		}
 	};
 
@@ -100,6 +104,13 @@
 				})()
 			]);
 
+			OPENAI_API_BASE_URLS.forEach(async (url, idx) => {
+				const res = await getOpenAIModels(localStorage.token, idx);
+				if (res.pipelines) {
+					pipelineUrls[url] = true;
+				}
+			});
+
 			const ollamaConfig = await getOllamaConfig(localStorage.token);
 			const openaiConfig = await getOpenAIConfig(localStorage.token);
 
@@ -139,13 +150,38 @@
 						<div class="flex flex-col gap-1">
 							{#each OPENAI_API_BASE_URLS as url, idx}
 								<div class="flex w-full gap-2">
-									<div class="flex-1">
+									<div class="flex-1 relative">
 										<input
-											class="w-full rounded-lg py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-850 outline-none"
+											class="w-full rounded-lg py-2 px-4 {pipelineUrls[url]
+												? 'pr-8'
+												: ''} text-sm dark:text-gray-300 dark:bg-gray-850 outline-none"
 											placeholder={$i18n.t('API Base URL')}
 											bind:value={url}
 											autocomplete="off"
 										/>
+
+										{#if pipelineUrls[url]}
+											<div class=" absolute top-2.5 right-2.5">
+												<Tooltip content="Pipelines">
+													<svg
+														xmlns="http://www.w3.org/2000/svg"
+														viewBox="0 0 24 24"
+														fill="currentColor"
+														class="size-4"
+													>
+														<path
+															d="M11.644 1.59a.75.75 0 0 1 .712 0l9.75 5.25a.75.75 0 0 1 0 1.32l-9.75 5.25a.75.75 0 0 1-.712 0l-9.75-5.25a.75.75 0 0 1 0-1.32l9.75-5.25Z"
+														/>
+														<path
+															d="m3.265 10.602 7.668 4.129a2.25 2.25 0 0 0 2.134 0l7.668-4.13 1.37.739a.75.75 0 0 1 0 1.32l-9.75 5.25a.75.75 0 0 1-.71 0l-9.75-5.25a.75.75 0 0 1 0-1.32l1.37-.738Z"
+														/>
+														<path
+															d="m10.933 19.231-7.668-4.13-1.37.739a.75.75 0 0 0 0 1.32l9.75 5.25c.221.12.489.12.71 0l9.75-5.25a.75.75 0 0 0 0-1.32l-1.37-.738-7.668 4.13a2.25 2.25 0 0 1-2.134-.001Z"
+														/>
+													</svg>
+												</Tooltip>
+											</div>
+										{/if}
 									</div>
 
 									<div class="flex-1">