Przeglądaj źródła

fix: litellm issue

Timothy J. Baek 1 rok temu
rodzic
commit
f96beeee8f

+ 5 - 1
src/routes/(app)/+page.svelte

@@ -534,6 +534,8 @@
 
 		console.log(docs);
 
+		console.log(model);
+
 		const res = await generateOpenAIChatCompletion(
 			localStorage.token,
 			{
@@ -586,7 +588,9 @@
 				max_tokens: $settings?.options?.num_predict ?? undefined,
 				docs: docs.length > 0 ? docs : undefined
 			},
-			model.source === 'litellm' ? `${LITELLM_API_BASE_URL}/v1` : `${OPENAI_API_BASE_URL}`
+			model.source.toLowerCase() === 'litellm'
+				? `${LITELLM_API_BASE_URL}/v1`
+				: `${OPENAI_API_BASE_URL}`
 		);
 
 		if (res && res.ok) {

+ 3 - 1
src/routes/(app)/c/[id]/+page.svelte

@@ -602,7 +602,9 @@
 				max_tokens: $settings?.options?.num_predict ?? undefined,
 				docs: docs.length > 0 ? docs : undefined
 			},
-			model.source === 'litellm' ? `${LITELLM_API_BASE_URL}/v1` : `${OPENAI_API_BASE_URL}`
+			model.source.toLowerCase() === 'litellm'
+				? `${LITELLM_API_BASE_URL}/v1`
+				: `${OPENAI_API_BASE_URL}`
 		);
 
 		if (res && res.ok) {