浏览代码

feat: allow model config via config.json

Jun Siang Cheah 11 月之前
父节点
当前提交
e76a444ed9

+ 18 - 1
backend/apps/litellm/main.py

@@ -18,7 +18,7 @@ from pydantic import BaseModel, ConfigDict
 from typing import Optional, List
 
 from utils.utils import get_verified_user, get_current_user, get_admin_user
-from config import SRC_LOG_LEVELS, ENV
+from config import SRC_LOG_LEVELS, ENV, MODEL_CONFIG
 from constants import MESSAGES
 
 import os
@@ -67,6 +67,7 @@ with open(LITELLM_CONFIG_DIR, "r") as file:
 
 app.state.ENABLE = ENABLE_LITELLM
 app.state.CONFIG = litellm_config
+app.state.MODEL_CONFIG = MODEL_CONFIG.get("litellm", [])
 
 # Global variable to store the subprocess reference
 background_process = None
@@ -238,6 +239,8 @@ async def get_models(user=Depends(get_current_user)):
                         )
                     )
 
+            for model in data["data"]:
+                add_custom_info_to_model(model)
             return data
         except Exception as e:
 
@@ -258,6 +261,14 @@ async def get_models(user=Depends(get_current_user)):
                         "object": "model",
                         "created": int(time.time()),
                         "owned_by": "openai",
+                        "custom_info": next(
+                            (
+                                item
+                                for item in app.state.MODEL_CONFIG
+                                if item["name"] == model["model_name"]
+                            ),
+                            {},
+                        ),
                     }
                     for model in app.state.CONFIG["model_list"]
                 ],
@@ -270,6 +281,12 @@ async def get_models(user=Depends(get_current_user)):
         }
 
 
+def add_custom_info_to_model(model: dict):
+    model["custom_info"] = next(
+        (item for item in app.state.MODEL_CONFIG if item["name"] == model["id"]), {}
+    )
+
+
 @app.get("/model/info")
 async def get_model_list(user=Depends(get_admin_user)):
     return {"data": app.state.CONFIG["model_list"]}

+ 14 - 1
backend/apps/ollama/main.py

@@ -46,6 +46,7 @@ from config import (
     ENABLE_MODEL_FILTER,
     MODEL_FILTER_LIST,
     UPLOAD_DIR,
+    MODEL_CONFIG,
 )
 from utils.misc import calculate_sha256
 
@@ -64,6 +65,7 @@ app.add_middleware(
 
 app.state.ENABLE_MODEL_FILTER = ENABLE_MODEL_FILTER
 app.state.MODEL_FILTER_LIST = MODEL_FILTER_LIST
+app.state.MODEL_CONFIG = MODEL_CONFIG.get("ollama", [])
 
 app.state.OLLAMA_BASE_URLS = OLLAMA_BASE_URLS
 app.state.MODELS = {}
@@ -158,15 +160,26 @@ async def get_all_models():
 
     models = {
         "models": merge_models_lists(
-            map(lambda response: response["models"] if response else None, responses)
+            map(
+                lambda response: (response["models"] if response else None),
+                responses,
+            )
         )
     }
+    for model in models["models"]:
+        add_custom_info_to_model(model)
 
     app.state.MODELS = {model["model"]: model for model in models["models"]}
 
     return models
 
 
+def add_custom_info_to_model(model: dict):
+    model["custom_info"] = next(
+        (item for item in app.state.MODEL_CONFIG if item["name"] == model["model"]), {}
+    )
+
+
 @app.get("/api/tags")
 @app.get("/api/tags/{url_idx}")
 async def get_ollama_tags(

+ 12 - 1
backend/apps/openai/main.py

@@ -26,6 +26,7 @@ from config import (
     CACHE_DIR,
     ENABLE_MODEL_FILTER,
     MODEL_FILTER_LIST,
+    MODEL_CONFIG,
 )
 from typing import List, Optional
 
@@ -47,6 +48,7 @@ app.add_middleware(
 
 app.state.ENABLE_MODEL_FILTER = ENABLE_MODEL_FILTER
 app.state.MODEL_FILTER_LIST = MODEL_FILTER_LIST
+app.state.MODEL_CONFIG = MODEL_CONFIG.get("openai", [])
 
 app.state.OPENAI_API_BASE_URLS = OPENAI_API_BASE_URLS
 app.state.OPENAI_API_KEYS = OPENAI_API_KEYS
@@ -217,10 +219,19 @@ async def get_all_models():
             )
         }
 
+        for model in models["data"]:
+            add_custom_info_to_model(model)
+
         log.info(f"models: {models}")
         app.state.MODELS = {model["id"]: model for model in models["data"]}
 
-        return models
+    return models
+
+
+def add_custom_info_to_model(model: dict):
+    model["custom_info"] = next(
+        (item for item in app.state.MODEL_CONFIG if item["name"] == model["id"]), {}
+    )
 
 
 @app.get("/models")

+ 24 - 0
backend/apps/web/routers/configs.py

@@ -35,6 +35,19 @@ class SetDefaultSuggestionsForm(BaseModel):
     suggestions: List[PromptSuggestion]
 
 
+class ModelConfig(BaseModel):
+    id: str
+    name: str
+    description: str
+    vision_capable: bool
+
+
+class SetModelConfigForm(BaseModel):
+    ollama: List[ModelConfig]
+    litellm: List[ModelConfig]
+    openai: List[ModelConfig]
+
+
 ############################
 # SetDefaultModels
 ############################
@@ -57,3 +70,14 @@ async def set_global_default_suggestions(
     data = form_data.model_dump()
     request.app.state.DEFAULT_PROMPT_SUGGESTIONS = data["suggestions"]
     return request.app.state.DEFAULT_PROMPT_SUGGESTIONS
+
+
+@router.post("/models", response_model=SetModelConfigForm)
+async def set_global_default_suggestions(
+    request: Request,
+    form_data: SetModelConfigForm,
+    user=Depends(get_admin_user),
+):
+    data = form_data.model_dump()
+    request.app.state.MODEL_CONFIG = data
+    return request.app.state.MODEL_CONFIG

+ 2 - 0
backend/config.py

@@ -424,6 +424,8 @@ WEBHOOK_URL = os.environ.get("WEBHOOK_URL", "")
 
 ENABLE_ADMIN_EXPORT = os.environ.get("ENABLE_ADMIN_EXPORT", "True").lower() == "true"
 
+MODEL_CONFIG = CONFIG_DATA.get("models", {"ollama": [], "litellm": [], "openai": []})
+
 ####################################
 # WEBUI_SECRET_KEY
 ####################################

+ 1 - 0
backend/main.py

@@ -58,6 +58,7 @@ from config import (
     SRC_LOG_LEVELS,
     WEBHOOK_URL,
     ENABLE_ADMIN_EXPORT,
+    MODEL_CONFIG,
 )
 from constants import ERROR_MESSAGES
 

+ 2 - 1
src/lib/apis/litellm/index.ts

@@ -33,7 +33,8 @@ export const getLiteLLMModels = async (token: string = '') => {
 					id: model.id,
 					name: model.name ?? model.id,
 					external: true,
-					source: 'LiteLLM'
+					source: 'LiteLLM',
+					custom_info: model.custom_info ?? {}
 				}))
 				.sort((a, b) => {
 					return a.name.localeCompare(b.name);

+ 6 - 1
src/lib/apis/openai/index.ts

@@ -163,7 +163,12 @@ export const getOpenAIModels = async (token: string = '') => {
 
 	return models
 		? models
-				.map((model) => ({ id: model.id, name: model.name ?? model.id, external: true }))
+				.map((model) => ({
+					id: model.id,
+					name: model.name ?? model.id,
+					external: true,
+					custom_info: model.custom_info ?? {}
+				}))
 				.sort((a, b) => {
 					return a.name.localeCompare(b.name);
 				})

+ 1 - 1
src/lib/components/admin/Settings/Users.svelte

@@ -125,7 +125,7 @@
 												<option value="" disabled selected>{$i18n.t('Select a model')}</option>
 												{#each $models.filter((model) => model.id) as model}
 													<option value={model.id} class="bg-gray-100 dark:bg-gray-700"
-														>{model.name}</option
+														>{model.custom_info?.displayName ?? model.name}</option
 													>
 												{/each}
 											</select>

+ 20 - 6
src/lib/components/chat/MessageInput.svelte

@@ -1,7 +1,7 @@
 <script lang="ts">
 	import { toast } from 'svelte-sonner';
 	import { onMount, tick, getContext } from 'svelte';
-	import { modelfiles, settings, showSidebar } from '$lib/stores';
+	import { type Model, modelfiles, settings, showSidebar } from '$lib/stores';
 	import { blobToFile, calculateSHA256, findWordIndices } from '$lib/utils';
 
 	import {
@@ -27,7 +27,7 @@
 	export let stopResponse: Function;
 
 	export let autoScroll = true;
-	export let selectedModel = '';
+	export let selectedModel: Model | undefined;
 
 	let chatTextAreaElement: HTMLTextAreaElement;
 	let filesInputElement;
@@ -359,6 +359,12 @@
 					inputFiles.forEach((file) => {
 						console.log(file, file.name.split('.').at(-1));
 						if (['image/gif', 'image/jpeg', 'image/png'].includes(file['type'])) {
+							if (selectedModel !== undefined) {
+								if (!(selectedModel.custom_info?.vision_capable ?? true)) {
+									toast.error($i18n.t('Selected model does not support image inputs.'));
+									return;
+								}
+							}
 							let reader = new FileReader();
 							reader.onload = (event) => {
 								files = [
@@ -500,7 +506,7 @@
 						}}
 					/>
 
-					{#if selectedModel !== ''}
+					{#if selectedModel !== undefined}
 						<div
 							class="px-3 py-2.5 text-left w-full flex justify-between items-center absolute bottom-0 left-0 right-0 bg-gradient-to-t from-50% from-white dark:from-gray-900"
 						>
@@ -515,14 +521,16 @@
 											: `${WEBUI_BASE_URL}/static/favicon.png`)}
 								/>
 								<div>
-									Talking to <span class=" font-medium">{selectedModel.name} </span>
+									Talking to <span class=" font-medium"
+										>{selectedModel.custom_info?.displayName ?? selectedModel.name}
+									</span>
 								</div>
 							</div>
 							<div>
 								<button
 									class="flex items-center"
 									on:click={() => {
-										selectedModel = '';
+										selectedModel = undefined;
 									}}
 								>
 									<XMark />
@@ -548,6 +556,12 @@
 								const _inputFiles = Array.from(inputFiles);
 								_inputFiles.forEach((file) => {
 									if (['image/gif', 'image/jpeg', 'image/png'].includes(file['type'])) {
+										if (selectedModel !== undefined) {
+											if (!(selectedModel.custom_info?.vision_capable ?? true)) {
+												toast.error($i18n.t('Selected model does not support image inputs.'));
+												return;
+											}
+										}
 										let reader = new FileReader();
 										reader.onload = (event) => {
 											files = [
@@ -880,7 +894,7 @@
 
 									if (e.key === 'Escape') {
 										console.log('Escape');
-										selectedModel = '';
+										selectedModel = undefined;
 									}
 								}}
 								rows="1"

+ 7 - 3
src/lib/components/chat/MessageInput/Models.svelte

@@ -21,8 +21,12 @@
 	let filteredModels = [];
 
 	$: filteredModels = $models
-		.filter((p) => p.name.includes(prompt.split(' ')?.at(0)?.substring(1) ?? ''))
-		.sort((a, b) => a.name.localeCompare(b.name));
+		.filter((p) =>
+			(p.custom_info?.displayName ?? p.name).includes(prompt.split(' ')?.at(0)?.substring(1) ?? '')
+		)
+		.sort((a, b) =>
+			(a.custom_info?.displayName ?? a.name).localeCompare(b.custom_info?.displayName ?? b.name)
+		);
 
 	$: if (prompt) {
 		selectedIdx = 0;
@@ -156,7 +160,7 @@
 								on:focus={() => {}}
 							>
 								<div class=" font-medium text-black line-clamp-1">
-									{model.name}
+									{model.custom_info?.displayName ?? model.name}
 								</div>
 
 								<!-- <div class=" text-xs text-gray-600 line-clamp-1">

+ 1 - 1
src/lib/components/chat/Messages/ResponseMessage.svelte

@@ -343,7 +343,7 @@
 				{#if message.model in modelfiles}
 					{modelfiles[message.model]?.title}
 				{:else}
-					{message.model ? ` ${message.model}` : ''}
+					{message.modelName ? ` ${message.modelName}` : message.model ? ` ${message.model}` : ''}
 				{/if}
 
 				{#if message.timestamp}

+ 1 - 1
src/lib/components/chat/ModelSelector.svelte

@@ -49,7 +49,7 @@
 							.filter((model) => model.name !== 'hr')
 							.map((model) => ({
 								value: model.id,
-								label: model.name,
+								label: model.custom_info?.displayName ?? model.name,
 								info: model
 							}))}
 						bind:value={selectedModel}

+ 8 - 2
src/lib/components/chat/ModelSelector/Selector.svelte

@@ -247,7 +247,11 @@
 							<!-- {JSON.stringify(item.info)} -->
 
 							{#if item.info.external}
-								<Tooltip content={item.info?.source ?? 'External'}>
+								<Tooltip
+									content={`${item.info?.source ?? 'External'}${
+										item.info.custom_info?.description ? '<br>' : ''
+									}${item.info.custom_info?.description?.replaceAll('\n', '<br>') ?? ''}`}
+								>
 									<div class=" mr-2">
 										<svg
 											xmlns="http://www.w3.org/2000/svg"
@@ -274,7 +278,9 @@
 										item.info?.details?.quantization_level
 											? item.info?.details?.quantization_level + ' '
 											: ''
-									}${item.info.size ? `(${(item.info.size / 1024 ** 3).toFixed(1)}GB)` : ''}`}
+									}${item.info.size ? `(${(item.info.size / 1024 ** 3).toFixed(1)}GB)` : ''}${
+										item.info.custom_info?.description ? '<br>' : ''
+									}${item.info.custom_info?.description?.replaceAll('\n', '<br>') ?? ''}`}
 								>
 									<div class=" mr-2">
 										<svg

+ 5 - 2
src/lib/components/chat/Settings/Interface.svelte

@@ -244,7 +244,10 @@
 						{#each $models as model}
 							{#if model.size != null}
 								<option value={model.name} class="bg-gray-100 dark:bg-gray-700">
-									{model.name + ' (' + (model.size / 1024 ** 3).toFixed(1) + ' GB)'}
+									{(model.custom_info?.displayName ?? model.name) +
+										' (' +
+										(model.size / 1024 ** 3).toFixed(1) +
+										' GB)'}
 								</option>
 							{/if}
 						{/each}
@@ -262,7 +265,7 @@
 						{#each $models as model}
 							{#if model.name !== 'hr'}
 								<option value={model.name} class="bg-gray-100 dark:bg-gray-700">
-									{model.name}
+									{model.custom_info?.displayName ?? model.name}
 								</option>
 							{/if}
 						{/each}

+ 99 - 12
src/lib/components/chat/Settings/Models.svelte

@@ -13,7 +13,7 @@
 		uploadModel
 	} from '$lib/apis/ollama';
 	import { WEBUI_API_BASE_URL, WEBUI_BASE_URL } from '$lib/constants';
-	import { WEBUI_NAME, models, MODEL_DOWNLOAD_POOL, user } from '$lib/stores';
+	import { WEBUI_NAME, models, MODEL_DOWNLOAD_POOL, user, config } from '$lib/stores';
 	import { splitStream } from '$lib/utils';
 	import { onMount, getContext } from 'svelte';
 	import { addLiteLLMModel, deleteLiteLLMModel, getLiteLLMModelInfo } from '$lib/apis/litellm';
@@ -67,6 +67,8 @@
 
 	let deleteModelTag = '';
 
+	let showModelInfo = false;
+
 	const updateModelsHandler = async () => {
 		for (const model of $models.filter(
 			(m) =>
@@ -587,24 +589,28 @@
 											viewBox="0 0 24 24"
 											fill="currentColor"
 											xmlns="http://www.w3.org/2000/svg"
-											><style>
+										>
+											<style>
 												.spinner_ajPY {
 													transform-origin: center;
 													animation: spinner_AtaB 0.75s infinite linear;
 												}
+
 												@keyframes spinner_AtaB {
 													100% {
 														transform: rotate(360deg);
 													}
 												}
-											</style><path
+											</style>
+											<path
 												d="M12,1A11,11,0,1,0,23,12,11,11,0,0,0,12,1Zm0,19a8,8,0,1,1,8-8A8,8,0,0,1,12,20Z"
 												opacity=".25"
-											/><path
+											/>
+											<path
 												d="M10.14,1.16a11,11,0,0,0-9,8.92A1.59,1.59,0,0,0,2.46,12,1.52,1.52,0,0,0,4.11,10.7a8,8,0,0,1,6.66-6.61A1.42,1.42,0,0,0,12,2.69h0A1.57,1.57,0,0,0,10.14,1.16Z"
 												class="spinner_ajPY"
-											/></svg
-										>
+											/>
+										</svg>
 									</div>
 								{:else}
 									<svg
@@ -705,7 +711,10 @@
 									{/if}
 									{#each $models.filter((m) => m.size != null && (selectedOllamaUrlIdx === null ? true : (m?.urls ?? []).includes(selectedOllamaUrlIdx))) as model}
 										<option value={model.name} class="bg-gray-100 dark:bg-gray-700"
-											>{model.name + ' (' + (model.size / 1024 ** 3).toFixed(1) + ' GB)'}</option
+											>{(model.custom_info?.displayName ?? model.name) +
+												' (' +
+												(model.size / 1024 ** 3).toFixed(1) +
+												' GB)'}</option
 										>
 									{/each}
 								</select>
@@ -833,24 +842,28 @@
 													viewBox="0 0 24 24"
 													fill="currentColor"
 													xmlns="http://www.w3.org/2000/svg"
-													><style>
+												>
+													<style>
 														.spinner_ajPY {
 															transform-origin: center;
 															animation: spinner_AtaB 0.75s infinite linear;
 														}
+
 														@keyframes spinner_AtaB {
 															100% {
 																transform: rotate(360deg);
 															}
 														}
-													</style><path
+													</style>
+													<path
 														d="M12,1A11,11,0,1,0,23,12,11,11,0,0,0,12,1Zm0,19a8,8,0,1,1,8-8A8,8,0,0,1,12,20Z"
 														opacity=".25"
-													/><path
+													/>
+													<path
 														d="M10.14,1.16a11,11,0,0,0-9,8.92A1.59,1.59,0,0,0,2.46,12,1.52,1.52,0,0,0,4.11,10.7a8,8,0,0,1,6.66-6.61A1.42,1.42,0,0,0,12,2.69h0A1.57,1.57,0,0,0,10.14,1.16Z"
 														class="spinner_ajPY"
-													/></svg
-												>
+													/>
+												</svg>
 											</div>
 										{:else}
 											<svg
@@ -932,6 +945,7 @@
 			<hr class=" dark:border-gray-700 my-2" />
 		{/if}
 
+		<!--TODO: Hide LiteLLM options when ENABLE_LITELLM=false-->
 		<div class=" space-y-3">
 			<div class="mt-2 space-y-3 pr-1.5">
 				<div>
@@ -1126,6 +1140,79 @@
 					{/if}
 				</div>
 			</div>
+			<hr class=" dark:border-gray-700 my-2" />
 		</div>
+
+		<!--        <div class=" space-y-3">-->
+		<!--            <div class="mt-2 space-y-3 pr-1.5">-->
+		<!--                <div>-->
+		<!--                    <div class="mb-2">-->
+		<!--                        <div class="flex justify-between items-center text-xs">-->
+		<!--                            <div class=" text-sm font-medium">{$i18n.t('Manage Model Information')}</div>-->
+		<!--                            <button-->
+		<!--                                    class=" text-xs font-medium text-gray-500"-->
+		<!--                                    type="button"-->
+		<!--                                    on:click={() => {-->
+		<!--									showModelInfo = !showModelInfo;-->
+		<!--								}}>{showModelInfo ? $i18n.t('Hide') : $i18n.t('Show')}</button-->
+		<!--                            >-->
+		<!--                        </div>-->
+		<!--                    </div>-->
+
+		<!--                    {#if showModelInfo}-->
+		<!--                        <div>-->
+		<!--                            <div class="flex justify-between items-center text-xs">-->
+		<!--                                <div class=" text-sm font-medium">{$i18n.t('Current Models')}</div>-->
+		<!--                            </div>-->
+
+		<!--                            <div class="flex gap-2">-->
+		<!--                                <div class="flex-1 pb-1">-->
+		<!--                                    <select-->
+		<!--                                            class="w-full rounded-lg py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-850 outline-none"-->
+		<!--                                            bind:value={selectedOllamaUrlIdx}-->
+		<!--                                            placeholder={$i18n.t('Select an existing model')}-->
+		<!--                                    >-->
+		<!--                                        {#each $config. as url, idx}-->
+		<!--                                            <option value={idx} class="bg-gray-100 dark:bg-gray-700">{url}</option>-->
+		<!--                                        {/each}-->
+		<!--                                    </select>-->
+		<!--                                </div>-->
+
+		<!--                                <div>-->
+		<!--                                    <div class="flex w-full justify-end">-->
+		<!--                                        <Tooltip content="Update All Models" placement="top">-->
+		<!--                                            <button-->
+		<!--                                                    class="p-2.5 flex gap-2 items-center bg-gray-100 hover:bg-gray-200 text-gray-800 dark:bg-gray-850 dark:hover:bg-gray-800 dark:text-gray-100 rounded-lg transition"-->
+		<!--                                                    on:click={() => {-->
+		<!--											updateModelsHandler();-->
+		<!--										}}-->
+		<!--                                            >-->
+		<!--                                                <svg-->
+		<!--                                                        xmlns="http://www.w3.org/2000/svg"-->
+		<!--                                                        viewBox="0 0 16 16"-->
+		<!--                                                        fill="currentColor"-->
+		<!--                                                        class="w-4 h-4"-->
+		<!--                                                >-->
+		<!--                                                    <path-->
+		<!--                                                            d="M7 1a.75.75 0 0 1 .75.75V6h-1.5V1.75A.75.75 0 0 1 7 1ZM6.25 6v2.94L5.03 7.72a.75.75 0 0 0-1.06 1.06l2.5 2.5a.75.75 0 0 0 1.06 0l2.5-2.5a.75.75 0 1 0-1.06-1.06L7.75 8.94V6H10a2 2 0 0 1 2 2v3a2 2 0 0 1-2 2H4a2 2 0 0 1-2-2V8a2 2 0 0 1 2-2h2.25Z"-->
+		<!--                                                    />-->
+		<!--                                                    <path-->
+		<!--                                                            d="M4.268 14A2 2 0 0 0 6 15h6a2 2 0 0 0 2-2v-3a2 2 0 0 0-1-1.732V11a3 3 0 0 1-3 3H4.268Z"-->
+		<!--                                                    />-->
+		<!--                                                </svg>-->
+		<!--                                            </button>-->
+		<!--                                        </Tooltip>-->
+		<!--                                    </div>-->
+		<!--                                </div>-->
+		<!--                            </div>-->
+
+		<!--                            {#if updateModelId}-->
+		<!--                                Updating "{updateModelId}" {updateProgress ? `(${updateProgress}%)` : ''}-->
+		<!--                            {/if}-->
+		<!--                        </div>-->
+		<!--                    {/if}-->
+		<!--                </div>-->
+		<!--            </div>-->
+		<!--        </div>-->
 	</div>
 </div>

+ 4 - 1
src/lib/components/documents/Settings/General.svelte

@@ -321,7 +321,10 @@
 							{/if}
 							{#each $models.filter((m) => m.id && !m.external) as model}
 								<option value={model.name} class="bg-gray-100 dark:bg-gray-700"
-									>{model.name + ' (' + (model.size / 1024 ** 3).toFixed(1) + ' GB)'}</option
+									>{(model.custom_info?.displayName ?? model.name) +
+										' (' +
+										(model.size / 1024 ** 3).toFixed(1) +
+										' GB)'}</option
 								>
 							{/each}
 						</select>

+ 30 - 9
src/lib/stores/index.ts

@@ -39,27 +39,34 @@ export const showSidebar = writable(false);
 export const showSettings = writable(false);
 export const showChangelog = writable(false);
 
-type Model = OpenAIModel | OllamaModel;
+export type Model = OpenAIModel | OllamaModel;
 
-type OpenAIModel = {
-	id: string;
-	name: string;
-	external: boolean;
-	source?: string;
+type ModelCustomInfo = {
+	name?: string;
+	displayName?: string;
+	description?: string;
+	vision_capable?: boolean;
 };
 
-type OllamaModel = {
+type BaseModel = {
 	id: string;
 	name: string;
+	custom_info?: ModelCustomInfo;
+};
 
-	// Ollama specific fields
+interface OpenAIModel extends BaseModel {
+	external: boolean;
+	source?: string;
+}
+
+interface OllamaModel extends BaseModel {
 	details: OllamaModelDetails;
 	size: number;
 	description: string;
 	model: string;
 	modified_at: string;
 	digest: string;
-};
+}
 
 type OllamaModelDetails = {
 	parent_model: string;
@@ -129,6 +136,20 @@ type Config = {
 	default_models?: string[];
 	default_prompt_suggestions?: PromptSuggestion[];
 	trusted_header_auth?: boolean;
+	model_config?: GlobalModelConfig;
+};
+
+type GlobalModelConfig = {
+	ollama?: ModelConfig[];
+	litellm?: ModelConfig[];
+	openai?: ModelConfig[];
+};
+
+type ModelConfig = {
+	id?: string;
+	name?: string;
+	description?: string;
+	vision_capable?: boolean;
 };
 
 type PromptSuggestion = {

+ 57 - 40
src/routes/(app)/+page.svelte

@@ -16,7 +16,8 @@
 		config,
 		WEBUI_NAME,
 		tags as _tags,
-		showSidebar
+		showSidebar,
+		type Model
 	} from '$lib/stores';
 	import { copyToClipboard, splitStream } from '$lib/utils';
 
@@ -53,7 +54,7 @@
 	let showModelSelector = true;
 
 	let selectedModels = [''];
-	let atSelectedModel = '';
+	let atSelectedModel: Model | undefined;
 
 	let selectedModelfile = null;
 	$: selectedModelfile =
@@ -254,50 +255,66 @@
 		const _chatId = JSON.parse(JSON.stringify($chatId));
 
 		await Promise.all(
-			(atSelectedModel !== '' ? [atSelectedModel.id] : selectedModels).map(async (modelId) => {
-				console.log('modelId', modelId);
-				const model = $models.filter((m) => m.id === modelId).at(0);
-
-				if (model) {
-					// Create response message
-					let responseMessageId = uuidv4();
-					let responseMessage = {
-						parentId: parentId,
-						id: responseMessageId,
-						childrenIds: [],
-						role: 'assistant',
-						content: '',
-						model: model.id,
-						timestamp: Math.floor(Date.now() / 1000) // Unix epoch
-					};
-
-					// Add message to history and Set currentId to messageId
-					history.messages[responseMessageId] = responseMessage;
-					history.currentId = responseMessageId;
-
-					// Append messageId to childrenIds of parent message
-					if (parentId !== null) {
-						history.messages[parentId].childrenIds = [
-							...history.messages[parentId].childrenIds,
-							responseMessageId
-						];
-					}
+			(atSelectedModel !== undefined ? [atSelectedModel.id] : selectedModels).map(
+				async (modelId) => {
+					console.log('modelId', modelId);
+					const model = $models.filter((m) => m.id === modelId).at(0);
+
+					if (model) {
+						// If there are image files, check if model is vision capable
+						const hasImages = messages.some((message) =>
+							message.files?.some((file) => file.type === 'image')
+						);
+						if (hasImages && !(model.custom_info?.vision_capable ?? true)) {
+							toast.error(
+								$i18n.t('Model {{modelName}} is not vision capable', {
+									modelName: model.custom_info?.displayName ?? model.name ?? model.id
+								})
+							);
+						}
+
+						// Create response message
+						let responseMessageId = uuidv4();
+						let responseMessage = {
+							parentId: parentId,
+							id: responseMessageId,
+							childrenIds: [],
+							role: 'assistant',
+							content: '',
+							model: model.id,
+							modelName: model.custom_info?.displayName ?? model.name ?? model.id,
+							timestamp: Math.floor(Date.now() / 1000) // Unix epoch
+						};
+
+						// Add message to history and Set currentId to messageId
+						history.messages[responseMessageId] = responseMessage;
+						history.currentId = responseMessageId;
+
+						// Append messageId to childrenIds of parent message
+						if (parentId !== null) {
+							history.messages[parentId].childrenIds = [
+								...history.messages[parentId].childrenIds,
+								responseMessageId
+							];
+						}
 
-					if (model?.external) {
-						await sendPromptOpenAI(model, prompt, responseMessageId, _chatId);
-					} else if (model) {
-						await sendPromptOllama(model, prompt, responseMessageId, _chatId);
+						if (model?.external) {
+							await sendPromptOpenAI(model, prompt, responseMessageId, _chatId);
+						} else if (model) {
+							await sendPromptOllama(model, prompt, responseMessageId, _chatId);
+						}
+					} else {
+						toast.error($i18n.t(`Model {{modelId}} not found`, { modelId }));
 					}
-				} else {
-					toast.error($i18n.t(`Model {{modelId}} not found`, { modelId }));
 				}
-			})
+			)
 		);
 
 		await chats.set(await getChatList(localStorage.token));
 	};
 
 	const sendPromptOllama = async (model, userPrompt, responseMessageId, _chatId) => {
+		const modelName = model.custom_info?.displayName ?? model.name ?? model.id;
 		model = model.id;
 		const responseMessage = history.messages[responseMessageId];
 
@@ -702,17 +719,17 @@
 			} else {
 				toast.error(
 					$i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
-						provider: model.name ?? model.id
+						provider: model.custom_info?.displayName ?? model.name ?? model.id
 					})
 				);
 				responseMessage.content = $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
-					provider: model.name ?? model.id
+					provider: model.custom_info?.displayName ?? model.name ?? model.id
 				});
 			}
 
 			responseMessage.error = true;
 			responseMessage.content = $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
-				provider: model.name ?? model.id
+				provider: model.custom_info?.displayName ?? model.name ?? model.id
 			});
 			responseMessage.done = true;
 			messages = messages;

+ 55 - 39
src/routes/(app)/c/[id]/+page.svelte

@@ -15,7 +15,8 @@
 		config,
 		WEBUI_NAME,
 		tags as _tags,
-		showSidebar
+		showSidebar,
+		type Model
 	} from '$lib/stores';
 	import { copyToClipboard, splitStream, convertMessagesToHistory } from '$lib/utils';
 
@@ -57,7 +58,7 @@
 	// let chatId = $page.params.id;
 	let showModelSelector = true;
 	let selectedModels = [''];
-	let atSelectedModel = '';
+	let atSelectedModel: Model | undefined;
 
 	let selectedModelfile = null;
 
@@ -259,43 +260,58 @@
 		const _chatId = JSON.parse(JSON.stringify($chatId));
 
 		await Promise.all(
-			(atSelectedModel !== '' ? [atSelectedModel.id] : selectedModels).map(async (modelId) => {
-				const model = $models.filter((m) => m.id === modelId).at(0);
-
-				if (model) {
-					// Create response message
-					let responseMessageId = uuidv4();
-					let responseMessage = {
-						parentId: parentId,
-						id: responseMessageId,
-						childrenIds: [],
-						role: 'assistant',
-						content: '',
-						model: model.id,
-						timestamp: Math.floor(Date.now() / 1000) // Unix epoch
-					};
-
-					// Add message to history and Set currentId to messageId
-					history.messages[responseMessageId] = responseMessage;
-					history.currentId = responseMessageId;
-
-					// Append messageId to childrenIds of parent message
-					if (parentId !== null) {
-						history.messages[parentId].childrenIds = [
-							...history.messages[parentId].childrenIds,
-							responseMessageId
-						];
-					}
+			(atSelectedModel !== undefined ? [atSelectedModel.id] : selectedModels).map(
+				async (modelId) => {
+					const model = $models.filter((m) => m.id === modelId).at(0);
+
+					if (model) {
+						// If there are image files, check if model is vision capable
+						const hasImages = messages.some((message) =>
+							message.files?.some((file) => file.type === 'image')
+						);
+						if (hasImages && !(model.custom_info?.vision_capable ?? true)) {
+							toast.error(
+								$i18n.t('Model {{modelName}} is not vision capable', {
+									modelName: model.custom_info?.displayName ?? model.name ?? model.id
+								})
+							);
+						}
+
+						// Create response message
+						let responseMessageId = uuidv4();
+						let responseMessage = {
+							parentId: parentId,
+							id: responseMessageId,
+							childrenIds: [],
+							role: 'assistant',
+							content: '',
+							model: model.id,
+							modelName: model.custom_info?.displayName ?? model.name ?? model.id,
+							timestamp: Math.floor(Date.now() / 1000) // Unix epoch
+						};
+
+						// Add message to history and Set currentId to messageId
+						history.messages[responseMessageId] = responseMessage;
+						history.currentId = responseMessageId;
+
+						// Append messageId to childrenIds of parent message
+						if (parentId !== null) {
+							history.messages[parentId].childrenIds = [
+								...history.messages[parentId].childrenIds,
+								responseMessageId
+							];
+						}
 
-					if (model?.external) {
-						await sendPromptOpenAI(model, prompt, responseMessageId, _chatId);
-					} else if (model) {
-						await sendPromptOllama(model, prompt, responseMessageId, _chatId);
+						if (model?.external) {
+							await sendPromptOpenAI(model, prompt, responseMessageId, _chatId);
+						} else if (model) {
+							await sendPromptOllama(model, prompt, responseMessageId, _chatId);
+						}
+					} else {
+						toast.error($i18n.t(`Model {{modelId}} not found`, { modelId }));
 					}
-				} else {
-					toast.error($i18n.t(`Model {{modelId}} not found`, { modelId }));
 				}
-			})
+			)
 		);
 
 		await chats.set(await getChatList(localStorage.token));
@@ -706,17 +722,17 @@
 			} else {
 				toast.error(
 					$i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
-						provider: model.name ?? model.id
+						provider: model.custom_info?.displayName ?? model.name ?? model.id
 					})
 				);
 				responseMessage.content = $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
-					provider: model.name ?? model.id
+					provider: model.custom_info?.displayName ?? model.name ?? model.id
 				});
 			}
 
 			responseMessage.error = true;
 			responseMessage.content = $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
-				provider: model.name ?? model.id
+				provider: model.custom_info?.displayName ?? model.name ?? model.id
 			});
 			responseMessage.done = true;
 			messages = messages;

+ 1 - 1
src/routes/(app)/playground/+page.svelte

@@ -326,7 +326,7 @@
 											.filter((model) => model.name !== 'hr')
 											.map((model) => ({
 												value: model.id,
-												label: model.name,
+												label: model.custom_info?.displayName ?? model.name,
 												info: model
 											}))}
 										bind:value={selectedModelId}