Explorar el Código

Merge pull request #1002 from open-webui/playground

feat: playground
Timothy Jaeryang Baek hace 1 año
padre
commit
9694c6569f

+ 15 - 8
backend/apps/ollama/main.py

@@ -11,7 +11,7 @@ from pydantic import BaseModel
 from apps.web.models.users import Users
 from constants import ERROR_MESSAGES
 from utils.utils import decode_token, get_current_user, get_admin_user
-from config import OLLAMA_API_BASE_URL, WEBUI_AUTH
+from config import OLLAMA_BASE_URL, WEBUI_AUTH
 
 app = FastAPI()
 app.add_middleware(
@@ -22,7 +22,7 @@ app.add_middleware(
     allow_headers=["*"],
 )
 
-app.state.OLLAMA_API_BASE_URL = OLLAMA_API_BASE_URL
+app.state.OLLAMA_BASE_URL = OLLAMA_BASE_URL
 
 # TARGET_SERVER_URL = OLLAMA_API_BASE_URL
 
@@ -32,7 +32,7 @@ REQUEST_POOL = []
 
 @app.get("/url")
 async def get_ollama_api_url(user=Depends(get_admin_user)):
-    return {"OLLAMA_API_BASE_URL": app.state.OLLAMA_API_BASE_URL}
+    return {"OLLAMA_BASE_URL": app.state.OLLAMA_BASE_URL}
 
 
 class UrlUpdateForm(BaseModel):
@@ -41,8 +41,8 @@ class UrlUpdateForm(BaseModel):
 
 @app.post("/url/update")
 async def update_ollama_api_url(form_data: UrlUpdateForm, user=Depends(get_admin_user)):
-    app.state.OLLAMA_API_BASE_URL = form_data.url
-    return {"OLLAMA_API_BASE_URL": app.state.OLLAMA_API_BASE_URL}
+    app.state.OLLAMA_BASE_URL = form_data.url
+    return {"OLLAMA_BASE_URL": app.state.OLLAMA_BASE_URL}
 
 
 @app.get("/cancel/{request_id}")
@@ -57,7 +57,7 @@ async def cancel_ollama_request(request_id: str, user=Depends(get_current_user))
 
 @app.api_route("/{path:path}", methods=["GET", "POST", "PUT", "DELETE"])
 async def proxy(path: str, request: Request, user=Depends(get_current_user)):
-    target_url = f"{app.state.OLLAMA_API_BASE_URL}/{path}"
+    target_url = f"{app.state.OLLAMA_BASE_URL}/{path}"
 
     body = await request.body()
     headers = dict(request.headers)
@@ -91,7 +91,13 @@ async def proxy(path: str, request: Request, user=Depends(get_current_user)):
 
             def stream_content():
                 try:
-                    if path in ["chat"]:
+                    if path == "generate":
+                        data = json.loads(body.decode("utf-8"))
+
+                        if not ("stream" in data and data["stream"] == False):
+                            yield json.dumps({"id": request_id, "done": False}) + "\n"
+
+                    elif path == "chat":
                         yield json.dumps({"id": request_id, "done": False}) + "\n"
 
                     for chunk in r.iter_content(chunk_size=8192):
@@ -103,7 +109,8 @@ async def proxy(path: str, request: Request, user=Depends(get_current_user)):
                 finally:
                     if hasattr(r, "close"):
                         r.close()
-                        REQUEST_POOL.remove(request_id)
+                        if request_id in REQUEST_POOL:
+                            REQUEST_POOL.remove(request_id)
 
             r = requests.request(
                 method=request.method,

+ 11 - 0
backend/config.py

@@ -211,6 +211,17 @@ if ENV == "prod":
     if OLLAMA_API_BASE_URL == "/ollama/api":
         OLLAMA_API_BASE_URL = "http://host.docker.internal:11434/api"
 
+
+OLLAMA_BASE_URL = os.environ.get("OLLAMA_BASE_URL", "")
+
+if OLLAMA_BASE_URL == "":
+    OLLAMA_BASE_URL = (
+        OLLAMA_API_BASE_URL[:-4]
+        if OLLAMA_API_BASE_URL.endswith("/api")
+        else OLLAMA_API_BASE_URL
+    )
+
+
 ####################################
 # OPENAI_API
 ####################################

+ 36 - 10
src/lib/apis/ollama/index.ts

@@ -29,7 +29,7 @@ export const getOllamaAPIUrl = async (token: string = '') => {
 		throw error;
 	}
 
-	return res.OLLAMA_API_BASE_URL;
+	return res.OLLAMA_BASE_URL;
 };
 
 export const updateOllamaAPIUrl = async (token: string = '', url: string) => {
@@ -64,13 +64,13 @@ export const updateOllamaAPIUrl = async (token: string = '', url: string) => {
 		throw error;
 	}
 
-	return res.OLLAMA_API_BASE_URL;
+	return res.OLLAMA_BASE_URL;
 };
 
 export const getOllamaVersion = async (token: string = '') => {
 	let error = null;
 
-	const res = await fetch(`${OLLAMA_API_BASE_URL}/version`, {
+	const res = await fetch(`${OLLAMA_API_BASE_URL}/api/version`, {
 		method: 'GET',
 		headers: {
 			Accept: 'application/json',
@@ -102,7 +102,7 @@ export const getOllamaVersion = async (token: string = '') => {
 export const getOllamaModels = async (token: string = '') => {
 	let error = null;
 
-	const res = await fetch(`${OLLAMA_API_BASE_URL}/tags`, {
+	const res = await fetch(`${OLLAMA_API_BASE_URL}/api/tags`, {
 		method: 'GET',
 		headers: {
 			Accept: 'application/json',
@@ -148,7 +148,7 @@ export const generateTitle = async (
 
 	console.log(template);
 
-	const res = await fetch(`${OLLAMA_API_BASE_URL}/generate`, {
+	const res = await fetch(`${OLLAMA_API_BASE_URL}/api/generate`, {
 		method: 'POST',
 		headers: {
 			'Content-Type': 'text/event-stream',
@@ -186,7 +186,7 @@ export const generatePrompt = async (token: string = '', model: string, conversa
 		conversation = '[no existing conversation]';
 	}
 
-	const res = await fetch(`${OLLAMA_API_BASE_URL}/generate`, {
+	const res = await fetch(`${OLLAMA_API_BASE_URL}/api/generate`, {
 		method: 'POST',
 		headers: {
 			'Content-Type': 'text/event-stream',
@@ -217,11 +217,37 @@ export const generatePrompt = async (token: string = '', model: string, conversa
 	return res;
 };
 
+export const generateTextCompletion = async (token: string = '', model: string, text: string) => {
+	let error = null;
+
+	const res = await fetch(`${OLLAMA_API_BASE_URL}/api/generate`, {
+		method: 'POST',
+		headers: {
+			'Content-Type': 'text/event-stream',
+			Authorization: `Bearer ${token}`
+		},
+		body: JSON.stringify({
+			model: model,
+			prompt: text,
+			stream: true
+		})
+	}).catch((err) => {
+		error = err;
+		return null;
+	});
+
+	if (error) {
+		throw error;
+	}
+
+	return res;
+};
+
 export const generateChatCompletion = async (token: string = '', body: object) => {
 	let controller = new AbortController();
 	let error = null;
 
-	const res = await fetch(`${OLLAMA_API_BASE_URL}/chat`, {
+	const res = await fetch(`${OLLAMA_API_BASE_URL}/api/chat`, {
 		signal: controller.signal,
 		method: 'POST',
 		headers: {
@@ -265,7 +291,7 @@ export const cancelChatCompletion = async (token: string = '', requestId: string
 export const createModel = async (token: string, tagName: string, content: string) => {
 	let error = null;
 
-	const res = await fetch(`${OLLAMA_API_BASE_URL}/create`, {
+	const res = await fetch(`${OLLAMA_API_BASE_URL}/api/create`, {
 		method: 'POST',
 		headers: {
 			'Content-Type': 'text/event-stream',
@@ -290,7 +316,7 @@ export const createModel = async (token: string, tagName: string, content: strin
 export const deleteModel = async (token: string, tagName: string) => {
 	let error = null;
 
-	const res = await fetch(`${OLLAMA_API_BASE_URL}/delete`, {
+	const res = await fetch(`${OLLAMA_API_BASE_URL}/api/delete`, {
 		method: 'DELETE',
 		headers: {
 			'Content-Type': 'text/event-stream',
@@ -324,7 +350,7 @@ export const deleteModel = async (token: string, tagName: string) => {
 export const pullModel = async (token: string, tagName: string) => {
 	let error = null;
 
-	const res = await fetch(`${OLLAMA_API_BASE_URL}/pull`, {
+	const res = await fetch(`${OLLAMA_API_BASE_URL}/api/pull`, {
 		method: 'POST',
 		headers: {
 			'Content-Type': 'text/event-stream',

+ 12 - 8
src/lib/components/chat/MessageInput/Models.svelte

@@ -79,14 +79,18 @@
 								throw data;
 							}
 
-							if (data.done == false) {
-								if (prompt == '' && data.response == '\n') {
-									continue;
-								} else {
-									prompt += data.response;
-									console.log(data.response);
-									chatInputElement.scrollTop = chatInputElement.scrollHeight;
-									await tick();
+							if ('id' in data) {
+								console.log(data);
+							} else {
+								if (data.done == false) {
+									if (prompt == '' && data.response == '\n') {
+										continue;
+									} else {
+										prompt += data.response;
+										console.log(data.response);
+										chatInputElement.scrollTop = chatInputElement.scrollHeight;
+										await tick();
+									}
 								}
 							}
 						}

+ 2 - 2
src/lib/components/chat/Settings/Connections.svelte

@@ -114,12 +114,12 @@
 		<hr class=" dark:border-gray-700" />
 
 		<div>
-			<div class=" mb-2.5 text-sm font-medium">Ollama API URL</div>
+			<div class=" mb-2.5 text-sm font-medium">Ollama Base URL</div>
 			<div class="flex w-full">
 				<div class="flex-1 mr-2">
 					<input
 						class="w-full rounded py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-800 outline-none"
-						placeholder="Enter URL (e.g. http://localhost:11434/api)"
+						placeholder="Enter URL (e.g. http://localhost:11434)"
 						bind:value={API_BASE_URL}
 					/>
 				</div>

+ 26 - 0
src/lib/components/layout/Sidebar.svelte

@@ -594,6 +594,32 @@
 										</div>
 										<div class=" self-center font-medium">Admin Panel</div>
 									</button>
+
+									<button
+										class="flex py-2.5 px-3.5 w-full hover:bg-gray-800 transition"
+										on:click={() => {
+											goto('/playground');
+											showDropdown = false;
+										}}
+									>
+										<div class=" self-center mr-3">
+											<svg
+												xmlns="http://www.w3.org/2000/svg"
+												fill="none"
+												viewBox="0 0 24 24"
+												stroke-width="1.5"
+												stroke="currentColor"
+												class="w-5 h-5"
+											>
+												<path
+													stroke-linecap="round"
+													stroke-linejoin="round"
+													d="m6.75 7.5 3 2.25-3 2.25m4.5 0h3m-9 8.25h13.5A2.25 2.25 0 0 0 21 18V6a2.25 2.25 0 0 0-2.25-2.25H5.25A2.25 2.25 0 0 0 3 6v12a2.25 2.25 0 0 0 2.25 2.25Z"
+												/>
+											</svg>
+										</div>
+										<div class=" self-center font-medium">Playground</div>
+									</button>
 								{/if}
 
 								<button

+ 105 - 0
src/lib/components/playground/ChatCompletion.svelte

@@ -0,0 +1,105 @@
+<script lang="ts">
+	import { onMount } from 'svelte';
+
+	export let messages = [];
+
+	onMount(() => {
+		messages.forEach((message, idx) => {
+			let textareaElement = document.getElementById(`${message.role}-${idx}-textarea`);
+			textareaElement.style.height = '';
+			textareaElement.style.height = textareaElement.scrollHeight + 'px';
+		});
+	});
+</script>
+
+<div class="py-3 space-y-3">
+	{#each messages as message, idx}
+		<div class="flex gap-2 group">
+			<div class="flex items-start pt-1">
+				<button
+					class="px-2 py-1 text-sm font-semibold uppercase min-w-[6rem] text-left dark:group-hover:bg-gray-800 rounded-lg transition"
+					on:click={() => {
+						message.role = message.role === 'user' ? 'assistant' : 'user';
+					}}>{message.role}</button
+				>
+			</div>
+
+			<div class="flex-1">
+				<textarea
+					id="{message.role}-{idx}-textarea"
+					class="w-full bg-transparent outline-none rounded-lg p-2 text-sm resize-none overflow-hidden"
+					placeholder="Enter {message.role === 'user' ? 'a user' : 'an assistant'} message here"
+					rows="1"
+					on:input={(e) => {
+						e.target.style.height = '';
+						e.target.style.height = e.target.scrollHeight + 'px';
+					}}
+					on:focus={(e) => {
+						e.target.style.height = '';
+						e.target.style.height = e.target.scrollHeight + 'px';
+
+						// e.target.style.height = Math.min(e.target.scrollHeight, 200) + 'px';
+					}}
+					bind:value={message.content}
+				/>
+			</div>
+
+			<div class=" pt-1">
+				<button
+					class=" group-hover:text-gray-500 dark:text-gray-900 dark:hover:text-gray-300 transition"
+					on:click={() => {
+						messages = messages.filter((message, messageIdx) => messageIdx !== idx);
+					}}
+				>
+					<svg
+						xmlns="http://www.w3.org/2000/svg"
+						fill="none"
+						viewBox="0 0 24 24"
+						stroke-width="2"
+						stroke="currentColor"
+						class="w-5 h-5"
+					>
+						<path
+							stroke-linecap="round"
+							stroke-linejoin="round"
+							d="M15 12H9m12 0a9 9 0 1 1-18 0 9 9 0 0 1 18 0Z"
+						/>
+					</svg>
+				</button>
+			</div>
+		</div>
+
+		<hr class=" dark:border-gray-800" />
+	{/each}
+
+	<button
+		class="flex items-center gap-2 px-2 py-1"
+		on:click={() => {
+			console.log(messages.at(-1));
+			messages.push({
+				role: (messages.at(-1)?.role ?? 'assistant') === 'user' ? 'assistant' : 'user',
+				content: ''
+			});
+			messages = messages;
+		}}
+	>
+		<div>
+			<svg
+				xmlns="http://www.w3.org/2000/svg"
+				fill="none"
+				viewBox="0 0 24 24"
+				stroke-width="1.5"
+				stroke="currentColor"
+				class="w-5 h-5"
+			>
+				<path
+					stroke-linecap="round"
+					stroke-linejoin="round"
+					d="M12 9v6m3-3H9m12 0a9 9 0 1 1-18 0 9 9 0 0 1 18 0Z"
+				/>
+			</svg>
+		</div>
+
+		<div class=" text-sm font-medium">Add message</div>
+	</button>
+</div>

+ 0 - 0
src/lib/components/playground/TextCompletion.svelte


+ 472 - 0
src/routes/(app)/playground/+page.svelte

@@ -0,0 +1,472 @@
+<script>
+	import { goto } from '$app/navigation';
+
+	import { onMount, tick } from 'svelte';
+
+	import { toast } from 'svelte-sonner';
+
+	import {
+		LITELLM_API_BASE_URL,
+		OLLAMA_API_BASE_URL,
+		OPENAI_API_BASE_URL,
+		WEBUI_API_BASE_URL
+	} from '$lib/constants';
+	import { WEBUI_NAME, config, user, models, settings } from '$lib/stores';
+
+	import { cancelChatCompletion, generateChatCompletion } from '$lib/apis/ollama';
+	import { generateOpenAIChatCompletion } from '$lib/apis/openai';
+
+	import { splitStream } from '$lib/utils';
+	import ChatCompletion from '$lib/components/playground/ChatCompletion.svelte';
+
+	let mode = 'chat';
+	let loaded = false;
+
+	let text = '';
+
+	let selectedModelId = '';
+
+	let loading = false;
+	let currentRequestId;
+	let stopResponseFlag = false;
+
+	let system = '';
+	let messages = [
+		{
+			role: 'user',
+			content: ''
+		}
+	];
+
+	const scrollToBottom = () => {
+		let element;
+
+		if (mode === 'chat') {
+			element = document.getElementById('messages-container');
+		} else {
+			element = document.getElementById('text-completion-textarea');
+		}
+
+		if (element) {
+			element.scrollTop = element?.scrollHeight;
+		}
+	};
+
+	// const cancelHandler = async () => {
+	// 	if (currentRequestId) {
+	// 		const res = await cancelChatCompletion(localStorage.token, currentRequestId);
+	// 		currentRequestId = null;
+	// 		loading = false;
+	// 	}
+	// };
+
+	const stopResponse = () => {
+		stopResponseFlag = true;
+		console.log('stopResponse');
+	};
+
+	const textCompletionHandler = async () => {
+		const model = $models.find((model) => model.id === selectedModelId);
+
+		const res = await generateOpenAIChatCompletion(
+			localStorage.token,
+			{
+				model: model.id,
+				stream: true,
+				messages: [
+					{
+						role: 'assistant',
+						content: text
+					}
+				]
+			},
+			model.external
+				? model.source === 'litellm'
+					? `${LITELLM_API_BASE_URL}/v1`
+					: `${OPENAI_API_BASE_URL}`
+				: `${OLLAMA_API_BASE_URL}/v1`
+		);
+
+		if (res && res.ok) {
+			const reader = res.body
+				.pipeThrough(new TextDecoderStream())
+				.pipeThrough(splitStream('\n'))
+				.getReader();
+
+			while (true) {
+				const { value, done } = await reader.read();
+				if (done || stopResponseFlag) {
+					currentRequestId = null;
+					break;
+				}
+
+				try {
+					let lines = value.split('\n');
+
+					for (const line of lines) {
+						if (line !== '') {
+							if (line === 'data: [DONE]') {
+								// responseMessage.done = true;
+								console.log('done');
+							} else {
+								let data = JSON.parse(line.replace(/^data: /, ''));
+								console.log(data);
+
+								text += data.choices[0].delta.content ?? '';
+							}
+						}
+					}
+				} catch (error) {
+					console.log(error);
+				}
+
+				scrollToBottom();
+			}
+		}
+	};
+
+	const chatCompletionHandler = async () => {
+		const model = $models.find((model) => model.id === selectedModelId);
+
+		const res = await generateOpenAIChatCompletion(
+			localStorage.token,
+			{
+				model: model.id,
+				stream: true,
+				messages: [
+					system
+						? {
+								role: 'system',
+								content: system
+						  }
+						: undefined,
+					...messages
+				].filter((message) => message)
+			},
+			model.external
+				? model.source === 'litellm'
+					? `${LITELLM_API_BASE_URL}/v1`
+					: `${OPENAI_API_BASE_URL}`
+				: `${OLLAMA_API_BASE_URL}/v1`
+		);
+
+		// const [res, controller] = await generateChatCompletion(localStorage.token, {
+		// 	model: selectedModelId,
+		// 	messages: [
+		// 		{
+		// 			role: 'assistant',
+		// 			content: text
+		// 		}
+		// 	]
+		// });
+
+		let responseMessage;
+		if (messages.at(-1)?.role === 'assistant') {
+			responseMessage = messages.at(-1);
+		} else {
+			responseMessage = {
+				role: 'assistant',
+				content: ''
+			};
+			messages.push(responseMessage);
+			messages = messages;
+		}
+
+		await tick();
+		const textareaElement = document.getElementById(`assistant-${messages.length - 1}-textarea`);
+
+		if (res && res.ok) {
+			const reader = res.body
+				.pipeThrough(new TextDecoderStream())
+				.pipeThrough(splitStream('\n'))
+				.getReader();
+
+			while (true) {
+				const { value, done } = await reader.read();
+				if (done || stopResponseFlag) {
+					break;
+				}
+
+				try {
+					let lines = value.split('\n');
+
+					for (const line of lines) {
+						if (line !== '') {
+							console.log(line);
+							if (line === 'data: [DONE]') {
+								// responseMessage.done = true;
+								messages = messages;
+							} else {
+								let data = JSON.parse(line.replace(/^data: /, ''));
+								console.log(data);
+
+								if (responseMessage.content == '' && data.choices[0].delta.content == '\n') {
+									continue;
+								} else {
+									textareaElement.style.height = textareaElement.scrollHeight + 'px';
+
+									responseMessage.content += data.choices[0].delta.content ?? '';
+									messages = messages;
+
+									textareaElement.style.height = textareaElement.scrollHeight + 'px';
+
+									await tick();
+								}
+							}
+						}
+					}
+				} catch (error) {
+					console.log(error);
+				}
+
+				scrollToBottom();
+			}
+
+			// while (true) {
+			// 	const { value, done } = await reader.read();
+			// 	if (done || stopResponseFlag) {
+			// 		if (stopResponseFlag) {
+			// 			await cancelChatCompletion(localStorage.token, currentRequestId);
+			// 		}
+
+			// 		currentRequestId = null;
+			// 		break;
+			// 	}
+
+			// 	try {
+			// 		let lines = value.split('\n');
+
+			// 		for (const line of lines) {
+			// 			if (line !== '') {
+			// 				console.log(line);
+			// 				let data = JSON.parse(line);
+
+			// 				if ('detail' in data) {
+			// 					throw data;
+			// 				}
+
+			// 				if ('id' in data) {
+			// 					console.log(data);
+			// 					currentRequestId = data.id;
+			// 				} else {
+			// 					if (data.done == false) {
+			// 						text += data.message.content;
+			// 					} else {
+			// 						console.log('done');
+			// 					}
+			// 				}
+			// 			}
+			// 		}
+			// 	} catch (error) {
+			// 		console.log(error);
+			// 	}
+
+			// 	scrollToBottom();
+			// }
+		}
+	};
+
+	const submitHandler = async () => {
+		if (selectedModelId) {
+			loading = true;
+
+			if (mode === 'complete') {
+				await textCompletionHandler();
+			} else if (mode === 'chat') {
+				await chatCompletionHandler();
+			}
+
+			loading = false;
+			stopResponseFlag = false;
+			currentRequestId = null;
+		}
+	};
+
+	onMount(async () => {
+		if ($user?.role !== 'admin') {
+			await goto('/');
+		}
+
+		if ($settings?.models) {
+			selectedModelId = $settings?.models[0];
+		} else if ($config?.default_models) {
+			selectedModelId = $config?.default_models.split(',')[0];
+		} else {
+			selectedModelId = '';
+		}
+		loaded = true;
+	});
+</script>
+
+<svelte:head>
+	<title>
+		{`Playground | ${$WEBUI_NAME}`}
+	</title>
+</svelte:head>
+
+<div class="min-h-screen max-h-[100dvh] w-full flex justify-center dark:text-white">
+	<div class=" flex flex-col justify-between w-full overflow-y-auto h-[100dvh]">
+		<div class="max-w-2xl mx-auto w-full px-3 p-3 md:px-0 h-full">
+			<div class=" flex flex-col h-full">
+				<div class="flex flex-col justify-between mb-2.5 gap-1">
+					<div class="flex justify-between items-center gap-2">
+						<div class=" text-2xl font-semibold self-center flex">
+							Playground <span class=" text-xs text-gray-500 self-center ml-1">(Beta)</span>
+						</div>
+
+						<div>
+							<button
+								class=" flex items-center gap-0.5 text-xs px-2.5 py-0.5 rounded-lg {mode ===
+									'chat' && 'text-sky-600 dark:text-sky-200 bg-sky-200/30'} {mode === 'complete' &&
+									'text-green-600 dark:text-green-200 bg-green-200/30'} "
+								on:click={() => {
+									if (mode === 'complete') {
+										mode = 'chat';
+									} else {
+										mode = 'complete';
+									}
+								}}
+							>
+								{#if mode === 'complete'}
+									Text Completion
+								{:else if mode === 'chat'}
+									Chat
+								{/if}
+
+								<div>
+									<svg
+										xmlns="http://www.w3.org/2000/svg"
+										viewBox="0 0 16 16"
+										fill="currentColor"
+										class="w-3 h-3"
+									>
+										<path
+											fill-rule="evenodd"
+											d="M5.22 10.22a.75.75 0 0 1 1.06 0L8 11.94l1.72-1.72a.75.75 0 1 1 1.06 1.06l-2.25 2.25a.75.75 0 0 1-1.06 0l-2.25-2.25a.75.75 0 0 1 0-1.06ZM10.78 5.78a.75.75 0 0 1-1.06 0L8 4.06 6.28 5.78a.75.75 0 0 1-1.06-1.06l2.25-2.25a.75.75 0 0 1 1.06 0l2.25 2.25a.75.75 0 0 1 0 1.06Z"
+											clip-rule="evenodd"
+										/>
+									</svg>
+								</div>
+							</button>
+						</div>
+					</div>
+
+					<div class="  flex gap-1 px-1">
+						<select
+							id="models"
+							class="outline-none bg-transparent text-sm font-medium rounded-lg w-full placeholder-gray-400"
+							bind:value={selectedModelId}
+						>
+							<option class=" text-gray-800" value="" selected disabled>Select a model</option>
+
+							{#each $models as model}
+								{#if model.name === 'hr'}
+									<hr />
+								{:else}
+									<option value={model.id} class="text-gray-800 text-lg"
+										>{model.name +
+											`${model.size ? ` (${(model.size / 1024 ** 3).toFixed(1)}GB)` : ''}`}</option
+									>
+								{/if}
+							{/each}
+						</select>
+
+						<!-- <button
+							class=" self-center dark:hover:text-gray-300"
+							id="open-settings-button"
+							on:click={async () => {}}
+						>
+							<svg
+								xmlns="http://www.w3.org/2000/svg"
+								fill="none"
+								viewBox="0 0 24 24"
+								stroke-width="1.5"
+								stroke="currentColor"
+								class="w-4 h-4"
+							>
+								<path
+									stroke-linecap="round"
+									stroke-linejoin="round"
+									d="M10.343 3.94c.09-.542.56-.94 1.11-.94h1.093c.55 0 1.02.398 1.11.94l.149.894c.07.424.384.764.78.93.398.164.855.142 1.205-.108l.737-.527a1.125 1.125 0 011.45.12l.773.774c.39.389.44 1.002.12 1.45l-.527.737c-.25.35-.272.806-.107 1.204.165.397.505.71.93.78l.893.15c.543.09.94.56.94 1.109v1.094c0 .55-.397 1.02-.94 1.11l-.893.149c-.425.07-.765.383-.93.78-.165.398-.143.854.107 1.204l.527.738c.32.447.269 1.06-.12 1.45l-.774.773a1.125 1.125 0 01-1.449.12l-.738-.527c-.35-.25-.806-.272-1.203-.107-.397.165-.71.505-.781.929l-.149.894c-.09.542-.56.94-1.11.94h-1.094c-.55 0-1.019-.398-1.11-.94l-.148-.894c-.071-.424-.384-.764-.781-.93-.398-.164-.854-.142-1.204.108l-.738.527c-.447.32-1.06.269-1.45-.12l-.773-.774a1.125 1.125 0 01-.12-1.45l.527-.737c.25-.35.273-.806.108-1.204-.165-.397-.505-.71-.93-.78l-.894-.15c-.542-.09-.94-.56-.94-1.109v-1.094c0-.55.398-1.02.94-1.11l.894-.149c.424-.07.765-.383.93-.78.165-.398.143-.854-.107-1.204l-.527-.738a1.125 1.125 0 01.12-1.45l.773-.773a1.125 1.125 0 011.45-.12l.737.527c.35.25.807.272 1.204.107.397-.165.71-.505.78-.929l.15-.894z"
+								/>
+								<path
+									stroke-linecap="round"
+									stroke-linejoin="round"
+									d="M15 12a3 3 0 11-6 0 3 3 0 016 0z"
+								/>
+							</svg>
+						</button> -->
+					</div>
+				</div>
+
+				{#if mode === 'chat'}
+					<div class="p-1">
+						<div class="p-3 outline outline-1 outline-gray-200 dark:outline-gray-800 rounded-lg">
+							<div class=" text-sm font-medium">System</div>
+							<textarea
+								id="system-textarea"
+								class="w-full h-full bg-transparent resize-none outline-none text-sm"
+								bind:value={system}
+								placeholder="You're a helpful assistant."
+								rows="4"
+							/>
+						</div>
+					</div>
+				{/if}
+
+				<div
+					class=" pb-2.5 flex flex-col justify-between w-full flex-auto overflow-auto h-0"
+					id="messages-container"
+				>
+					<div class=" h-full w-full flex flex-col">
+						<div class="flex-1 p-1">
+							{#if mode === 'complete'}
+								<textarea
+									id="text-completion-textarea"
+									class="w-full h-full p-3 bg-transparent outline outline-1 outline-gray-200 dark:outline-gray-800 resize-none rounded-lg text-sm"
+									bind:value={text}
+									placeholder="You're a helpful assistant."
+								/>
+							{:else if mode === 'chat'}
+								<ChatCompletion bind:messages />
+							{/if}
+						</div>
+					</div>
+				</div>
+
+				<div class="pb-2">
+					{#if !loading}
+						<button
+							class="px-3 py-1.5 text-sm font-medium bg-emerald-600 hover:bg-emerald-700 text-gray-50 transition rounded-lg"
+							on:click={() => {
+								submitHandler();
+							}}
+						>
+							Submit
+						</button>
+					{:else}
+						<button
+							class="px-3 py-1.5 text-sm font-medium bg-gray-100 hover:bg-gray-200 text-gray-900 transition rounded-lg"
+							on:click={() => {
+								stopResponse();
+							}}
+						>
+							Cancel
+						</button>
+					{/if}
+				</div>
+			</div>
+		</div>
+	</div>
+</div>
+
+<style>
+	.scrollbar-hidden::-webkit-scrollbar {
+		display: none; /* for Chrome, Safari and Opera */
+	}
+
+	.scrollbar-hidden {
+		-ms-overflow-style: none; /* IE and Edge */
+		scrollbar-width: none; /* Firefox */
+	}
+</style>