Forráskód Böngészése

feat: enable buildtime API_ENDPOINT env var

Timothy J. Baek 1 éve
szülő
commit
86395a8c1f
10 módosított fájl, 645 hozzáadás és 63 törlés
  1. 2 0
      .env.example
  2. 8 3
      Dockerfile
  3. 14 4
      README.md
  4. 584 19
      package-lock.json
  5. 2 0
      package.json
  6. 1 1
      run.sh
  7. 18 6
      src/lib/constants.ts
  8. 1 1
      src/routes/+layout.js
  9. 0 8
      src/routes/+page.server.ts
  10. 15 21
      src/routes/+page.svelte

+ 2 - 0
.env.example

@@ -0,0 +1,2 @@
+OLLAMA_API_ENDPOINT=""
+PUBLIC_API_ENDPOINT="$OLLAMA_API_ENDPOINT"

+ 8 - 3
Dockerfile

@@ -1,15 +1,20 @@
 # syntax=docker/dockerfile:1
 
 FROM node:latest
-
 WORKDIR /app
+
+ARG OLLAMA_API_ENDPOINT=''
+RUN echo $OLLAMA_API_ENDPOINT
+
 ENV ENV prod
 
+ENV PUBLIC_API_ENDPOINT $OLLAMA_API_ENDPOINT
+RUN echo $PUBLIC_API_ENDPOINT
+
 COPY package.json package-lock.json ./ 
 RUN npm ci
 
-
 COPY . .
 RUN npm run build
 
-CMD [ "node", "./build/index.js"]
+CMD [ "npm", "run", "start"]

+ 14 - 4
README.md

@@ -7,15 +7,25 @@ ChatGPT-Style Web Interface for Ollama 🦙
 ## Features ⭐
 
 - 🖥️ **Intuitive Interface**: Our chat interface takes inspiration from ChatGPT, ensuring a user-friendly experience.
+
 - 📱 **Responsive Design**: Enjoy a seamless experience on both desktop and mobile devices.
+
 - ⚡ **Swift Responsiveness**: Enjoy fast and responsive performance.
+
 - 🚀 **Effortless Setup**: Install seamlessly using Docker for a hassle-free experience.
+
 - 🤖 **Multiple Model Support**: Seamlessly switch between different chat models for diverse interactions.
+
 - 📜 **Chat History**: Effortlessly access and manage your conversation history.
+
 - 📤📥 **Import/Export Chat History**: Seamlessly move your chat data in and out of the platform.
+
 - ⚙️ **Fine-Tuned Control with Advanced Parameters**: Gain a deeper level of control by adjusting parameters such as temperature and defining your system prompts to tailor the conversation to your specific preferences and needs.
+
 - 💻 **Code Syntax Highlighting**: Enjoy enhanced code readability with our syntax highlighting feature.
-- 🔗 **External Ollama Server Connection**: Link to the model when Ollama is hosted on a different server via the environment variable -e OLLAMA_ENDPOINT="http://[insert your Ollama address]".
+
+- 🔗 **External Ollama Server Connection**: You can seamlessly connect to an external Ollama server hosted on a different address by setting the environment variable during the Docker build process. Execute the following command to include the Ollama API endpoint in the Docker image: `docker build --build-arg OLLAMA_API_ENDPOINT="http://[Your Ollama URL]/api" -t ollama-webui .`.
+
 - 🌟 **Continuous Updates**: We are committed to improving Ollama Web UI with regular updates and new features.
 
 ## How to Install 🚀
@@ -40,7 +50,7 @@ OLLAMA_HOST=0.0.0.0 OLLAMA_ORIGINS=* ollama serve
 
 ```bash
 docker build -t ollama-webui .
-docker run -d -p 3000:3000 --add-host=host.docker.internal:host-gateway --name ollama-webui --restart always ollama-webui
+docker run -d -p 3000:8080 --name ollama-webui --restart always ollama-webui
 ```
 
 Your Ollama Web UI should now be hosted at [http://localhost:3000](http://localhost:3000). Enjoy! 😄
@@ -50,8 +60,8 @@ Your Ollama Web UI should now be hosted at [http://localhost:3000](http://localh
 If Ollama is hosted on a server other than your local machine, you can connect to it using the following environment variable:
 
 ```bash
-docker build -t ollama-webui .
-docker run -d -p 3000:3000 --add-host=host.docker.internal:host-gateway -e OLLAMA_ENDPOINT="http://[insert your ollama url]" --name ollama-webui --restart always ollama-webui
+docker build --build-arg OLLAMA_API_ENDPOINT="http://[Your Ollama URL]/api" -t ollama-webui .
+docker run -d -p 3000:8080 --name ollama-webui --restart always ollama-webui
 ```
 
 ## What's Next? 🚀

A különbségek nem kerülnek megjelenítésre, a fájl túl nagy
+ 584 - 19
package-lock.json


+ 2 - 0
package.json

@@ -3,6 +3,7 @@
 	"version": "0.0.1",
 	"private": true,
 	"scripts": {
+		"start": "http-server ./build",
 		"dev": "vite dev --host",
 		"build": "vite build",
 		"preview": "vite preview",
@@ -40,6 +41,7 @@
 		"@sveltejs/adapter-node": "^1.3.1",
 		"file-saver": "^2.0.5",
 		"highlight.js": "^11.9.0",
+		"http-server": "^14.1.1",
 		"idb": "^7.1.1",
 		"marked": "^9.1.0",
 		"svelte-french-toast": "^1.2.0",

+ 1 - 1
run.sh

@@ -1,5 +1,5 @@
 docker stop ollama-webui || true
 docker rm ollama-webui || true
 docker build -t ollama-webui .
-docker run -d -p 3000:3000 --add-host=host.docker.internal:host-gateway --name ollama-webui --restart always ollama-webui
+docker run -d -p 3000:8080 --name ollama-webui --restart always ollama-webui
 docker image prune -f

+ 18 - 6
src/lib/constants.ts

@@ -1,7 +1,19 @@
-import { browser, dev } from '$app/environment';
+import { browser } from '$app/environment';
+import { PUBLIC_API_ENDPOINT } from '$env/static/public';
 
-export const API_ENDPOINT = browser
-	? `https://localhost/api`
-	: dev
-	? `http://localhost:11434/api`
-	: 'http://host.docker.internal:11434/api';
+export const API_ENDPOINT =
+	PUBLIC_API_ENDPOINT === ''
+		? browser
+			? `http://${location.hostname}:11434/api`
+			: `http://localhost:11434/api`
+		: PUBLIC_API_ENDPOINT;
+
+// Source: https://kit.svelte.dev/docs/modules#$env-static-public
+// This feature, akin to $env/static/private, exclusively incorporates environment variables
+// that are prefixed with config.kit.env.publicPrefix (usually set to PUBLIC_).
+// Consequently, these variables can be securely exposed to client-side code.
+
+// Example of the .env configuration:
+// OLLAMA_API_ENDPOINT="http://localhost:11434/api"
+// # Public
+// PUBLIC_API_ENDPOINT=$OLLAMA_API_ENDPOINT

+ 1 - 1
src/routes/+layout.js

@@ -7,7 +7,7 @@ export const prerender = true;
 // you have to set ssr to false.
 // This is not the case (so set as true or comment the line)
 // Documentation: https://kit.svelte.dev/docs/page-options#ssr
-export const ssr = true;
+// export const ssr = false;
 
 // How to manage the trailing slashes in the URLs
 // the URL for about page witll be /about with 'ignore' (default)

+ 0 - 8
src/routes/+page.server.ts

@@ -1,8 +0,0 @@
-import type { PageServerLoad } from './$types';
-
-export const load: PageServerLoad = () => {
-	const API_ENDPOINT = process.env.API_ENDPOINT;
-	return {
-		API_ENDPOINT
-	};
-};

+ 15 - 21
src/routes/+page.svelte

@@ -7,23 +7,13 @@
 	const { saveAs } = fileSaver;
 	import hljs from 'highlight.js';
 	import 'highlight.js/styles/dark.min.css';
-
-	import type { PageData } from './$types';
-	import { API_ENDPOINT as DEV_API_ENDPOINT } from '$lib/constants';
+	import { API_ENDPOINT } from '$lib/constants';
 	import { onMount, tick } from 'svelte';
-	import { page } from '$app/stores';
-	const suggestions = ''; // $page.url.searchParams.get('suggestions');
 
 	import Navbar from '$lib/components/layout/Navbar.svelte';
 	import SettingsModal from '$lib/components/chat/SettingsModal.svelte';
 
-	/* export let data: PageData; */
-	/* $: ({ API_ENDPOINT } = data); */
-	/* if (!API_ENDPOINT) { */
-	/*     API_ENDPOINT = DEV_API_ENDPOINT; */
-	/* } */
-	/* console.log('API_ENDPOINT',API_ENDPOINT) */
-	/* console.log('DEV_API_ENDPOINT', DEV_API_ENDPOINT) */
+	let suggestions = ''; // $page.url.searchParams.get('suggestions');
 
 	let models = [];
 	let textareaElement;
@@ -41,20 +31,24 @@
 	let messages = [];
 
 	onMount(async () => {
-		/* console.log('API_ENDPOINT 2', API_ENDPOINT) */
-		const resp = await fetch(`${DEV_API_ENDPOINT}/tags`, {
+		console.log(API_ENDPOINT);
+		const res = await fetch(`${API_ENDPOINT}/tags`, {
 			method: 'GET',
 			headers: {
 				Accept: 'application/json',
 				'Content-Type': 'application/json'
 			}
-		});
-		if (!resp.ok) {
-			let msg = await resp.text();
-			let err = new Error(msg);
-			throw err;
-		}
-		const data = await resp.json();
+		})
+			.then(async (res) => {
+				if (!res.ok) throw await res.json();
+				return res.json();
+			})
+			.catch((error) => {
+				console.log(error);
+				return { models: [] };
+			});
+
+		const data = res;
 		models = data.models;
 
 		let settings = localStorage.getItem('settings');

Nem az összes módosított fájl került megjelenítésre, mert túl sok fájl változott