浏览代码

chore: change to API_ENDPOINT to conventional name API_BASE_URL

AJ ONeal 1 年之前
父节点
当前提交
859adee369
共有 6 个文件被更改,包括 23 次插入22 次删除
  1. 1 1
      .github/workflows/node.js.yaml
  2. 4 4
      Dockerfile
  3. 5 4
      README.md
  4. 1 1
      example.env
  5. 6 6
      src/lib/constants.ts
  6. 6 6
      src/routes/+page.svelte

+ 1 - 1
.github/workflows/node.js.yaml

@@ -7,7 +7,7 @@ jobs:
   build:
     name: 'Fmt, Lint, & Build'
     env:
-      PUBLIC_API_ENDPOINT: ''
+      PUBLIC_API_BASE_URL: ''
     runs-on: ubuntu-latest
     strategy:
       matrix:

+ 4 - 4
Dockerfile

@@ -3,13 +3,13 @@
 FROM node:latest
 WORKDIR /app
 
-ARG OLLAMA_API_ENDPOINT=''
-RUN echo $OLLAMA_API_ENDPOINT
+ARG OLLAMA_API_BASE_URL=''
+RUN echo $OLLAMA_API_BASE_URL
 
 ENV ENV prod
 
-ENV PUBLIC_API_ENDPOINT $OLLAMA_API_ENDPOINT
-RUN echo $PUBLIC_API_ENDPOINT
+ENV PUBLIC_API_BASE_URL $OLLAMA_API_BASE_URL
+RUN echo $PUBLIC_API_BASE_URL
 
 COPY package.json package-lock.json ./ 
 RUN npm ci

+ 5 - 4
README.md

@@ -24,7 +24,7 @@ ChatGPT-Style Web Interface for Ollama 🦙
 
 - 💻 **Code Syntax Highlighting**: Enjoy enhanced code readability with our syntax highlighting feature.
 
-- 🔗 **External Ollama Server Connection**: You can seamlessly connect to an external Ollama server hosted on a different address by setting the environment variable during the Docker build process. Execute the following command to include the Ollama API endpoint in the Docker image: `docker build --build-arg OLLAMA_API_ENDPOINT="http://[Your Ollama URL]/api" -t ollama-webui .`.
+- 🔗 **External Ollama Server Connection**: You can seamlessly connect to an external Ollama server hosted on a different address by setting the environment variable during the Docker build process. Execute the following command to include the Ollama API base URL in the Docker image: `docker build --build-arg OLLAMA_API_BASE_URL='http://localhost:11343/api' -t ollama-webui .`.
 
 - 🌟 **Continuous Updates**: We are committed to improving Ollama Web UI with regular updates and new features.
 
@@ -49,7 +49,8 @@ OLLAMA_HOST=0.0.0.0 OLLAMA_ORIGINS=* ollama serve
 ### Using Docker 🐳
 
 ```bash
-docker build -t ollama-webui .
+docker build --build-arg OLLAMA_API_BASE_URL='http://localhost:11434/api' -t ollama-webui .
+
 docker run -d -p 3000:8080 --name ollama-webui --restart always ollama-webui
 ```
 
@@ -57,10 +58,10 @@ Your Ollama Web UI should now be hosted at [http://localhost:3000](http://localh
 
 #### Connecting to Ollama on a Different Server
 
-If Ollama is hosted on a server other than your local machine, you can connect to it using the following environment variable:
+If Ollama is hosted on a server other than your local machine, change `OLLAMA_API_BASE_URL` to match:
 
 ```bash
-docker build --build-arg OLLAMA_API_ENDPOINT="http://[Your Ollama URL]/api" -t ollama-webui .
+docker build --build-arg OLLAMA_API_BASE_URL='https://example.com/api' -t ollama-webui .
 docker run -d -p 3000:8080 --name ollama-webui --restart always ollama-webui
 ```
 

+ 1 - 1
.env.example → example.env

@@ -1,4 +1,4 @@
-PUBLIC_API_ENDPOINT="http://localhost:11434/api"
+PUBLIC_API_BASE_URL="http://localhost:11434/api"
 
 OLLAMA_API_ID='my-api-token'
 OLLAMA_API_TOKEN='xxxxxxxxxxxxxxxx'

+ 6 - 6
src/lib/constants.ts

@@ -1,12 +1,12 @@
 import { browser } from '$app/environment';
-import { PUBLIC_API_ENDPOINT } from '$env/static/public';
+import { PUBLIC_API_BASE_URL } from '$env/static/public';
 
-export const API_ENDPOINT =
-	PUBLIC_API_ENDPOINT === ''
+export const API_BASE_URL =
+	PUBLIC_API_BASE_URL === ''
 		? browser
 			? `http://${location.hostname}:11434/api`
 			: `http://localhost:11434/api`
-		: PUBLIC_API_ENDPOINT;
+		: PUBLIC_API_BASE_URL;
 
 // Source: https://kit.svelte.dev/docs/modules#$env-static-public
 // This feature, akin to $env/static/private, exclusively incorporates environment variables
@@ -14,6 +14,6 @@ export const API_ENDPOINT =
 // Consequently, these variables can be securely exposed to client-side code.
 
 // Example of the .env configuration:
-// OLLAMA_API_ENDPOINT="http://localhost:11434/api"
+// OLLAMA_API_BASE_URL="http://localhost:11434/api"
 // # Public
-// PUBLIC_API_ENDPOINT=$OLLAMA_API_ENDPOINT
+// PUBLIC_API_BASE_URL=$OLLAMA_API_BASE_URL

+ 6 - 6
src/routes/+page.svelte

@@ -7,7 +7,7 @@
 	const { saveAs } = fileSaver;
 	import hljs from 'highlight.js';
 	import 'highlight.js/styles/dark.min.css';
-	import { API_ENDPOINT } from '$lib/constants';
+	import { API_BASE_URL } from '$lib/constants';
 	import { onMount, tick } from 'svelte';
 
 	import Navbar from '$lib/components/layout/Navbar.svelte';
@@ -31,8 +31,8 @@
 	let messages = [];
 
 	onMount(async () => {
-		console.log(API_ENDPOINT);
-		const res = await fetch(`${API_ENDPOINT}/tags`, {
+		console.log(API_BASE_URL);
+		const res = await fetch(`${API_BASE_URL}/tags`, {
 			method: 'GET',
 			headers: {
 				Accept: 'application/json',
@@ -277,7 +277,7 @@
 			messages = [...messages, responseMessage];
 			window.scrollTo({ top: document.body.scrollHeight });
 
-			const res = await fetch(`${API_ENDPOINT}/generate`, {
+			const res = await fetch(`${API_BASE_URL}/generate`, {
 				method: 'POST',
 				headers: {
 					'Content-Type': 'text/event-stream'
@@ -373,7 +373,7 @@
 			messages = [...messages, responseMessage];
 			window.scrollTo({ top: document.body.scrollHeight });
 
-			const res = await fetch(`${API_ENDPOINT}/generate`, {
+			const res = await fetch(`${API_BASE_URL}/generate`, {
 				method: 'POST',
 				headers: {
 					'Content-Type': 'text/event-stream'
@@ -453,7 +453,7 @@
 	const generateTitle = async (user_prompt) => {
 		console.log('generateTitle');
 
-		const res = await fetch(`${API_ENDPOINT}/generate`, {
+		const res = await fetch(`${API_BASE_URL}/generate`, {
 			method: 'POST',
 			headers: {
 				'Content-Type': 'text/event-stream'