Browse Source

Merge branch 'main' of https://github.com/anuraagdjain/ollama-webui into feat/parallel-model-downloads

Anuraag Jain 1 year ago
parent
commit
17a6ca505b
68 changed files with 3903 additions and 2081 deletions
  1. 1 1
      .github/ISSUE_TEMPLATE/bug_report.md
  2. 0 1
      .github/ISSUE_TEMPLATE/feature_request.md
  3. 27 0
      .github/workflows/format-backend.yaml
  4. 22 0
      .github/workflows/format-build-frontend.yaml
  5. 27 0
      .github/workflows/lint-backend.disabled
  6. 21 0
      .github/workflows/lint-frontend.disabled
  7. 0 27
      .github/workflows/node.js.yaml
  8. 6 9
      Dockerfile
  9. 18 4
      README.md
  10. 2 2
      TROUBLESHOOTING.md
  11. 85 93
      backend/apps/ollama/main.py
  12. 127 0
      backend/apps/ollama/old_main.py
  13. 143 0
      backend/apps/openai/main.py
  14. 14 4
      backend/apps/web/main.py
  15. 9 1
      backend/apps/web/models/auths.py
  16. 18 21
      backend/apps/web/models/chats.py
  17. 14 13
      backend/apps/web/models/modelfiles.py
  18. 115 0
      backend/apps/web/models/prompts.py
  19. 17 1
      backend/apps/web/models/users.py
  20. 34 32
      backend/apps/web/routers/auths.py
  21. 16 14
      backend/apps/web/routers/chats.py
  22. 40 0
      backend/apps/web/routers/configs.py
  23. 21 22
      backend/apps/web/routers/modelfiles.py
  24. 116 0
      backend/apps/web/routers/prompts.py
  25. 58 4
      backend/apps/web/routers/users.py
  26. 9 8
      backend/apps/web/routers/utils.py
  27. 13 4
      backend/config.py
  28. 5 3
      backend/constants.py
  29. 9 2
      backend/main.py
  30. 3 1
      backend/start.sh
  31. 7 0
      backend/utils/misc.py
  32. 4 1
      backend/utils/utils.py
  33. BIN
      bun.lockb
  34. BIN
      demo.gif
  35. 1 1
      docker-compose.data.yaml
  36. 1 1
      docker-compose.yaml
  37. 3 9
      example.env
  38. 164 551
      package-lock.json
  39. 11 9
      package.json
  40. 7 0
      run-ollama-docker.sh
  41. 31 0
      src/lib/apis/configs/index.ts
  42. 107 38
      src/lib/apis/ollama/index.ts
  43. 201 3
      src/lib/apis/openai/index.ts
  44. 178 0
      src/lib/apis/prompts/index.ts
  45. 40 0
      src/lib/apis/users/index.ts
  46. 172 0
      src/lib/components/admin/EditUserModal.svelte
  47. 1 1
      src/lib/components/chat/MessageInput.svelte
  48. 56 172
      src/lib/components/chat/MessageInput/PromptCommands.svelte
  49. 4 2
      src/lib/components/chat/MessageInput/Suggestions.svelte
  50. 1 1
      src/lib/components/chat/Messages/Placeholder.svelte
  51. 15 2
      src/lib/components/chat/ModelSelector.svelte
  52. 310 296
      src/lib/components/chat/SettingsModal.svelte
  53. 13 1
      src/lib/components/common/Modal.svelte
  54. 308 231
      src/lib/components/layout/Sidebar.svelte
  55. 4 10
      src/lib/constants.ts
  56. 3 0
      src/lib/stores/index.ts
  57. 2 2
      src/lib/utils/index.ts
  58. 18 22
      src/routes/(app)/+layout.svelte
  59. 222 221
      src/routes/(app)/+page.svelte
  60. 48 1
      src/routes/(app)/admin/+page.svelte
  61. 185 201
      src/routes/(app)/c/[id]/+page.svelte
  62. 26 7
      src/routes/(app)/modelfiles/+page.svelte
  63. 2 8
      src/routes/(app)/modelfiles/create/+page.svelte
  64. 1 8
      src/routes/(app)/modelfiles/edit/+page.svelte
  65. 309 0
      src/routes/(app)/prompts/+page.svelte
  66. 222 0
      src/routes/(app)/prompts/create/+page.svelte
  67. 221 0
      src/routes/(app)/prompts/edit/+page.svelte
  68. 15 15
      static/manifest.json

+ 1 - 1
.github/ISSUE_TEMPLATE/bug_report.md

@@ -4,7 +4,6 @@ about: Create a report to help us improve
 title: ''
 labels: ''
 assignees: ''
-
 ---
 
 # Bug Report
@@ -31,6 +30,7 @@ assignees: ''
 ## Reproduction Details
 
 **Confirmation:**
+
 - [ ] I have read and followed all the instructions provided in the README.md.
 - [ ] I have reviewed the troubleshooting.md document.
 - [ ] I have included the browser console logs.

+ 0 - 1
.github/ISSUE_TEMPLATE/feature_request.md

@@ -4,7 +4,6 @@ about: Suggest an idea for this project
 title: ''
 labels: ''
 assignees: ''
-
 ---
 
 **Is your feature request related to a problem? Please describe.**

+ 27 - 0
.github/workflows/format-backend.yaml

@@ -0,0 +1,27 @@
+name: Python CI
+on:
+  push:
+    branches: ['main']
+  pull_request:
+jobs:
+  build:
+    name: 'Format Backend'
+    env:
+      PUBLIC_API_BASE_URL: ''
+    runs-on: ubuntu-latest
+    strategy:
+      matrix:
+        node-version:
+          - latest
+    steps:
+      - uses: actions/checkout@v4
+      - name: Use Python
+        uses: actions/setup-python@v4
+      - name: Use Bun
+        uses: oven-sh/setup-bun@v1
+      - name: Install dependencies
+        run: |
+          python -m pip install --upgrade pip
+          pip install yapf
+      - name: Format backend
+        run: bun run format:backend

+ 22 - 0
.github/workflows/format-build-frontend.yaml

@@ -0,0 +1,22 @@
+name: Bun CI
+on:
+  push:
+    branches: ['main']
+  pull_request:
+jobs:
+  build:
+    name: 'Format & Build Frontend'
+    env:
+      PUBLIC_API_BASE_URL: ''
+    runs-on: ubuntu-latest
+    steps:
+      - uses: actions/checkout@v4
+      - name: Use Bun
+        uses: oven-sh/setup-bun@v1
+      - run: bun --version
+      - name: Install frontend dependencies
+        run: bun install
+      - name: Format frontend
+        run: bun run format
+      - name: Build frontend
+        run: bun run build

+ 27 - 0
.github/workflows/lint-backend.disabled

@@ -0,0 +1,27 @@
+name: Python CI
+on:
+  push:
+    branches: ['main']
+  pull_request:
+jobs:
+  build:
+    name: 'Lint Backend'
+    env:
+      PUBLIC_API_BASE_URL: ''
+    runs-on: ubuntu-latest
+    strategy:
+      matrix:
+        node-version:
+          - latest
+    steps:
+      - uses: actions/checkout@v4
+      - name: Use Python
+        uses: actions/setup-python@v4
+      - name: Use Bun
+        uses: oven-sh/setup-bun@v1
+      - name: Install dependencies
+        run: |
+          python -m pip install --upgrade pip
+          pip install pylint
+      - name: Lint backend
+        run: bun run lint:backend

+ 21 - 0
.github/workflows/lint-frontend.disabled

@@ -0,0 +1,21 @@
+name: Bun CI
+on:
+  push:
+    branches: ['main']
+  pull_request:
+jobs:
+  build:
+    name: 'Lint Frontend'
+    env:
+      PUBLIC_API_BASE_URL: ''
+    runs-on: ubuntu-latest
+    steps:
+      - uses: actions/checkout@v4
+      - name: Use Bun
+        uses: oven-sh/setup-bun@v1
+      - run: bun --version
+      - name: Install frontend dependencies
+        run: bun install --frozen-lockfile
+      - run: bun run lint:frontend
+      - run: bun run lint:types
+        if: success() || failure()

+ 0 - 27
.github/workflows/node.js.yaml

@@ -1,27 +0,0 @@
-name: Node.js CI
-on:
-  push:
-    branches: ['main']
-  pull_request:
-jobs:
-  build:
-    name: 'Fmt, Lint, & Build'
-    env:
-      PUBLIC_API_BASE_URL: ''
-    runs-on: ubuntu-latest
-    strategy:
-      matrix:
-        node-version:
-          - latest
-    steps:
-      - uses: actions/checkout@v3
-      - name: Use Node.js ${{ matrix.node-version }}
-        uses: actions/setup-node@v3
-        with:
-          node-version: ${{ matrix.node-version }}
-      - run: node --version
-      - run: npm clean-install
-      - run: npm run fmt
-        #- run: npm run lint
-        #- run: npm run lint:types
-      - run: npm run build

+ 6 - 9
Dockerfile

@@ -2,12 +2,6 @@
 
 FROM node:alpine as build
 
-ARG OLLAMA_API_BASE_URL='/ollama/api'
-RUN echo $OLLAMA_API_BASE_URL
-
-ENV PUBLIC_API_BASE_URL $OLLAMA_API_BASE_URL
-RUN echo $PUBLIC_API_BASE_URL
-
 WORKDIR /app
 
 COPY package.json package-lock.json ./ 
@@ -18,10 +12,13 @@ RUN npm run build
 
 FROM python:3.11-slim-buster as base
 
-ARG OLLAMA_API_BASE_URL='/ollama/api'
-
 ENV ENV=prod
-ENV OLLAMA_API_BASE_URL $OLLAMA_API_BASE_URL
+
+ENV OLLAMA_API_BASE_URL "/ollama/api"
+
+ENV OPENAI_API_BASE_URL ""
+ENV OPENAI_API_KEY ""
+
 ENV WEBUI_JWT_SECRET_KEY "SECRET_KEY"
 
 WORKDIR /app

+ 18 - 4
README.md

@@ -33,6 +33,10 @@ Also check our sibling project, [OllamaHub](https://ollamahub.com/), where you c
 
 - ✒️🔢 **Full Markdown and LaTeX Support**: Elevate your LLM experience with comprehensive Markdown and LaTeX capabilities for enriched interaction.
 
+- 📜 **Prompt Preset Support**: Instantly access preset prompts using the '/' command in the chat input. Load predefined conversation starters effortlessly and expedite your interactions. Effortlessly import prompts through [OllamaHub](https://ollamahub.com/) integration.
+
+- 👍👎 **RLHF Annotation**: Empower your messages by rating them with thumbs up and thumbs down, facilitating the creation of datasets for Reinforcement Learning from Human Feedback (RLHF). Utilize your messages to train or fine-tune models, all while ensuring the confidentiality of locally saved data.
+
 - 📥🗑️ **Download/Delete Models**: Easily download or remove models directly from the web UI.
 
 - ⬆️ **GGUF File Model Creation**: Effortlessly create Ollama models by uploading GGUF files directly from the web UI. Streamlined process with options to upload from your machine or download GGUF files from Hugging Face.
@@ -194,9 +198,15 @@ While we strongly recommend using our convenient Docker container installation f
 
 The Ollama Web UI consists of two primary components: the frontend and the backend (which serves as a reverse proxy, handling static frontend files, and additional features). Both need to be running concurrently for the development environment.
 
-**Warning: Backend Dependency for Proper Functionality**
+> [!IMPORTANT]
+> The backend is required for proper functionality
+
+### Requirements 📦
 
-### TL;DR 🚀
+- 🐰 [Bun](https://bun.sh) >= 1.0.21 or 🐢 [Node.js](https://nodejs.org/en) >= 20.10
+- 🐍 [Python](https://python.org) >= 3.11
+
+### Build and Install 🛠️
 
 Run the following commands to install:
 
@@ -207,13 +217,17 @@ cd ollama-webui/
 # Copying required .env file
 cp -RPp example.env .env
 
-# Building Frontend
+# Building Frontend Using Node
 npm i
 npm run build
 
+# or Building Frontend Using Bun
+# bun install
+# bun run build
+
 # Serving Frontend with the Backend
 cd ./backend
-pip install -r requirements.txt
+pip install -r requirements.txt -U
 sh start.sh
 ```
 

+ 2 - 2
TROUBLESHOOTING.md

@@ -4,7 +4,7 @@
 
 The Ollama WebUI system is designed to streamline interactions between the client (your browser) and the Ollama API. At the heart of this design is a backend reverse proxy, enhancing security and resolving CORS issues.
 
-- **How it Works**: When you make a request (like `/ollama/api/tags`) from the Ollama WebUI, it doesn’t go directly to the Ollama API. Instead, it first reaches the Ollama WebUI backend. The backend then forwards this request to the Ollama API via the route you define in the `OLLAMA_API_BASE_URL` environment variable. For instance, a request to `/ollama/api/tags` in the WebUI is equivalent to `OLLAMA_API_BASE_URL/tags` in the backend.
+- **How it Works**: The Ollama WebUI is designed to interact with the Ollama API through a specific route. When a request is made from the WebUI to Ollama, it is not directly sent to the Ollama API. Initially, the request is sent to the Ollama WebUI backend via `/ollama/api` route. From there, the backend is responsible for forwarding the request to the Ollama API. This forwarding is accomplished by using the route specified in the `OLLAMA_API_BASE_URL` environment variable. Therefore, a request made to `/ollama/api` in the WebUI is effectively the same as making a request to `OLLAMA_API_BASE_URL` in the backend. For instance, a request to `/ollama/api/tags` in the WebUI is equivalent to `OLLAMA_API_BASE_URL/tags` in the backend.
 
 - **Security Benefits**: This design prevents direct exposure of the Ollama API to the frontend, safeguarding against potential CORS (Cross-Origin Resource Sharing) issues and unauthorized access. Requiring authentication to access the Ollama API further enhances this security layer.
 
@@ -27,6 +27,6 @@ docker run -d --network=host -v ollama-webui:/app/backend/data -e OLLAMA_API_BAS
 1. **Verify Ollama URL Format**:
    - When running the Web UI container, ensure the `OLLAMA_API_BASE_URL` is correctly set, including the `/api` suffix. (e.g., `http://192.168.1.1:11434/api` for different host setups).
    - In the Ollama WebUI, navigate to "Settings" > "General".
-   - Confirm that the Ollama Server URL is correctly set to `/ollama/api`, including the `/api` suffix.
+   - Confirm that the Ollama Server URL is correctly set to `[OLLAMA URL]/api` (e.g., `http://localhost:11434/api`), including the `/api` suffix.
 
 By following these enhanced troubleshooting steps, connection issues should be effectively resolved. For further assistance or queries, feel free to reach out to us on our community Discord.

+ 85 - 93
backend/apps/ollama/main.py

@@ -1,119 +1,111 @@
-from flask import Flask, request, Response, jsonify
-from flask_cors import CORS
-
+from fastapi import FastAPI, Request, Response, HTTPException, Depends
+from fastapi.middleware.cors import CORSMiddleware
+from fastapi.responses import StreamingResponse
+from fastapi.concurrency import run_in_threadpool
 
 import requests
 import json
-
+from pydantic import BaseModel
 
 from apps.web.models.users import Users
 from constants import ERROR_MESSAGES
-from utils.utils import decode_token
+from utils.utils import decode_token, get_current_user
 from config import OLLAMA_API_BASE_URL, WEBUI_AUTH
 
-app = Flask(__name__)
-CORS(
-    app
-)  # Enable Cross-Origin Resource Sharing (CORS) to allow requests from different domains
+app = FastAPI()
+app.add_middleware(
+    CORSMiddleware,
+    allow_origins=["*"],
+    allow_credentials=True,
+    allow_methods=["*"],
+    allow_headers=["*"],
+)
 
-# Define the target server URL
-TARGET_SERVER_URL = OLLAMA_API_BASE_URL
+app.state.OLLAMA_API_BASE_URL = OLLAMA_API_BASE_URL
 
+# TARGET_SERVER_URL = OLLAMA_API_BASE_URL
 
-@app.route("/", defaults={"path": ""}, methods=["GET", "POST", "PUT", "DELETE"])
-@app.route("/<path:path>", methods=["GET", "POST", "PUT", "DELETE"])
-def proxy(path):
-    # Combine the base URL of the target server with the requested path
-    target_url = f"{TARGET_SERVER_URL}/{path}"
-    print(target_url)
 
-    # Get data from the original request
-    data = request.get_data()
-    headers = dict(request.headers)
+@app.get("/url")
+async def get_ollama_api_url(user=Depends(get_current_user)):
+    if user and user.role == "admin":
+        return {"OLLAMA_API_BASE_URL": app.state.OLLAMA_API_BASE_URL}
+    else:
+        raise HTTPException(status_code=401, detail=ERROR_MESSAGES.ACCESS_PROHIBITED)
+
+
+class UrlUpdateForm(BaseModel):
+    url: str
 
-    # Basic RBAC support
-    if WEBUI_AUTH:
-        if "Authorization" in headers:
-            _, credentials = headers["Authorization"].split()
-            token_data = decode_token(credentials)
-            if token_data is None or "email" not in token_data:
-                return jsonify({"detail": ERROR_MESSAGES.UNAUTHORIZED}), 401
-
-            user = Users.get_user_by_email(token_data["email"])
-            if user:
-                # Only user and admin roles can access
-                if user.role in ["user", "admin"]:
-                    if path in ["pull", "delete", "push", "copy", "create"]:
-                        # Only admin role can perform actions above
-                        if user.role == "admin":
-                            pass
-                        else:
-                            return (
-                                jsonify({"detail": ERROR_MESSAGES.ACCESS_PROHIBITED}),
-                                401,
-                            )
-                    else:
-                        pass
-                else:
-                    return jsonify({"detail": ERROR_MESSAGES.ACCESS_PROHIBITED}), 401
-            else:
-                return jsonify({"detail": ERROR_MESSAGES.UNAUTHORIZED}), 401
-        else:
-            return jsonify({"detail": ERROR_MESSAGES.UNAUTHORIZED}), 401
+
+@app.post("/url/update")
+async def update_ollama_api_url(
+    form_data: UrlUpdateForm, user=Depends(get_current_user)
+):
+    if user and user.role == "admin":
+        app.state.OLLAMA_API_BASE_URL = form_data.url
+        return {"OLLAMA_API_BASE_URL": app.state.OLLAMA_API_BASE_URL}
     else:
-        pass
+        raise HTTPException(status_code=401, detail=ERROR_MESSAGES.ACCESS_PROHIBITED)
 
-    r = None
+
+@app.api_route("/{path:path}", methods=["GET", "POST", "PUT", "DELETE"])
+async def proxy(path: str, request: Request, user=Depends(get_current_user)):
+    target_url = f"{app.state.OLLAMA_API_BASE_URL}/{path}"
+
+    body = await request.body()
+    headers = dict(request.headers)
+
+    if user.role in ["user", "admin"]:
+        if path in ["pull", "delete", "push", "copy", "create"]:
+            if user.role != "admin":
+                raise HTTPException(
+                    status_code=401, detail=ERROR_MESSAGES.ACCESS_PROHIBITED
+                )
+    else:
+        raise HTTPException(status_code=401, detail=ERROR_MESSAGES.ACCESS_PROHIBITED)
 
     headers.pop("Host", None)
     headers.pop("Authorization", None)
     headers.pop("Origin", None)
     headers.pop("Referer", None)
 
-    try:
-        # Make a request to the target server
-        r = requests.request(
-            method=request.method,
-            url=target_url,
-            data=data,
-            headers=headers,
-            stream=True,  # Enable streaming for server-sent events
-        )
-
-        r.raise_for_status()
-
-        # Proxy the target server's response to the client
-        def generate():
-            for chunk in r.iter_content(chunk_size=8192):
-                yield chunk
-
-        response = Response(generate(), status=r.status_code)
+    r = None
 
-        # Copy headers from the target server's response to the client's response
-        for key, value in r.headers.items():
-            response.headers[key] = value
+    def get_request():
+        nonlocal r
+        try:
+            r = requests.request(
+                method=request.method,
+                url=target_url,
+                data=body,
+                headers=headers,
+                stream=True,
+            )
+
+            r.raise_for_status()
+
+            return StreamingResponse(
+                r.iter_content(chunk_size=8192),
+                status_code=r.status_code,
+                headers=dict(r.headers),
+            )
+        except Exception as e:
+            raise e
 
-        return response
+    try:
+        return await run_in_threadpool(get_request)
     except Exception as e:
-        print(e)
         error_detail = "Ollama WebUI: Server Connection Error"
-        if r != None:
-            print(r.text)
-            res = r.json()
-            if "error" in res:
-                error_detail = f"Ollama: {res['error']}"
-            print(res)
-
-        return (
-            jsonify(
-                {
-                    "detail": error_detail,
-                    "message": str(e),
-                }
-            ),
-            400,
+        if r is not None:
+            try:
+                res = r.json()
+                if "error" in res:
+                    error_detail = f"Ollama: {res['error']}"
+            except:
+                error_detail = f"Ollama: {e}"
+
+        raise HTTPException(
+            status_code=r.status_code if r else 500,
+            detail=error_detail,
         )
-
-
-if __name__ == "__main__":
-    app.run(debug=True)

+ 127 - 0
backend/apps/ollama/old_main.py

@@ -0,0 +1,127 @@
+from fastapi import FastAPI, Request, Response, HTTPException, Depends
+from fastapi.middleware.cors import CORSMiddleware
+from fastapi.responses import StreamingResponse
+
+import requests
+import json
+from pydantic import BaseModel
+
+from apps.web.models.users import Users
+from constants import ERROR_MESSAGES
+from utils.utils import decode_token, get_current_user
+from config import OLLAMA_API_BASE_URL, WEBUI_AUTH
+
+import aiohttp
+
+app = FastAPI()
+app.add_middleware(
+    CORSMiddleware,
+    allow_origins=["*"],
+    allow_credentials=True,
+    allow_methods=["*"],
+    allow_headers=["*"],
+)
+
+app.state.OLLAMA_API_BASE_URL = OLLAMA_API_BASE_URL
+
+# TARGET_SERVER_URL = OLLAMA_API_BASE_URL
+
+
+@app.get("/url")
+async def get_ollama_api_url(user=Depends(get_current_user)):
+    if user and user.role == "admin":
+        return {"OLLAMA_API_BASE_URL": app.state.OLLAMA_API_BASE_URL}
+    else:
+        raise HTTPException(status_code=401, detail=ERROR_MESSAGES.ACCESS_PROHIBITED)
+
+
+class UrlUpdateForm(BaseModel):
+    url: str
+
+
+@app.post("/url/update")
+async def update_ollama_api_url(
+    form_data: UrlUpdateForm, user=Depends(get_current_user)
+):
+    if user and user.role == "admin":
+        app.state.OLLAMA_API_BASE_URL = form_data.url
+        return {"OLLAMA_API_BASE_URL": app.state.OLLAMA_API_BASE_URL}
+    else:
+        raise HTTPException(status_code=401, detail=ERROR_MESSAGES.ACCESS_PROHIBITED)
+
+
+# async def fetch_sse(method, target_url, body, headers):
+#     async with aiohttp.ClientSession() as session:
+#         try:
+#             async with session.request(
+#                 method, target_url, data=body, headers=headers
+#             ) as response:
+#                 print(response.status)
+#                 async for line in response.content:
+#                     yield line
+#         except Exception as e:
+#             print(e)
+#             error_detail = "Ollama WebUI: Server Connection Error"
+#             yield json.dumps({"error": error_detail, "message": str(e)}).encode()
+
+
+@app.api_route("/{path:path}", methods=["GET", "POST", "PUT", "DELETE"])
+async def proxy(path: str, request: Request, user=Depends(get_current_user)):
+    target_url = f"{app.state.OLLAMA_API_BASE_URL}/{path}"
+    print(target_url)
+
+    body = await request.body()
+    headers = dict(request.headers)
+
+    if user.role in ["user", "admin"]:
+        if path in ["pull", "delete", "push", "copy", "create"]:
+            if user.role != "admin":
+                raise HTTPException(
+                    status_code=401, detail=ERROR_MESSAGES.ACCESS_PROHIBITED
+                )
+    else:
+        raise HTTPException(status_code=401, detail=ERROR_MESSAGES.ACCESS_PROHIBITED)
+
+    headers.pop("Host", None)
+    headers.pop("Authorization", None)
+    headers.pop("Origin", None)
+    headers.pop("Referer", None)
+
+    session = aiohttp.ClientSession()
+    response = None
+    try:
+        response = await session.request(
+            request.method, target_url, data=body, headers=headers
+        )
+
+        print(response)
+        if not response.ok:
+            data = await response.json()
+            print(data)
+            response.raise_for_status()
+
+        async def generate():
+            async for line in response.content:
+                print(line)
+                yield line
+            await session.close()
+
+        return StreamingResponse(generate(), response.status)
+
+    except Exception as e:
+        print(e)
+        error_detail = "Ollama WebUI: Server Connection Error"
+
+        if response is not None:
+            try:
+                res = await response.json()
+                if "error" in res:
+                    error_detail = f"Ollama: {res['error']}"
+            except:
+                error_detail = f"Ollama: {e}"
+
+        await session.close()
+        raise HTTPException(
+            status_code=response.status if response else 500,
+            detail=error_detail,
+        )

+ 143 - 0
backend/apps/openai/main.py

@@ -0,0 +1,143 @@
+from fastapi import FastAPI, Request, Response, HTTPException, Depends
+from fastapi.middleware.cors import CORSMiddleware
+from fastapi.responses import StreamingResponse, JSONResponse
+
+import requests
+import json
+from pydantic import BaseModel
+
+from apps.web.models.users import Users
+from constants import ERROR_MESSAGES
+from utils.utils import decode_token, get_current_user
+from config import OPENAI_API_BASE_URL, OPENAI_API_KEY
+
+app = FastAPI()
+app.add_middleware(
+    CORSMiddleware,
+    allow_origins=["*"],
+    allow_credentials=True,
+    allow_methods=["*"],
+    allow_headers=["*"],
+)
+
+app.state.OPENAI_API_BASE_URL = OPENAI_API_BASE_URL
+app.state.OPENAI_API_KEY = OPENAI_API_KEY
+
+
+class UrlUpdateForm(BaseModel):
+    url: str
+
+
+class KeyUpdateForm(BaseModel):
+    key: str
+
+
+@app.get("/url")
+async def get_openai_url(user=Depends(get_current_user)):
+    if user and user.role == "admin":
+        return {"OPENAI_API_BASE_URL": app.state.OPENAI_API_BASE_URL}
+    else:
+        raise HTTPException(status_code=401,
+                            detail=ERROR_MESSAGES.ACCESS_PROHIBITED)
+
+
+@app.post("/url/update")
+async def update_openai_url(form_data: UrlUpdateForm,
+                            user=Depends(get_current_user)):
+    if user and user.role == "admin":
+        app.state.OPENAI_API_BASE_URL = form_data.url
+        return {"OPENAI_API_BASE_URL": app.state.OPENAI_API_BASE_URL}
+    else:
+        raise HTTPException(status_code=401,
+                            detail=ERROR_MESSAGES.ACCESS_PROHIBITED)
+
+
+@app.get("/key")
+async def get_openai_key(user=Depends(get_current_user)):
+    if user and user.role == "admin":
+        return {"OPENAI_API_KEY": app.state.OPENAI_API_KEY}
+    else:
+        raise HTTPException(status_code=401,
+                            detail=ERROR_MESSAGES.ACCESS_PROHIBITED)
+
+
+@app.post("/key/update")
+async def update_openai_key(form_data: KeyUpdateForm,
+                            user=Depends(get_current_user)):
+    if user and user.role == "admin":
+        app.state.OPENAI_API_KEY = form_data.key
+        return {"OPENAI_API_KEY": app.state.OPENAI_API_KEY}
+    else:
+        raise HTTPException(status_code=401,
+                            detail=ERROR_MESSAGES.ACCESS_PROHIBITED)
+
+
+@app.api_route("/{path:path}", methods=["GET", "POST", "PUT", "DELETE"])
+async def proxy(path: str, request: Request, user=Depends(get_current_user)):
+    target_url = f"{app.state.OPENAI_API_BASE_URL}/{path}"
+    print(target_url, app.state.OPENAI_API_KEY)
+
+    if user.role not in ["user", "admin"]:
+        raise HTTPException(status_code=401,
+                            detail=ERROR_MESSAGES.ACCESS_PROHIBITED)
+    if app.state.OPENAI_API_KEY == "":
+        raise HTTPException(status_code=401,
+                            detail=ERROR_MESSAGES.API_KEY_NOT_FOUND)
+
+    body = await request.body()
+    # headers = dict(request.headers)
+    # print(headers)
+
+    headers = {}
+    headers["Authorization"] = f"Bearer {app.state.OPENAI_API_KEY}"
+    headers["Content-Type"] = "application/json"
+
+    try:
+        r = requests.request(
+            method=request.method,
+            url=target_url,
+            data=body,
+            headers=headers,
+            stream=True,
+        )
+
+        r.raise_for_status()
+
+        # Check if response is SSE
+        if "text/event-stream" in r.headers.get("Content-Type", ""):
+            return StreamingResponse(
+                r.iter_content(chunk_size=8192),
+                status_code=r.status_code,
+                headers=dict(r.headers),
+            )
+        else:
+            # For non-SSE, read the response and return it
+            # response_data = (
+            #     r.json()
+            #     if r.headers.get("Content-Type", "")
+            #     == "application/json"
+            #     else r.text
+            # )
+
+            response_data = r.json()
+
+            print(type(response_data))
+
+            if "openai" in app.state.OPENAI_API_BASE_URL and path == "models":
+                response_data["data"] = list(
+                    filter(lambda model: "gpt" in model["id"],
+                           response_data["data"]))
+
+            return response_data
+    except Exception as e:
+        print(e)
+        error_detail = "Ollama WebUI: Server Connection Error"
+        if r is not None:
+            try:
+                res = r.json()
+                if "error" in res:
+                    error_detail = f"External: {res['error']}"
+            except:
+                error_detail = f"External: {e}"
+
+        raise HTTPException(status_code=r.status_code, detail=error_detail)

+ 14 - 4
backend/apps/web/main.py

@@ -1,7 +1,7 @@
 from fastapi import FastAPI, Depends
 from fastapi.routing import APIRoute
 from fastapi.middleware.cors import CORSMiddleware
-from apps.web.routers import auths, users, chats, modelfiles, utils
+from apps.web.routers import auths, users, chats, modelfiles, prompts, configs, utils
 from config import WEBUI_VERSION, WEBUI_AUTH
 
 app = FastAPI()
@@ -9,6 +9,7 @@ app = FastAPI()
 origins = ["*"]
 
 app.state.ENABLE_SIGNUP = True
+app.state.DEFAULT_MODELS = None
 
 app.add_middleware(
     CORSMiddleware,
@@ -19,13 +20,22 @@ app.add_middleware(
 )
 
 app.include_router(auths.router, prefix="/auths", tags=["auths"])
-
 app.include_router(users.router, prefix="/users", tags=["users"])
 app.include_router(chats.router, prefix="/chats", tags=["chats"])
-app.include_router(modelfiles.router, prefix="/modelfiles", tags=["modelfiles"])
+app.include_router(modelfiles.router,
+                   prefix="/modelfiles",
+                   tags=["modelfiles"])
+app.include_router(prompts.router, prefix="/prompts", tags=["prompts"])
+
+app.include_router(configs.router, prefix="/configs", tags=["configs"])
 app.include_router(utils.router, prefix="/utils", tags=["utils"])
 
 
 @app.get("/")
 async def get_status():
-    return {"status": True, "version": WEBUI_VERSION, "auth": WEBUI_AUTH}
+    return {
+        "status": True,
+        "version": WEBUI_VERSION,
+        "auth": WEBUI_AUTH,
+        "default_models": app.state.DEFAULT_MODELS,
+    }

+ 9 - 1
backend/apps/web/models/auths.py

@@ -4,7 +4,6 @@ import time
 import uuid
 from peewee import *
 
-
 from apps.web.models.users import UserModel, Users
 from utils.utils import (
     verify_password,
@@ -123,6 +122,15 @@ class AuthsTable:
         except:
             return False
 
+    def update_email_by_id(self, id: str, email: str) -> bool:
+        try:
+            query = Auth.update(email=email).where(Auth.id == id)
+            result = query.execute()
+
+            return True if result == 1 else False
+        except:
+            return False
+
     def delete_auth_by_id(self, id: str) -> bool:
         try:
             # Delete User

+ 18 - 21
backend/apps/web/models/chats.py

@@ -3,14 +3,12 @@ from typing import List, Union, Optional
 from peewee import *
 from playhouse.shortcuts import model_to_dict
 
-
 import json
 import uuid
 import time
 
 from apps.web.internal.db import DB
 
-
 ####################
 # Chat DB Schema
 ####################
@@ -62,23 +60,23 @@ class ChatTitleIdResponse(BaseModel):
 
 
 class ChatTable:
+
     def __init__(self, db):
         self.db = db
         db.create_tables([Chat])
 
-    def insert_new_chat(self, user_id: str, form_data: ChatForm) -> Optional[ChatModel]:
+    def insert_new_chat(self, user_id: str,
+                        form_data: ChatForm) -> Optional[ChatModel]:
         id = str(uuid.uuid4())
         chat = ChatModel(
             **{
                 "id": id,
                 "user_id": user_id,
-                "title": form_data.chat["title"]
-                if "title" in form_data.chat
-                else "New Chat",
+                "title": form_data.chat["title"] if "title" in
+                form_data.chat else "New Chat",
                 "chat": json.dumps(form_data.chat),
                 "timestamp": int(time.time()),
-            }
-        )
+            })
 
         result = Chat.create(**chat.model_dump())
         return chat if result else None
@@ -111,27 +109,25 @@ class ChatTable:
         except:
             return None
 
-    def get_chat_lists_by_user_id(
-        self, user_id: str, skip: int = 0, limit: int = 50
-    ) -> List[ChatModel]:
+    def get_chat_lists_by_user_id(self,
+                                  user_id: str,
+                                  skip: int = 0,
+                                  limit: int = 50) -> List[ChatModel]:
         return [
-            ChatModel(**model_to_dict(chat))
-            for chat in Chat.select()
-            .where(Chat.user_id == user_id)
-            .order_by(Chat.timestamp.desc())
+            ChatModel(**model_to_dict(chat)) for chat in Chat.select().where(
+                Chat.user_id == user_id).order_by(Chat.timestamp.desc())
             # .limit(limit)
             # .offset(skip)
         ]
 
     def get_all_chats_by_user_id(self, user_id: str) -> List[ChatModel]:
         return [
-            ChatModel(**model_to_dict(chat))
-            for chat in Chat.select()
-            .where(Chat.user_id == user_id)
-            .order_by(Chat.timestamp.desc())
+            ChatModel(**model_to_dict(chat)) for chat in Chat.select().where(
+                Chat.user_id == user_id).order_by(Chat.timestamp.desc())
         ]
 
-    def get_chat_by_id_and_user_id(self, id: str, user_id: str) -> Optional[ChatModel]:
+    def get_chat_by_id_and_user_id(self, id: str,
+                                   user_id: str) -> Optional[ChatModel]:
         try:
             chat = Chat.get(Chat.id == id, Chat.user_id == user_id)
             return ChatModel(**model_to_dict(chat))
@@ -146,7 +142,8 @@ class ChatTable:
 
     def delete_chat_by_id_and_user_id(self, id: str, user_id: str) -> bool:
         try:
-            query = Chat.delete().where((Chat.id == id) & (Chat.user_id == user_id))
+            query = Chat.delete().where((Chat.id == id)
+                                        & (Chat.user_id == user_id))
             query.execute()  # Remove the rows, return number of rows removed.
 
             return True

+ 14 - 13
backend/apps/web/models/modelfiles.py

@@ -12,7 +12,7 @@ from apps.web.internal.db import DB
 import json
 
 ####################
-# User DB Schema
+# Modelfile DB Schema
 ####################
 
 
@@ -58,13 +58,14 @@ class ModelfileResponse(BaseModel):
 
 
 class ModelfilesTable:
+
     def __init__(self, db):
         self.db = db
         self.db.create_tables([Modelfile])
 
     def insert_new_modelfile(
-        self, user_id: str, form_data: ModelfileForm
-    ) -> Optional[ModelfileModel]:
+            self, user_id: str,
+            form_data: ModelfileForm) -> Optional[ModelfileModel]:
         if "tagName" in form_data.modelfile:
             modelfile = ModelfileModel(
                 **{
@@ -72,8 +73,7 @@ class ModelfilesTable:
                     "tag_name": form_data.modelfile["tagName"],
                     "modelfile": json.dumps(form_data.modelfile),
                     "timestamp": int(time.time()),
-                }
-            )
+                })
 
             try:
                 result = Modelfile.create(**modelfile.model_dump())
@@ -87,28 +87,29 @@ class ModelfilesTable:
         else:
             return None
 
-    def get_modelfile_by_tag_name(self, tag_name: str) -> Optional[ModelfileModel]:
+    def get_modelfile_by_tag_name(self,
+                                  tag_name: str) -> Optional[ModelfileModel]:
         try:
             modelfile = Modelfile.get(Modelfile.tag_name == tag_name)
             return ModelfileModel(**model_to_dict(modelfile))
         except:
             return None
 
-    def get_modelfiles(self, skip: int = 0, limit: int = 50) -> List[ModelfileResponse]:
+    def get_modelfiles(self,
+                       skip: int = 0,
+                       limit: int = 50) -> List[ModelfileResponse]:
         return [
             ModelfileResponse(
                 **{
                     **model_to_dict(modelfile),
-                    "modelfile": json.loads(modelfile.modelfile),
-                }
-            )
-            for modelfile in Modelfile.select()
+                    "modelfile":
+                    json.loads(modelfile.modelfile),
+                }) for modelfile in Modelfile.select()
             # .limit(limit).offset(skip)
         ]
 
     def update_modelfile_by_tag_name(
-        self, tag_name: str, modelfile: dict
-    ) -> Optional[ModelfileModel]:
+            self, tag_name: str, modelfile: dict) -> Optional[ModelfileModel]:
         try:
             query = Modelfile.update(
                 modelfile=json.dumps(modelfile),

+ 115 - 0
backend/apps/web/models/prompts.py

@@ -0,0 +1,115 @@
+from pydantic import BaseModel
+from peewee import *
+from playhouse.shortcuts import model_to_dict
+from typing import List, Union, Optional
+import time
+
+from utils.utils import decode_token
+from utils.misc import get_gravatar_url
+
+from apps.web.internal.db import DB
+
+import json
+
+####################
+# Prompts DB Schema
+####################
+
+
+class Prompt(Model):
+    command = CharField(unique=True)
+    user_id = CharField()
+    title = CharField()
+    content = TextField()
+    timestamp = DateField()
+
+    class Meta:
+        database = DB
+
+
+class PromptModel(BaseModel):
+    command: str
+    user_id: str
+    title: str
+    content: str
+    timestamp: int  # timestamp in epoch
+
+
+####################
+# Forms
+####################
+
+
+class PromptForm(BaseModel):
+    command: str
+    title: str
+    content: str
+
+
+class PromptsTable:
+
+    def __init__(self, db):
+        self.db = db
+        self.db.create_tables([Prompt])
+
+    def insert_new_prompt(self, user_id: str,
+                          form_data: PromptForm) -> Optional[PromptModel]:
+        prompt = PromptModel(
+            **{
+                "user_id": user_id,
+                "command": form_data.command,
+                "title": form_data.title,
+                "content": form_data.content,
+                "timestamp": int(time.time()),
+            })
+
+        try:
+            result = Prompt.create(**prompt.model_dump())
+            if result:
+                return prompt
+            else:
+                return None
+        except:
+            return None
+
+    def get_prompt_by_command(self, command: str) -> Optional[PromptModel]:
+        try:
+            prompt = Prompt.get(Prompt.command == command)
+            return PromptModel(**model_to_dict(prompt))
+        except:
+            return None
+
+    def get_prompts(self) -> List[PromptModel]:
+        return [
+            PromptModel(**model_to_dict(prompt)) for prompt in Prompt.select()
+            # .limit(limit).offset(skip)
+        ]
+
+    def update_prompt_by_command(
+            self, command: str,
+            form_data: PromptForm) -> Optional[PromptModel]:
+        try:
+            query = Prompt.update(
+                title=form_data.title,
+                content=form_data.content,
+                timestamp=int(time.time()),
+            ).where(Prompt.command == command)
+
+            query.execute()
+
+            prompt = Prompt.get(Prompt.command == command)
+            return PromptModel(**model_to_dict(prompt))
+        except:
+            return None
+
+    def delete_prompt_by_command(self, command: str) -> bool:
+        try:
+            query = Prompt.delete().where((Prompt.command == command))
+            query.execute()  # Remove the rows, return number of rows removed.
+
+            return True
+        except:
+            return False
+
+
+Prompts = PromptsTable(DB)

+ 17 - 1
backend/apps/web/models/users.py

@@ -8,7 +8,6 @@ from utils.misc import get_gravatar_url
 from apps.web.internal.db import DB
 from apps.web.models.chats import Chats
 
-
 ####################
 # User DB Schema
 ####################
@@ -45,6 +44,13 @@ class UserRoleUpdateForm(BaseModel):
     role: str
 
 
+class UserUpdateForm(BaseModel):
+    name: str
+    email: str
+    profile_image_url: str
+    password: Optional[str] = None
+
+
 class UsersTable:
     def __init__(self, db):
         self.db = db
@@ -102,6 +108,16 @@ class UsersTable:
         except:
             return None
 
+    def update_user_by_id(self, id: str, updated: dict) -> Optional[UserModel]:
+        try:
+            query = User.update(**updated).where(User.id == id)
+            query.execute()
+
+            user = User.get(User.id == id)
+            return UserModel(**model_to_dict(user))
+        except:
+            return None
+
     def delete_user_by_id(self, id: str) -> bool:
         try:
             # Delete User Chats

+ 34 - 32
backend/apps/web/routers/auths.py

@@ -18,12 +18,10 @@ from apps.web.models.auths import (
 )
 from apps.web.models.users import Users
 
-
 from utils.utils import get_password_hash, get_current_user, create_token
-from utils.misc import get_gravatar_url
+from utils.misc import get_gravatar_url, validate_email_format
 from constants import ERROR_MESSAGES
 
-
 router = APIRouter()
 
 ############################
@@ -48,9 +46,8 @@ async def get_session_user(user=Depends(get_current_user)):
 
 
 @router.post("/update/password", response_model=bool)
-async def update_password(
-    form_data: UpdatePasswordForm, session_user=Depends(get_current_user)
-):
+async def update_password(form_data: UpdatePasswordForm,
+                          session_user=Depends(get_current_user)):
     if session_user:
         user = Auths.authenticate_user(session_user.email, form_data.password)
 
@@ -95,33 +92,38 @@ async def signin(form_data: SigninForm):
 @router.post("/signup", response_model=SigninResponse)
 async def signup(request: Request, form_data: SignupForm):
     if request.app.state.ENABLE_SIGNUP:
-        if not Users.get_user_by_email(form_data.email.lower()):
-            try:
-                role = "admin" if Users.get_num_users() == 0 else "pending"
-                hashed = get_password_hash(form_data.password)
-                user = Auths.insert_new_auth(
-                    form_data.email.lower(), hashed, form_data.name, role
-                )
-
-                if user:
-                    token = create_token(data={"email": user.email})
-                    # response.set_cookie(key='token', value=token, httponly=True)
-
-                    return {
-                        "token": token,
-                        "token_type": "Bearer",
-                        "id": user.id,
-                        "email": user.email,
-                        "name": user.name,
-                        "role": user.role,
-                        "profile_image_url": user.profile_image_url,
-                    }
-                else:
-                    raise HTTPException(500, detail=ERROR_MESSAGES.CREATE_USER_ERROR)
-            except Exception as err:
-                raise HTTPException(500, detail=ERROR_MESSAGES.DEFAULT(err))
+        if validate_email_format(form_data.email.lower()):
+            if not Users.get_user_by_email(form_data.email.lower()):
+                try:
+                    role = "admin" if Users.get_num_users() == 0 else "pending"
+                    hashed = get_password_hash(form_data.password)
+                    user = Auths.insert_new_auth(form_data.email.lower(),
+                                                 hashed, form_data.name, role)
+
+                    if user:
+                        token = create_token(data={"email": user.email})
+                        # response.set_cookie(key='token', value=token, httponly=True)
+
+                        return {
+                            "token": token,
+                            "token_type": "Bearer",
+                            "id": user.id,
+                            "email": user.email,
+                            "name": user.name,
+                            "role": user.role,
+                            "profile_image_url": user.profile_image_url,
+                        }
+                    else:
+                        raise HTTPException(
+                            500, detail=ERROR_MESSAGES.CREATE_USER_ERROR)
+                except Exception as err:
+                    raise HTTPException(500,
+                                        detail=ERROR_MESSAGES.DEFAULT(err))
+            else:
+                raise HTTPException(400, detail=ERROR_MESSAGES.EMAIL_TAKEN)
         else:
-            raise HTTPException(400, detail=ERROR_MESSAGES.EMAIL_TAKEN)
+            raise HTTPException(400,
+                                detail=ERROR_MESSAGES.INVALID_EMAIL_FORMAT)
     else:
         raise HTTPException(400, detail=ERROR_MESSAGES.ACCESS_PROHIBITED)
 

+ 16 - 14
backend/apps/web/routers/chats.py

@@ -17,8 +17,7 @@ from apps.web.models.chats import (
 )
 
 from utils.utils import (
-    bearer_scheme,
-)
+    bearer_scheme, )
 from constants import ERROR_MESSAGES
 
 router = APIRouter()
@@ -30,8 +29,7 @@ router = APIRouter()
 
 @router.get("/", response_model=List[ChatTitleIdResponse])
 async def get_user_chats(
-    user=Depends(get_current_user), skip: int = 0, limit: int = 50
-):
+        user=Depends(get_current_user), skip: int = 0, limit: int = 50):
     return Chats.get_chat_lists_by_user_id(user.id, skip, limit)
 
 
@@ -43,8 +41,9 @@ async def get_user_chats(
 @router.get("/all", response_model=List[ChatResponse])
 async def get_all_user_chats(user=Depends(get_current_user)):
     return [
-        ChatResponse(**{**chat.model_dump(), "chat": json.loads(chat.chat)})
-        for chat in Chats.get_all_chats_by_user_id(user.id)
+        ChatResponse(**{
+            **chat.model_dump(), "chat": json.loads(chat.chat)
+        }) for chat in Chats.get_all_chats_by_user_id(user.id)
     ]
 
 
@@ -69,11 +68,12 @@ async def get_chat_by_id(id: str, user=Depends(get_current_user)):
     chat = Chats.get_chat_by_id_and_user_id(id, user.id)
 
     if chat:
-        return ChatResponse(**{**chat.model_dump(), "chat": json.loads(chat.chat)})
+        return ChatResponse(**{
+            **chat.model_dump(), "chat": json.loads(chat.chat)
+        })
     else:
-        raise HTTPException(
-            status_code=status.HTTP_401_UNAUTHORIZED, detail=ERROR_MESSAGES.NOT_FOUND
-        )
+        raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED,
+                            detail=ERROR_MESSAGES.NOT_FOUND)
 
 
 ############################
@@ -82,15 +82,17 @@ async def get_chat_by_id(id: str, user=Depends(get_current_user)):
 
 
 @router.post("/{id}", response_model=Optional[ChatResponse])
-async def update_chat_by_id(
-    id: str, form_data: ChatForm, user=Depends(get_current_user)
-):
+async def update_chat_by_id(id: str,
+                            form_data: ChatForm,
+                            user=Depends(get_current_user)):
     chat = Chats.get_chat_by_id_and_user_id(id, user.id)
     if chat:
         updated_chat = {**json.loads(chat.chat), **form_data.chat}
 
         chat = Chats.update_chat_by_id(id, updated_chat)
-        return ChatResponse(**{**chat.model_dump(), "chat": json.loads(chat.chat)})
+        return ChatResponse(**{
+            **chat.model_dump(), "chat": json.loads(chat.chat)
+        })
     else:
         raise HTTPException(
             status_code=status.HTTP_401_UNAUTHORIZED,

+ 40 - 0
backend/apps/web/routers/configs.py

@@ -0,0 +1,40 @@
+from fastapi import Response, Request
+from fastapi import Depends, FastAPI, HTTPException, status
+from datetime import datetime, timedelta
+from typing import List, Union
+
+from fastapi import APIRouter
+from pydantic import BaseModel
+import time
+import uuid
+
+from apps.web.models.users import Users
+
+from utils.utils import get_password_hash, get_current_user, create_token
+from utils.misc import get_gravatar_url, validate_email_format
+from constants import ERROR_MESSAGES
+
+router = APIRouter()
+
+
+class SetDefaultModelsForm(BaseModel):
+    models: str
+
+
+############################
+# SetDefaultModels
+############################
+
+
+@router.post("/default/models", response_model=str)
+async def set_global_default_models(request: Request,
+                                    form_data: SetDefaultModelsForm,
+                                    user=Depends(get_current_user)):
+    if user.role == "admin":
+        request.app.state.DEFAULT_MODELS = form_data.models
+        return request.app.state.DEFAULT_MODELS
+    else:
+        raise HTTPException(
+            status_code=status.HTTP_403_FORBIDDEN,
+            detail=ERROR_MESSAGES.ACCESS_PROHIBITED,
+        )

+ 21 - 22
backend/apps/web/routers/modelfiles.py

@@ -24,7 +24,9 @@ router = APIRouter()
 
 
 @router.get("/", response_model=List[ModelfileResponse])
-async def get_modelfiles(skip: int = 0, limit: int = 50, user=Depends(get_current_user)):
+async def get_modelfiles(skip: int = 0,
+                         limit: int = 50,
+                         user=Depends(get_current_user)):
     return Modelfiles.get_modelfiles(skip, limit)
 
 
@@ -34,9 +36,8 @@ async def get_modelfiles(skip: int = 0, limit: int = 50, user=Depends(get_curren
 
 
 @router.post("/create", response_model=Optional[ModelfileResponse])
-async def create_new_modelfile(
-    form_data: ModelfileForm, user=Depends(get_current_user)
-):
+async def create_new_modelfile(form_data: ModelfileForm,
+                               user=Depends(get_current_user)):
     if user.role != "admin":
         raise HTTPException(
             status_code=status.HTTP_401_UNAUTHORIZED,
@@ -49,9 +50,9 @@ async def create_new_modelfile(
         return ModelfileResponse(
             **{
                 **modelfile.model_dump(),
-                "modelfile": json.loads(modelfile.modelfile),
-            }
-        )
+                "modelfile":
+                json.loads(modelfile.modelfile),
+            })
     else:
         raise HTTPException(
             status_code=status.HTTP_401_UNAUTHORIZED,
@@ -65,16 +66,17 @@ async def create_new_modelfile(
 
 
 @router.post("/", response_model=Optional[ModelfileResponse])
-async def get_modelfile_by_tag_name(form_data: ModelfileTagNameForm, user=Depends(get_current_user)):
+async def get_modelfile_by_tag_name(form_data: ModelfileTagNameForm,
+                                    user=Depends(get_current_user)):
     modelfile = Modelfiles.get_modelfile_by_tag_name(form_data.tag_name)
 
     if modelfile:
         return ModelfileResponse(
             **{
                 **modelfile.model_dump(),
-                "modelfile": json.loads(modelfile.modelfile),
-            }
-        )
+                "modelfile":
+                json.loads(modelfile.modelfile),
+            })
     else:
         raise HTTPException(
             status_code=status.HTTP_401_UNAUTHORIZED,
@@ -88,9 +90,8 @@ async def get_modelfile_by_tag_name(form_data: ModelfileTagNameForm, user=Depend
 
 
 @router.post("/update", response_model=Optional[ModelfileResponse])
-async def update_modelfile_by_tag_name(
-    form_data: ModelfileUpdateForm, user=Depends(get_current_user)
-):
+async def update_modelfile_by_tag_name(form_data: ModelfileUpdateForm,
+                                       user=Depends(get_current_user)):
     if user.role != "admin":
         raise HTTPException(
             status_code=status.HTTP_401_UNAUTHORIZED,
@@ -104,15 +105,14 @@ async def update_modelfile_by_tag_name(
         }
 
         modelfile = Modelfiles.update_modelfile_by_tag_name(
-            form_data.tag_name, updated_modelfile
-        )
+            form_data.tag_name, updated_modelfile)
 
         return ModelfileResponse(
             **{
                 **modelfile.model_dump(),
-                "modelfile": json.loads(modelfile.modelfile),
-            }
-        )
+                "modelfile":
+                json.loads(modelfile.modelfile),
+            })
     else:
         raise HTTPException(
             status_code=status.HTTP_401_UNAUTHORIZED,
@@ -126,9 +126,8 @@ async def update_modelfile_by_tag_name(
 
 
 @router.delete("/delete", response_model=bool)
-async def delete_modelfile_by_tag_name(
-    form_data: ModelfileTagNameForm, user=Depends(get_current_user)
-):
+async def delete_modelfile_by_tag_name(form_data: ModelfileTagNameForm,
+                                       user=Depends(get_current_user)):
     if user.role != "admin":
         raise HTTPException(
             status_code=status.HTTP_401_UNAUTHORIZED,

+ 116 - 0
backend/apps/web/routers/prompts.py

@@ -0,0 +1,116 @@
+from fastapi import Depends, FastAPI, HTTPException, status
+from datetime import datetime, timedelta
+from typing import List, Union, Optional
+
+from fastapi import APIRouter
+from pydantic import BaseModel
+import json
+
+from apps.web.models.prompts import Prompts, PromptForm, PromptModel
+
+from utils.utils import get_current_user
+from constants import ERROR_MESSAGES
+
+router = APIRouter()
+
+############################
+# GetPrompts
+############################
+
+
+@router.get("/", response_model=List[PromptModel])
+async def get_prompts(user=Depends(get_current_user)):
+    return Prompts.get_prompts()
+
+
+############################
+# CreateNewPrompt
+############################
+
+
+@router.post("/create", response_model=Optional[PromptModel])
+async def create_new_prompt(form_data: PromptForm,
+                            user=Depends(get_current_user)):
+    if user.role != "admin":
+        raise HTTPException(
+            status_code=status.HTTP_401_UNAUTHORIZED,
+            detail=ERROR_MESSAGES.ACCESS_PROHIBITED,
+        )
+
+    prompt = Prompts.get_prompt_by_command(form_data.command)
+    if prompt == None:
+        prompt = Prompts.insert_new_prompt(user.id, form_data)
+
+        if prompt:
+            return prompt
+        else:
+            raise HTTPException(
+                status_code=status.HTTP_401_UNAUTHORIZED,
+                detail=ERROR_MESSAGES.DEFAULT(),
+            )
+    else:
+        raise HTTPException(
+            status_code=status.HTTP_400_BAD_REQUEST,
+            detail=ERROR_MESSAGES.COMMAND_TAKEN,
+        )
+
+
+############################
+# GetPromptByCommand
+############################
+
+
+@router.get("/{command}", response_model=Optional[PromptModel])
+async def get_prompt_by_command(command: str, user=Depends(get_current_user)):
+    prompt = Prompts.get_prompt_by_command(f"/{command}")
+
+    if prompt:
+        return prompt
+    else:
+        raise HTTPException(
+            status_code=status.HTTP_401_UNAUTHORIZED,
+            detail=ERROR_MESSAGES.NOT_FOUND,
+        )
+
+
+############################
+# UpdatePromptByCommand
+############################
+
+
+@router.post("/{command}/update", response_model=Optional[PromptModel])
+async def update_prompt_by_command(command: str,
+                                   form_data: PromptForm,
+                                   user=Depends(get_current_user)):
+    if user.role != "admin":
+        raise HTTPException(
+            status_code=status.HTTP_401_UNAUTHORIZED,
+            detail=ERROR_MESSAGES.ACCESS_PROHIBITED,
+        )
+
+    prompt = Prompts.update_prompt_by_command(f"/{command}", form_data)
+    if prompt:
+        return prompt
+    else:
+        raise HTTPException(
+            status_code=status.HTTP_401_UNAUTHORIZED,
+            detail=ERROR_MESSAGES.ACCESS_PROHIBITED,
+        )
+
+
+############################
+# DeletePromptByCommand
+############################
+
+
+@router.delete("/{command}/delete", response_model=bool)
+async def delete_prompt_by_command(command: str,
+                                   user=Depends(get_current_user)):
+    if user.role != "admin":
+        raise HTTPException(
+            status_code=status.HTTP_401_UNAUTHORIZED,
+            detail=ERROR_MESSAGES.ACCESS_PROHIBITED,
+        )
+
+    result = Prompts.delete_prompt_by_command(f"/{command}")
+    return result

+ 58 - 4
backend/apps/web/routers/users.py

@@ -8,14 +8,12 @@ from pydantic import BaseModel
 import time
 import uuid
 
-from apps.web.models.users import UserModel, UserRoleUpdateForm, Users
+from apps.web.models.users import UserModel, UserUpdateForm, UserRoleUpdateForm, Users
 from apps.web.models.auths import Auths
 
-
-from utils.utils import get_current_user
+from utils.utils import get_current_user, get_password_hash
 from constants import ERROR_MESSAGES
 
-
 router = APIRouter()
 
 ############################
@@ -57,6 +55,62 @@ async def update_user_role(
         )
 
 
+############################
+# UpdateUserById
+############################
+
+
+@router.post("/{user_id}/update", response_model=Optional[UserModel])
+async def update_user_by_id(
+    user_id: str, form_data: UserUpdateForm, session_user=Depends(get_current_user)
+):
+    if session_user.role != "admin":
+        raise HTTPException(
+            status_code=status.HTTP_403_FORBIDDEN,
+            detail=ERROR_MESSAGES.ACCESS_PROHIBITED,
+        )
+
+    user = Users.get_user_by_id(user_id)
+
+    if user:
+        if form_data.email.lower() != user.email:
+            email_user = Users.get_user_by_email(form_data.email.lower())
+            if email_user:
+                raise HTTPException(
+                    status_code=status.HTTP_400_BAD_REQUEST,
+                    detail=ERROR_MESSAGES.EMAIL_TAKEN,
+                )
+
+        if form_data.password:
+            hashed = get_password_hash(form_data.password)
+            print(hashed)
+            Auths.update_user_password_by_id(user_id, hashed)
+
+        Auths.update_email_by_id(user_id, form_data.email.lower())
+        updated_user = Users.update_user_by_id(
+            user_id,
+            {
+                "name": form_data.name,
+                "email": form_data.email.lower(),
+                "profile_image_url": form_data.profile_image_url,
+            },
+        )
+
+        if updated_user:
+            return updated_user
+        else:
+            raise HTTPException(
+                status_code=status.HTTP_400_BAD_REQUEST,
+                detail=ERROR_MESSAGES.DEFAULT(),
+            )
+
+    else:
+        raise HTTPException(
+            status_code=status.HTTP_400_BAD_REQUEST,
+            detail=ERROR_MESSAGES.USER_NOT_FOUND,
+        )
+
+
 ############################
 # DeleteUserById
 ############################

+ 9 - 8
backend/apps/web/routers/utils.py

@@ -9,12 +9,10 @@ import os
 import aiohttp
 import json
 
-
 from utils.misc import calculate_sha256
 
 from config import OLLAMA_API_BASE_URL
 
-
 router = APIRouter()
 
 
@@ -42,7 +40,10 @@ def parse_huggingface_url(hf_url):
         return None
 
 
-async def download_file_stream(url, file_path, file_name, chunk_size=1024 * 1024):
+async def download_file_stream(url,
+                               file_path,
+                               file_name,
+                               chunk_size=1024 * 1024):
     done = False
 
     if os.path.exists(file_path):
@@ -56,7 +57,8 @@ async def download_file_stream(url, file_path, file_name, chunk_size=1024 * 1024
 
     async with aiohttp.ClientSession(timeout=timeout) as session:
         async with session.get(url, headers=headers) as response:
-            total_size = int(response.headers.get("content-length", 0)) + current_size
+            total_size = int(response.headers.get("content-length",
+                                                  0)) + current_size
 
             with open(file_path, "ab+") as file:
                 async for data in response.content.iter_chunked(chunk_size):
@@ -89,9 +91,7 @@ async def download_file_stream(url, file_path, file_name, chunk_size=1024 * 1024
 
 
 @router.get("/download")
-async def download(
-    url: str,
-):
+async def download(url: str, ):
     # url = "https://huggingface.co/TheBloke/stablelm-zephyr-3b-GGUF/resolve/main/stablelm-zephyr-3b.Q2_K.gguf"
     file_name = parse_huggingface_url(url)
 
@@ -161,4 +161,5 @@ async def upload(file: UploadFile = File(...)):
             res = {"error": str(e)}
             yield f"data: {json.dumps(res)}\n\n"
 
-    return StreamingResponse(file_write_stream(), media_type="text/event-stream")
+    return StreamingResponse(file_write_stream(),
+                             media_type="text/event-stream")

+ 13 - 4
backend/config.py

@@ -19,19 +19,28 @@ ENV = os.environ.get("ENV", "dev")
 # OLLAMA_API_BASE_URL
 ####################################
 
-OLLAMA_API_BASE_URL = os.environ.get(
-    "OLLAMA_API_BASE_URL", "http://localhost:11434/api"
-)
+OLLAMA_API_BASE_URL = os.environ.get("OLLAMA_API_BASE_URL",
+                                     "http://localhost:11434/api")
 
 if ENV == "prod":
     if OLLAMA_API_BASE_URL == "/ollama/api":
         OLLAMA_API_BASE_URL = "http://host.docker.internal:11434/api"
 
+####################################
+# OPENAI_API
+####################################
+
+OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY", "")
+OPENAI_API_BASE_URL = os.environ.get("OPENAI_API_BASE_URL", "")
+
+if OPENAI_API_BASE_URL == "":
+    OPENAI_API_BASE_URL = "https://api.openai.com/v1"
+
 ####################################
 # WEBUI_VERSION
 ####################################
 
-WEBUI_VERSION = os.environ.get("WEBUI_VERSION", "v1.0.0-alpha.42")
+WEBUI_VERSION = os.environ.get("WEBUI_VERSION", "v1.0.0-alpha.50")
 
 ####################################
 # WEBUI_AUTH (Required for security)

+ 5 - 3
backend/constants.py

@@ -6,6 +6,7 @@ class MESSAGES(str, Enum):
 
 
 class ERROR_MESSAGES(str, Enum):
+
     def __str__(self) -> str:
         return super().__str__()
 
@@ -17,19 +18,20 @@ class ERROR_MESSAGES(str, Enum):
     USERNAME_TAKEN = (
         "Uh-oh! This username is already registered. Please choose another username."
     )
+    COMMAND_TAKEN = "Uh-oh! This command is already registered. Please choose another command string."
     INVALID_TOKEN = (
         "Your session has expired or the token is invalid. Please sign in again."
     )
     INVALID_CRED = "The email or password provided is incorrect. Please check for typos and try logging in again."
+    INVALID_EMAIL_FORMAT = "The email format you entered is invalid. Please double-check and make sure you're using a valid email address (e.g., yourname@example.com)."
     INVALID_PASSWORD = (
         "The password provided is incorrect. Please check for typos and try again."
     )
     UNAUTHORIZED = "401 Unauthorized"
     ACCESS_PROHIBITED = "You do not have permission to access this resource. Please contact your administrator for assistance."
     ACTION_PROHIBITED = (
-        "The requested action has been restricted as a security measure."
-    )
+        "The requested action has been restricted as a security measure.")
     NOT_FOUND = "We could not find what you're looking for :/"
     USER_NOT_FOUND = "We could not find what you're looking for :/"
-
+    API_KEY_NOT_FOUND = "Oops! It looks like there's a hiccup. The API key is missing. Please make sure to provide a valid API key to access this feature."
     MALICIOUS = "Unusual activities detected, please try again in a few minutes."

+ 9 - 2
backend/main.py

@@ -6,12 +6,15 @@ from fastapi.middleware.cors import CORSMiddleware
 from starlette.exceptions import HTTPException as StarletteHTTPException
 
 from apps.ollama.main import app as ollama_app
+from apps.openai.main import app as openai_app
+
 from apps.web.main import app as webui_app
 
 import time
 
 
 class SPAStaticFiles(StaticFiles):
+
     async def get_response(self, path: str, scope):
         try:
             return await super().get_response(path, scope)
@@ -46,5 +49,9 @@ async def check_url(request: Request, call_next):
 
 
 app.mount("/api/v1", webui_app)
-app.mount("/ollama/api", WSGIMiddleware(ollama_app))
-app.mount("/", SPAStaticFiles(directory="../build", html=True), name="spa-static-files")
+app.mount("/ollama/api", ollama_app)
+app.mount("/openai/api", openai_app)
+
+app.mount("/",
+          SPAStaticFiles(directory="../build", html=True),
+          name="spa-static-files")

+ 3 - 1
backend/start.sh

@@ -1 +1,3 @@
-uvicorn main:app --host 0.0.0.0 --port 8080 --forwarded-allow-ips '*'
+#!/usr/bin/env bash
+
+uvicorn main:app --host 0.0.0.0 --port 8080 --forwarded-allow-ips '*'

+ 7 - 0
backend/utils/misc.py

@@ -1,4 +1,5 @@
 import hashlib
+import re
 
 
 def get_gravatar_url(email):
@@ -21,3 +22,9 @@ def calculate_sha256(file):
     for chunk in iter(lambda: file.read(8192), b""):
         sha256.update(chunk)
     return sha256.hexdigest()
+
+
+def validate_email_format(email: str) -> bool:
+    if not re.match(r"[^@]+@[^@]+\.[^@]+", email):
+        return False
+    return True

+ 4 - 1
backend/utils/utils.py

@@ -8,9 +8,12 @@ from passlib.context import CryptContext
 from datetime import datetime, timedelta
 import requests
 import jwt
-
+import logging
 import config
 
+logging.getLogger("passlib").setLevel(logging.ERROR)
+
+
 JWT_SECRET_KEY = config.WEBUI_JWT_SECRET_KEY
 ALGORITHM = "HS256"
 

BIN
bun.lockb


BIN
demo.gif


+ 1 - 1
docker-compose.data.yaml

@@ -3,4 +3,4 @@ version: '3.8'
 services:
   ollama:
     volumes:
-      - ${OLLAMA_DATA_DIR-./ollama-data}:/root/.ollama
+      - ${OLLAMA_DATA_DIR-./ollama-data}:/root/.ollama

+ 1 - 1
docker-compose.yaml

@@ -25,7 +25,7 @@ services:
     ports:
       - ${OLLAMA_WEBUI_PORT-3000}:8080
     environment:
-      - "OLLAMA_API_BASE_URL=http://ollama:11434/api"
+      - 'OLLAMA_API_BASE_URL=http://ollama:11434/api'
     extra_hosts:
       - host.docker.internal:host-gateway
     restart: unless-stopped

+ 3 - 9
example.env

@@ -1,12 +1,6 @@
-# If you're serving both the frontend and backend (Recommended)
-# Set the public API base URL for seamless communication
-PUBLIC_API_BASE_URL='/ollama/api'
-
-# If you're serving only the frontend (Not recommended and not fully supported)
-# Comment above and Uncomment below
-# You can use the default value or specify a custom path, e.g., '/api'
-# PUBLIC_API_BASE_URL='http://{location.hostname}:11434/api'
-
 # Ollama URL for the backend to connect
 # The path '/ollama/api' will be redirected to the specified backend URL
 OLLAMA_API_BASE_URL='http://localhost:11434/api'
+
+OPENAI_API_BASE_URL=''
+OPENAI_API_KEY=''

File diff suppressed because it is too large
+ 164 - 551
package-lock.json


+ 11 - 9
package.json

@@ -8,22 +8,23 @@
 		"preview": "vite preview",
 		"check": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json",
 		"check:watch": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json --watch",
-		"lint": "npm run eslint",
+		"lint": "npm run lint:frontend ; npm run lint:types ; npm run lint:backend",
+		"lint:frontend": "eslint . --fix",
 		"lint:types": "npm run check",
-		"fmt": "npm run prettier:svelte && npm run prettier",
-		"eslint": "npx -p eslint@8 -- eslint .",
-		"prettier:svelte": "npx -p prettier@2 -- prettier --plugin-search-dir . --write .",
-		"prettier": "npx -p prettier@2 -- prettier --write '**/*.{js,css,md,html,json}'"
+		"lint:backend": "pylint backend/",
+		"format": "prettier --plugin-search-dir --write '**/*.{js,ts,svelte,css,md,html,json}'",
+		"format:backend": "yapf --recursive backend -p -i"
 	},
 	"devDependencies": {
 		"@sveltejs/adapter-auto": "^2.0.0",
 		"@sveltejs/adapter-static": "^2.0.3",
-		"@sveltejs/kit": "^1.20.4",
+		"@sveltejs/kit": "^1.30.0",
 		"@tailwindcss/typography": "^0.5.10",
-		"@typescript-eslint/eslint-plugin": "^6.0.0",
-		"@typescript-eslint/parser": "^6.0.0",
+		"@types/bun": "latest",
+		"@typescript-eslint/eslint-plugin": "^6.17.0",
+		"@typescript-eslint/parser": "^6.17.0",
 		"autoprefixer": "^10.4.16",
-		"eslint": "^8.28.0",
+		"eslint": "^8.56.0",
 		"eslint-config-prettier": "^8.5.0",
 		"eslint-plugin-svelte": "^2.30.0",
 		"postcss": "^8.4.31",
@@ -40,6 +41,7 @@
 	"dependencies": {
 		"@sveltejs/adapter-node": "^1.3.1",
 		"async": "^3.2.5",
+		"dayjs": "^1.11.10",
 		"file-saver": "^2.0.5",
 		"highlight.js": "^11.9.0",
 		"idb": "^7.1.1",

+ 7 - 0
run-ollama-docker.sh

@@ -0,0 +1,7 @@
+docker rm -f ollama || true
+docker pull ollama/ollama
+# CPU Only
+docker run -d -v ollama:/root/.ollama -p 11434:11434 --name ollama ollama/ollama
+# GPU Support
+# docker run -d --gpus=all -v ollama:/root/.ollama -p 11434:11434 --name ollama ollama/ollama
+docker image prune -f

+ 31 - 0
src/lib/apis/configs/index.ts

@@ -0,0 +1,31 @@
+import { WEBUI_API_BASE_URL } from '$lib/constants';
+
+export const setDefaultModels = async (token: string, models: string) => {
+	let error = null;
+
+	const res = await fetch(`${WEBUI_API_BASE_URL}/configs/default/models`, {
+		method: 'POST',
+		headers: {
+			'Content-Type': 'application/json',
+			Authorization: `Bearer ${token}`
+		},
+		body: JSON.stringify({
+			models: models
+		})
+	})
+		.then(async (res) => {
+			if (!res.ok) throw await res.json();
+			return res.json();
+		})
+		.catch((err) => {
+			console.log(err);
+			error = err.detail;
+			return null;
+		});
+
+	if (error) {
+		throw error;
+	}
+
+	return res;
+};

+ 107 - 38
src/lib/apis/ollama/index.ts

@@ -1,12 +1,76 @@
 import { OLLAMA_API_BASE_URL } from '$lib/constants';
 
-export const getOllamaVersion = async (
-	base_url: string = OLLAMA_API_BASE_URL,
-	token: string = ''
-) => {
+export const getOllamaAPIUrl = async (token: string = '') => {
 	let error = null;
 
-	const res = await fetch(`${base_url}/version`, {
+	const res = await fetch(`${OLLAMA_API_BASE_URL}/url`, {
+		method: 'GET',
+		headers: {
+			Accept: 'application/json',
+			'Content-Type': 'application/json',
+			...(token && { authorization: `Bearer ${token}` })
+		}
+	})
+		.then(async (res) => {
+			if (!res.ok) throw await res.json();
+			return res.json();
+		})
+		.catch((err) => {
+			console.log(err);
+			if ('detail' in err) {
+				error = err.detail;
+			} else {
+				error = 'Server connection failed';
+			}
+			return null;
+		});
+
+	if (error) {
+		throw error;
+	}
+
+	return res.OLLAMA_API_BASE_URL;
+};
+
+export const updateOllamaAPIUrl = async (token: string = '', url: string) => {
+	let error = null;
+
+	const res = await fetch(`${OLLAMA_API_BASE_URL}/url/update`, {
+		method: 'POST',
+		headers: {
+			Accept: 'application/json',
+			'Content-Type': 'application/json',
+			...(token && { authorization: `Bearer ${token}` })
+		},
+		body: JSON.stringify({
+			url: url
+		})
+	})
+		.then(async (res) => {
+			if (!res.ok) throw await res.json();
+			return res.json();
+		})
+		.catch((err) => {
+			console.log(err);
+			if ('detail' in err) {
+				error = err.detail;
+			} else {
+				error = 'Server connection failed';
+			}
+			return null;
+		});
+
+	if (error) {
+		throw error;
+	}
+
+	return res.OLLAMA_API_BASE_URL;
+};
+
+export const getOllamaVersion = async (token: string = '') => {
+	let error = null;
+
+	const res = await fetch(`${OLLAMA_API_BASE_URL}/version`, {
 		method: 'GET',
 		headers: {
 			Accept: 'application/json',
@@ -35,13 +99,10 @@ export const getOllamaVersion = async (
 	return res?.version ?? '';
 };
 
-export const getOllamaModels = async (
-	base_url: string = OLLAMA_API_BASE_URL,
-	token: string = ''
-) => {
+export const getOllamaModels = async (token: string = '') => {
 	let error = null;
 
-	const res = await fetch(`${base_url}/tags`, {
+	const res = await fetch(`${OLLAMA_API_BASE_URL}/tags`, {
 		method: 'GET',
 		headers: {
 			Accept: 'application/json',
@@ -67,18 +128,15 @@ export const getOllamaModels = async (
 		throw error;
 	}
 
-	return res?.models ?? [];
+	return (res?.models ?? []).sort((a, b) => {
+		return a.name.localeCompare(b.name);
+	});
 };
 
-export const generateTitle = async (
-	base_url: string = OLLAMA_API_BASE_URL,
-	token: string = '',
-	model: string,
-	prompt: string
-) => {
+export const generateTitle = async (token: string = '', model: string, prompt: string) => {
 	let error = null;
 
-	const res = await fetch(`${base_url}/generate`, {
+	const res = await fetch(`${OLLAMA_API_BASE_URL}/generate`, {
 		method: 'POST',
 		headers: {
 			'Content-Type': 'text/event-stream',
@@ -86,7 +144,7 @@ export const generateTitle = async (
 		},
 		body: JSON.stringify({
 			model: model,
-			prompt: `Generate a brief 3-5 word title for this question, excluding the term 'title.' Then, please reply with only the title: ${prompt}`,
+			prompt: `Create a concise, 3-5 word phrase as a header for the following query, strictly adhering to the 3-5 word limit and avoiding the use of the word 'title': ${prompt}`,
 			stream: false
 		})
 	})
@@ -109,14 +167,10 @@ export const generateTitle = async (
 	return res?.response ?? 'New Chat';
 };
 
-export const generateChatCompletion = async (
-	base_url: string = OLLAMA_API_BASE_URL,
-	token: string = '',
-	body: object
-) => {
+export const generateChatCompletion = async (token: string = '', body: object) => {
 	let error = null;
 
-	const res = await fetch(`${base_url}/chat`, {
+	const res = await fetch(`${OLLAMA_API_BASE_URL}/chat`, {
 		method: 'POST',
 		headers: {
 			'Content-Type': 'text/event-stream',
@@ -135,15 +189,10 @@ export const generateChatCompletion = async (
 	return res;
 };
 
-export const createModel = async (
-	base_url: string = OLLAMA_API_BASE_URL,
-	token: string,
-	tagName: string,
-	content: string
-) => {
+export const createModel = async (token: string, tagName: string, content: string) => {
 	let error = null;
 
-	const res = await fetch(`${base_url}/create`, {
+	const res = await fetch(`${OLLAMA_API_BASE_URL}/create`, {
 		method: 'POST',
 		headers: {
 			'Content-Type': 'text/event-stream',
@@ -165,14 +214,10 @@ export const createModel = async (
 	return res;
 };
 
-export const deleteModel = async (
-	base_url: string = OLLAMA_API_BASE_URL,
-	token: string,
-	tagName: string
-) => {
+export const deleteModel = async (token: string, tagName: string) => {
 	let error = null;
 
-	const res = await fetch(`${base_url}/delete`, {
+	const res = await fetch(`${OLLAMA_API_BASE_URL}/delete`, {
 		method: 'DELETE',
 		headers: {
 			'Content-Type': 'text/event-stream',
@@ -202,3 +247,27 @@ export const deleteModel = async (
 
 	return res;
 };
+
+export const pullModel = async (token: string, tagName: string) => {
+	let error = null;
+
+	const res = await fetch(`${OLLAMA_API_BASE_URL}/pull`, {
+		method: 'POST',
+		headers: {
+			'Content-Type': 'text/event-stream',
+			Authorization: `Bearer ${token}`
+		},
+		body: JSON.stringify({
+			name: tagName
+		})
+	}).catch((err) => {
+		error = err;
+		return null;
+	});
+
+	if (error) {
+		throw error;
+	}
+
+	return res;
+};

+ 201 - 3
src/lib/apis/openai/index.ts

@@ -1,4 +1,176 @@
-export const getOpenAIModels = async (
+import { OPENAI_API_BASE_URL } from '$lib/constants';
+
+export const getOpenAIUrl = async (token: string = '') => {
+	let error = null;
+
+	const res = await fetch(`${OPENAI_API_BASE_URL}/url`, {
+		method: 'GET',
+		headers: {
+			Accept: 'application/json',
+			'Content-Type': 'application/json',
+			...(token && { authorization: `Bearer ${token}` })
+		}
+	})
+		.then(async (res) => {
+			if (!res.ok) throw await res.json();
+			return res.json();
+		})
+		.catch((err) => {
+			console.log(err);
+			if ('detail' in err) {
+				error = err.detail;
+			} else {
+				error = 'Server connection failed';
+			}
+			return null;
+		});
+
+	if (error) {
+		throw error;
+	}
+
+	return res.OPENAI_API_BASE_URL;
+};
+
+export const updateOpenAIUrl = async (token: string = '', url: string) => {
+	let error = null;
+
+	const res = await fetch(`${OPENAI_API_BASE_URL}/url/update`, {
+		method: 'POST',
+		headers: {
+			Accept: 'application/json',
+			'Content-Type': 'application/json',
+			...(token && { authorization: `Bearer ${token}` })
+		},
+		body: JSON.stringify({
+			url: url
+		})
+	})
+		.then(async (res) => {
+			if (!res.ok) throw await res.json();
+			return res.json();
+		})
+		.catch((err) => {
+			console.log(err);
+			if ('detail' in err) {
+				error = err.detail;
+			} else {
+				error = 'Server connection failed';
+			}
+			return null;
+		});
+
+	if (error) {
+		throw error;
+	}
+
+	return res.OPENAI_API_BASE_URL;
+};
+
+export const getOpenAIKey = async (token: string = '') => {
+	let error = null;
+
+	const res = await fetch(`${OPENAI_API_BASE_URL}/key`, {
+		method: 'GET',
+		headers: {
+			Accept: 'application/json',
+			'Content-Type': 'application/json',
+			...(token && { authorization: `Bearer ${token}` })
+		}
+	})
+		.then(async (res) => {
+			if (!res.ok) throw await res.json();
+			return res.json();
+		})
+		.catch((err) => {
+			console.log(err);
+			if ('detail' in err) {
+				error = err.detail;
+			} else {
+				error = 'Server connection failed';
+			}
+			return null;
+		});
+
+	if (error) {
+		throw error;
+	}
+
+	return res.OPENAI_API_KEY;
+};
+
+export const updateOpenAIKey = async (token: string = '', key: string) => {
+	let error = null;
+
+	const res = await fetch(`${OPENAI_API_BASE_URL}/key/update`, {
+		method: 'POST',
+		headers: {
+			Accept: 'application/json',
+			'Content-Type': 'application/json',
+			...(token && { authorization: `Bearer ${token}` })
+		},
+		body: JSON.stringify({
+			key: key
+		})
+	})
+		.then(async (res) => {
+			if (!res.ok) throw await res.json();
+			return res.json();
+		})
+		.catch((err) => {
+			console.log(err);
+			if ('detail' in err) {
+				error = err.detail;
+			} else {
+				error = 'Server connection failed';
+			}
+			return null;
+		});
+
+	if (error) {
+		throw error;
+	}
+
+	return res.OPENAI_API_KEY;
+};
+
+export const getOpenAIModels = async (token: string = '') => {
+	let error = null;
+
+	const res = await fetch(`${OPENAI_API_BASE_URL}/models`, {
+		method: 'GET',
+		headers: {
+			Accept: 'application/json',
+			'Content-Type': 'application/json',
+			...(token && { authorization: `Bearer ${token}` })
+		}
+	})
+		.then(async (res) => {
+			if (!res.ok) throw await res.json();
+			return res.json();
+		})
+		.catch((err) => {
+			console.log(err);
+			error = `OpenAI: ${err?.error?.message ?? 'Network Problem'}`;
+			return [];
+		});
+
+	if (error) {
+		throw error;
+	}
+
+	const models = Array.isArray(res) ? res : res?.data ?? null;
+
+	return models
+		? models
+				.map((model) => ({ name: model.id, external: true }))
+				.sort((a, b) => {
+					return a.name.localeCompare(b.name);
+				})
+		: models;
+};
+
+export const getOpenAIModelsDirect = async (
 	base_url: string = 'https://api.openai.com/v1',
 	api_key: string = ''
 ) => {
@@ -25,9 +197,35 @@ export const getOpenAIModels = async (
 		throw error;
 	}
 
-	let models = Array.isArray(res) ? res : res?.data ?? null;
+	const models = Array.isArray(res) ? res : res?.data ?? null;
 
 	return models
 		.map((model) => ({ name: model.id, external: true }))
-		.filter((model) => (base_url.includes('openai') ? model.name.includes('gpt') : true));
+		.filter((model) => (base_url.includes('openai') ? model.name.includes('gpt') : true))
+		.sort((a, b) => {
+			return a.name.localeCompare(b.name);
+		});
+};
+
+export const generateOpenAIChatCompletion = async (token: string = '', body: object) => {
+	let error = null;
+
+	const res = await fetch(`${OPENAI_API_BASE_URL}/chat/completions`, {
+		method: 'POST',
+		headers: {
+			Authorization: `Bearer ${token}`,
+			'Content-Type': 'application/json'
+		},
+		body: JSON.stringify(body)
+	}).catch((err) => {
+		console.log(err);
+		error = err;
+		return null;
+	});
+
+	if (error) {
+		throw error;
+	}
+
+	return res;
 };

+ 178 - 0
src/lib/apis/prompts/index.ts

@@ -0,0 +1,178 @@
+import { WEBUI_API_BASE_URL } from '$lib/constants';
+
+export const createNewPrompt = async (
+	token: string,
+	command: string,
+	title: string,
+	content: string
+) => {
+	let error = null;
+
+	const res = await fetch(`${WEBUI_API_BASE_URL}/prompts/create`, {
+		method: 'POST',
+		headers: {
+			Accept: 'application/json',
+			'Content-Type': 'application/json',
+			authorization: `Bearer ${token}`
+		},
+		body: JSON.stringify({
+			command: `/${command}`,
+			title: title,
+			content: content
+		})
+	})
+		.then(async (res) => {
+			if (!res.ok) throw await res.json();
+			return res.json();
+		})
+		.catch((err) => {
+			error = err.detail;
+			console.log(err);
+			return null;
+		});
+
+	if (error) {
+		throw error;
+	}
+
+	return res;
+};
+
+export const getPrompts = async (token: string = '') => {
+	let error = null;
+
+	const res = await fetch(`${WEBUI_API_BASE_URL}/prompts/`, {
+		method: 'GET',
+		headers: {
+			Accept: 'application/json',
+			'Content-Type': 'application/json',
+			authorization: `Bearer ${token}`
+		}
+	})
+		.then(async (res) => {
+			if (!res.ok) throw await res.json();
+			return res.json();
+		})
+		.then((json) => {
+			return json;
+		})
+		.catch((err) => {
+			error = err.detail;
+			console.log(err);
+			return null;
+		});
+
+	if (error) {
+		throw error;
+	}
+
+	return res;
+};
+
+export const getPromptByCommand = async (token: string, command: string) => {
+	let error = null;
+
+	const res = await fetch(`${WEBUI_API_BASE_URL}/prompts/${command}`, {
+		method: 'GET',
+		headers: {
+			Accept: 'application/json',
+			'Content-Type': 'application/json',
+			authorization: `Bearer ${token}`
+		}
+	})
+		.then(async (res) => {
+			if (!res.ok) throw await res.json();
+			return res.json();
+		})
+		.then((json) => {
+			return json;
+		})
+		.catch((err) => {
+			error = err.detail;
+
+			console.log(err);
+			return null;
+		});
+
+	if (error) {
+		throw error;
+	}
+
+	return res;
+};
+
+export const updatePromptByCommand = async (
+	token: string,
+	command: string,
+	title: string,
+	content: string
+) => {
+	let error = null;
+
+	const res = await fetch(`${WEBUI_API_BASE_URL}/prompts/${command}/update`, {
+		method: 'POST',
+		headers: {
+			Accept: 'application/json',
+			'Content-Type': 'application/json',
+			authorization: `Bearer ${token}`
+		},
+		body: JSON.stringify({
+			command: `/${command}`,
+			title: title,
+			content: content
+		})
+	})
+		.then(async (res) => {
+			if (!res.ok) throw await res.json();
+			return res.json();
+		})
+		.then((json) => {
+			return json;
+		})
+		.catch((err) => {
+			error = err.detail;
+
+			console.log(err);
+			return null;
+		});
+
+	if (error) {
+		throw error;
+	}
+
+	return res;
+};
+
+export const deletePromptByCommand = async (token: string, command: string) => {
+	let error = null;
+
+	command = command.charAt(0) === '/' ? command.slice(1) : command;
+
+	const res = await fetch(`${WEBUI_API_BASE_URL}/prompts/${command}/delete`, {
+		method: 'DELETE',
+		headers: {
+			Accept: 'application/json',
+			'Content-Type': 'application/json',
+			authorization: `Bearer ${token}`
+		}
+	})
+		.then(async (res) => {
+			if (!res.ok) throw await res.json();
+			return res.json();
+		})
+		.then((json) => {
+			return json;
+		})
+		.catch((err) => {
+			error = err.detail;
+
+			console.log(err);
+			return null;
+		});
+
+	if (error) {
+		throw error;
+	}
+
+	return res;
+};

+ 40 - 0
src/lib/apis/users/index.ts

@@ -84,3 +84,43 @@ export const deleteUserById = async (token: string, userId: string) => {
 
 	return res;
 };
+
+type UserUpdateForm = {
+	profile_image_url: string;
+	email: string;
+	name: string;
+	password: string;
+};
+
+export const updateUserById = async (token: string, userId: string, user: UserUpdateForm) => {
+	let error = null;
+
+	const res = await fetch(`${WEBUI_API_BASE_URL}/users/${userId}/update`, {
+		method: 'POST',
+		headers: {
+			'Content-Type': 'application/json',
+			Authorization: `Bearer ${token}`
+		},
+		body: JSON.stringify({
+			profile_image_url: user.profile_image_url,
+			email: user.email,
+			name: user.name,
+			password: user.password !== '' ? user.password : undefined
+		})
+	})
+		.then(async (res) => {
+			if (!res.ok) throw await res.json();
+			return res.json();
+		})
+		.catch((err) => {
+			console.log(err);
+			error = err.detail;
+			return null;
+		});
+
+	if (error) {
+		throw error;
+	}
+
+	return res;
+};

+ 172 - 0
src/lib/components/admin/EditUserModal.svelte

@@ -0,0 +1,172 @@
+<script lang="ts">
+	import toast from 'svelte-french-toast';
+	import dayjs from 'dayjs';
+	import { createEventDispatcher } from 'svelte';
+	import { onMount } from 'svelte';
+
+	import { updateUserById } from '$lib/apis/users';
+	import Modal from '../common/Modal.svelte';
+
+	const dispatch = createEventDispatcher();
+
+	export let show = false;
+	export let selectedUser;
+	export let sessionUser;
+
+	let _user = {
+		profile_image_url: '',
+		name: '',
+		email: '',
+		password: ''
+	};
+
+	const submitHandler = async () => {
+		const res = await updateUserById(localStorage.token, selectedUser.id, _user).catch((error) => {
+			toast.error(error);
+		});
+
+		if (res) {
+			dispatch('save');
+			show = false;
+		}
+	};
+
+	onMount(() => {
+		if (selectedUser) {
+			_user = selectedUser;
+			_user.password = '';
+		}
+	});
+</script>
+
+<Modal size="sm" bind:show>
+	<div>
+		<div class=" flex justify-between dark:text-gray-300 px-5 py-4">
+			<div class=" text-lg font-medium self-center">Edit User</div>
+			<button
+				class="self-center"
+				on:click={() => {
+					show = false;
+				}}
+			>
+				<svg
+					xmlns="http://www.w3.org/2000/svg"
+					viewBox="0 0 20 20"
+					fill="currentColor"
+					class="w-5 h-5"
+				>
+					<path
+						d="M6.28 5.22a.75.75 0 00-1.06 1.06L8.94 10l-3.72 3.72a.75.75 0 101.06 1.06L10 11.06l3.72 3.72a.75.75 0 101.06-1.06L11.06 10l3.72-3.72a.75.75 0 00-1.06-1.06L10 8.94 6.28 5.22z"
+					/>
+				</svg>
+			</button>
+		</div>
+		<hr class=" dark:border-gray-800" />
+
+		<div class="flex flex-col md:flex-row w-full p-5 md:space-x-4 dark:text-gray-200">
+			<div class=" flex flex-col w-full sm:flex-row sm:justify-center sm:space-x-6">
+				<form
+					class="flex flex-col w-full"
+					on:submit|preventDefault={() => {
+						submitHandler();
+					}}
+				>
+					<div class=" flex items-center rounded-md py-2 px-4 w-full">
+						<div class=" self-center mr-5">
+							<img
+								src={selectedUser.profile_image_url}
+								class=" max-w-[55px] object-cover rounded-full"
+								alt="User profile"
+							/>
+						</div>
+
+						<div>
+							<div class=" self-center capitalize font-semibold">{selectedUser.name}</div>
+
+							<div class="text-xs text-gray-500">
+								Created at {dayjs(selectedUser.timestamp * 1000).format('MMMM DD, YYYY')}
+							</div>
+						</div>
+					</div>
+
+					<hr class=" dark:border-gray-800 my-3 w-full" />
+
+					<div class=" flex flex-col space-y-1.5">
+						<div class="flex flex-col w-full">
+							<div class=" mb-1 text-xs text-gray-500">Email</div>
+
+							<div class="flex-1">
+								<input
+									class="w-full rounded py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-800 disabled:text-gray-500 dark:disabled:text-gray-500 outline-none"
+									type="email"
+									bind:value={_user.email}
+									autocomplete="off"
+									required
+									disabled={_user.id == sessionUser.id}
+								/>
+							</div>
+						</div>
+
+						<div class="flex flex-col w-full">
+							<div class=" mb-1 text-xs text-gray-500">Name</div>
+
+							<div class="flex-1">
+								<input
+									class="w-full rounded py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-800 outline-none"
+									type="text"
+									bind:value={_user.name}
+									autocomplete="off"
+									required
+								/>
+							</div>
+						</div>
+
+						<div class="flex flex-col w-full">
+							<div class=" mb-1 text-xs text-gray-500">New Password</div>
+
+							<div class="flex-1">
+								<input
+									class="w-full rounded py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-800 outline-none"
+									type="password"
+									bind:value={_user.password}
+									autocomplete="new-password"
+								/>
+							</div>
+						</div>
+					</div>
+
+					<div class="flex justify-end pt-3 text-sm font-medium">
+						<button
+							class=" px-4 py-2 bg-emerald-600 hover:bg-emerald-700 text-gray-100 transition rounded"
+							type="submit"
+						>
+							Save
+						</button>
+					</div>
+				</form>
+			</div>
+		</div>
+	</div>
+</Modal>
+
+<style>
+	input::-webkit-outer-spin-button,
+	input::-webkit-inner-spin-button {
+		/* display: none; <- Crashes Chrome on hover */
+		-webkit-appearance: none;
+		margin: 0; /* <-- Apparently some margin are still there even though it's hidden */
+	}
+
+	.tabs::-webkit-scrollbar {
+		display: none; /* for Chrome, Safari and Opera */
+	}
+
+	.tabs {
+		-ms-overflow-style: none; /* IE and Edge */
+		scrollbar-width: none; /* Firefox */
+	}
+
+	input[type='number'] {
+		-moz-appearance: textfield; /* Firefox */
+	}
+</style>

+ 1 - 1
src/lib/components/chat/MessageInput.svelte

@@ -298,7 +298,7 @@
 							id="chat-textarea"
 							class=" dark:bg-gray-800 dark:text-gray-100 outline-none w-full py-3 px-2 {fileUploadEnabled
 								? ''
-								: ' pl-4'} rounded-xl resize-none"
+								: ' pl-4'} rounded-xl resize-none h-[48px]"
 							placeholder={speechRecognitionListening ? 'Listening...' : 'Send a message'}
 							bind:value={prompt}
 							on:keypress={(e) => {

+ 56 - 172
src/lib/components/chat/MessageInput/PromptCommands.svelte

@@ -1,158 +1,13 @@
 <script lang="ts">
+	import { prompts } from '$lib/stores';
 	import { findWordIndices } from '$lib/utils';
 	import { tick } from 'svelte';
 
 	export let prompt = '';
-
 	let selectedCommandIdx = 0;
-
-	let promptCommands = [
-		{
-			command: '/article',
-			title: 'Article Generator',
-			content: `Write an article about [topic]
-
-include relevant statistics (add the links of the sources you use) and consider diverse perspectives. Write it in a [X_tone] and mention the source links in the end.`
-		},
-		{
-			command: '/backlink',
-
-			title: 'Backlink Outreach Email',
-			content: `Write a link-exchange outreach email on behalf of [your name] from [your_company] to ask for a backlink from their [website_url] to [your website url].`
-		},
-		{
-			command: '/faq',
-
-			title: 'FAQ Generator',
-			content: `Create a list of [10] frequently asked questions about [keyword] and provide answers for each one of them considering the SERP and rich result guidelines.`
-		},
-		{
-			command: '/headline',
-
-			title: 'Headline Generator',
-			content: `Generate 10 attention-grabbing headlines for an article about [your topic]`
-		},
-		{
-			command: '/product',
-
-			title: 'Product Description',
-			content: `Craft an irresistible product description that highlights the benefits of [your product]`
-		},
-		{
-			command: '/seo',
-
-			title: 'SEO Content Brief',
-			content: `Create a SEO content brief for [keyword].`
-		},
-		{
-			command: '/seo-ideas',
-
-			title: 'SEO Keyword Ideas',
-			content: `Generate a list of 20 keyword ideas on [topic].
-
-Cluster this list of keywords according to funnel stages whether they are top of the funnel, middle of the funnel or bottom of the funnel keywords.`
-		},
-		{
-			command: '/summary',
-
-			title: 'Short Summary',
-			content: `Write a summary in 50 words that summarizes [topic or keyword].`
-		},
-		{
-			command: '/email-subject',
-
-			title: 'Email Subject Line',
-			content: `Develop [5] subject lines for a cold email offering your [product or service] to a potential client.`
-		},
-		{
-			command: '/facebook-ads',
-
-			title: 'Facebook Ads',
-			content: `Create 3 variations of effective ad copy to promote [product] for [audience].
-
-Make sure they are [persuasive/playful/emotional] and mention these benefits:
-
-[Benefit 1]
-
-[Benefit 2]
-
-[Benefit 3]
-
-Finish with a call to action saying [CTA].
-
-Add 3 emojis to it.`
-		},
-		{
-			command: '/google-ads',
-
-			title: 'Google Ads',
-			content: `Create 10 google ads (a headline and a description) for [product description] targeting the keyword [keyword].
-
-The headline of the ad needs to be under 30 characters. The description needs to be under 90 characters. Format the output as a table.`
-		},
-		{
-			command: '/insta-caption',
-
-			title: 'Instagram Caption',
-			content: `Write 5 variations of Instagram captions for [product].
-
-Use friendly, human-like language that appeals to [target audience].
-
-Emphasize the unique qualities of [product],
-
-use ample emojis, and don't sound too promotional.`
-		},
-		{
-			command: '/linkedin-post',
-
-			title: 'LinkedIn Post',
-			content: `Create a narrative Linkedin post using immersive writing about [topic].
-
-Details:
-
-[give details in bullet point format]
-
-Use a mix of short and long sentences. Make it punchy and dramatic.`
-		},
-		{
-			command: '/youtube-desc',
-
-			title: 'YouTube Video',
-			content: `Write a 100-word YouTube video description that compels [audience]
-
-to watch a video on [topic]
-
-and mentions the following keywords
-
-[keyword 1]
-
-[keyword 2]
-
-[keyword 3].`
-		},
-		{
-			command: '/seo-meta',
-
-			title: 'SEO Meta',
-			content: `Suggest a meta description for the content above, make it user-friendly and with a call to action, include the keyword [keyword].`
-		},
-		{
-			command: '/eli5',
-
-			title: 'ELI5',
-			content: `You are an expert teacher with the ability to explain complex topics in simpler terms. Explain the concept of [topic] in simple terms, so that my [grade level/subject] class can understand [this concept/specific example]?`
-		},
-		{
-			command: '/emoji-translate',
-
-			title: 'Emoji Translation',
-			content: `You are an emoji expert. Using only emojis, translate the following text to emojis. [insert numbered sentences].`
-		}
-	];
-
 	let filteredPromptCommands = [];
 
-	$: filteredPromptCommands = promptCommands
+	$: filteredPromptCommands = $prompts
 		.filter((p) => p.command.includes(prompt))
 		.sort((a, b) => a.title.localeCompare(b.title));
 
@@ -195,32 +50,61 @@ and mentions the following keywords
 	<div class="md:px-2 mb-3 text-left w-full">
 		<div class="flex w-full rounded-lg border border-gray-100 dark:border-gray-700">
 			<div class=" bg-gray-100 dark:bg-gray-700 w-10 rounded-l-lg text-center">
-				<div class=" text-lg font-medium mt-2">/</div>
+				<div class=" text-lg font-semibold mt-2">/</div>
 			</div>
-			<div class=" max-h-60 overflow-y-auto bg-white w-full p-2 rounded-r-lg space-y-0.5">
-				{#each filteredPromptCommands as command, commandIdx}
-					<button
-						class=" px-3 py-1.5 rounded-lg w-full text-left {commandIdx === selectedCommandIdx
-							? ' bg-gray-100 selected-command-option-button'
-							: ''}"
-						type="button"
-						on:click={() => {
-							confirmCommand(command);
-						}}
-						on:mousemove={() => {
-							selectedCommandIdx = commandIdx;
-						}}
-						on:focus={() => {}}
-					>
-						<div class=" font-medium text-black">
-							{command.command}
-						</div>
-
-						<div class=" text-xs text-gray-600">
-							{command.title}
-						</div>
-					</button>
-				{/each}
+
+			<div class="max-h-60 flex flex-col w-full rounded-r-lg">
+				<div class=" overflow-y-auto bg-white p-2 rounded-tr-lg space-y-0.5">
+					{#each filteredPromptCommands as command, commandIdx}
+						<button
+							class=" px-3 py-1.5 rounded-lg w-full text-left {commandIdx === selectedCommandIdx
+								? ' bg-gray-100 selected-command-option-button'
+								: ''}"
+							type="button"
+							on:click={() => {
+								confirmCommand(command);
+							}}
+							on:mousemove={() => {
+								selectedCommandIdx = commandIdx;
+							}}
+							on:focus={() => {}}
+						>
+							<div class=" font-medium text-black">
+								{command.command}
+							</div>
+
+							<div class=" text-xs text-gray-600">
+								{command.title}
+							</div>
+						</button>
+					{/each}
+				</div>
+
+				<div
+					class=" px-2 pb-1 text-xs text-gray-600 bg-white rounded-br-lg flex items-center space-x-1"
+				>
+					<div>
+						<svg
+							xmlns="http://www.w3.org/2000/svg"
+							fill="none"
+							viewBox="0 0 24 24"
+							stroke-width="1.5"
+							stroke="currentColor"
+							class="w-3 h-3"
+						>
+							<path
+								stroke-linecap="round"
+								stroke-linejoin="round"
+								d="m11.25 11.25.041-.02a.75.75 0 0 1 1.063.852l-.708 2.836a.75.75 0 0 0 1.063.853l.041-.021M21 12a9 9 0 1 1-18 0 9 9 0 0 1 18 0Zm-9-3.75h.008v.008H12V8.25Z"
+							/>
+						</svg>
+					</div>
+
+					<div class="line-clamp-1">
+						Tip: Update multiple variable slots consecutively by pressing the tab key in the chat
+						input after each replacement.
+					</div>
+				</div>
 			</div>
 		</div>
 	</div>

+ 4 - 2
src/lib/components/chat/MessageInput/Suggestions.svelte

@@ -7,7 +7,7 @@
 	{#each suggestionPrompts as prompt, promptIdx}
 		<div class="{promptIdx > 1 ? 'hidden sm:inline-flex' : ''} basis-full sm:basis-1/2 p-[5px]">
 			<button
-				class=" flex-1 flex justify-between w-full px-4 py-2.5 bg-white hover:bg-gray-50 dark:bg-gray-800 dark:hover:bg-gray-700 outline outline-1 outline-gray-200 dark:outline-gray-600 rounded-lg transition group"
+				class=" flex-1 flex justify-between w-full h-full px-4 py-2.5 bg-white hover:bg-gray-50 dark:bg-gray-800 dark:hover:bg-gray-700 outline outline-1 outline-gray-200 dark:outline-gray-600 rounded-lg transition group"
 				on:click={() => {
 					submitPrompt(prompt.content);
 				}}
@@ -17,7 +17,9 @@
 						<div class="text-sm font-medium dark:text-gray-300">{prompt.title[0]}</div>
 						<div class="text-sm text-gray-500">{prompt.title[1]}</div>
 					{:else}
-						<div class=" self-center text-sm font-medium dark:text-gray-300">{prompt.content}</div>
+						<div class=" self-center text-sm font-medium dark:text-gray-300 line-clamp-2">
+							{prompt.content}
+						</div>
 					{/if}
 				</div>
 

+ 1 - 1
src/lib/components/chat/Messages/Placeholder.svelte

@@ -27,7 +27,7 @@
 					>
 						{#if model in modelfiles}
 							<img
-								src={modelfiles[model]?.imageUrl}
+								src={modelfiles[model]?.imageUrl ?? '/ollama-dark.png'}
 								alt="modelfile"
 								class=" w-20 mb-2 rounded-full {models.length > 1
 									? ' border-[5px] border-white dark:border-gray-800'

+ 15 - 2
src/lib/components/chat/ModelSelector.svelte

@@ -1,11 +1,13 @@
 <script lang="ts">
-	import { models, showSettings, settings } from '$lib/stores';
+	import { setDefaultModels } from '$lib/apis/configs';
+	import { models, showSettings, settings, user } from '$lib/stores';
+	import { onMount, tick } from 'svelte';
 	import toast from 'svelte-french-toast';
 
 	export let selectedModels = [''];
 	export let disabled = false;
 
-	const saveDefaultModel = () => {
+	const saveDefaultModel = async () => {
 		const hasEmptyModel = selectedModels.filter((it) => it === '');
 		if (hasEmptyModel.length) {
 			toast.error('Choose a model before saving...');
@@ -13,8 +15,19 @@
 		}
 		settings.set({ ...$settings, models: selectedModels });
 		localStorage.setItem('settings', JSON.stringify($settings));
+
+		if ($user.role === 'admin') {
+			console.log('setting default models globally');
+			await setDefaultModels(localStorage.token, selectedModels.join(','));
+		}
 		toast.success('Default model updated');
 	};
+
+	$: if (selectedModels.length > 0 && $models.length > 0) {
+		selectedModels = selectedModels.map((model) =>
+			$models.map((m) => m.name).includes(model) ? model : ''
+		);
+	}
 </script>
 
 <div class="flex flex-col my-2">

+ 310 - 296
src/lib/components/chat/SettingsModal.svelte

@@ -8,19 +8,30 @@
 	import { splitStream, getGravatarURL } from '$lib/utils';
 	import queue from 'async/queue';
 
-	import { getOllamaVersion } from '$lib/apis/ollama';
-	import { createNewChat, deleteAllChats, getAllChats, getChatList } from '$lib/apis/chats';
 	import {
-		WEB_UI_VERSION,
-		OLLAMA_API_BASE_URL,
-		WEBUI_API_BASE_URL,
-		WEBUI_BASE_URL
-	} from '$lib/constants';
+		getOllamaVersion,
+		getOllamaModels,
+		getOllamaAPIUrl,
+		updateOllamaAPIUrl,
+		pullModel,
+		createModel,
+		deleteModel
+	} from '$lib/apis/ollama';
+	import { createNewChat, deleteAllChats, getAllChats, getChatList } from '$lib/apis/chats';
+	import { WEB_UI_VERSION, WEBUI_API_BASE_URL } from '$lib/constants';
 
 	import Advanced from './Settings/Advanced.svelte';
 	import Modal from '../common/Modal.svelte';
 	import { updateUserPassword } from '$lib/apis/auths';
 	import { goto } from '$app/navigation';
+	import Page from '../../../routes/(app)/+page.svelte';
+	import {
+		getOpenAIKey,
+		getOpenAIModels,
+		getOpenAIUrl,
+		updateOpenAIKey,
+		updateOpenAIUrl
+	} from '$lib/apis/openai';
 
 	export let show = false;
 
@@ -34,7 +45,7 @@
 	let selectedTab = 'general';
 
 	// General
-	let API_BASE_URL = OLLAMA_API_BASE_URL;
+	let API_BASE_URL = '';
 	let themes = ['dark', 'light', 'rose-pine dark', 'rose-pine-dawn light'];
 	let theme = 'dark';
 	let notificationEnabled = false;
@@ -77,17 +88,21 @@
 
 	let deleteModelTag = '';
 
+	// External
+
+	let OPENAI_API_KEY = '';
+	let OPENAI_API_BASE_URL = '';
+
 	// Addons
 	let titleAutoGenerate = true;
 	let speechAutoSend = false;
 	let responseAutoCopy = false;
 
 	let gravatarEmail = '';
-	let OPENAI_API_KEY = '';
-	let OPENAI_API_BASE_URL = '';
+	let titleAutoGenerateModel = '';
 
 	// Chats
-
+	let saveChatHistory = true;
 	let importFiles;
 	let showDeleteConfirm = false;
 
@@ -139,22 +154,23 @@
 	// About
 	let ollamaVersion = '';
 
-	const checkOllamaConnection = async () => {
-		if (API_BASE_URL === '') {
-			API_BASE_URL = OLLAMA_API_BASE_URL;
-		}
-		const _models = await getModels(API_BASE_URL, 'ollama');
+	const updateOllamaAPIUrlHandler = async () => {
+		API_BASE_URL = await updateOllamaAPIUrl(localStorage.token, API_BASE_URL);
+		const _models = await getModels('ollama');
 
 		if (_models.length > 0) {
 			toast.success('Server connection verified');
 			await models.set(_models);
-
-			saveSettings({
-				API_BASE_URL: API_BASE_URL
-			});
 		}
 	};
 
+	const updateOpenAIHandler = async () => {
+		OPENAI_API_BASE_URL = await updateOpenAIUrl(localStorage.token, OPENAI_API_BASE_URL);
+		OPENAI_API_KEY = await updateOpenAIKey(localStorage.token, OPENAI_API_KEY);
+
+		await models.set(await getModels());
+	};
+
 	const toggleTheme = async () => {
 		if (theme === 'dark') {
 			theme = 'light';
@@ -223,60 +239,44 @@
 		}
 	};
 
-	const toggleAuthHeader = async () => {
-		authEnabled = !authEnabled;
+	const toggleSaveChatHistory = async () => {
+		saveChatHistory = !saveChatHistory;
+		console.log(saveChatHistory);
+
+		if (saveChatHistory === false) {
+			await goto('/');
+		}
+		saveSettings({ saveChatHistory: saveChatHistory });
 	};
 
 	const pullModelHandlerProcessor = async (opts:{modelName:string, callback: Function}) => {
-		console.log('Pull model name', opts.modelName);
-		
-		const res = await fetch(`${API_BASE_URL}/pull`, {
-			method: 'POST',
-			headers: {
-				'Content-Type': 'text/event-stream',
-				...($settings.authHeader && { Authorization: $settings.authHeader }),
-				...($user && { Authorization: `Bearer ${localStorage.token}` })
-			},
-			body: JSON.stringify({
-				name: opts.modelName
-			})
-		});
 
-		const reader = res.body
-			.pipeThrough(new TextDecoderStream())
-			.pipeThrough(splitStream('\n'))
-			.getReader();
-
-		while (true) {
-			const { value, done } = await reader.read();
-			if (done) break;
+		try {
+			const res = await pullModel(localStorage.token, opts.modelName);
+	
+			const reader = res.body
+				.pipeThrough(new TextDecoderStream())
+				.pipeThrough(splitStream('\n'))
+				.getReader();
 
-			try {
-				let lines = value.split('\n');
+			while (true) {
+				try {
+					const { value, done } = await reader.read();
+					if (done) break;
 
-				for (const line of lines) {
-					if (line !== '') {
-						console.log(line);
-						let data = JSON.parse(line);
-						console.log(data);
+					let lines = value.split('\n');
 
+					for (const line of lines) {
+						if (line !== '') {
+							let data = JSON.parse(line);
 						if (data.error) {
 							throw data.error;
 						}
-
 						if (data.detail) {
 							throw data.detail;
 						}
 						if (data.status) {
-							if (!data.digest) {
-								if (data.status === 'success') {
-									const notification = new Notification(`Ollama`, {
-										body: `Model '${opts.modelName}' has been successfully downloaded.`,
-										icon: '/favicon.png'
-									});
-								}
-							} else {
-								digest = data.digest;
+							if (data.digest) {
 								let downloadProgress = 0;
 								if (data.completed) {
 									downloadProgress = Math.round((data.completed / data.total) * 1000) / 10;
@@ -286,15 +286,21 @@
 								modelDownloadStatus[opts.modelName] = {pullProgress: downloadProgress, digest: data.digest};
 							}
 						}
+						}
 					}
+				} catch (error) {
+					console.log('Failed to read from data stream', error);
+					throw error;
 				}
-			} catch (error) {
-				console.error(error);
-				opts.callback({success:false, error, modelName: opts.modelName});
 			}
+			opts.callback({success: true, modelName: opts.modelName});
+		} catch (error) {
+			console.error(error);
+			opts.callback({success:false, error, modelName: opts.modelName});
 		}
-		opts.callback({success: true, modelName: opts.modelName});
-	};
+
+		
+		};
 
 	const pullModelHandler = async() => {
 		if(modelDownloadStatus[modelTag]){
@@ -437,21 +443,11 @@
 		}
 
 		if (uploaded) {
-			const res = await fetch(`${$settings?.API_BASE_URL ?? OLLAMA_API_BASE_URL}/create`, {
-				method: 'POST',
-				headers: {
-					'Content-Type': 'text/event-stream',
-					...($settings.authHeader && { Authorization: $settings.authHeader }),
-					...($user && { Authorization: `Bearer ${localStorage.token}` })
-				},
-				body: JSON.stringify({
-					name: `${name}:latest`,
-					modelfile: `FROM @${modelFileDigest}\n${modelFileContent}`
-				})
-			}).catch((err) => {
-				console.log(err);
-				return null;
-			});
+			const res = await createModel(
+				localStorage.token,
+				`${name}:latest`,
+				`FROM @${modelFileDigest}\n${modelFileContent}`
+			);
 
 			if (res && res.ok) {
 				const reader = res.body
@@ -517,124 +513,35 @@
 	};
 
 	const deleteModelHandler = async () => {
-		const res = await fetch(`${API_BASE_URL}/delete`, {
-			method: 'DELETE',
-			headers: {
-				'Content-Type': 'text/event-stream',
-				...($settings.authHeader && { Authorization: $settings.authHeader }),
-				...($user && { Authorization: `Bearer ${localStorage.token}` })
-			},
-			body: JSON.stringify({
-				name: deleteModelTag
-			})
+		const res = await deleteModel(localStorage.token, deleteModelTag).catch((error) => {
+			toast.error(error);
 		});
 
-		const reader = res.body
-			.pipeThrough(new TextDecoderStream())
-			.pipeThrough(splitStream('\n'))
-			.getReader();
-
-		while (true) {
-			const { value, done } = await reader.read();
-			if (done) break;
-
-			try {
-				let lines = value.split('\n');
-
-				for (const line of lines) {
-					if (line !== '' && line !== 'null') {
-						console.log(line);
-						let data = JSON.parse(line);
-						console.log(data);
-
-						if (data.error) {
-							throw data.error;
-						}
-						if (data.detail) {
-							throw data.detail;
-						}
-
-						if (data.status) {
-						}
-					} else {
-						toast.success(`Deleted ${deleteModelTag}`);
-					}
-				}
-			} catch (error) {
-				console.log(error);
-				toast.error(error);
-			}
+		if (res) {
+			toast.success(`Deleted ${deleteModelTag}`);
 		}
 
 		deleteModelTag = '';
 		models.set(await getModels());
 	};
 
-	const getModels = async (url = '', type = 'all') => {
-		let models = [];
-		const res = await fetch(`${url ? url : $settings?.API_BASE_URL ?? OLLAMA_API_BASE_URL}/tags`, {
-			method: 'GET',
-			headers: {
-				Accept: 'application/json',
-				'Content-Type': 'application/json',
-				...($settings.authHeader && { Authorization: $settings.authHeader }),
-				...($user && { Authorization: `Bearer ${localStorage.token}` })
-			}
-		})
-			.then(async (res) => {
-				if (!res.ok) throw await res.json();
-				return res.json();
-			})
-			.catch((error) => {
+	const getModels = async (type = 'all') => {
+		const models = [];
+		models.push(
+			...(await getOllamaModels(localStorage.token).catch((error) => {
+				toast.error(error);
+				return [];
+			}))
+		);
+
+		// If OpenAI API Key exists
+		if (type === 'all' && OPENAI_API_KEY) {
+			const openAIModels = await getOpenAIModels(localStorage.token).catch((error) => {
 				console.log(error);
-				if ('detail' in error) {
-					toast.error(error.detail);
-				} else {
-					toast.error('Server connection failed');
-				}
 				return null;
 			});
-		console.log(res);
-		models.push(...(res?.models ?? []));
-
-		// If OpenAI API Key exists
-		if (type === 'all' && $settings.OPENAI_API_KEY) {
-			const API_BASE_URL = $settings.OPENAI_API_BASE_URL ?? 'https://api.openai.com/v1';
 
-			// Validate OPENAI_API_KEY
-			const openaiModelRes = await fetch(`${API_BASE_URL}/models`, {
-				method: 'GET',
-				headers: {
-					'Content-Type': 'application/json',
-					Authorization: `Bearer ${$settings.OPENAI_API_KEY}`
-				}
-			})
-				.then(async (res) => {
-					if (!res.ok) throw await res.json();
-					return res.json();
-				})
-				.catch((error) => {
-					console.log(error);
-					toast.error(`OpenAI: ${error?.error?.message ?? 'Network Problem'}`);
-					return null;
-				});
-
-			const openAIModels = Array.isArray(openaiModelRes)
-				? openaiModelRes
-				: openaiModelRes?.data ?? null;
-
-			models.push(
-				...(openAIModels
-					? [
-							{ name: 'hr' },
-							...openAIModels
-								.map((model) => ({ name: model.id, external: true }))
-								.filter((model) =>
-									API_BASE_URL.includes('openai') ? model.name.includes('gpt') : true
-								)
-					  ]
-					: [])
-			);
+			models.push(...(openAIModels ? [{ name: 'hr' }, ...openAIModels] : []));
 		}
 
 		return models;
@@ -666,15 +573,20 @@
 	};
 
 	onMount(async () => {
+		console.log('settings', $user.role === 'admin');
+		if ($user.role === 'admin') {
+			API_BASE_URL = await getOllamaAPIUrl(localStorage.token);
+			OPENAI_API_BASE_URL = await getOpenAIUrl(localStorage.token);
+			OPENAI_API_KEY = await getOpenAIKey(localStorage.token);
+		}
+
 		let settings = JSON.parse(localStorage.getItem('settings') ?? '{}');
 		console.log(settings);
 
 		theme = localStorage.theme ?? 'dark';
 		notificationEnabled = settings.notificationEnabled ?? false;
 
-		API_BASE_URL = settings.API_BASE_URL ?? OLLAMA_API_BASE_URL;
 		system = settings.system ?? '';
-
 		requestFormat = settings.requestFormat ?? '';
 
 		options.seed = settings.seed ?? 0;
@@ -689,10 +601,10 @@
 		titleAutoGenerate = settings.titleAutoGenerate ?? true;
 		speechAutoSend = settings.speechAutoSend ?? false;
 		responseAutoCopy = settings.responseAutoCopy ?? false;
-
+		titleAutoGenerateModel = settings.titleAutoGenerateModel ?? '';
 		gravatarEmail = settings.gravatarEmail ?? '';
-		OPENAI_API_KEY = settings.OPENAI_API_KEY ?? '';
-		OPENAI_API_BASE_URL = settings.OPENAI_API_BASE_URL ?? 'https://api.openai.com/v1';
+
+		saveChatHistory = settings.saveChatHistory ?? true;
 
 		authEnabled = settings.authHeader !== undefined ? true : false;
 		if (authEnabled) {
@@ -700,10 +612,7 @@
 			authContent = settings.authHeader.split(' ')[1];
 		}
 
-		ollamaVersion = await getOllamaVersion(
-			API_BASE_URL ?? OLLAMA_API_BASE_URL,
-			localStorage.token
-		).catch((error) => {
+		ollamaVersion = await getOllamaVersion(localStorage.token).catch((error) => {
 			return '';
 		});
 	});
@@ -787,55 +696,57 @@
 					<div class=" self-center">Advanced</div>
 				</button>
 
-				<button
-					class="px-2.5 py-2.5 min-w-fit rounded-lg flex-1 md:flex-none flex text-right transition {selectedTab ===
-					'models'
-						? 'bg-gray-200 dark:bg-gray-700'
-						: ' hover:bg-gray-300 dark:hover:bg-gray-800'}"
-					on:click={() => {
-						selectedTab = 'models';
-					}}
-				>
-					<div class=" self-center mr-2">
-						<svg
-							xmlns="http://www.w3.org/2000/svg"
-							viewBox="0 0 20 20"
-							fill="currentColor"
-							class="w-4 h-4"
-						>
-							<path
-								fill-rule="evenodd"
-								d="M10 1c3.866 0 7 1.79 7 4s-3.134 4-7 4-7-1.79-7-4 3.134-4 7-4zm5.694 8.13c.464-.264.91-.583 1.306-.952V10c0 2.21-3.134 4-7 4s-7-1.79-7-4V8.178c.396.37.842.688 1.306.953C5.838 10.006 7.854 10.5 10 10.5s4.162-.494 5.694-1.37zM3 13.179V15c0 2.21 3.134 4 7 4s7-1.79 7-4v-1.822c-.396.37-.842.688-1.306.953-1.532.875-3.548 1.369-5.694 1.369s-4.162-.494-5.694-1.37A7.009 7.009 0 013 13.179z"
-								clip-rule="evenodd"
-							/>
-						</svg>
-					</div>
-					<div class=" self-center">Models</div>
-				</button>
+				{#if $user?.role === 'admin'}
+					<button
+						class="px-2.5 py-2.5 min-w-fit rounded-lg flex-1 md:flex-none flex text-right transition {selectedTab ===
+						'models'
+							? 'bg-gray-200 dark:bg-gray-700'
+							: ' hover:bg-gray-300 dark:hover:bg-gray-800'}"
+						on:click={() => {
+							selectedTab = 'models';
+						}}
+					>
+						<div class=" self-center mr-2">
+							<svg
+								xmlns="http://www.w3.org/2000/svg"
+								viewBox="0 0 20 20"
+								fill="currentColor"
+								class="w-4 h-4"
+							>
+								<path
+									fill-rule="evenodd"
+									d="M10 1c3.866 0 7 1.79 7 4s-3.134 4-7 4-7-1.79-7-4 3.134-4 7-4zm5.694 8.13c.464-.264.91-.583 1.306-.952V10c0 2.21-3.134 4-7 4s-7-1.79-7-4V8.178c.396.37.842.688 1.306.953C5.838 10.006 7.854 10.5 10 10.5s4.162-.494 5.694-1.37zM3 13.179V15c0 2.21 3.134 4 7 4s7-1.79 7-4v-1.822c-.396.37-.842.688-1.306.953-1.532.875-3.548 1.369-5.694 1.369s-4.162-.494-5.694-1.37A7.009 7.009 0 013 13.179z"
+									clip-rule="evenodd"
+								/>
+							</svg>
+						</div>
+						<div class=" self-center">Models</div>
+					</button>
 
-				<button
-					class="px-2.5 py-2.5 min-w-fit rounded-lg flex-1 md:flex-none flex text-right transition {selectedTab ===
-					'external'
-						? 'bg-gray-200 dark:bg-gray-700'
-						: ' hover:bg-gray-300 dark:hover:bg-gray-800'}"
-					on:click={() => {
-						selectedTab = 'external';
-					}}
-				>
-					<div class=" self-center mr-2">
-						<svg
-							xmlns="http://www.w3.org/2000/svg"
-							viewBox="0 0 16 16"
-							fill="currentColor"
-							class="w-4 h-4"
-						>
-							<path
-								d="M1 9.5A3.5 3.5 0 0 0 4.5 13H12a3 3 0 0 0 .917-5.857 2.503 2.503 0 0 0-3.198-3.019 3.5 3.5 0 0 0-6.628 2.171A3.5 3.5 0 0 0 1 9.5Z"
-							/>
-						</svg>
-					</div>
-					<div class=" self-center">External</div>
-				</button>
+					<button
+						class="px-2.5 py-2.5 min-w-fit rounded-lg flex-1 md:flex-none flex text-right transition {selectedTab ===
+						'external'
+							? 'bg-gray-200 dark:bg-gray-700'
+							: ' hover:bg-gray-300 dark:hover:bg-gray-800'}"
+						on:click={() => {
+							selectedTab = 'external';
+						}}
+					>
+						<div class=" self-center mr-2">
+							<svg
+								xmlns="http://www.w3.org/2000/svg"
+								viewBox="0 0 16 16"
+								fill="currentColor"
+								class="w-4 h-4"
+							>
+								<path
+									d="M1 9.5A3.5 3.5 0 0 0 4.5 13H12a3 3 0 0 0 .917-5.857 2.503 2.503 0 0 0-3.198-3.019 3.5 3.5 0 0 0-6.628 2.171A3.5 3.5 0 0 0 1 9.5Z"
+								/>
+							</svg>
+						</div>
+						<div class=" self-center">External</div>
+					</button>
+				{/if}
 
 				<button
 					class="px-2.5 py-2.5 min-w-fit rounded-lg flex-1 md:flex-none flex text-right transition {selectedTab ===
@@ -1065,51 +976,51 @@
 							</div>
 						</div>
 
-						<hr class=" dark:border-gray-700" />
-						<div>
-							<div class=" mb-2.5 text-sm font-medium">Ollama API URL</div>
-							<div class="flex w-full">
-								<div class="flex-1 mr-2">
-									<input
-										class="w-full rounded py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-800 outline-none"
-										placeholder="Enter URL (e.g. http://localhost:8080/ollama/api)"
-										bind:value={API_BASE_URL}
-									/>
-								</div>
-								<button
-									class="px-3 bg-gray-200 hover:bg-gray-300 dark:bg-gray-600 dark:hover:bg-gray-700 rounded transition"
-									on:click={() => {
-										checkOllamaConnection();
-									}}
-								>
-									<svg
-										xmlns="http://www.w3.org/2000/svg"
-										viewBox="0 0 20 20"
-										fill="currentColor"
-										class="w-4 h-4"
-									>
-										<path
-											fill-rule="evenodd"
-											d="M15.312 11.424a5.5 5.5 0 01-9.201 2.466l-.312-.311h2.433a.75.75 0 000-1.5H3.989a.75.75 0 00-.75.75v4.242a.75.75 0 001.5 0v-2.43l.31.31a7 7 0 0011.712-3.138.75.75 0 00-1.449-.39zm1.23-3.723a.75.75 0 00.219-.53V2.929a.75.75 0 00-1.5 0V5.36l-.31-.31A7 7 0 003.239 8.188a.75.75 0 101.448.389A5.5 5.5 0 0113.89 6.11l.311.31h-2.432a.75.75 0 000 1.5h4.243a.75.75 0 00.53-.219z"
-											clip-rule="evenodd"
+						{#if $user.role === 'admin'}
+							<hr class=" dark:border-gray-700" />
+							<div>
+								<div class=" mb-2.5 text-sm font-medium">Ollama API URL</div>
+								<div class="flex w-full">
+									<div class="flex-1 mr-2">
+										<input
+											class="w-full rounded py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-800 outline-none"
+											placeholder="Enter URL (e.g. http://localhost:11434/api)"
+											bind:value={API_BASE_URL}
 										/>
-									</svg>
-								</button>
-							</div>
+									</div>
+									<button
+										class="px-3 bg-gray-200 hover:bg-gray-300 dark:bg-gray-600 dark:hover:bg-gray-700 rounded transition"
+										on:click={() => {
+											updateOllamaAPIUrlHandler();
+										}}
+									>
+										<svg
+											xmlns="http://www.w3.org/2000/svg"
+											viewBox="0 0 20 20"
+											fill="currentColor"
+											class="w-4 h-4"
+										>
+											<path
+												fill-rule="evenodd"
+												d="M15.312 11.424a5.5 5.5 0 01-9.201 2.466l-.312-.311h2.433a.75.75 0 000-1.5H3.989a.75.75 0 00-.75.75v4.242a.75.75 0 001.5 0v-2.43l.31.31a7 7 0 0011.712-3.138.75.75 0 00-1.449-.39zm1.23-3.723a.75.75 0 00.219-.53V2.929a.75.75 0 00-1.5 0V5.36l-.31-.31A7 7 0 003.239 8.188a.75.75 0 101.448.389A5.5 5.5 0 0113.89 6.11l.311.31h-2.432a.75.75 0 000 1.5h4.243a.75.75 0 00.53-.219z"
+												clip-rule="evenodd"
+											/>
+										</svg>
+									</button>
+								</div>
 
-							<div class="mt-2 text-xs text-gray-400 dark:text-gray-500">
-								The field above should be set to <span
-									class=" text-gray-500 dark:text-gray-300 font-medium">'/ollama/api'</span
-								>;
-								<a
-									class=" text-gray-500 dark:text-gray-300 font-medium"
-									href="https://github.com/ollama-webui/ollama-webui#troubleshooting"
-									target="_blank"
-								>
-									Click here for help.
-								</a>
+								<div class="mt-2 text-xs text-gray-400 dark:text-gray-500">
+									Trouble accessing Ollama?
+									<a
+										class=" text-gray-300 font-medium"
+										href="https://github.com/ollama-webui/ollama-webui#troubleshooting"
+										target="_blank"
+									>
+										Click here for help.
+									</a>
+								</div>
 							</div>
-						</div>
+						{/if}
 
 						<hr class=" dark:border-gray-700" />
 
@@ -1127,7 +1038,6 @@
 								class=" px-4 py-2 bg-emerald-600 hover:bg-emerald-700 text-gray-100 transition rounded"
 								on:click={() => {
 									saveSettings({
-										API_BASE_URL: API_BASE_URL === '' ? OLLAMA_API_BASE_URL : API_BASE_URL,
 										system: system !== '' ? system : undefined
 									});
 									show = false;
@@ -1548,10 +1458,12 @@
 					<form
 						class="flex flex-col h-full justify-between space-y-3 text-sm"
 						on:submit|preventDefault={() => {
-							saveSettings({
-								OPENAI_API_KEY: OPENAI_API_KEY !== '' ? OPENAI_API_KEY : undefined,
-								OPENAI_API_BASE_URL: OPENAI_API_BASE_URL !== '' ? OPENAI_API_BASE_URL : undefined
-							});
+							updateOpenAIHandler();
+
+							// saveSettings({
+							// 	OPENAI_API_KEY: OPENAI_API_KEY !== '' ? OPENAI_API_KEY : undefined,
+							// 	OPENAI_API_BASE_URL: OPENAI_API_BASE_URL !== '' ? OPENAI_API_BASE_URL : undefined
+							// });
 							show = false;
 						}}
 					>
@@ -1608,10 +1520,6 @@
 					<form
 						class="flex flex-col h-full justify-between space-y-3 text-sm"
 						on:submit|preventDefault={() => {
-							saveSettings({
-								gravatarEmail: gravatarEmail !== '' ? gravatarEmail : undefined,
-								gravatarUrl: gravatarEmail !== '' ? getGravatarURL(gravatarEmail) : undefined
-							});
 							show = false;
 						}}
 					>
@@ -1621,7 +1529,7 @@
 
 								<div>
 									<div class=" py-0.5 flex w-full justify-between">
-										<div class=" self-center text-xs font-medium">Title Auto Generation</div>
+										<div class=" self-center text-xs font-medium">Title Auto-Generation</div>
 
 										<button
 											class="p-1 px-3 text-xs flex rounded transition"
@@ -1683,6 +1591,54 @@
 							</div>
 
 							<hr class=" dark:border-gray-700" />
+
+							<div>
+								<div class=" mb-2.5 text-sm font-medium">Set Title Auto-Generation Model</div>
+								<div class="flex w-full">
+									<div class="flex-1 mr-2">
+										<select
+											class="w-full rounded py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-800 outline-none"
+											bind:value={titleAutoGenerateModel}
+											placeholder="Select a model"
+										>
+											<option value="" selected>Default</option>
+											{#each $models.filter((m) => m.size != null) as model}
+												<option value={model.name} class="bg-gray-100 dark:bg-gray-700"
+													>{model.name +
+														' (' +
+														(model.size / 1024 ** 3).toFixed(1) +
+														' GB)'}</option
+												>
+											{/each}
+										</select>
+									</div>
+									<button
+										class="px-3 bg-gray-200 hover:bg-gray-300 dark:bg-gray-700 dark:hover:bg-gray-800 dark:text-gray-100 rounded transition"
+										on:click={() => {
+											saveSettings({
+												titleAutoGenerateModel:
+													titleAutoGenerateModel !== '' ? titleAutoGenerateModel : undefined
+											});
+										}}
+										type="button"
+									>
+										<svg
+											xmlns="http://www.w3.org/2000/svg"
+											viewBox="0 0 16 16"
+											fill="currentColor"
+											class="w-3.5 h-3.5"
+										>
+											<path
+												fill-rule="evenodd"
+												d="M13.836 2.477a.75.75 0 0 1 .75.75v3.182a.75.75 0 0 1-.75.75h-3.182a.75.75 0 0 1 0-1.5h1.37l-.84-.841a4.5 4.5 0 0 0-7.08.932.75.75 0 0 1-1.3-.75 6 6 0 0 1 9.44-1.242l.842.84V3.227a.75.75 0 0 1 .75-.75Zm-.911 7.5A.75.75 0 0 1 13.199 11a6 6 0 0 1-9.44 1.241l-.84-.84v1.371a.75.75 0 0 1-1.5 0V9.591a.75.75 0 0 1 .75-.75H5.35a.75.75 0 0 1 0 1.5H3.98l.841.841a4.5 4.5 0 0 0 7.08-.932.75.75 0 0 1 1.025-.273Z"
+												clip-rule="evenodd"
+											/>
+										</svg>
+									</button>
+								</div>
+							</div>
+
+							<!-- <hr class=" dark:border-gray-700" />
 							<div>
 								<div class=" mb-2.5 text-sm font-medium">
 									Gravatar Email <span class=" text-gray-400 text-sm">(optional)</span>
@@ -1705,7 +1661,7 @@
 										target="_blank">Gravatar.</a
 									>
 								</div>
-							</div>
+							</div> -->
 						</div>
 
 						<div class="flex justify-end pt-3 text-sm font-medium">
@@ -1720,6 +1676,64 @@
 				{:else if selectedTab === 'chats'}
 					<div class="flex flex-col h-full justify-between space-y-3 text-sm">
 						<div class=" space-y-2">
+							<div
+								class="flex flex-col justify-between rounded-md items-center py-2 px-3.5 w-full transition"
+							>
+								<div class="flex w-full justify-between">
+									<div class=" self-center text-sm font-medium">Chat History</div>
+
+									<button
+										class="p-1 px-3 text-xs flex rounded transition"
+										type="button"
+										on:click={() => {
+											toggleSaveChatHistory();
+										}}
+									>
+										{#if saveChatHistory === true}
+											<svg
+												xmlns="http://www.w3.org/2000/svg"
+												viewBox="0 0 16 16"
+												fill="currentColor"
+												class="w-4 h-4"
+											>
+												<path d="M8 9.5a1.5 1.5 0 1 0 0-3 1.5 1.5 0 0 0 0 3Z" />
+												<path
+													fill-rule="evenodd"
+													d="M1.38 8.28a.87.87 0 0 1 0-.566 7.003 7.003 0 0 1 13.238.006.87.87 0 0 1 0 .566A7.003 7.003 0 0 1 1.379 8.28ZM11 8a3 3 0 1 1-6 0 3 3 0 0 1 6 0Z"
+													clip-rule="evenodd"
+												/>
+											</svg>
+
+											<span class="ml-2 self-center"> On </span>
+										{:else}
+											<svg
+												xmlns="http://www.w3.org/2000/svg"
+												viewBox="0 0 16 16"
+												fill="currentColor"
+												class="w-4 h-4"
+											>
+												<path
+													fill-rule="evenodd"
+													d="M3.28 2.22a.75.75 0 0 0-1.06 1.06l10.5 10.5a.75.75 0 1 0 1.06-1.06l-1.322-1.323a7.012 7.012 0 0 0 2.16-3.11.87.87 0 0 0 0-.567A7.003 7.003 0 0 0 4.82 3.76l-1.54-1.54Zm3.196 3.195 1.135 1.136A1.502 1.502 0 0 1 9.45 8.389l1.136 1.135a3 3 0 0 0-4.109-4.109Z"
+													clip-rule="evenodd"
+												/>
+												<path
+													d="m7.812 10.994 1.816 1.816A7.003 7.003 0 0 1 1.38 8.28a.87.87 0 0 1 0-.566 6.985 6.985 0 0 1 1.113-2.039l2.513 2.513a3 3 0 0 0 2.806 2.806Z"
+												/>
+											</svg>
+
+											<span class="ml-2 self-center">Off</span>
+										{/if}
+									</button>
+								</div>
+
+								<div class="text-xs text-left w-full font-medium mt-0.5">
+									This setting does not sync across browsers or devices.
+								</div>
+							</div>
+
+							<hr class=" dark:border-gray-700" />
+
 							<div class="flex flex-col">
 								<input
 									id="chat-import-input"

+ 13 - 1
src/lib/components/common/Modal.svelte

@@ -3,8 +3,18 @@
 	import { fade, blur } from 'svelte/transition';
 
 	export let show = true;
+	export let size = 'md';
+
 	let mounted = false;
 
+	const sizeToWidth = (size) => {
+		if (size === 'sm') {
+			return 'w-[30rem]';
+		} else {
+			return 'w-[40rem]';
+		}
+	};
+
 	onMount(() => {
 		mounted = true;
 	});
@@ -28,7 +38,9 @@
 		}}
 	>
 		<div
-			class="m-auto rounded-xl max-w-full w-[40rem] mx-2 bg-gray-50 dark:bg-gray-900 shadow-3xl"
+			class="m-auto rounded-xl max-w-full {sizeToWidth(
+				size
+			)} mx-2 bg-gray-50 dark:bg-gray-900 shadow-3xl"
 			transition:fade={{ delay: 100, duration: 200 }}
 			on:click={(e) => {
 				e.stopPropagation();

+ 308 - 231
src/lib/components/layout/Sidebar.svelte

@@ -6,7 +6,7 @@
 
 	import { goto, invalidateAll } from '$app/navigation';
 	import { page } from '$app/stores';
-	import { user, chats, showSettings, chatId } from '$lib/stores';
+	import { user, chats, settings, showSettings, chatId } from '$lib/stores';
 	import { onMount } from 'svelte';
 	import { deleteChatById, getChatList, updateChatById } from '$lib/apis/chats';
 
@@ -49,6 +49,12 @@
 		await deleteChatById(localStorage.token, id);
 		await chats.set(await getChatList(localStorage.token));
 	};
+
+	const saveSettings = async (updated) => {
+		await settings.set({ ...$settings, ...updated });
+		localStorage.setItem('settings', JSON.stringify($settings));
+		location.href = '/';
+	};
 </script>
 
 <div
@@ -100,7 +106,7 @@
 		</div>
 
 		{#if $user?.role === 'admin'}
-			<div class="px-2.5 flex justify-center my-1">
+			<div class="px-2.5 flex justify-center mt-1">
 				<button
 					class="flex-grow flex space-x-3 rounded-md px-3 py-2 hover:bg-gray-900 transition"
 					on:click={async () => {
@@ -129,255 +135,326 @@
 					</div>
 				</button>
 			</div>
-		{/if}
-
-		<div class="px-2.5 mt-1 mb-2 flex justify-center space-x-2">
-			<div class="flex w-full" id="chat-search">
-				<div class="self-center pl-3 py-2 rounded-l bg-gray-950">
-					<svg
-						xmlns="http://www.w3.org/2000/svg"
-						viewBox="0 0 20 20"
-						fill="currentColor"
-						class="w-4 h-4"
-					>
-						<path
-							fill-rule="evenodd"
-							d="M9 3.5a5.5 5.5 0 100 11 5.5 5.5 0 000-11zM2 9a7 7 0 1112.452 4.391l3.328 3.329a.75.75 0 11-1.06 1.06l-3.329-3.328A7 7 0 012 9z"
-							clip-rule="evenodd"
-						/>
-					</svg>
-				</div>
 
-				<input
-					class="w-full rounded-r py-1.5 pl-2.5 pr-4 text-sm text-gray-300 bg-gray-950 outline-none"
-					placeholder="Search"
-					bind:value={search}
-				/>
+			<div class="px-2.5 flex justify-center mb-1">
+				<button
+					class="flex-grow flex space-x-3 rounded-md px-3 py-2 hover:bg-gray-900 transition"
+					on:click={async () => {
+						goto('/prompts');
+					}}
+				>
+					<div class="self-center">
+						<svg
+							xmlns="http://www.w3.org/2000/svg"
+							viewBox="0 0 16 16"
+							fill="currentColor"
+							class="w-4 h-4"
+						>
+							<path
+								fill-rule="evenodd"
+								d="M11.013 2.513a1.75 1.75 0 0 1 2.475 2.474L6.226 12.25a2.751 2.751 0 0 1-.892.596l-2.047.848a.75.75 0 0 1-.98-.98l.848-2.047a2.75 2.75 0 0 1 .596-.892l7.262-7.261Z"
+								clip-rule="evenodd"
+							/>
+						</svg>
+					</div>
 
-				<!-- <div class="self-center pr-3 py-2  bg-gray-900">
-					<svg
-						xmlns="http://www.w3.org/2000/svg"
-						fill="none"
-						viewBox="0 0 24 24"
-						stroke-width="1.5"
-						stroke="currentColor"
-						class="w-4 h-4"
-					>
-						<path
-							stroke-linecap="round"
-							stroke-linejoin="round"
-							d="M12 3c2.755 0 5.455.232 8.083.678.533.09.917.556.917 1.096v1.044a2.25 2.25 0 01-.659 1.591l-5.432 5.432a2.25 2.25 0 00-.659 1.591v2.927a2.25 2.25 0 01-1.244 2.013L9.75 21v-6.568a2.25 2.25 0 00-.659-1.591L3.659 7.409A2.25 2.25 0 013 5.818V4.774c0-.54.384-1.006.917-1.096A48.32 48.32 0 0112 3z"
-						/>
-					</svg>
-				</div> -->
+					<div class="flex self-center">
+						<div class=" self-center font-medium text-sm">Prompts</div>
+					</div>
+				</button>
 			</div>
-		</div>
+		{/if}
 
-		<div class="pl-2.5 my-2 flex-1 flex flex-col space-y-1 overflow-y-auto">
-			{#each $chats.filter((chat) => {
-				if (search === '') {
-					return true;
-				} else {
-					let title = chat.title.toLowerCase();
+		<div class="relative flex flex-col flex-1 overflow-y-auto">
+			{#if !($settings.saveChatHistory ?? true)}
+				<div class="absolute z-40 w-full h-full bg-black/90 flex justify-center">
+					<div class=" text-left px-5 py-2">
+						<div class=" font-medium">Chat History is off for this browser.</div>
+						<div class="text-xs mt-2">
+							When history is turned off, new chats on this browser won't appear in your history on
+							any of your devices. <span class=" font-semibold"
+								>This setting does not sync across browsers or devices.</span
+							>
+						</div>
 
-					if (title.includes(search)) {
-						return true;
-					} else {
-						return false;
-					}
-				}
-			}) as chat, i}
-				<div class=" w-full pr-2 relative">
-					<button
-						class=" w-full flex justify-between rounded-md px-3 py-2 hover:bg-gray-900 {chat.id ===
-						$chatId
-							? 'bg-gray-900'
-							: ''} transition whitespace-nowrap text-ellipsis"
-						on:click={() => {
-							// goto(`/c/${chat.id}`);
-							if (chat.id !== chatTitleEditId) {
-								chatTitleEditId = null;
-								chatTitle = '';
-							}
-
-							if (chat.id !== $chatId) {
-								loadChat(chat.id);
-							}
-						}}
-					>
-						<div class=" flex self-center flex-1">
-							<div class=" self-center mr-3">
+						<div class="mt-3">
+							<button
+								class="flex justify-center items-center space-x-1.5 px-3 py-2.5 rounded-lg text-xs bg-gray-200 hover:bg-gray-300 transition text-gray-800 font-medium w-full"
+								type="button"
+								on:click={() => {
+									saveSettings({
+										saveChatHistory: true
+									});
+								}}
+							>
 								<svg
 									xmlns="http://www.w3.org/2000/svg"
-									fill="none"
-									viewBox="0 0 24 24"
-									stroke-width="1.5"
-									stroke="currentColor"
-									class="w-4 h-4"
+									viewBox="0 0 16 16"
+									fill="currentColor"
+									class="w-3 h-3"
 								>
 									<path
-										stroke-linecap="round"
-										stroke-linejoin="round"
-										d="M2.25 12.76c0 1.6 1.123 2.994 2.707 3.227 1.087.16 2.185.283 3.293.369V21l4.076-4.076a1.526 1.526 0 011.037-.443 48.282 48.282 0 005.68-.494c1.584-.233 2.707-1.626 2.707-3.228V6.741c0-1.602-1.123-2.995-2.707-3.228A48.394 48.394 0 0012 3c-2.392 0-4.744.175-7.043.513C3.373 3.746 2.25 5.14 2.25 6.741v6.018z"
+										fill-rule="evenodd"
+										d="M8 1a.75.75 0 0 1 .75.75v6.5a.75.75 0 0 1-1.5 0v-6.5A.75.75 0 0 1 8 1ZM4.11 3.05a.75.75 0 0 1 0 1.06 5.5 5.5 0 1 0 7.78 0 .75.75 0 0 1 1.06-1.06 7 7 0 1 1-9.9 0 .75.75 0 0 1 1.06 0Z"
+										clip-rule="evenodd"
 									/>
 								</svg>
-							</div>
-							<div
-								class=" text-left self-center overflow-hidden {chat.id === $chatId
-									? 'w-[120px]'
-									: 'w-[180px]'} "
-							>
-								{#if chatTitleEditId === chat.id}
-									<input bind:value={chatTitle} class=" bg-transparent w-full" />
-								{:else}
-									{chat.title}
-								{/if}
-							</div>
+
+								<div>Enable Chat History</div>
+							</button>
 						</div>
-					</button>
+					</div>
+				</div>
+			{/if}
 
-					{#if chat.id === $chatId}
-						<div class=" absolute right-[22px] top-[10px]">
-							{#if chatTitleEditId === chat.id}
-								<div class="flex self-center space-x-1.5">
-									<button
-										class=" self-center hover:text-white transition"
-										on:click={() => {
-											editChatTitle(chat.id, chatTitle);
-											chatTitleEditId = null;
-											chatTitle = '';
-										}}
+			<div class="px-2.5 mt-1 mb-2 flex justify-center space-x-2">
+				<div class="flex w-full" id="chat-search">
+					<div class="self-center pl-3 py-2 rounded-l bg-gray-950">
+						<svg
+							xmlns="http://www.w3.org/2000/svg"
+							viewBox="0 0 20 20"
+							fill="currentColor"
+							class="w-4 h-4"
+						>
+							<path
+								fill-rule="evenodd"
+								d="M9 3.5a5.5 5.5 0 100 11 5.5 5.5 0 000-11zM2 9a7 7 0 1112.452 4.391l3.328 3.329a.75.75 0 11-1.06 1.06l-3.329-3.328A7 7 0 012 9z"
+								clip-rule="evenodd"
+							/>
+						</svg>
+					</div>
+
+					<input
+						class="w-full rounded-r py-1.5 pl-2.5 pr-4 text-sm text-gray-300 bg-gray-950 outline-none"
+						placeholder="Search"
+						bind:value={search}
+					/>
+
+					<!-- <div class="self-center pr-3 py-2  bg-gray-900">
+						<svg
+							xmlns="http://www.w3.org/2000/svg"
+							fill="none"
+							viewBox="0 0 24 24"
+							stroke-width="1.5"
+							stroke="currentColor"
+							class="w-4 h-4"
+						>
+							<path
+								stroke-linecap="round"
+								stroke-linejoin="round"
+								d="M12 3c2.755 0 5.455.232 8.083.678.533.09.917.556.917 1.096v1.044a2.25 2.25 0 01-.659 1.591l-5.432 5.432a2.25 2.25 0 00-.659 1.591v2.927a2.25 2.25 0 01-1.244 2.013L9.75 21v-6.568a2.25 2.25 0 00-.659-1.591L3.659 7.409A2.25 2.25 0 013 5.818V4.774c0-.54.384-1.006.917-1.096A48.32 48.32 0 0112 3z"
+							/>
+						</svg>
+					</div> -->
+				</div>
+			</div>
+
+			<div class="pl-2.5 my-2 flex-1 flex flex-col space-y-1 overflow-y-auto">
+				{#each $chats.filter((chat) => {
+					if (search === '') {
+						return true;
+					} else {
+						let title = chat.title.toLowerCase();
+
+						if (title.includes(search)) {
+							return true;
+						} else {
+							return false;
+						}
+					}
+				}) as chat, i}
+					<div class=" w-full pr-2 relative">
+						<button
+							class=" w-full flex justify-between rounded-md px-3 py-2 hover:bg-gray-900 {chat.id ===
+							$chatId
+								? 'bg-gray-900'
+								: ''} transition whitespace-nowrap text-ellipsis"
+							on:click={() => {
+								// goto(`/c/${chat.id}`);
+								if (chat.id !== chatTitleEditId) {
+									chatTitleEditId = null;
+									chatTitle = '';
+								}
+
+								if (chat.id !== $chatId) {
+									loadChat(chat.id);
+								}
+							}}
+						>
+							<div class=" flex self-center flex-1">
+								<div class=" self-center mr-3">
+									<svg
+										xmlns="http://www.w3.org/2000/svg"
+										fill="none"
+										viewBox="0 0 24 24"
+										stroke-width="1.5"
+										stroke="currentColor"
+										class="w-4 h-4"
 									>
-										<svg
-											xmlns="http://www.w3.org/2000/svg"
-											viewBox="0 0 20 20"
-											fill="currentColor"
-											class="w-4 h-4"
+										<path
+											stroke-linecap="round"
+											stroke-linejoin="round"
+											d="M2.25 12.76c0 1.6 1.123 2.994 2.707 3.227 1.087.16 2.185.283 3.293.369V21l4.076-4.076a1.526 1.526 0 011.037-.443 48.282 48.282 0 005.68-.494c1.584-.233 2.707-1.626 2.707-3.228V6.741c0-1.602-1.123-2.995-2.707-3.228A48.394 48.394 0 0012 3c-2.392 0-4.744.175-7.043.513C3.373 3.746 2.25 5.14 2.25 6.741v6.018z"
+										/>
+									</svg>
+								</div>
+								<div
+									class=" text-left self-center overflow-hidden {chat.id === $chatId
+										? 'w-[120px]'
+										: 'w-[180px]'} "
+								>
+									{#if chatTitleEditId === chat.id}
+										<input bind:value={chatTitle} class=" bg-transparent w-full" />
+									{:else}
+										{chat.title}
+									{/if}
+								</div>
+							</div>
+						</button>
+
+						{#if chat.id === $chatId}
+							<div class=" absolute right-[22px] top-[10px]">
+								{#if chatTitleEditId === chat.id}
+									<div class="flex self-center space-x-1.5">
+										<button
+											class=" self-center hover:text-white transition"
+											on:click={() => {
+												editChatTitle(chat.id, chatTitle);
+												chatTitleEditId = null;
+												chatTitle = '';
+											}}
 										>
-											<path
-												fill-rule="evenodd"
-												d="M16.704 4.153a.75.75 0 01.143 1.052l-8 10.5a.75.75 0 01-1.127.075l-4.5-4.5a.75.75 0 011.06-1.06l3.894 3.893 7.48-9.817a.75.75 0 011.05-.143z"
-												clip-rule="evenodd"
-											/>
-										</svg>
-									</button>
-									<button
-										class=" self-center hover:text-white transition"
-										on:click={() => {
-											chatTitleEditId = null;
-											chatTitle = '';
-										}}
-									>
-										<svg
-											xmlns="http://www.w3.org/2000/svg"
-											viewBox="0 0 20 20"
-											fill="currentColor"
-											class="w-4 h-4"
+											<svg
+												xmlns="http://www.w3.org/2000/svg"
+												viewBox="0 0 20 20"
+												fill="currentColor"
+												class="w-4 h-4"
+											>
+												<path
+													fill-rule="evenodd"
+													d="M16.704 4.153a.75.75 0 01.143 1.052l-8 10.5a.75.75 0 01-1.127.075l-4.5-4.5a.75.75 0 011.06-1.06l3.894 3.893 7.48-9.817a.75.75 0 011.05-.143z"
+													clip-rule="evenodd"
+												/>
+											</svg>
+										</button>
+										<button
+											class=" self-center hover:text-white transition"
+											on:click={() => {
+												chatTitleEditId = null;
+												chatTitle = '';
+											}}
 										>
-											<path
-												d="M6.28 5.22a.75.75 0 00-1.06 1.06L8.94 10l-3.72 3.72a.75.75 0 101.06 1.06L10 11.06l3.72 3.72a.75.75 0 101.06-1.06L11.06 10l3.72-3.72a.75.75 0 00-1.06-1.06L10 8.94 6.28 5.22z"
-											/>
-										</svg>
-									</button>
-								</div>
-							{:else if chatDeleteId === chat.id}
-								<div class="flex self-center space-x-1.5">
-									<button
-										class=" self-center hover:text-white transition"
-										on:click={() => {
-											deleteChat(chat.id);
-										}}
-									>
-										<svg
-											xmlns="http://www.w3.org/2000/svg"
-											viewBox="0 0 20 20"
-											fill="currentColor"
-											class="w-4 h-4"
+											<svg
+												xmlns="http://www.w3.org/2000/svg"
+												viewBox="0 0 20 20"
+												fill="currentColor"
+												class="w-4 h-4"
+											>
+												<path
+													d="M6.28 5.22a.75.75 0 00-1.06 1.06L8.94 10l-3.72 3.72a.75.75 0 101.06 1.06L10 11.06l3.72 3.72a.75.75 0 101.06-1.06L11.06 10l3.72-3.72a.75.75 0 00-1.06-1.06L10 8.94 6.28 5.22z"
+												/>
+											</svg>
+										</button>
+									</div>
+								{:else if chatDeleteId === chat.id}
+									<div class="flex self-center space-x-1.5">
+										<button
+											class=" self-center hover:text-white transition"
+											on:click={() => {
+												deleteChat(chat.id);
+											}}
 										>
-											<path
-												fill-rule="evenodd"
-												d="M16.704 4.153a.75.75 0 01.143 1.052l-8 10.5a.75.75 0 01-1.127.075l-4.5-4.5a.75.75 0 011.06-1.06l3.894 3.893 7.48-9.817a.75.75 0 011.05-.143z"
-												clip-rule="evenodd"
-											/>
-										</svg>
-									</button>
-									<button
-										class=" self-center hover:text-white transition"
-										on:click={() => {
-											chatDeleteId = null;
-										}}
-									>
-										<svg
-											xmlns="http://www.w3.org/2000/svg"
-											viewBox="0 0 20 20"
-											fill="currentColor"
-											class="w-4 h-4"
+											<svg
+												xmlns="http://www.w3.org/2000/svg"
+												viewBox="0 0 20 20"
+												fill="currentColor"
+												class="w-4 h-4"
+											>
+												<path
+													fill-rule="evenodd"
+													d="M16.704 4.153a.75.75 0 01.143 1.052l-8 10.5a.75.75 0 01-1.127.075l-4.5-4.5a.75.75 0 011.06-1.06l3.894 3.893 7.48-9.817a.75.75 0 011.05-.143z"
+													clip-rule="evenodd"
+												/>
+											</svg>
+										</button>
+										<button
+											class=" self-center hover:text-white transition"
+											on:click={() => {
+												chatDeleteId = null;
+											}}
 										>
-											<path
-												d="M6.28 5.22a.75.75 0 00-1.06 1.06L8.94 10l-3.72 3.72a.75.75 0 101.06 1.06L10 11.06l3.72 3.72a.75.75 0 101.06-1.06L11.06 10l3.72-3.72a.75.75 0 00-1.06-1.06L10 8.94 6.28 5.22z"
-											/>
-										</svg>
-									</button>
-								</div>
-							{:else}
-								<div class="flex self-center space-x-1.5">
-									<button
-										id="delete-chat-button"
-										class=" hidden"
-										on:click={() => {
-											deleteChat(chat.id);
-										}}
-									/>
-									<button
-										class=" self-center hover:text-white transition"
-										on:click={() => {
-											chatTitle = chat.title;
-											chatTitleEditId = chat.id;
-											// editChatTitle(chat.id, 'a');
-										}}
-									>
-										<svg
-											xmlns="http://www.w3.org/2000/svg"
-											fill="none"
-											viewBox="0 0 24 24"
-											stroke-width="1.5"
-											stroke="currentColor"
-											class="w-4 h-4"
+											<svg
+												xmlns="http://www.w3.org/2000/svg"
+												viewBox="0 0 20 20"
+												fill="currentColor"
+												class="w-4 h-4"
+											>
+												<path
+													d="M6.28 5.22a.75.75 0 00-1.06 1.06L8.94 10l-3.72 3.72a.75.75 0 101.06 1.06L10 11.06l3.72 3.72a.75.75 0 101.06-1.06L11.06 10l3.72-3.72a.75.75 0 00-1.06-1.06L10 8.94 6.28 5.22z"
+												/>
+											</svg>
+										</button>
+									</div>
+								{:else}
+									<div class="flex self-center space-x-1.5">
+										<button
+											id="delete-chat-button"
+											class=" hidden"
+											on:click={() => {
+												deleteChat(chat.id);
+											}}
+										/>
+										<button
+											class=" self-center hover:text-white transition"
+											on:click={() => {
+												chatTitle = chat.title;
+												chatTitleEditId = chat.id;
+												// editChatTitle(chat.id, 'a');
+											}}
 										>
-											<path
-												stroke-linecap="round"
-												stroke-linejoin="round"
-												d="M16.862 4.487l1.687-1.688a1.875 1.875 0 112.652 2.652L6.832 19.82a4.5 4.5 0 01-1.897 1.13l-2.685.8.8-2.685a4.5 4.5 0 011.13-1.897L16.863 4.487zm0 0L19.5 7.125"
-											/>
-										</svg>
-									</button>
-									<button
-										class=" self-center hover:text-white transition"
-										on:click={() => {
-											chatDeleteId = chat.id;
-										}}
-									>
-										<svg
-											xmlns="http://www.w3.org/2000/svg"
-											fill="none"
-											viewBox="0 0 24 24"
-											stroke-width="1.5"
-											stroke="currentColor"
-											class="w-4 h-4"
+											<svg
+												xmlns="http://www.w3.org/2000/svg"
+												fill="none"
+												viewBox="0 0 24 24"
+												stroke-width="1.5"
+												stroke="currentColor"
+												class="w-4 h-4"
+											>
+												<path
+													stroke-linecap="round"
+													stroke-linejoin="round"
+													d="M16.862 4.487l1.687-1.688a1.875 1.875 0 112.652 2.652L6.832 19.82a4.5 4.5 0 01-1.897 1.13l-2.685.8.8-2.685a4.5 4.5 0 011.13-1.897L16.863 4.487zm0 0L19.5 7.125"
+												/>
+											</svg>
+										</button>
+										<button
+											class=" self-center hover:text-white transition"
+											on:click={() => {
+												chatDeleteId = chat.id;
+											}}
 										>
-											<path
-												stroke-linecap="round"
-												stroke-linejoin="round"
-												d="M14.74 9l-.346 9m-4.788 0L9.26 9m9.968-3.21c.342.052.682.107 1.022.166m-1.022-.165L18.16 19.673a2.25 2.25 0 01-2.244 2.077H8.084a2.25 2.25 0 01-2.244-2.077L4.772 5.79m14.456 0a48.108 48.108 0 00-3.478-.397m-12 .562c.34-.059.68-.114 1.022-.165m0 0a48.11 48.11 0 013.478-.397m7.5 0v-.916c0-1.18-.91-2.164-2.09-2.201a51.964 51.964 0 00-3.32 0c-1.18.037-2.09 1.022-2.09 2.201v.916m7.5 0a48.667 48.667 0 00-7.5 0"
-											/>
-										</svg>
-									</button>
-								</div>
-							{/if}
-						</div>
-					{/if}
-				</div>
-			{/each}
+											<svg
+												xmlns="http://www.w3.org/2000/svg"
+												fill="none"
+												viewBox="0 0 24 24"
+												stroke-width="1.5"
+												stroke="currentColor"
+												class="w-4 h-4"
+											>
+												<path
+													stroke-linecap="round"
+													stroke-linejoin="round"
+													d="M14.74 9l-.346 9m-4.788 0L9.26 9m9.968-3.21c.342.052.682.107 1.022.166m-1.022-.165L18.16 19.673a2.25 2.25 0 01-2.244 2.077H8.084a2.25 2.25 0 01-2.244-2.077L4.772 5.79m14.456 0a48.108 48.108 0 00-3.478-.397m-12 .562c.34-.059.68-.114 1.022-.165m0 0a48.11 48.11 0 013.478-.397m7.5 0v-.916c0-1.18-.91-2.164-2.09-2.201a51.964 51.964 0 00-3.32 0c-1.18.037-2.09 1.022-2.09 2.201v.916m7.5 0a48.667 48.667 0 00-7.5 0"
+												/>
+											</svg>
+										</button>
+									</div>
+								{/if}
+							</div>
+						{/if}
+					</div>
+				{/each}
+			</div>
 		</div>
 
 		<div class="px-2.5">

+ 4 - 10
src/lib/constants.ts

@@ -1,16 +1,10 @@
-import { dev, browser } from '$app/environment';
-import { PUBLIC_API_BASE_URL } from '$env/static/public';
-
-export const OLLAMA_API_BASE_URL = dev
-	? `http://${location.hostname}:8080/ollama/api`
-	: PUBLIC_API_BASE_URL === ''
-	? browser
-		? `http://${location.hostname}:11434/api`
-		: `http://localhost:11434/api`
-	: PUBLIC_API_BASE_URL;
+import { dev } from '$app/environment';
 
 export const WEBUI_BASE_URL = dev ? `http://${location.hostname}:8080` : ``;
+
 export const WEBUI_API_BASE_URL = `${WEBUI_BASE_URL}/api/v1`;
+export const OLLAMA_API_BASE_URL = `${WEBUI_BASE_URL}/ollama/api`;
+export const OPENAI_API_BASE_URL = `${WEBUI_BASE_URL}/openai/api`;
 
 export const WEB_UI_VERSION = 'v1.0.0-alpha-static';
 

+ 3 - 0
src/lib/stores/index.ts

@@ -6,10 +6,13 @@ export const user = writable(undefined);
 
 // Frontend
 export const theme = writable('dark');
+
 export const chatId = writable('');
+
 export const chats = writable([]);
 export const models = writable([]);
 export const modelfiles = writable([]);
+export const prompts = writable([]);
 
 export const settings = writable({});
 export const showSettings = writable(false);

+ 2 - 2
src/lib/utils/index.ts

@@ -21,7 +21,7 @@ export const splitStream = (splitOn) => {
 };
 
 export const convertMessagesToHistory = (messages) => {
-	let history = {
+	const history = {
 		messages: {},
 		currentId: null
 	};
@@ -114,7 +114,7 @@ export const checkVersion = (required, current) => {
 
 export const findWordIndices = (text) => {
 	const regex = /\[([^\]]+)\]/g;
-	let matches = [];
+	const matches = [];
 	let match;
 
 	while ((match = regex.exec(text)) !== null) {

+ 18 - 22
src/routes/(app)/+layout.svelte

@@ -9,11 +9,12 @@
 
 	import { getOllamaModels, getOllamaVersion } from '$lib/apis/ollama';
 	import { getModelfiles } from '$lib/apis/modelfiles';
+	import { getPrompts } from '$lib/apis/prompts';
 
 	import { getOpenAIModels } from '$lib/apis/openai';
 
-	import { user, showSettings, settings, models, modelfiles } from '$lib/stores';
-	import { OLLAMA_API_BASE_URL, REQUIRED_OLLAMA_VERSION, WEBUI_API_BASE_URL } from '$lib/constants';
+	import { user, showSettings, settings, models, modelfiles, prompts } from '$lib/stores';
+	import { REQUIRED_OLLAMA_VERSION, WEBUI_API_BASE_URL } from '$lib/constants';
 
 	import SettingsModal from '$lib/components/chat/SettingsModal.svelte';
 	import Sidebar from '$lib/components/layout/Sidebar.svelte';
@@ -31,36 +32,28 @@
 	const getModels = async () => {
 		let models = [];
 		models.push(
-			...(await getOllamaModels(
-				$settings?.API_BASE_URL ?? OLLAMA_API_BASE_URL,
-				localStorage.token
-			).catch((error) => {
+			...(await getOllamaModels(localStorage.token).catch((error) => {
 				toast.error(error);
 				return [];
 			}))
 		);
-		// If OpenAI API Key exists
-		if ($settings.OPENAI_API_KEY) {
-			const openAIModels = await getOpenAIModels(
-				$settings.OPENAI_API_BASE_URL ?? 'https://api.openai.com/v1',
-				$settings.OPENAI_API_KEY
-			).catch((error) => {
-				console.log(error);
-				toast.error(error);
-				return null;
-			});
 
-			models.push(...(openAIModels ? [{ name: 'hr' }, ...openAIModels] : []));
-		}
+		// $settings.OPENAI_API_BASE_URL ?? 'https://api.openai.com/v1',
+		// 		$settings.OPENAI_API_KEY
+
+		const openAIModels = await getOpenAIModels(localStorage.token).catch((error) => {
+			console.log(error);
+			return null;
+		});
+
+		models.push(...(openAIModels ? [{ name: 'hr' }, ...openAIModels] : []));
+
 		return models;
 	};
 
 	const setOllamaVersion = async (version: string = '') => {
 		if (version === '') {
-			version = await getOllamaVersion(
-				$settings?.API_BASE_URL ?? OLLAMA_API_BASE_URL,
-				localStorage.token
-			).catch((error) => {
+			version = await getOllamaVersion(localStorage.token).catch((error) => {
 				return '';
 			});
 		}
@@ -101,6 +94,9 @@
 			console.log();
 			await settings.set(JSON.parse(localStorage.getItem('settings') ?? '{}'));
 			await modelfiles.set(await getModelfiles(localStorage.token));
+
+			await prompts.set(await getPrompts(localStorage.token));
+
 			console.log($modelfiles);
 
 			modelfiles.subscribe(async () => {

+ 222 - 221
src/routes/(app)/+page.svelte

@@ -6,8 +6,7 @@
 	import { goto } from '$app/navigation';
 	import { page } from '$app/stores';
 
-	import { models, modelfiles, user, settings, chats, chatId } from '$lib/stores';
-	import { OLLAMA_API_BASE_URL } from '$lib/constants';
+	import { models, modelfiles, user, settings, chats, chatId, config } from '$lib/stores';
 
 	import { generateChatCompletion, generateTitle } from '$lib/apis/ollama';
 	import { copyToClipboard, splitStream } from '$lib/utils';
@@ -17,6 +16,7 @@
 	import ModelSelector from '$lib/components/chat/ModelSelector.svelte';
 	import Navbar from '$lib/components/layout/Navbar.svelte';
 	import { createNewChat, getChatList, updateChatById } from '$lib/apis/chats';
+	import { generateOpenAIChatCompletion } from '$lib/apis/openai';
 
 	let stopResponseFlag = false;
 	let autoScroll = true;
@@ -90,9 +90,18 @@
 			messages: {},
 			currentId: null
 		};
-		selectedModels = $page.url.searchParams.get('models')
-			? $page.url.searchParams.get('models')?.split(',')
-			: $settings.models ?? [''];
+
+		console.log($config);
+
+		if ($page.url.searchParams.get('models')) {
+			selectedModels = $page.url.searchParams.get('models')?.split(',');
+		} else if ($settings?.models) {
+			selectedModels = $settings?.models;
+		} else if ($config?.default_models) {
+			selectedModels = $config?.default_models.split(',');
+		} else {
+			selectedModels = [''];
+		}
 
 		let _settings = JSON.parse(localStorage.getItem('settings') ?? '{}');
 		settings.set({
@@ -109,10 +118,14 @@
 		await Promise.all(
 			selectedModels.map(async (model) => {
 				console.log(model);
-				if ($models.filter((m) => m.name === model)[0].external) {
+				const modelTag = $models.filter((m) => m.name === model).at(0);
+
+				if (modelTag?.external) {
 					await sendPromptOpenAI(model, prompt, parentId, _chatId);
-				} else {
+				} else if (modelTag) {
 					await sendPromptOllama(model, prompt, parentId, _chatId);
+				} else {
+					toast.error(`Model ${model} not found`);
 				}
 			})
 		);
@@ -150,36 +163,32 @@
 		// Scroll down
 		window.scrollTo({ top: document.body.scrollHeight });
 
-		const res = await generateChatCompletion(
-			$settings?.API_BASE_URL ?? OLLAMA_API_BASE_URL,
-			localStorage.token,
-			{
-				model: model,
-				messages: [
-					$settings.system
-						? {
-								role: 'system',
-								content: $settings.system
-						  }
-						: undefined,
-					...messages
-				]
-					.filter((message) => message)
-					.map((message) => ({
-						role: message.role,
-						content: message.content,
-						...(message.files && {
-							images: message.files
-								.filter((file) => file.type === 'image')
-								.map((file) => file.url.slice(file.url.indexOf(',') + 1))
-						})
-					})),
-				options: {
-					...($settings.options ?? {})
-				},
-				format: $settings.requestFormat ?? undefined
-			}
-		);
+		const res = await generateChatCompletion(localStorage.token, {
+			model: model,
+			messages: [
+				$settings.system
+					? {
+							role: 'system',
+							content: $settings.system
+					  }
+					: undefined,
+				...messages
+			]
+				.filter((message) => message)
+				.map((message) => ({
+					role: message.role,
+					content: message.content,
+					...(message.files && {
+						images: message.files
+							.filter((file) => file.type === 'image')
+							.map((file) => file.url.slice(file.url.indexOf(',') + 1))
+					})
+				})),
+			options: {
+				...($settings.options ?? {})
+			},
+			format: $settings.requestFormat ?? undefined
+		});
 
 		if (res && res.ok) {
 			const reader = res.body
@@ -271,11 +280,13 @@
 			}
 
 			if ($chatId == _chatId) {
-				chat = await updateChatById(localStorage.token, _chatId, {
-					messages: messages,
-					history: history
-				});
-				await chats.set(await getChatList(localStorage.token));
+				if ($settings.saveChatHistory ?? true) {
+					chat = await updateChatById(localStorage.token, _chatId, {
+						messages: messages,
+						history: history
+					});
+					await chats.set(await getChatList(localStorage.token));
+				}
 			}
 		} else {
 			if (res !== null) {
@@ -313,188 +324,173 @@
 	};
 
 	const sendPromptOpenAI = async (model, userPrompt, parentId, _chatId) => {
-		if ($settings.OPENAI_API_KEY) {
-			if (models) {
-				let responseMessageId = uuidv4();
-
-				let responseMessage = {
-					parentId: parentId,
-					id: responseMessageId,
-					childrenIds: [],
-					role: 'assistant',
-					content: '',
-					model: model
-				};
-
-				history.messages[responseMessageId] = responseMessage;
-				history.currentId = responseMessageId;
-				if (parentId !== null) {
-					history.messages[parentId].childrenIds = [
-						...history.messages[parentId].childrenIds,
-						responseMessageId
-					];
-				}
+		let responseMessageId = uuidv4();
 
-				window.scrollTo({ top: document.body.scrollHeight });
+		let responseMessage = {
+			parentId: parentId,
+			id: responseMessageId,
+			childrenIds: [],
+			role: 'assistant',
+			content: '',
+			model: model
+		};
 
-				const res = await fetch(
-					`${$settings.OPENAI_API_BASE_URL ?? 'https://api.openai.com/v1'}/chat/completions`,
-					{
-						method: 'POST',
-						headers: {
-							Authorization: `Bearer ${$settings.OPENAI_API_KEY}`,
-							'Content-Type': 'application/json'
-						},
-						body: JSON.stringify({
-							model: model,
-							stream: true,
-							messages: [
-								$settings.system
-									? {
-											role: 'system',
-											content: $settings.system
-									  }
-									: undefined,
-								...messages
-							]
-								.filter((message) => message)
-								.map((message) => ({
-									role: message.role,
-									...(message.files
-										? {
-												content: [
-													{
-														type: 'text',
-														text: message.content
-													},
-													...message.files
-														.filter((file) => file.type === 'image')
-														.map((file) => ({
-															type: 'image_url',
-															image_url: {
-																url: file.url
-															}
-														}))
-												]
-										  }
-										: { content: message.content })
-								})),
-							seed: $settings.options.seed ?? undefined,
-							stop: $settings.options.stop ?? undefined,
-							temperature: $settings.options.temperature ?? undefined,
-							top_p: $settings.options.top_p ?? undefined,
-							num_ctx: $settings.options.num_ctx ?? undefined,
-							frequency_penalty: $settings.options.repeat_penalty ?? undefined,
-							max_tokens: $settings.options.num_predict ?? undefined
-						})
-					}
-				).catch((err) => {
-					console.log(err);
-					return null;
-				});
-
-				if (res && res.ok) {
-					const reader = res.body
-						.pipeThrough(new TextDecoderStream())
-						.pipeThrough(splitStream('\n'))
-						.getReader();
-
-					while (true) {
-						const { value, done } = await reader.read();
-						if (done || stopResponseFlag || _chatId !== $chatId) {
-							responseMessage.done = true;
-							messages = messages;
-							break;
-						}
+		history.messages[responseMessageId] = responseMessage;
+		history.currentId = responseMessageId;
+		if (parentId !== null) {
+			history.messages[parentId].childrenIds = [
+				...history.messages[parentId].childrenIds,
+				responseMessageId
+			];
+		}
 
-						try {
-							let lines = value.split('\n');
-
-							for (const line of lines) {
-								if (line !== '') {
-									console.log(line);
-									if (line === 'data: [DONE]') {
-										responseMessage.done = true;
-										messages = messages;
-									} else {
-										let data = JSON.parse(line.replace(/^data: /, ''));
-										console.log(data);
-
-										if (responseMessage.content == '' && data.choices[0].delta.content == '\n') {
-											continue;
-										} else {
-											responseMessage.content += data.choices[0].delta.content ?? '';
-											messages = messages;
-										}
-									}
-								}
-							}
-						} catch (error) {
-							console.log(error);
-						}
+		window.scrollTo({ top: document.body.scrollHeight });
 
-						if ($settings.notificationEnabled && !document.hasFocus()) {
-							const notification = new Notification(`OpenAI ${model}`, {
-								body: responseMessage.content,
-								icon: '/favicon.png'
-							});
-						}
+		const res = await generateOpenAIChatCompletion(localStorage.token, {
+			model: model,
+			stream: true,
+			messages: [
+				$settings.system
+					? {
+							role: 'system',
+							content: $settings.system
+					  }
+					: undefined,
+				...messages
+			]
+				.filter((message) => message)
+				.map((message) => ({
+					role: message.role,
+					...(message.files
+						? {
+								content: [
+									{
+										type: 'text',
+										text: message.content
+									},
+									...message.files
+										.filter((file) => file.type === 'image')
+										.map((file) => ({
+											type: 'image_url',
+											image_url: {
+												url: file.url
+											}
+										}))
+								]
+						  }
+						: { content: message.content })
+				})),
+			seed: $settings?.options?.seed ?? undefined,
+			stop: $settings?.options?.stop ?? undefined,
+			temperature: $settings?.options?.temperature ?? undefined,
+			top_p: $settings?.options?.top_p ?? undefined,
+			num_ctx: $settings?.options?.num_ctx ?? undefined,
+			frequency_penalty: $settings?.options?.repeat_penalty ?? undefined,
+			max_tokens: $settings?.options?.num_predict ?? undefined
+		});
 
-						if ($settings.responseAutoCopy) {
-							copyToClipboard(responseMessage.content);
-						}
+		if (res && res.ok) {
+			const reader = res.body
+				.pipeThrough(new TextDecoderStream())
+				.pipeThrough(splitStream('\n'))
+				.getReader();
 
-						if (autoScroll) {
-							window.scrollTo({ top: document.body.scrollHeight });
-						}
-					}
+			while (true) {
+				const { value, done } = await reader.read();
+				if (done || stopResponseFlag || _chatId !== $chatId) {
+					responseMessage.done = true;
+					messages = messages;
+					break;
+				}
 
-					if ($chatId == _chatId) {
-						chat = await updateChatById(localStorage.token, _chatId, {
-							messages: messages,
-							history: history
-						});
-						await chats.set(await getChatList(localStorage.token));
-					}
-				} else {
-					if (res !== null) {
-						const error = await res.json();
-						console.log(error);
-						if ('detail' in error) {
-							toast.error(error.detail);
-							responseMessage.content = error.detail;
-						} else {
-							if ('message' in error.error) {
-								toast.error(error.error.message);
-								responseMessage.content = error.error.message;
+				try {
+					let lines = value.split('\n');
+
+					for (const line of lines) {
+						if (line !== '') {
+							console.log(line);
+							if (line === 'data: [DONE]') {
+								responseMessage.done = true;
+								messages = messages;
 							} else {
-								toast.error(error.error);
-								responseMessage.content = error.error;
+								let data = JSON.parse(line.replace(/^data: /, ''));
+								console.log(data);
+
+								if (responseMessage.content == '' && data.choices[0].delta.content == '\n') {
+									continue;
+								} else {
+									responseMessage.content += data.choices[0].delta.content ?? '';
+									messages = messages;
+								}
 							}
 						}
-					} else {
-						toast.error(`Uh-oh! There was an issue connecting to ${model}.`);
-						responseMessage.content = `Uh-oh! There was an issue connecting to ${model}.`;
 					}
+				} catch (error) {
+					console.log(error);
+				}
 
-					responseMessage.error = true;
-					responseMessage.content = `Uh-oh! There was an issue connecting to ${model}.`;
-					responseMessage.done = true;
-					messages = messages;
+				if ($settings.notificationEnabled && !document.hasFocus()) {
+					const notification = new Notification(`OpenAI ${model}`, {
+						body: responseMessage.content,
+						icon: '/favicon.png'
+					});
 				}
 
-				stopResponseFlag = false;
-				await tick();
+				if ($settings.responseAutoCopy) {
+					copyToClipboard(responseMessage.content);
+				}
 
 				if (autoScroll) {
 					window.scrollTo({ top: document.body.scrollHeight });
 				}
+			}
 
-				if (messages.length == 2) {
-					window.history.replaceState(history.state, '', `/c/${_chatId}`);
-					await setChatTitle(_chatId, userPrompt);
+			if ($chatId == _chatId) {
+				if ($settings.saveChatHistory ?? true) {
+					chat = await updateChatById(localStorage.token, _chatId, {
+						messages: messages,
+						history: history
+					});
+					await chats.set(await getChatList(localStorage.token));
+				}
+			}
+		} else {
+			if (res !== null) {
+				const error = await res.json();
+				console.log(error);
+				if ('detail' in error) {
+					toast.error(error.detail);
+					responseMessage.content = error.detail;
+				} else {
+					if ('message' in error.error) {
+						toast.error(error.error.message);
+						responseMessage.content = error.error.message;
+					} else {
+						toast.error(error.error);
+						responseMessage.content = error.error;
+					}
 				}
+			} else {
+				toast.error(`Uh-oh! There was an issue connecting to ${model}.`);
+				responseMessage.content = `Uh-oh! There was an issue connecting to ${model}.`;
 			}
+
+			responseMessage.error = true;
+			responseMessage.content = `Uh-oh! There was an issue connecting to ${model}.`;
+			responseMessage.done = true;
+			messages = messages;
+		}
+
+		stopResponseFlag = false;
+		await tick();
+
+		if (autoScroll) {
+			window.scrollTo({ top: document.body.scrollHeight });
+		}
+
+		if (messages.length == 2) {
+			window.history.replaceState(history.state, '', `/c/${_chatId}`);
+			await setChatTitle(_chatId, userPrompt);
 		}
 	};
 
@@ -535,20 +531,24 @@
 
 			// Create new chat if only one message in messages
 			if (messages.length == 1) {
-				chat = await createNewChat(localStorage.token, {
-					id: $chatId,
-					title: 'New Chat',
-					models: selectedModels,
-					system: $settings.system ?? undefined,
-					options: {
-						...($settings.options ?? {})
-					},
-					messages: messages,
-					history: history,
-					timestamp: Date.now()
-				});
-				await chats.set(await getChatList(localStorage.token));
-				await chatId.set(chat.id);
+				if ($settings.saveChatHistory ?? true) {
+					chat = await createNewChat(localStorage.token, {
+						id: $chatId,
+						title: 'New Chat',
+						models: selectedModels,
+						system: $settings.system ?? undefined,
+						options: {
+							...($settings.options ?? {})
+						},
+						messages: messages,
+						history: history,
+						timestamp: Date.now()
+					});
+					await chats.set(await getChatList(localStorage.token));
+					await chatId.set(chat.id);
+				} else {
+					await chatId.set('local');
+				}
 				await tick();
 			}
 
@@ -582,9 +582,8 @@
 	const generateChatTitle = async (_chatId, userPrompt) => {
 		if ($settings.titleAutoGenerate ?? true) {
 			const title = await generateTitle(
-				$settings?.API_BASE_URL ?? OLLAMA_API_BASE_URL,
 				localStorage.token,
-				selectedModels[0],
+				$settings?.titleAutoGenerateModel ?? selectedModels[0],
 				userPrompt
 			);
 
@@ -601,8 +600,10 @@
 			title = _title;
 		}
 
-		chat = await updateChatById(localStorage.token, _chatId, { title: _title });
-		await chats.set(await getChatList(localStorage.token));
+		if ($settings.saveChatHistory ?? true) {
+			chat = await updateChatById(localStorage.token, _chatId, { title: _title });
+			await chats.set(await getChatList(localStorage.token));
+		}
 	};
 </script>
 

+ 48 - 1
src/routes/(app)/admin/+page.svelte

@@ -8,11 +8,15 @@
 
 	import { updateUserRole, getUsers, deleteUserById } from '$lib/apis/users';
 	import { getSignUpEnabledStatus, toggleSignUpEnabledStatus } from '$lib/apis/auths';
+	import EditUserModal from '$lib/components/admin/EditUserModal.svelte';
 
 	let loaded = false;
 	let users = [];
 
+	let selectedUser = null;
+
 	let signUpEnabled = true;
+	let showEditUserModal = false;
 
 	const updateRoleHandler = async (id, role) => {
 		const res = await updateUserRole(localStorage.token, id, role).catch((error) => {
@@ -25,6 +29,17 @@
 		}
 	};
 
+	const editUserPasswordHandler = async (id, password) => {
+		const res = await deleteUserById(localStorage.token, id).catch((error) => {
+			toast.error(error);
+			return null;
+		});
+		if (res) {
+			users = await getUsers(localStorage.token);
+			toast.success('Successfully updated');
+		}
+	};
+
 	const deleteUserHandler = async (id) => {
 		const res = await deleteUserById(localStorage.token, id).catch((error) => {
 			toast.error(error);
@@ -51,6 +66,17 @@
 	});
 </script>
 
+{#key selectedUser}
+	<EditUserModal
+		bind:show={showEditUserModal}
+		{selectedUser}
+		sessionUser={$user}
+		on:save={async () => {
+			users = await getUsers(localStorage.token);
+		}}
+	/>
+{/key}
+
 <div
 	class=" bg-white dark:bg-gray-800 dark:text-gray-100 min-h-screen w-full flex justify-center font-mona"
 >
@@ -154,7 +180,28 @@
 												}}>{user.role}</button
 											>
 										</td>
-										<td class="px-6 py-4 text-center flex justify-center">
+										<td class="px-6 py-4 space-x-1 text-center flex justify-center">
+											<button
+												class="self-center w-fit text-sm p-1.5 border dark:border-gray-600 rounded-xl flex"
+												on:click={async () => {
+													showEditUserModal = !showEditUserModal;
+													selectedUser = user;
+												}}
+											>
+												<svg
+													xmlns="http://www.w3.org/2000/svg"
+													viewBox="0 0 16 16"
+													fill="currentColor"
+													class="w-4 h-4"
+												>
+													<path
+														fill-rule="evenodd"
+														d="M11.013 2.513a1.75 1.75 0 0 1 2.475 2.474L6.226 12.25a2.751 2.751 0 0 1-.892.596l-2.047.848a.75.75 0 0 1-.98-.98l.848-2.047a2.75 2.75 0 0 1 .596-.892l7.262-7.261Z"
+														clip-rule="evenodd"
+													/>
+												</svg>
+											</button>
+
 											<button
 												class="self-center w-fit text-sm p-1.5 border dark:border-gray-600 rounded-xl flex"
 												on:click={async () => {

+ 185 - 201
src/routes/(app)/c/[id]/+page.svelte

@@ -7,9 +7,10 @@
 	import { page } from '$app/stores';
 
 	import { models, modelfiles, user, settings, chats, chatId } from '$lib/stores';
-	import { OLLAMA_API_BASE_URL } from '$lib/constants';
 
 	import { generateChatCompletion, generateTitle } from '$lib/apis/ollama';
+	import { generateOpenAIChatCompletion } from '$lib/apis/openai';
+
 	import { copyToClipboard, splitStream } from '$lib/utils';
 
 	import MessageInput from '$lib/components/chat/MessageInput.svelte';
@@ -136,17 +137,20 @@
 		await Promise.all(
 			selectedModels.map(async (model) => {
 				console.log(model);
-				if ($models.filter((m) => m.name === model)[0].external) {
+				const modelTag = $models.filter((m) => m.name === model).at(0);
+
+				if (modelTag?.external) {
 					await sendPromptOpenAI(model, prompt, parentId, _chatId);
-				} else {
+				} else if (modelTag) {
 					await sendPromptOllama(model, prompt, parentId, _chatId);
+				} else {
+					toast.error(`Model ${model} not found`);
 				}
 			})
 		);
 
 		await chats.set(await getChatList(localStorage.token));
 	};
-
 	const sendPromptOllama = async (model, userPrompt, parentId, _chatId) => {
 		// Create response message
 		let responseMessageId = uuidv4();
@@ -177,36 +181,32 @@
 		// Scroll down
 		window.scrollTo({ top: document.body.scrollHeight });
 
-		const res = await generateChatCompletion(
-			$settings?.API_BASE_URL ?? OLLAMA_API_BASE_URL,
-			localStorage.token,
-			{
-				model: model,
-				messages: [
-					$settings.system
-						? {
-								role: 'system',
-								content: $settings.system
-						  }
-						: undefined,
-					...messages
-				]
-					.filter((message) => message)
-					.map((message) => ({
-						role: message.role,
-						content: message.content,
-						...(message.files && {
-							images: message.files
-								.filter((file) => file.type === 'image')
-								.map((file) => file.url.slice(file.url.indexOf(',') + 1))
-						})
-					})),
-				options: {
-					...($settings.options ?? {})
-				},
-				format: $settings.requestFormat ?? undefined
-			}
-		);
+		const res = await generateChatCompletion(localStorage.token, {
+			model: model,
+			messages: [
+				$settings.system
+					? {
+							role: 'system',
+							content: $settings.system
+					  }
+					: undefined,
+				...messages
+			]
+				.filter((message) => message)
+				.map((message) => ({
+					role: message.role,
+					content: message.content,
+					...(message.files && {
+						images: message.files
+							.filter((file) => file.type === 'image')
+							.map((file) => file.url.slice(file.url.indexOf(',') + 1))
+					})
+				})),
+			options: {
+				...($settings.options ?? {})
+			},
+			format: $settings.requestFormat ?? undefined
+		});
 
 		if (res && res.ok) {
 			const reader = res.body
@@ -340,188 +340,171 @@
 	};
 
 	const sendPromptOpenAI = async (model, userPrompt, parentId, _chatId) => {
-		if ($settings.OPENAI_API_KEY) {
-			if (models) {
-				let responseMessageId = uuidv4();
-
-				let responseMessage = {
-					parentId: parentId,
-					id: responseMessageId,
-					childrenIds: [],
-					role: 'assistant',
-					content: '',
-					model: model
-				};
-
-				history.messages[responseMessageId] = responseMessage;
-				history.currentId = responseMessageId;
-				if (parentId !== null) {
-					history.messages[parentId].childrenIds = [
-						...history.messages[parentId].childrenIds,
-						responseMessageId
-					];
-				}
+		let responseMessageId = uuidv4();
 
-				window.scrollTo({ top: document.body.scrollHeight });
-
-				const res = await fetch(
-					`${$settings.OPENAI_API_BASE_URL ?? 'https://api.openai.com/v1'}/chat/completions`,
-					{
-						method: 'POST',
-						headers: {
-							Authorization: `Bearer ${$settings.OPENAI_API_KEY}`,
-							'Content-Type': 'application/json'
-						},
-						body: JSON.stringify({
-							model: model,
-							stream: true,
-							messages: [
-								$settings.system
-									? {
-											role: 'system',
-											content: $settings.system
-									  }
-									: undefined,
-								...messages
-							]
-								.filter((message) => message)
-								.map((message) => ({
-									role: message.role,
-									...(message.files
-										? {
-												content: [
-													{
-														type: 'text',
-														text: message.content
-													},
-													...message.files
-														.filter((file) => file.type === 'image')
-														.map((file) => ({
-															type: 'image_url',
-															image_url: {
-																url: file.url
-															}
-														}))
-												]
-										  }
-										: { content: message.content })
-								})),
-							seed: $settings.options.seed ?? undefined,
-							stop: $settings.options.stop ?? undefined,
-							temperature: $settings.options.temperature ?? undefined,
-							top_p: $settings.options.top_p ?? undefined,
-							num_ctx: $settings.options.num_ctx ?? undefined,
-							frequency_penalty: $settings.options.repeat_penalty ?? undefined,
-							max_tokens: $settings.options.num_predict ?? undefined
-						})
-					}
-				).catch((err) => {
-					console.log(err);
-					return null;
-				});
+		let responseMessage = {
+			parentId: parentId,
+			id: responseMessageId,
+			childrenIds: [],
+			role: 'assistant',
+			content: '',
+			model: model
+		};
 
-				if (res && res.ok) {
-					const reader = res.body
-						.pipeThrough(new TextDecoderStream())
-						.pipeThrough(splitStream('\n'))
-						.getReader();
-
-					while (true) {
-						const { value, done } = await reader.read();
-						if (done || stopResponseFlag || _chatId !== $chatId) {
-							responseMessage.done = true;
-							messages = messages;
-							break;
-						}
+		history.messages[responseMessageId] = responseMessage;
+		history.currentId = responseMessageId;
+		if (parentId !== null) {
+			history.messages[parentId].childrenIds = [
+				...history.messages[parentId].childrenIds,
+				responseMessageId
+			];
+		}
 
-						try {
-							let lines = value.split('\n');
-
-							for (const line of lines) {
-								if (line !== '') {
-									console.log(line);
-									if (line === 'data: [DONE]') {
-										responseMessage.done = true;
-										messages = messages;
-									} else {
-										let data = JSON.parse(line.replace(/^data: /, ''));
-										console.log(data);
-
-										if (responseMessage.content == '' && data.choices[0].delta.content == '\n') {
-											continue;
-										} else {
-											responseMessage.content += data.choices[0].delta.content ?? '';
-											messages = messages;
-										}
-									}
-								}
-							}
-						} catch (error) {
-							console.log(error);
-						}
+		window.scrollTo({ top: document.body.scrollHeight });
 
-						if ($settings.notificationEnabled && !document.hasFocus()) {
-							const notification = new Notification(`OpenAI ${model}`, {
-								body: responseMessage.content,
-								icon: '/favicon.png'
-							});
-						}
+		const res = await generateOpenAIChatCompletion(localStorage.token, {
+			model: model,
+			stream: true,
+			messages: [
+				$settings.system
+					? {
+							role: 'system',
+							content: $settings.system
+					  }
+					: undefined,
+				...messages
+			]
+				.filter((message) => message)
+				.map((message) => ({
+					role: message.role,
+					...(message.files
+						? {
+								content: [
+									{
+										type: 'text',
+										text: message.content
+									},
+									...message.files
+										.filter((file) => file.type === 'image')
+										.map((file) => ({
+											type: 'image_url',
+											image_url: {
+												url: file.url
+											}
+										}))
+								]
+						  }
+						: { content: message.content })
+				})),
+			seed: $settings?.options?.seed ?? undefined,
+			stop: $settings?.options?.stop ?? undefined,
+			temperature: $settings?.options?.temperature ?? undefined,
+			top_p: $settings?.options?.top_p ?? undefined,
+			num_ctx: $settings?.options?.num_ctx ?? undefined,
+			frequency_penalty: $settings?.options?.repeat_penalty ?? undefined,
+			max_tokens: $settings?.options?.num_predict ?? undefined
+		});
 
-						if ($settings.responseAutoCopy) {
-							copyToClipboard(responseMessage.content);
-						}
+		if (res && res.ok) {
+			const reader = res.body
+				.pipeThrough(new TextDecoderStream())
+				.pipeThrough(splitStream('\n'))
+				.getReader();
 
-						if (autoScroll) {
-							window.scrollTo({ top: document.body.scrollHeight });
-						}
-					}
+			while (true) {
+				const { value, done } = await reader.read();
+				if (done || stopResponseFlag || _chatId !== $chatId) {
+					responseMessage.done = true;
+					messages = messages;
+					break;
+				}
 
-					if ($chatId == _chatId) {
-						chat = await updateChatById(localStorage.token, _chatId, {
-							messages: messages,
-							history: history
-						});
-						await chats.set(await getChatList(localStorage.token));
-					}
-				} else {
-					if (res !== null) {
-						const error = await res.json();
-						console.log(error);
-						if ('detail' in error) {
-							toast.error(error.detail);
-							responseMessage.content = error.detail;
-						} else {
-							if ('message' in error.error) {
-								toast.error(error.error.message);
-								responseMessage.content = error.error.message;
+				try {
+					let lines = value.split('\n');
+
+					for (const line of lines) {
+						if (line !== '') {
+							console.log(line);
+							if (line === 'data: [DONE]') {
+								responseMessage.done = true;
+								messages = messages;
 							} else {
-								toast.error(error.error);
-								responseMessage.content = error.error;
+								let data = JSON.parse(line.replace(/^data: /, ''));
+								console.log(data);
+
+								if (responseMessage.content == '' && data.choices[0].delta.content == '\n') {
+									continue;
+								} else {
+									responseMessage.content += data.choices[0].delta.content ?? '';
+									messages = messages;
+								}
 							}
 						}
-					} else {
-						toast.error(`Uh-oh! There was an issue connecting to ${model}.`);
-						responseMessage.content = `Uh-oh! There was an issue connecting to ${model}.`;
 					}
+				} catch (error) {
+					console.log(error);
+				}
 
-					responseMessage.error = true;
-					responseMessage.content = `Uh-oh! There was an issue connecting to ${model}.`;
-					responseMessage.done = true;
-					messages = messages;
+				if ($settings.notificationEnabled && !document.hasFocus()) {
+					const notification = new Notification(`OpenAI ${model}`, {
+						body: responseMessage.content,
+						icon: '/favicon.png'
+					});
 				}
 
-				stopResponseFlag = false;
-				await tick();
+				if ($settings.responseAutoCopy) {
+					copyToClipboard(responseMessage.content);
+				}
 
 				if (autoScroll) {
 					window.scrollTo({ top: document.body.scrollHeight });
 				}
+			}
 
-				if (messages.length == 2) {
-					window.history.replaceState(history.state, '', `/c/${_chatId}`);
-					await setChatTitle(_chatId, userPrompt);
+			if ($chatId == _chatId) {
+				chat = await updateChatById(localStorage.token, _chatId, {
+					messages: messages,
+					history: history
+				});
+				await chats.set(await getChatList(localStorage.token));
+			}
+		} else {
+			if (res !== null) {
+				const error = await res.json();
+				console.log(error);
+				if ('detail' in error) {
+					toast.error(error.detail);
+					responseMessage.content = error.detail;
+				} else {
+					if ('message' in error.error) {
+						toast.error(error.error.message);
+						responseMessage.content = error.error.message;
+					} else {
+						toast.error(error.error);
+						responseMessage.content = error.error;
+					}
 				}
+			} else {
+				toast.error(`Uh-oh! There was an issue connecting to ${model}.`);
+				responseMessage.content = `Uh-oh! There was an issue connecting to ${model}.`;
 			}
+
+			responseMessage.error = true;
+			responseMessage.content = `Uh-oh! There was an issue connecting to ${model}.`;
+			responseMessage.done = true;
+			messages = messages;
+		}
+
+		stopResponseFlag = false;
+		await tick();
+
+		if (autoScroll) {
+			window.scrollTo({ top: document.body.scrollHeight });
+		}
+
+		if (messages.length == 2) {
+			window.history.replaceState(history.state, '', `/c/${_chatId}`);
+			await setChatTitle(_chatId, userPrompt);
 		}
 	};
 
@@ -608,12 +591,7 @@
 
 	const generateChatTitle = async (_chatId, userPrompt) => {
 		if ($settings.titleAutoGenerate ?? true) {
-			const title = await generateTitle(
-				$settings?.API_BASE_URL ?? OLLAMA_API_BASE_URL,
-				localStorage.token,
-				selectedModels[0],
-				userPrompt
-			);
+			const title = await generateTitle(localStorage.token, selectedModels[0], userPrompt);
 
 			if (title) {
 				await setChatTitle(_chatId, title);
@@ -631,6 +609,12 @@
 		chat = await updateChatById(localStorage.token, _chatId, { title: _title });
 		await chats.set(await getChatList(localStorage.token));
 	};
+
+	onMount(async () => {
+		if (!($settings.saveChatHistory ?? true)) {
+			await goto('/');
+		}
+	});
 </script>
 
 <svelte:window

+ 26 - 7
src/routes/(app)/modelfiles/+page.svelte

@@ -6,7 +6,6 @@
 	import { onMount } from 'svelte';
 
 	import { modelfiles, settings, user } from '$lib/stores';
-	import { OLLAMA_API_BASE_URL } from '$lib/constants';
 	import { createModel, deleteModel } from '$lib/apis/ollama';
 	import {
 		createNewModelfile,
@@ -20,11 +19,7 @@
 	const deleteModelHandler = async (tagName) => {
 		let success = null;
 
-		success = await deleteModel(
-			$settings?.API_BASE_URL ?? OLLAMA_API_BASE_URL,
-			localStorage.token,
-			tagName
-		);
+		success = await deleteModel(localStorage.token, tagName);
 
 		if (success) {
 			toast.success(`Deleted ${tagName}`);
@@ -44,7 +39,7 @@
 
 		const url = 'https://ollamahub.com';
 
-		const tab = await window.open(`${url}/create`, '_blank');
+		const tab = await window.open(`${url}/modelfiles/create`, '_blank');
 		window.addEventListener(
 			'message',
 			(event) => {
@@ -254,6 +249,30 @@
 							</svg>
 						</div>
 					</button>
+
+					<button
+						class="self-center w-fit text-sm px-3 py-1 border dark:border-gray-600 rounded-xl flex"
+						on:click={async () => {
+							saveModelfiles($modelfiles);
+						}}
+					>
+						<div class=" self-center mr-2 font-medium">Export Modelfiles</div>
+
+						<div class=" self-center">
+							<svg
+								xmlns="http://www.w3.org/2000/svg"
+								viewBox="0 0 16 16"
+								fill="currentColor"
+								class="w-3.5 h-3.5"
+							>
+								<path
+									fill-rule="evenodd"
+									d="M4 2a1.5 1.5 0 0 0-1.5 1.5v9A1.5 1.5 0 0 0 4 14h8a1.5 1.5 0 0 0 1.5-1.5V6.621a1.5 1.5 0 0 0-.44-1.06L9.94 2.439A1.5 1.5 0 0 0 8.878 2H4Zm4 3.5a.75.75 0 0 1 .75.75v2.69l.72-.72a.75.75 0 1 1 1.06 1.06l-2 2a.75.75 0 0 1-1.06 0l-2-2a.75.75 0 0 1 1.06-1.06l.72.72V6.25A.75.75 0 0 1 8 5.5Z"
+									clip-rule="evenodd"
+								/>
+							</svg>
+						</div>
+					</button>
 				</div>
 
 				{#if localModelfiles.length > 0}

+ 2 - 8
src/routes/(app)/modelfiles/create/+page.svelte

@@ -2,7 +2,6 @@
 	import { v4 as uuidv4 } from 'uuid';
 	import { toast } from 'svelte-french-toast';
 	import { goto } from '$app/navigation';
-	import { OLLAMA_API_BASE_URL } from '$lib/constants';
 	import { settings, user, config, modelfiles, models } from '$lib/stores';
 
 	import Advanced from '$lib/components/chat/Settings/Advanced.svelte';
@@ -132,12 +131,7 @@ SYSTEM """${system}"""`.replace(/^\s*\n/gm, '');
 			Object.keys(categories).filter((category) => categories[category]).length > 0 &&
 			!$models.includes(tagName)
 		) {
-			const res = await createModel(
-				$settings?.API_BASE_URL ?? OLLAMA_API_BASE_URL,
-				localStorage.token,
-				tagName,
-				content
-			);
+			const res = await createModel(localStorage.token, tagName, content);
 
 			if (res) {
 				const reader = res.body
@@ -641,7 +635,7 @@ SYSTEM """${system}"""`.replace(/^\s*\n/gm, '');
 						<div class=" text-sm font-semibold mb-2">Pull Progress</div>
 						<div class="w-full rounded-full dark:bg-gray-800">
 							<div
-								class="dark:bg-gray-600 text-xs font-medium text-blue-100 text-center p-0.5 leading-none rounded-full"
+								class="dark:bg-gray-600 bg-gray-500 text-xs font-medium text-gray-100 text-center p-0.5 leading-none rounded-full"
 								style="width: {Math.max(15, pullProgress ?? 0)}%"
 							>
 								{pullProgress ?? 0}%

+ 1 - 8
src/routes/(app)/modelfiles/edit/+page.svelte

@@ -7,8 +7,6 @@
 	import { page } from '$app/stores';
 
 	import { settings, user, config, modelfiles } from '$lib/stores';
-
-	import { OLLAMA_API_BASE_URL } from '$lib/constants';
 	import { splitStream } from '$lib/utils';
 
 	import { createModel } from '$lib/apis/ollama';
@@ -104,12 +102,7 @@
 			content !== '' &&
 			Object.keys(categories).filter((category) => categories[category]).length > 0
 		) {
-			const res = await createModel(
-				$settings?.API_BASE_URL ?? OLLAMA_API_BASE_URL,
-				localStorage.token,
-				tagName,
-				content
-			);
+			const res = await createModel(localStorage.token, tagName, content);
 
 			if (res) {
 				const reader = res.body

+ 309 - 0
src/routes/(app)/prompts/+page.svelte

@@ -0,0 +1,309 @@
+<script lang="ts">
+	import toast from 'svelte-french-toast';
+	import fileSaver from 'file-saver';
+	const { saveAs } = fileSaver;
+
+	import { onMount } from 'svelte';
+	import { prompts } from '$lib/stores';
+	import { createNewPrompt, deletePromptByCommand, getPrompts } from '$lib/apis/prompts';
+	import { error } from '@sveltejs/kit';
+
+	let importFiles = '';
+	let query = '';
+
+	const sharePrompt = async (prompt) => {
+		toast.success('Redirecting you to OllamaHub');
+
+		const url = 'https://ollamahub.com';
+
+		const tab = await window.open(`${url}/prompts/create`, '_blank');
+		window.addEventListener(
+			'message',
+			(event) => {
+				if (event.origin !== url) return;
+				if (event.data === 'loaded') {
+					tab.postMessage(JSON.stringify(prompt), '*');
+				}
+			},
+			false
+		);
+	};
+
+	const deletePrompt = async (command) => {
+		await deletePromptByCommand(localStorage.token, command);
+		await prompts.set(await getPrompts(localStorage.token));
+	};
+</script>
+
+<div class="min-h-screen w-full flex justify-center dark:text-white">
+	<div class=" py-2.5 flex flex-col justify-between w-full">
+		<div class="max-w-2xl mx-auto w-full px-3 md:px-0 my-10">
+			<div class="mb-6 flex justify-between items-center">
+				<div class=" text-2xl font-semibold self-center">My Prompts</div>
+			</div>
+
+			<div class=" flex w-full space-x-2">
+				<div class="flex flex-1">
+					<div class=" self-center ml-1 mr-3">
+						<svg
+							xmlns="http://www.w3.org/2000/svg"
+							viewBox="0 0 20 20"
+							fill="currentColor"
+							class="w-4 h-4"
+						>
+							<path
+								fill-rule="evenodd"
+								d="M9 3.5a5.5 5.5 0 100 11 5.5 5.5 0 000-11zM2 9a7 7 0 1112.452 4.391l3.328 3.329a.75.75 0 11-1.06 1.06l-3.329-3.328A7 7 0 012 9z"
+								clip-rule="evenodd"
+							/>
+						</svg>
+					</div>
+					<input
+						class=" w-full text-sm pr-4 py-1 rounded-r-xl outline-none bg-transparent"
+						bind:value={query}
+						placeholder="Search Prompt"
+					/>
+				</div>
+
+				<div>
+					<a
+						class=" px-2 py-2 rounded-xl border border-gray-200 dark:border-gray-600 hover:bg-gray-100 dark:bg-gray-800 dark:hover:bg-gray-700 transition font-medium text-sm flex items-center space-x-1"
+						href="/prompts/create"
+					>
+						<svg
+							xmlns="http://www.w3.org/2000/svg"
+							viewBox="0 0 16 16"
+							fill="currentColor"
+							class="w-4 h-4"
+						>
+							<path
+								d="M8.75 3.75a.75.75 0 0 0-1.5 0v3.5h-3.5a.75.75 0 0 0 0 1.5h3.5v3.5a.75.75 0 0 0 1.5 0v-3.5h3.5a.75.75 0 0 0 0-1.5h-3.5v-3.5Z"
+							/>
+						</svg>
+					</a>
+				</div>
+			</div>
+
+			{#if $prompts.length === 0}
+				<div />
+			{:else}
+				{#each $prompts.filter((p) => query === '' || p.command.includes(query)) as prompt}
+					<hr class=" dark:border-gray-700 my-2.5" />
+					<div class=" flex space-x-4 cursor-pointer w-full mb-3">
+						<div class=" flex flex-1 space-x-4 cursor-pointer w-full">
+							<a href={`/prompts/edit?command=${encodeURIComponent(prompt.command)}`}>
+								<div class=" flex-1 self-center pl-5">
+									<div class=" font-bold">{prompt.command}</div>
+									<div class=" text-xs overflow-hidden text-ellipsis line-clamp-1">
+										{prompt.title}
+									</div>
+								</div>
+							</a>
+						</div>
+						<div class="flex flex-row space-x-1 self-center">
+							<a
+								class="self-center w-fit text-sm px-2 py-2 border dark:border-gray-600 rounded-xl"
+								type="button"
+								href={`/prompts/edit?command=${encodeURIComponent(prompt.command)}`}
+							>
+								<svg
+									xmlns="http://www.w3.org/2000/svg"
+									fill="none"
+									viewBox="0 0 24 24"
+									stroke-width="1.5"
+									stroke="currentColor"
+									class="w-4 h-4"
+								>
+									<path
+										stroke-linecap="round"
+										stroke-linejoin="round"
+										d="M16.862 4.487l1.687-1.688a1.875 1.875 0 112.652 2.652L6.832 19.82a4.5 4.5 0 01-1.897 1.13l-2.685.8.8-2.685a4.5 4.5 0 011.13-1.897L16.863 4.487zm0 0L19.5 7.125"
+									/>
+								</svg>
+							</a>
+
+							<button
+								class="self-center w-fit text-sm px-2 py-2 border dark:border-gray-600 rounded-xl"
+								type="button"
+								on:click={() => {
+									sharePrompt(prompt);
+								}}
+							>
+								<svg
+									xmlns="http://www.w3.org/2000/svg"
+									fill="none"
+									viewBox="0 0 24 24"
+									stroke-width="1.5"
+									stroke="currentColor"
+									class="w-4 h-4"
+								>
+									<path
+										stroke-linecap="round"
+										stroke-linejoin="round"
+										d="M7.217 10.907a2.25 2.25 0 100 2.186m0-2.186c.18.324.283.696.283 1.093s-.103.77-.283 1.093m0-2.186l9.566-5.314m-9.566 7.5l9.566 5.314m0 0a2.25 2.25 0 103.935 2.186 2.25 2.25 0 00-3.935-2.186zm0-12.814a2.25 2.25 0 103.933-2.185 2.25 2.25 0 00-3.933 2.185z"
+									/>
+								</svg>
+							</button>
+
+							<button
+								class="self-center w-fit text-sm px-2 py-2 border dark:border-gray-600 rounded-xl"
+								type="button"
+								on:click={() => {
+									deletePrompt(prompt.command);
+								}}
+							>
+								<svg
+									xmlns="http://www.w3.org/2000/svg"
+									fill="none"
+									viewBox="0 0 24 24"
+									stroke-width="1.5"
+									stroke="currentColor"
+									class="w-4 h-4"
+								>
+									<path
+										stroke-linecap="round"
+										stroke-linejoin="round"
+										d="M14.74 9l-.346 9m-4.788 0L9.26 9m9.968-3.21c.342.052.682.107 1.022.166m-1.022-.165L18.16 19.673a2.25 2.25 0 01-2.244 2.077H8.084a2.25 2.25 0 01-2.244-2.077L4.772 5.79m14.456 0a48.108 48.108 0 00-3.478-.397m-12 .562c.34-.059.68-.114 1.022-.165m0 0a48.11 48.11 0 013.478-.397m7.5 0v-.916c0-1.18-.91-2.164-2.09-2.201a51.964 51.964 0 00-3.32 0c-1.18.037-2.09 1.022-2.09 2.201v.916m7.5 0a48.667 48.667 0 00-7.5 0"
+									/>
+								</svg>
+							</button>
+						</div>
+					</div>
+				{/each}
+			{/if}
+
+			<hr class=" dark:border-gray-700 my-2.5" />
+
+			<div class=" flex justify-between w-full mb-3">
+				<div class="flex space-x-2">
+					<input
+						id="prompts-import-input"
+						bind:files={importFiles}
+						type="file"
+						accept=".json"
+						hidden
+						on:change={() => {
+							console.log(importFiles);
+
+							const reader = new FileReader();
+							reader.onload = async (event) => {
+								const savedPrompts = JSON.parse(event.target.result);
+								console.log(savedPrompts);
+
+								for (const prompt of savedPrompts) {
+									await createNewPrompt(
+										localStorage.token,
+										prompt.command.charAt(0) === '/' ? prompt.command.slice(1) : prompt.command,
+										prompt.title,
+										prompt.content
+									).catch((error) => {
+										toast.error(error);
+										return null;
+									});
+								}
+
+								await prompts.set(await getPrompts(localStorage.token));
+							};
+
+							reader.readAsText(importFiles[0]);
+						}}
+					/>
+
+					<button
+						class="self-center w-fit text-sm px-3 py-1 border dark:border-gray-600 rounded-xl flex"
+						on:click={async () => {
+							document.getElementById('prompts-import-input')?.click();
+						}}
+					>
+						<div class=" self-center mr-2 font-medium">Import Prompts</div>
+
+						<div class=" self-center">
+							<svg
+								xmlns="http://www.w3.org/2000/svg"
+								viewBox="0 0 16 16"
+								fill="currentColor"
+								class="w-4 h-4"
+							>
+								<path
+									fill-rule="evenodd"
+									d="M4 2a1.5 1.5 0 0 0-1.5 1.5v9A1.5 1.5 0 0 0 4 14h8a1.5 1.5 0 0 0 1.5-1.5V6.621a1.5 1.5 0 0 0-.44-1.06L9.94 2.439A1.5 1.5 0 0 0 8.878 2H4Zm4 9.5a.75.75 0 0 1-.75-.75V8.06l-.72.72a.75.75 0 0 1-1.06-1.06l2-2a.75.75 0 0 1 1.06 0l2 2a.75.75 0 1 1-1.06 1.06l-.72-.72v2.69a.75.75 0 0 1-.75.75Z"
+									clip-rule="evenodd"
+								/>
+							</svg>
+						</div>
+					</button>
+
+					<button
+						class="self-center w-fit text-sm px-3 py-1 border dark:border-gray-600 rounded-xl flex"
+						on:click={async () => {
+							// document.getElementById('modelfiles-import-input')?.click();
+							let blob = new Blob([JSON.stringify($prompts)], {
+								type: 'application/json'
+							});
+							saveAs(blob, `prompts-export-${Date.now()}.json`);
+						}}
+					>
+						<div class=" self-center mr-2 font-medium">Export Prompts</div>
+
+						<div class=" self-center">
+							<svg
+								xmlns="http://www.w3.org/2000/svg"
+								viewBox="0 0 16 16"
+								fill="currentColor"
+								class="w-4 h-4"
+							>
+								<path
+									fill-rule="evenodd"
+									d="M4 2a1.5 1.5 0 0 0-1.5 1.5v9A1.5 1.5 0 0 0 4 14h8a1.5 1.5 0 0 0 1.5-1.5V6.621a1.5 1.5 0 0 0-.44-1.06L9.94 2.439A1.5 1.5 0 0 0 8.878 2H4Zm4 3.5a.75.75 0 0 1 .75.75v2.69l.72-.72a.75.75 0 1 1 1.06 1.06l-2 2a.75.75 0 0 1-1.06 0l-2-2a.75.75 0 0 1 1.06-1.06l.72.72V6.25A.75.75 0 0 1 8 5.5Z"
+									clip-rule="evenodd"
+								/>
+							</svg>
+						</div>
+					</button>
+
+					<!-- <button
+						on:click={() => {
+							loadDefaultPrompts();
+						}}
+					>
+						dd
+					</button> -->
+				</div>
+			</div>
+
+			<div class=" my-16">
+				<div class=" text-2xl font-semibold mb-6">Made by OllamaHub Community</div>
+
+				<a
+					class=" flex space-x-4 cursor-pointer w-full mb-3"
+					href="https://ollamahub.com/?type=prompts"
+					target="_blank"
+				>
+					<div class=" self-center w-10">
+						<div
+							class="w-full h-10 flex justify-center rounded-full bg-transparent dark:bg-gray-700 border border-dashed border-gray-200"
+						>
+							<svg
+								xmlns="http://www.w3.org/2000/svg"
+								viewBox="0 0 24 24"
+								fill="currentColor"
+								class="w-6"
+							>
+								<path
+									fill-rule="evenodd"
+									d="M12 3.75a.75.75 0 01.75.75v6.75h6.75a.75.75 0 010 1.5h-6.75v6.75a.75.75 0 01-1.5 0v-6.75H4.5a.75.75 0 010-1.5h6.75V4.5a.75.75 0 01.75-.75z"
+									clip-rule="evenodd"
+								/>
+							</svg>
+						</div>
+					</div>
+
+					<div class=" self-center">
+						<div class=" font-bold">Discover a prompt</div>
+						<div class=" text-sm">Discover, download, and explore custom Prompts</div>
+					</div>
+				</a>
+			</div>
+		</div>
+	</div>
+</div>

+ 222 - 0
src/routes/(app)/prompts/create/+page.svelte

@@ -0,0 +1,222 @@
+<script>
+	import toast from 'svelte-french-toast';
+
+	import { goto } from '$app/navigation';
+	import { prompts } from '$lib/stores';
+	import { onMount, tick } from 'svelte';
+
+	import { createNewPrompt, getPrompts } from '$lib/apis/prompts';
+
+	let loading = false;
+
+	// ///////////
+	// Prompt
+	// ///////////
+
+	let title = '';
+	let command = '';
+	let content = '';
+
+	$: command = title !== '' ? `${title.replace(/\s+/g, '-').toLowerCase()}` : '';
+
+	const submitHandler = async () => {
+		loading = true;
+
+		if (validateCommandString(command)) {
+			const prompt = await createNewPrompt(localStorage.token, command, title, content).catch(
+				(error) => {
+					toast.error(error);
+
+					return null;
+				}
+			);
+
+			if (prompt) {
+				await prompts.set(await getPrompts(localStorage.token));
+				await goto('/prompts');
+			}
+		} else {
+			toast.error('Only alphanumeric characters and hyphens are allowed in the command string.');
+		}
+
+		loading = false;
+	};
+
+	const validateCommandString = (inputString) => {
+		// Regular expression to match only alphanumeric characters and hyphen
+		const regex = /^[a-zA-Z0-9-]+$/;
+
+		// Test the input string against the regular expression
+		return regex.test(inputString);
+	};
+
+	onMount(() => {
+		window.addEventListener('message', async (event) => {
+			if (
+				!['https://ollamahub.com', 'https://www.ollamahub.com', 'http://localhost:5173'].includes(
+					event.origin
+				)
+			)
+				return;
+			const prompt = JSON.parse(event.data);
+			console.log(prompt);
+
+			title = prompt.title;
+			await tick();
+			content = prompt.content;
+			command = prompt.command;
+		});
+
+		if (window.opener ?? false) {
+			window.opener.postMessage('loaded', '*');
+		}
+	});
+</script>
+
+<div class="min-h-screen w-full flex justify-center dark:text-white">
+	<div class=" py-2.5 flex flex-col justify-between w-full">
+		<div class="max-w-2xl mx-auto w-full px-3 md:px-0 my-10">
+			<div class=" text-2xl font-semibold mb-6">My Prompts</div>
+
+			<button
+				class="flex space-x-1"
+				on:click={() => {
+					history.back();
+				}}
+			>
+				<div class=" self-center">
+					<svg
+						xmlns="http://www.w3.org/2000/svg"
+						viewBox="0 0 20 20"
+						fill="currentColor"
+						class="w-4 h-4"
+					>
+						<path
+							fill-rule="evenodd"
+							d="M17 10a.75.75 0 01-.75.75H5.612l4.158 3.96a.75.75 0 11-1.04 1.08l-5.5-5.25a.75.75 0 010-1.08l5.5-5.25a.75.75 0 111.04 1.08L5.612 9.25H16.25A.75.75 0 0117 10z"
+							clip-rule="evenodd"
+						/>
+					</svg>
+				</div>
+				<div class=" self-center font-medium text-sm">Back</div>
+			</button>
+			<hr class="my-3 dark:border-gray-700" />
+
+			<form
+				class="flex flex-col"
+				on:submit|preventDefault={() => {
+					submitHandler();
+				}}
+			>
+				<div class="my-2">
+					<div class=" text-sm font-semibold mb-2">Title*</div>
+
+					<div>
+						<input
+							class="px-3 py-1.5 text-sm w-full bg-transparent border dark:border-gray-600 outline-none rounded-lg"
+							placeholder="Add a short title for this prompt"
+							bind:value={title}
+							required
+						/>
+					</div>
+				</div>
+
+				<div class="my-2">
+					<div class=" text-sm font-semibold mb-2">Command*</div>
+
+					<div class="flex items-center mb-1">
+						<div
+							class="bg-gray-200 dark:bg-gray-600 font-bold px-3 py-1 border border-r-0 dark:border-gray-600 rounded-l-lg"
+						>
+							/
+						</div>
+						<input
+							class="px-3 py-1.5 text-sm w-full bg-transparent border dark:border-gray-600 outline-none rounded-r-lg"
+							placeholder="short-summary"
+							bind:value={command}
+							required
+						/>
+					</div>
+
+					<div class="text-xs text-gray-400 dark:text-gray-500">
+						Only <span class=" text-gray-600 dark:text-gray-300 font-medium"
+							>alphanumeric characters and hyphens</span
+						>
+						are allowed; Activate this command by typing "<span
+							class=" text-gray-600 dark:text-gray-300 font-medium"
+						>
+							/{command}
+						</span>" to chat input.
+					</div>
+				</div>
+
+				<div class="my-2">
+					<div class="flex w-full justify-between">
+						<div class=" self-center text-sm font-semibold">Prompt Content*</div>
+					</div>
+
+					<div class="mt-2">
+						<div>
+							<textarea
+								class="px-3 py-1.5 text-sm w-full bg-transparent border dark:border-gray-600 outline-none rounded-lg"
+								placeholder={`Write a summary in 50 words that summarizes [topic or keyword].`}
+								rows="6"
+								bind:value={content}
+								required
+							/>
+						</div>
+
+						<div class="text-xs text-gray-400 dark:text-gray-500">
+							Format your variables using square brackets like this: <span
+								class=" text-gray-600 dark:text-gray-300 font-medium">[variable]</span
+							>
+							. Make sure to enclose them with
+							<span class=" text-gray-600 dark:text-gray-300 font-medium">'['</span>
+							and <span class=" text-gray-600 dark:text-gray-300 font-medium">']'</span> .
+						</div>
+					</div>
+				</div>
+
+				<div class="my-2 flex justify-end">
+					<button
+						class=" text-sm px-3 py-2 transition rounded-xl {loading
+							? ' cursor-not-allowed bg-gray-100 dark:bg-gray-800'
+							: ' bg-gray-50 hover:bg-gray-100 dark:bg-gray-700 dark:hover:bg-gray-800'} flex"
+						type="submit"
+						disabled={loading}
+					>
+						<div class=" self-center font-medium">Save & Create</div>
+
+						{#if loading}
+							<div class="ml-1.5 self-center">
+								<svg
+									class=" w-4 h-4"
+									viewBox="0 0 24 24"
+									fill="currentColor"
+									xmlns="http://www.w3.org/2000/svg"
+									><style>
+										.spinner_ajPY {
+											transform-origin: center;
+											animation: spinner_AtaB 0.75s infinite linear;
+										}
+										@keyframes spinner_AtaB {
+											100% {
+												transform: rotate(360deg);
+											}
+										}
+									</style><path
+										d="M12,1A11,11,0,1,0,23,12,11,11,0,0,0,12,1Zm0,19a8,8,0,1,1,8-8A8,8,0,0,1,12,20Z"
+										opacity=".25"
+									/><path
+										d="M10.14,1.16a11,11,0,0,0-9,8.92A1.59,1.59,0,0,0,2.46,12,1.52,1.52,0,0,0,4.11,10.7a8,8,0,0,1,6.66-6.61A1.42,1.42,0,0,0,12,2.69h0A1.57,1.57,0,0,0,10.14,1.16Z"
+										class="spinner_ajPY"
+									/></svg
+								>
+							</div>
+						{/if}
+					</button>
+				</div>
+			</form>
+		</div>
+	</div>
+</div>

+ 221 - 0
src/routes/(app)/prompts/edit/+page.svelte

@@ -0,0 +1,221 @@
+<script>
+	import toast from 'svelte-french-toast';
+
+	import { goto } from '$app/navigation';
+	import { prompts } from '$lib/stores';
+	import { onMount, tick } from 'svelte';
+
+	import { getPrompts, updatePromptByCommand } from '$lib/apis/prompts';
+	import { page } from '$app/stores';
+
+	let loading = false;
+
+	// ///////////
+	// Prompt
+	// ///////////
+
+	let title = '';
+	let command = '';
+	let content = '';
+
+	const updateHandler = async () => {
+		loading = true;
+
+		if (validateCommandString(command)) {
+			const prompt = await updatePromptByCommand(localStorage.token, command, title, content).catch(
+				(error) => {
+					toast.error(error);
+					return null;
+				}
+			);
+
+			if (prompt) {
+				await prompts.set(await getPrompts(localStorage.token));
+				await goto('/prompts');
+			}
+		} else {
+			toast.error('Only alphanumeric characters and hyphens are allowed in the command string.');
+		}
+
+		loading = false;
+	};
+
+	const validateCommandString = (inputString) => {
+		// Regular expression to match only alphanumeric characters and hyphen
+		const regex = /^[a-zA-Z0-9-]+$/;
+
+		// Test the input string against the regular expression
+		return regex.test(inputString);
+	};
+
+	onMount(async () => {
+		command = $page.url.searchParams.get('command');
+		if (command) {
+			const prompt = $prompts.filter((prompt) => prompt.command === command).at(0);
+
+			if (prompt) {
+				console.log(prompt);
+
+				console.log(prompt.command);
+
+				title = prompt.title;
+				await tick();
+				command = prompt.command.slice(1);
+				content = prompt.content;
+			} else {
+				goto('/prompts');
+			}
+		} else {
+			goto('/prompts');
+		}
+	});
+</script>
+
+<div class="min-h-screen w-full flex justify-center dark:text-white">
+	<div class=" py-2.5 flex flex-col justify-between w-full">
+		<div class="max-w-2xl mx-auto w-full px-3 md:px-0 my-10">
+			<div class=" text-2xl font-semibold mb-6">My Prompts</div>
+
+			<button
+				class="flex space-x-1"
+				on:click={() => {
+					history.back();
+				}}
+			>
+				<div class=" self-center">
+					<svg
+						xmlns="http://www.w3.org/2000/svg"
+						viewBox="0 0 20 20"
+						fill="currentColor"
+						class="w-4 h-4"
+					>
+						<path
+							fill-rule="evenodd"
+							d="M17 10a.75.75 0 01-.75.75H5.612l4.158 3.96a.75.75 0 11-1.04 1.08l-5.5-5.25a.75.75 0 010-1.08l5.5-5.25a.75.75 0 111.04 1.08L5.612 9.25H16.25A.75.75 0 0117 10z"
+							clip-rule="evenodd"
+						/>
+					</svg>
+				</div>
+				<div class=" self-center font-medium text-sm">Back</div>
+			</button>
+			<hr class="my-3 dark:border-gray-700" />
+
+			<form
+				class="flex flex-col"
+				on:submit|preventDefault={() => {
+					updateHandler();
+				}}
+			>
+				<div class="my-2">
+					<div class=" text-sm font-semibold mb-2">Title*</div>
+
+					<div>
+						<input
+							class="px-3 py-1.5 text-sm w-full bg-transparent border dark:border-gray-600 outline-none rounded-lg"
+							placeholder="Add a short title for this prompt"
+							bind:value={title}
+							required
+						/>
+					</div>
+				</div>
+
+				<div class="my-2">
+					<div class=" text-sm font-semibold mb-2">Command*</div>
+
+					<div class="flex items-center mb-1">
+						<div
+							class="bg-gray-200 dark:bg-gray-600 font-bold px-3 py-1 border border-r-0 dark:border-gray-600 rounded-l-lg"
+						>
+							/
+						</div>
+						<input
+							class="px-3 py-1.5 text-sm w-full bg-transparent border disabled:text-gray-500 dark:border-gray-600 outline-none rounded-r-lg"
+							placeholder="short-summary"
+							bind:value={command}
+							disabled
+							required
+						/>
+					</div>
+
+					<div class="text-xs text-gray-400 dark:text-gray-500">
+						Only <span class=" text-gray-600 dark:text-gray-300 font-medium"
+							>alphanumeric characters and hyphens</span
+						>
+						are allowed; Activate this command by typing "<span
+							class=" text-gray-600 dark:text-gray-300 font-medium"
+						>
+							/{command}
+						</span>" to chat input.
+					</div>
+				</div>
+
+				<div class="my-2">
+					<div class="flex w-full justify-between">
+						<div class=" self-center text-sm font-semibold">Prompt Content*</div>
+					</div>
+
+					<div class="mt-2">
+						<div>
+							<textarea
+								class="px-3 py-1.5 text-sm w-full bg-transparent border dark:border-gray-600 outline-none rounded-lg"
+								placeholder={`Write a summary in 50 words that summarizes [topic or keyword].`}
+								rows="6"
+								bind:value={content}
+								required
+							/>
+						</div>
+
+						<div class="text-xs text-gray-400 dark:text-gray-500">
+							Format your variables using square brackets like this: <span
+								class=" text-gray-600 dark:text-gray-300 font-medium">[variable]</span
+							>
+							. Make sure to enclose them with
+							<span class=" text-gray-600 dark:text-gray-300 font-medium">'['</span>
+							and <span class=" text-gray-600 dark:text-gray-300 font-medium">']'</span> .
+						</div>
+					</div>
+				</div>
+
+				<div class="my-2 flex justify-end">
+					<button
+						class=" text-sm px-3 py-2 transition rounded-xl {loading
+							? ' cursor-not-allowed bg-gray-100 dark:bg-gray-800'
+							: ' bg-gray-50 hover:bg-gray-100 dark:bg-gray-700 dark:hover:bg-gray-800'} flex"
+						type="submit"
+						disabled={loading}
+					>
+						<div class=" self-center font-medium">Save & Update</div>
+
+						{#if loading}
+							<div class="ml-1.5 self-center">
+								<svg
+									class=" w-4 h-4"
+									viewBox="0 0 24 24"
+									fill="currentColor"
+									xmlns="http://www.w3.org/2000/svg"
+									><style>
+										.spinner_ajPY {
+											transform-origin: center;
+											animation: spinner_AtaB 0.75s infinite linear;
+										}
+										@keyframes spinner_AtaB {
+											100% {
+												transform: rotate(360deg);
+											}
+										}
+									</style><path
+										d="M12,1A11,11,0,1,0,23,12,11,11,0,0,0,12,1Zm0,19a8,8,0,1,1,8-8A8,8,0,0,1,12,20Z"
+										opacity=".25"
+									/><path
+										d="M10.14,1.16a11,11,0,0,0-9,8.92A1.59,1.59,0,0,0,2.46,12,1.52,1.52,0,0,0,4.11,10.7a8,8,0,0,1,6.66-6.61A1.42,1.42,0,0,0,12,2.69h0A1.57,1.57,0,0,0,10.14,1.16Z"
+										class="spinner_ajPY"
+									/></svg
+								>
+							</div>
+						{/if}
+					</button>
+				</div>
+			</form>
+		</div>
+	</div>
+</div>

+ 15 - 15
static/manifest.json

@@ -1,16 +1,16 @@
 {
-  "name": "Ollama Web UI",
-  "short_name": "Ollama",
-  "start_url": "/",
-  "display": "standalone",
-  "background_color": "#343541",
-  "theme_color": "#343541",
-  "orientation": "portrait-primary",
-  "icons": [
-    {
-      "src": "/favicon.png",
-      "type": "image/png",
-      "sizes": "844x884"
-    }
-  ]
-}
+	"name": "Ollama Web UI",
+	"short_name": "Ollama",
+	"start_url": "/",
+	"display": "standalone",
+	"background_color": "#343541",
+	"theme_color": "#343541",
+	"orientation": "portrait-primary",
+	"icons": [
+		{
+			"src": "/favicon.png",
+			"type": "image/png",
+			"sizes": "844x884"
+		}
+	]
+}

Some files were not shown because too many files changed in this diff