@@ -2,6 +2,5 @@
.vscode
.env
.venv
-*.spec
dist
ollama
@@ -1,8 +1,6 @@
FROM golang:1.20
-RUN apt-get update && apt-get install -y cmake
WORKDIR /go/src/github.com/jmorganca/ollama
COPY . .
-RUN cmake -S llama -B llama/build && cmake --build llama/build
RUN CGO_ENABLED=1 go build -ldflags '-linkmode external -extldflags "-static"' .
FROM alpine
@@ -18,7 +18,6 @@ Run large language models with `llama.cpp`.
- [Download](https://ollama.ai/download) for macOS
- Download for Windows (coming soon)
-- Docker: `docker run -p 11434:11434 ollama/ollama`
You can also build the [binary from source](#building).
@@ -1,3 +1,5 @@
+// +build darwin
+
/**
* llama.cpp - git 5bf2a2771886ee86137e01dbc7492f78fb392066
*