|
@@ -19,15 +19,32 @@ spec:
|
|
image: {{ .Values.ollama.image }}
|
|
image: {{ .Values.ollama.image }}
|
|
ports:
|
|
ports:
|
|
- containerPort: {{ .Values.ollama.servicePort }}
|
|
- containerPort: {{ .Values.ollama.servicePort }}
|
|
- resources:
|
|
|
|
- limits:
|
|
|
|
- cpu: {{ .Values.ollama.resources.limits.cpu }}
|
|
|
|
- memory: {{ .Values.ollama.resources.limits.memory }}
|
|
|
|
- nvidia.com/gpu: {{ .Values.ollama.resources.limits.gpu }}
|
|
|
|
|
|
+ env:
|
|
|
|
+ {{- if .Values.ollama.gpu.enabled }}
|
|
|
|
+ - name: PATH
|
|
|
|
+ value: /usr/local/nvidia/bin:/usr/local/cuda/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
|
|
|
|
+ - name: LD_LIBRARY_PATH
|
|
|
|
+ value: /usr/local/nvidia/lib:/usr/local/nvidia/lib64
|
|
|
|
+ - name: NVIDIA_DRIVER_CAPABILITIES
|
|
|
|
+ value: compute,utility
|
|
|
|
+ {{- end}}
|
|
|
|
+ {{- if .Values.ollama.resources }}
|
|
|
|
+ resources: {{- toYaml .Values.ollama.resources | nindent 10 }}
|
|
|
|
+ {{- end }}
|
|
volumeMounts:
|
|
volumeMounts:
|
|
- name: ollama-volume
|
|
- name: ollama-volume
|
|
mountPath: /root/.ollama
|
|
mountPath: /root/.ollama
|
|
tty: true
|
|
tty: true
|
|
|
|
+ {{- with .Values.ollama.nodeSelector }}
|
|
|
|
+ nodeSelector:
|
|
|
|
+ {{- toYaml . | nindent 8 }}
|
|
|
|
+ {{- end }}
|
|
|
|
+ tolerations:
|
|
|
|
+ {{- if .Values.ollama.gpu.enabled }}
|
|
|
|
+ - key: nvidia.com/gpu
|
|
|
|
+ operator: Exists
|
|
|
|
+ effect: NoSchedule
|
|
|
|
+ {{- end }}
|
|
volumeClaimTemplates:
|
|
volumeClaimTemplates:
|
|
- metadata:
|
|
- metadata:
|
|
name: ollama-volume
|
|
name: ollama-volume
|
|
@@ -35,4 +52,4 @@ spec:
|
|
accessModes: [ "ReadWriteOnce" ]
|
|
accessModes: [ "ReadWriteOnce" ]
|
|
resources:
|
|
resources:
|
|
requests:
|
|
requests:
|
|
- storage: 1Gi
|
|
|
|
|
|
+ storage: {{ .Values.ollama.volumeSize }}
|