This commit is contained in:
parent
e00058258c
commit
afd43d8f95
36
ollama/ollama-deployment.yaml
Normal file
36
ollama/ollama-deployment.yaml
Normal file
@ -0,0 +1,36 @@
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: ollama-deployment
|
||||
namespace: ollama-ns
|
||||
spec:
|
||||
replicas: 1
|
||||
selector:
|
||||
matchLabels:
|
||||
app: ollama
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: ollama
|
||||
spec:
|
||||
runtimeClassName: nvidia
|
||||
containers:
|
||||
- name: ollama
|
||||
image: ollama/ollama
|
||||
env:
|
||||
- name: OLLAMA_HOST
|
||||
value: 0.0.0.0
|
||||
- name: OLLAMA_MODELS
|
||||
value: "/models"
|
||||
ports:
|
||||
- containerPort: 11434
|
||||
resources:
|
||||
limits:
|
||||
nvidia.com/gpu: 2
|
||||
volumeMounts:
|
||||
- name: ollama-volume
|
||||
mountPath: "/my-models"
|
||||
volumes:
|
||||
- name: ollama-volume
|
||||
persistentVolumeClaim:
|
||||
claimName: ollama-pvc
|
12
ollama/ollama-pvc.yaml
Normal file
12
ollama/ollama-pvc.yaml
Normal file
@ -0,0 +1,12 @@
|
||||
apiVersion: v1
|
||||
kind: PersistentVolumeClaim
|
||||
metadata:
|
||||
name: ollama-pvc
|
||||
namespace: ollama-ns
|
||||
spec:
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
storageClassName: longhorn
|
||||
resources:
|
||||
requests:
|
||||
storage: 200Gi
|
12
ollama/ollama-service.yaml
Normal file
12
ollama/ollama-service.yaml
Normal file
@ -0,0 +1,12 @@
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
name: ollama-service
|
||||
namespace: ollama-ns
|
||||
spec:
|
||||
type: LoadBalancer
|
||||
ports:
|
||||
- port: 11434
|
||||
targetPort: 11434
|
||||
selector:
|
||||
app: ollama
|
30
open-webui/open-webui-deployment.yaml
Normal file
30
open-webui/open-webui-deployment.yaml
Normal file
@ -0,0 +1,30 @@
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: open-webui-deployment
|
||||
namespace: open-webui-ns
|
||||
spec:
|
||||
replicas: 1
|
||||
selector:
|
||||
matchLabels:
|
||||
app: open-webui
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: open-webui
|
||||
spec:
|
||||
containers:
|
||||
- name: open-webui
|
||||
image: ghcr.io/open-webui/open-webui:main
|
||||
ports:
|
||||
- containerPort: 8080
|
||||
env:
|
||||
- name: OLLAMA_BASE_URL
|
||||
value: "http://ollama-service.ollama-ns.svc.cluster.local:11434" # Assuming the internal service is named 'open-webui-service'
|
||||
volumeMounts:
|
||||
- name: config
|
||||
mountPath: /app/backend/data
|
||||
volumes:
|
||||
- name: config
|
||||
persistentVolumeClaim:
|
||||
claimName: open-webui-pvc
|
12
open-webui/open-webui-pvc.yaml
Normal file
12
open-webui/open-webui-pvc.yaml
Normal file
@ -0,0 +1,12 @@
|
||||
apiVersion: v1
|
||||
kind: PersistentVolumeClaim
|
||||
metadata:
|
||||
name: open-webui-pvc
|
||||
namespace: open-webui-ns
|
||||
spec:
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
resources:
|
||||
requests:
|
||||
storage: 5Gi
|
||||
storageClassName: longhorn
|
13
open-webui/open-webui-service.yaml
Normal file
13
open-webui/open-webui-service.yaml
Normal file
@ -0,0 +1,13 @@
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
name: open-webui-service
|
||||
namespace: open-webui-ns
|
||||
spec:
|
||||
type: LoadBalancer
|
||||
ports:
|
||||
- protocol: TCP
|
||||
port: 80
|
||||
targetPort: 8080
|
||||
selector:
|
||||
app: open-webui
|
Loading…
Reference in New Issue
Block a user