apiVersion: apps/v1 kind: Deployment metadata: name: litellm namespace: ai-core labels: app.kubernetes.io/name: litellm spec: replicas: 1 selector: matchLabels: app.kubernetes.io/name: litellm template: metadata: labels: app.kubernetes.io/name: litellm spec: containers: - name: litellm image: ghcr.io/berriai/litellm:main-v1.55.10 args: - "--config" - "/etc/litellm/config.yaml" ports: - containerPort: 4000 name: http env: - name: LITELLM_MASTER_KEY valueFrom: secretKeyRef: name: litellm-master key: LITELLM_MASTER_KEY - name: LANGFUSE_PUBLIC_KEY valueFrom: secretKeyRef: name: litellm-langfuse key: public_key optional: true - name: LANGFUSE_SECRET_KEY valueFrom: secretKeyRef: name: litellm-langfuse key: secret_key optional: true - name: LANGFUSE_HOST value: "http://langfuse.observability.svc.cluster.local:3000" volumeMounts: - name: config mountPath: /etc/litellm readOnly: true resources: requests: cpu: 250m memory: 512Mi limits: cpu: "2" memory: 2Gi volumes: - name: config configMap: name: litellm-config