Takamasa Takenaka e9fa90a028 Modify values.yml to match expected format
When system application-apply, the script will download
containers and upload them to the local registry.
(This process includes repository re-written with
system parameter configuration)

Current in values.yaml of snmp armada app, only
stx-snmp container configuration matched to the
expected pattern. So that the script downloaded
only stx-snmp container but no other two containers.

This fix modifies the format in values.xml to
match the expected pattern (No value changes for
repositories nor tags)

Test Plan:
PASS: Apply snmp-armada-app and confirm the followings:
      - Status becomes "applied"
      - All 3 containers download from configured repository
      - All 3 containers are in the local repository

Closes-bug: 1952654

Signed-off-by: Takamasa Takenaka <takamasa.takenaka@windriver.com>
Change-Id: I8b742a2e211717b343f459443b15e947e5c6bd92
2021-11-29 15:58:40 -03:00

149 lines
6.5 KiB
YAML
Executable File

apiVersion: apps/v1
kind: Deployment
metadata:
name: {{ include "snmp.fullname" . }}
labels:
app.kubernetes.io/name: {{ include "snmp.name" . }}
helm.sh/chart: {{ include "snmp.chart" . }}
app.kubernetes.io/instance: {{ .Release.Name }}
app.kubernetes.io/managed-by: {{ .Release.Service }}
spec:
replicas: {{ .Values.replicaCount }}
selector:
matchLabels:
app.kubernetes.io/name: {{ include "snmp.name" . }}
app.kubernetes.io/instance: {{ .Release.Name }}
template:
metadata:
labels:
app.kubernetes.io/name: {{ include "snmp.name" . }}
app.kubernetes.io/instance: {{ .Release.Name }}
annotations:
rollme: {{ randAlphaNum 5 | quote }}
spec:
imagePullSecrets:
- name: default-registry-key
tolerations:
- key: "node-role.kubernetes.io/master"
operator: "Exists"
effect: "NoSchedule"
containers:
- name: {{ .Chart.Name }}
image: "{{ .Values.snmp.image.repository }}:{{ .Values.snmp.image.tag }}"
imagePullPolicy: {{ .Values.snmp.pull_policy }}
ports:
- containerPort: 161
protocol: UDP
- containerPort: 162
protocol: UDP
- containerPort: 705
protocol: TCP
env:
- name: "OPTIONDEBUG"
value: {{ .Values.image.debug }}
# Issues with liveness and readiness: https://bit.ly/3nVRQrL
# K8s issue related: https://github.com/kubernetes/kubernetes/issues/81713
#
# livenessProbe:
# tcpSocket:
# port: 705
# initialDelaySeconds: {{ .Values.livenessProbe.initialDelaySeconds }}
# periodSeconds: {{ .Values.livenessProbe.periodSeconds }}
# timeoutSeconds: {{ .Values.livenessProbe.timeoutSeconds }}
# successThreshold: {{ .Values.livenessProbe.successThreshold }}
# failureThreshold: {{ .Values.livenessProbe.failureThreshold }}
# readinessProbe:
# tcpSocket:
# port: 705
# initialDelaySeconds: {{ .Values.readinessProbe.initialDelaySeconds }}
# periodSeconds: {{ .Values.readinessProbe.periodSeconds }}
# timeoutSeconds: {{ .Values.readinessProbe.timeoutSeconds }}
# successThreshold: {{ .Values.readinessProbe.successThreshold }}
# failureThreshold: {{ .Values.readinessProbe.failureThreshold }}
volumeMounts:
- name: snmpd-etc-volume
mountPath: /etc/snmp
- name: {{ .Values.subagent.name }}
image: "{{ .Values.subagent.image.repository }}:{{ .Values.subagent.image.tag }}"
imagePullPolicy: {{ .Values.subagent.pull_policy }}
volumeMounts:
- name: snmpd-fm-volume
mountPath: /etc/fm/
env:
- name: "OPTIONDEBUG"
value: {{ .Values.image.debug }}
- name: "MASTER_AGENT_HOST"
value: {{ .Values.deployment.master_agent_host | quote}}
- name: "MASTER_AGENT_PORT"
value: {{ .Values.deployment.master_agent_port | quote}}
- name: "MASTER_AGENT_CONNECTION_RETRIES"
value: {{ .Values.deployment.master_agent_connection_retries | quote}}
# Fail on liveness and readiness as here: https://bit.ly/3nVRQrL
# K8s issue related: https://github.com/kubernetes/kubernetes/issues/81713
#
# args:
# - /bin/sh
# - -c
# - touch /tmp/healthy
# livenessProbe:
# exec:
# command:
# - cat
# - /tmp/healthy
# initialDelaySeconds: {{ .Values.livenessProbe.initialDelaySeconds }}
# periodSeconds: {{ .Values.livenessProbe.periodSeconds }}
# timeoutSeconds: {{ .Values.livenessProbe.timeoutSeconds }}
# successThreshold: {{ .Values.livenessProbe.successThreshold }}
# failureThreshold: {{ .Values.livenessProbe.failureThreshold }}
# readinessProbe:
# exec:
# command:
# - cat
# - /tmp/healthy
# initialDelaySeconds: {{ .Values.readinessProbe.initialDelaySeconds }}
# periodSeconds: {{ .Values.readinessProbe.periodSeconds }}
# timeoutSeconds: {{ .Values.readinessProbe.timeoutSeconds }}
# successThreshold: {{ .Values.readinessProbe.successThreshold }}
# failureThreshold: {{ .Values.readinessProbe.failureThreshold }}
- name: {{ .Values.trap_subagent.name }}
image: "{{ .Values.trap_subagent.image.repository }}:{{ .Values.trap_subagent.image.tag }}"
imagePullPolicy: {{ .Values.trap_subagent.pull_policy }}
ports:
- containerPort: 162
protocol: TCP
env:
- name: "OPTIONDEBUG"
value: {{ .Values.image.debug }}
- name: "MASTER_AGENT_HOST"
value: {{ .Values.deployment.master_agent_host | quote}}
- name: "MASTER_AGENT_PORT"
value: {{ .Values.deployment.master_agent_port | quote}}
- name: "MASTER_AGENT_CONNECTION_RETRIES"
value: {{ .Values.deployment.master_agent_connection_retries | quote}}
# Fail on liveness and readiness as here: https://bit.ly/3nVRQrL
# K8s issue related: https://github.com/kubernetes/kubernetes/issues/81713
#
# livenessProbe:
# tcpSocket:
# port: 162
# initialDelaySeconds: {{ .Values.livenessProbe.initialDelaySeconds }}
# periodSeconds: {{ .Values.livenessProbe.periodSeconds }}
# timeoutSeconds: {{ .Values.livenessProbe.timeoutSeconds }}
# successThreshold: {{ .Values.livenessProbe.successThreshold }}
# failureThreshold: {{ .Values.livenessProbe.failureThreshold }}
# readinessProbe:
# tcpSocket:
# port: 162
# initialDelaySeconds: {{ .Values.readinessProbe.initialDelaySeconds }}
# periodSeconds: {{ .Values.readinessProbe.periodSeconds }}
# timeoutSeconds: {{ .Values.readinessProbe.timeoutSeconds }}
# successThreshold: {{ .Values.readinessProbe.successThreshold }}
# failureThreshold: {{ .Values.readinessProbe.failureThreshold }}
volumes:
- name: snmpd-etc-volume
configMap:
name: {{ .Values.configmap.name_snmpd }}
- name: snmpd-fm-volume
configMap:
name: {{ .Values.configmap.name_fm }}