zfs-localpv/e2e-tests/experiments/infra-chaos/service_failure/run_e2e_test.yml
Aman Gupta 4e73638b5a
feat(e2e-test): Add e2e-tests for zfs-localpv (#298)
Signed-off-by: w3aman <aman.gupta@mayadata.io>
2021-06-09 21:21:39 +05:30

93 lines
No EOL
2.2 KiB
YAML

---
apiVersion: v1
kind: ConfigMap
metadata:
name: service-failure
namespace: e2e
data:
parameters.yml: |
---
apiVersion: v1
kind: Secret
metadata:
name: node-password
namespace: e2e
type: Opaque
data:
password:
---
apiVersion: batch/v1
kind: Job
metadata:
generateName: service-failure-chaos-
namespace: e2e
spec:
template:
metadata:
labels:
name: service-failure-chaos
spec:
serviceAccountName: e2e
restartPolicy: Never
#nodeSelector:
# kubernetes.io/hostname:
tolerations:
- key: "infra-aid"
operator: "Equal"
value: "observer"
effect: "NoSchedule"
containers:
- name: ansibletest
image: openebs/zfs-localpv-e2e:ci
imagePullPolicy: IfNotPresent
env:
- name: ANSIBLE_STDOUT_CALLBACK
value: default
- name: ZFS_OPERATOR_NAMESPACE
value: ""
- name: APP_NAMESPACE
value: ""
- name: APP_LABEL
value: ""
- name: APP_PVC
value: ""
## specify the service type to perform chaos
## for kubelet failure use value: kubelet
## for container runtime failure use value: (docker,containerd,cri-o)
- name: SVC_TYPE
value: ""
- name: USERNAME
value: ""
## To check data persistence against specific application provide value as below:
## (For busybox value: "busybox" and For percona value: "mysql")
- name: DATA_PERSISTENCE
value: ""
- name: NODE_PASSWORD
valueFrom:
secretKeyRef:
name: node-password
key: password
command: ["/bin/bash"]
args: ["-c", "ANSIBLE_LOCAL_TEMP=$HOME/.ansible/tmp ANSIBLE_REMOTE_TEMP=$HOME/.ansible/tmp ansible-playbook ./e2e-tests/experiments/infra-chaos/service_failure/test.yml -i /etc/ansible/hosts -vv; exit 0"]
volumeMounts:
- name: parameters
mountPath: /mnt/
volumes:
- name: parameters
configMap:
name: service-failure