feat(e2e-test): Add e2e-tests for zfs-localpv (#298)

Signed-off-by: w3aman <aman.gupta@mayadata.io>
This commit is contained in:
Aman Gupta 2021-06-09 21:21:39 +05:30 committed by GitHub
parent 53f872fcf1
commit 4e73638b5a
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
137 changed files with 8745 additions and 0 deletions

View file

@ -0,0 +1,42 @@
---
apiVersion: apps/v1
kind: Deployment
metadata:
name: app-busybox
labels:
lkey: lvalue
spec:
selector:
matchLabels:
lkey: lvalue
template:
metadata:
labels:
lkey: lvalue
spec:
containers:
- name: app-busybox
imagePullPolicy: IfNotPresent
image: gcr.io/google-containers/busybox
command: ["/bin/sh"]
args: ["-c", "while true; do sleep 10;done"]
env:
volumeMounts:
- name: data-vol
mountPath: /busybox
volumes:
- name: data-vol
persistentVolumeClaim:
claimName: testclaim
---
kind: PersistentVolumeClaim
apiVersion: v1
metadata:
name: testclaim
spec:
storageClassName: testclass
accessModes:
- ReadWriteOnce
resources:
requests:
storage: teststorage

View file

@ -0,0 +1,36 @@
---
apiVersion: apps/v1
kind: StatefulSet
metadata:
name: app-busybox
labels:
lkey: lvalue
spec:
selector:
matchLabels:
lkey: lvalue
template:
metadata:
labels:
lkey: lvalue
spec:
containers:
- name: app-busybox
image: gcr.io/google-containers/busybox
imagePullPolicy: IfNotPresent
command:
- sh
- -c
- 'date > /busybox/date.txt; sync; sleep 5; sync; tail -f /dev/null;'
volumeMounts:
- name: testclaim
mountPath: /busybox
volumeClaimTemplates:
- metadata:
name: testclaim
spec:
accessModes: [ "ReadWriteOnce" ]
storageClassName: testclass
resources:
requests:
storage: teststorage

View file

@ -0,0 +1,55 @@
---
apiVersion: batch/v1
kind: Job
metadata:
generateName: busybox-deploy-
namespace: e2e
spec:
template:
metadata:
name: busybox-deploy
labels:
app: busybox
spec:
serviceAccountName: e2e
restartPolicy: Never
containers:
- name: ansibletest
image: openebs/zfs-localpv-e2e:ci
imagePullPolicy: IfNotPresent
env:
- name: ANSIBLE_STDOUT_CALLBACK
value: default
# Name of the storage class to use for volume provisioning
- name: STORAGE_CLASS
value: 'zfspv-sc'
# This is the namespace where busybox application will be deployed
- name: APP_NAMESPACE
value: 'busybox'
# Application label for busybox deployment/statefulset in `key=value` format
- name: APP_LABEL
value: 'app=busybox'
# Application PVC name
- name: APP_PVC
value: 'busybox-pvc'
# Persistent volume storage capacity (for e.g, 5Gi)
- name: PV_CAPACITY
value: '5Gi'
# Use: `statefuleset` to deploy busybox application as statefulset
# Use: `deployment` to deploy busybox application as deployment
- name: DEPLOY_TYPE
value: 'deployment'
# Use: `provision` to deploy the application
# Use: `deprovision` to deprovision the application
- name: ACTION
value: 'provision'
command: ["/bin/bash"]
args: ["-c", "ansible-playbook ./e2e-tests/apps/busybox/deployers/test.yml -i /etc/ansible/hosts -v; exit 0"]

View file

@ -0,0 +1,78 @@
---
- hosts: localhost
connection: local
gather_facts: False
vars_files:
- test_vars.yml
tasks:
- block:
## Generating the testname for deployment
- include_tasks: /e2e-tests/hack/create_testname.yml
## RECORD START-OF-TEST IN e2e RESULT CR
- include_tasks: /e2e-tests/hack/update_e2e_result_resource.yml
vars:
status: 'SOT'
- block:
- block:
## Prerequisite tasks such as, namespace creation and replacing placeholder
## with test specific values, before deploying application
- include_tasks: /e2e-tests/utils/k8s/pre_create_app_deploy.yml
vars:
application: "{{ application_statefulset }}"
## Deploying the application
- include_tasks: /e2e-tests/utils/k8s/deploy_single_app.yml
vars:
application: "{{ application_statefulset }}"
when: "'deprovision' not in action"
- name: Deprovisioning the Application
include_tasks: /e2e-tests/utils/k8s/deprovision_statefulset.yml
vars:
app_deployer: "{{ application_statefulset }}"
when: "'deprovision' is in action"
when: lookup('env','DEPLOY_TYPE') == 'statefulset'
- block:
- block:
## Prerequisite tasks such as, namespace creation and replacing placeholder
## with test specific values, before deploying application
- include_tasks: /e2e-tests/utils/k8s/pre_create_app_deploy.yml
vars:
application: "{{ application_deployment }}"
## Deploying the application
- include_tasks: /e2e-tests/utils/k8s/deploy_single_app.yml
vars:
application: "{{ application_deployment }}"
when: "'deprovision' not in action"
- name: Deprovisioning the Application
include_tasks: /e2e-tests/utils/k8s/deprovision_deployment.yml
vars:
app_deployer: "{{ application_deployment }}"
when: "'deprovision' is in action"
when: lookup('env','DEPLOY_TYPE') == 'deployment'
- set_fact:
flag: "Pass"
rescue:
- name: Setting fail flag
set_fact:
flag: "Fail"
always:
## RECORD END-OF-TEST IN e2e RESULT CR
- include_tasks: /e2e-tests/hack/update_e2e_result_resource.yml
vars:
status: 'EOT'

View file

@ -0,0 +1,19 @@
test_name: "busybox-{{ action }}-{{ app_ns }}"
application_name: "busybox"
application_statefulset: busybox_statefulset.yml
application_deployment: busybox_deployment.yml
storage_class: "{{ lookup('env','STORAGE_CLASS') }}"
app_ns: "{{ lookup('env','APP_NAMESPACE') }}"
app_label: "{{ lookup('env','APP_LABEL') }}"
app_pvc: "{{ lookup('env','APP_PVC') }}"
deploy_type: "{{ lookup('env','DEPLOY_TYPE') }}"
action: "{{ lookup('env','ACTION') }}"

View file

@ -0,0 +1,80 @@
---
apiVersion: rbac.authorization.k8s.io/v1beta1
kind: ClusterRole
metadata:
name: app-namespace
labels:
name: app-namespace
rules:
- apiGroups: ["*"]
resources: ["*"]
verbs: ["*"]
---
apiVersion: v1
kind: ServiceAccount
metadata:
name: app-namespace
namespace: app-namespace
labels:
name: app-namespace
---
apiVersion: rbac.authorization.k8s.io/v1beta1
kind: ClusterRoleBinding
metadata:
name: app-namespace
labels:
name: app-namespace
roleRef:
apiGroup: rbac.authorization.k8s.io
kind: ClusterRole
name: app-namespace
subjects:
- kind: ServiceAccount
name: app-namespace
namespace: app-namespace
---
apiVersion: batch/v1
kind: Job
metadata:
generateName: busybox-liveness-
namespace: app-namespace
spec:
template:
metadata:
name: busybox-liveness
namespace: app-namespace
labels:
liveness: busybox-liveness
# label used for mass-liveness check upon infra-chaos
infra-aid: liveness
spec:
serviceAccountName: app-namespace
restartPolicy: Never
containers:
- name: busybox-liveness
image: openebs/busybox-client
imagePullPolicy: Always
env:
- name: LIVENESS_TIMEOUT_SECONDS
value: "liveness-timeout-seconds"
# number of retries when livenss-fails
- name: LIVENESS_RETRY_COUNT
value: "liveness-retry-count"
# Namespace in which busybox is running
- name: NAMESPACE
value: app-namespace
- name: POD_NAME
value: pod-name
command: ["/bin/bash"]
args: ["-c", "./liveness.sh; exit 0"]

View file

@ -0,0 +1,57 @@
---
apiVersion: batch/v1
kind: Job
metadata:
generateName: busybox-liveness-
namespace: e2e
spec:
activeDeadlineSeconds: 5400
template:
metadata:
name: busybox-liveness
namespace: e2e
labels:
liveness: busybox-liveness
# label used for mass-liveness check upon infra-chaos
infra-aid: liveness
spec:
serviceAccountName: e2e
restartPolicy: Never
containers:
- name: ansibletest
image: openebs/zfs-localpv-e2e:ci
imagePullPolicy: IfNotPresent
env:
- name: ANSIBLE_STDOUT_CALLBACK
value: default
- name: MY_POD_NAME
valueFrom:
fieldRef:
fieldPath: metadata.name
- name: LIVENESS_TIMEOUT_SECONDS
value: "10"
# number of retries when livenss-fails
- name: LIVENESS_RETRY_COUNT
value: "5"
# This is the namespace where busybox application is running
- name: APP_NAMESPACE
value: 'busybox'
# Application label for busybox in `key=value` format
- name: APP_LABEL
value: 'app=busybox'
# Use: `provision` to apply the liveness-probe checks for busybox application
# Use: `deprovision` to deprovision the liveness-probe
- name: ACTION
value: 'provision'
command: ["/bin/bash"]
args: ["-c", "ansible-playbook ./e2e-tests/apps/busybox/liveness/test.yml -i /etc/ansible/hosts -v; exit 0"]

View file

@ -0,0 +1,96 @@
---
- hosts: localhost
connection: local
gather_facts: False
vars_files:
- test_vars.yml
tasks:
- block:
- block:
- name: Record test instance/run ID
set_fact:
run_id: "{{ lookup('env','RUN_ID') }}"
- name: Construct testname appended with runID
set_fact:
test_name: "{{ test_name }}-{{ run_id }}"
when: lookup('env','RUN_ID')
## RECORD START-OF-TEST IN e2e RESULT CR
- include_tasks: /e2e-tests/hack/update_e2e_result_resource.yml
vars:
status: 'SOT'
- block:
- name: Getting the application pod name
shell: kubectl get pod -n {{ namespace }} -l {{ app_label }} -o jsonpath={.items[0].metadata.name}
register: pod_name
- name: Replacing the placeholder for pod-name
replace:
path: "{{ busybox_liveness }}"
regexp: "pod-name"
replace: "{{ pod_name.stdout }}"
- name: Replacing the placeholder for namespace
replace:
path: "{{ busybox_liveness }}"
regexp: "app-namespace"
replace: "{{ namespace }}"
- name: Replacing the placeholder for liveness-retry-count
replace:
path: "{{ busybox_liveness }}"
regexp: "liveness-retry-count"
replace: "{{ liveness_retry }}"
- name: Replacing the placeholder for liveness-timeout
replace:
path: "{{ busybox_liveness }}"
regexp: "liveness-timeout-seconds"
replace: "{{ liveness_timeout }}"
- name: Creating busybox-liveness job
shell: kubectl create -f {{ busybox_liveness }}
- name: Verifying whether liveness pod is started successfully
shell: kubectl get pod -n {{ namespace }} -l liveness=busybox-liveness -o jsonpath={.items[0].status.phase}
register: pod_status
until: "'Running' in pod_status.stdout"
delay: 5
retries: 40
- set_fact:
flag: "Pass"
when: "'deprovision' not in action"
- block:
- name: Getting the busybox liveness job
shell: kubectl get job -l liveness=busybox-liveness -n {{ namespace }} -o jsonpath='{.items[0].metadata.name}'
register: liveness_job
- name: Deleting busybox liveness job
shell: kubectl delete job {{ liveness_job.stdout }} -n {{ namespace }}
- set_fact:
flag: "Pass"
when: "'deprovision' is in action"
rescue:
- set_fact:
flag: "Fail"
always:
## RECORD END-OF-TEST IN e2e RESULT CR
- include_tasks: /e2e-tests/hack/update_e2e_result_resource.yml
vars:
status: 'EOT'

View file

@ -0,0 +1,15 @@
test_name: busybox-liveness
namespace: "{{ lookup('env','APP_NAMESPACE') }}"
app_label: "{{ lookup('env','APP_LABEL') }}"
busybox_liveness: busybox_liveness.yml
liveness_retry: "{{ lookup('env','LIVENESS_RETRY_COUNT') }}"
liveness_timeout: "{{ lookup('env','LIVENESS_TIMEOUT_SECONDS') }}"
liveness_log: "liveness-running"
action: "{{ lookup('env','ACTION') }}"

View file

@ -0,0 +1,72 @@
---
apiVersion: apps/v1
kind: Deployment
metadata:
name: percona
labels:
lkey: lvalue
spec:
selector:
matchLabels:
lkey: lvalue
template:
metadata:
labels:
lkey: lvalue
spec:
containers:
- resources:
limits:
cpu: 0.5
name: percona
image: openebs/tests-custom-percona:latest
imagePullPolicy: IfNotPresent
args:
- "--ignore-db-dir"
- "lost+found"
env:
- name: MYSQL_ROOT_PASSWORD
value: k8sDem0
ports:
- containerPort: 3306
name: percona
volumeMounts:
- mountPath: /var/lib/mysql
name: data-vol
#<!-- BEGIN ANSIBLE MANAGED BLOCK -->
livenessProbe:
exec:
command: ["bash", "sql-test.sh"]
initialDelaySeconds: 60
periodSeconds: 1
timeoutSeconds: 10
#<!-- END ANSIBLE MANAGED BLOCK -->
volumes:
- name: data-vol
persistentVolumeClaim:
claimName: testclaim
---
kind: PersistentVolumeClaim
apiVersion: v1
metadata:
name: testclaim
spec:
storageClassName: testclass
accessModes:
- ReadWriteOnce
resources:
requests:
storage: teststorage
---
apiVersion: v1
kind: Service
metadata:
name: percona-mysql
labels:
lkey: lvalue
spec:
ports:
- port: 3306
targetPort: 3306
selector:
lkey: lvalue

View file

@ -0,0 +1,52 @@
---
apiVersion: batch/v1
kind: Job
metadata:
generateName: percona-deploy-
namespace: e2e
spec:
template:
metadata:
name: percona-deploy
labels:
app: percona-deployment
spec:
serviceAccountName: e2e
restartPolicy: Never
containers:
- name: ansibletest
image: openebs/zfs-localpv-e2e:ci
imagePullPolicy: IfNotPresent
env:
- name: ANSIBLE_STDOUT_CALLBACK
value: default
# Name of the storage class to use for volume provisioning
- name: STORAGE_CLASS
value: 'zfspv-sc'
# This is the namespace where percona application will be deployed
- name: APP_NAMESPACE
value: 'percona'
# Application label for percona deployment in `key=value` format
- name: APP_LABEL
value: 'app=percona'
# Application PVC name
- name: APP_PVC
value: 'percona-pvc'
# Persistent volume storage capacity (for e.g, 5Gi)
- name: PV_CAPACITY
value: '5Gi'
# Use: `provision` to deploy the application
# Use: `deprovision` to deprovision the application
- name: ACTION
value: 'provision'
command: ["/bin/bash"]
args: ["-c", "ansible-playbook ./e2e-tests/apps/percona/deployers/test.yml -i /etc/ansible/hosts -v; exit 0"]

View file

@ -0,0 +1,57 @@
---
- hosts: localhost
connection: local
gather_facts: False
vars_files:
- test_vars.yml
tasks:
- block:
## Generating the testname for deployment
- include_tasks: /e2e-tests/hack/create_testname.yml
## RECORD START-OF-TEST IN e2e RESULT CR
- include_tasks: "/e2e-tests/hack/update_e2e_result_resource.yml"
vars:
status: 'SOT'
- block:
## Prerequisite tasks such as, namespace creation and replacing placeholder
## with test specific values, before deploying application
- include_tasks: /e2e-tests/utils/k8s/pre_create_app_deploy.yml
vars:
application: "{{ application_deployment }}"
## Deploying the application
- include_tasks: /e2e-tests/utils/k8s/deploy_single_app.yml
vars:
application: "{{ application_deployment }}"
## Fetching the pod name
- include_tasks: /e2e-tests/utils/k8s/fetch_app_pod.yml
## Checking the db is ready for connection
- include_tasks: /e2e-tests/utils/applications/mysql/check_db_connection.yml
when: "'deprovision' not in action"
- name: Deprovisioning the Application
include_tasks: /e2e-tests/utils/k8s/deprovision_deployment.yml
vars:
app_deployer: "{{ application_deployment }}"
when: "'deprovision' is in action"
- set_fact:
flag: "Pass"
rescue:
- set_fact:
flag: "Fail"
always:
## RECORD END-OF-TEST IN e2e RESULT CR
- include_tasks: /e2e-tests/hack/update_e2e_result_resource.yml
vars:
status: 'EOT'

View file

@ -0,0 +1,15 @@
test_name: "percona-{{ action }}-{{ app_ns }}"
application_deployment: percona.yml
application_name: "percona"
storage_class: "{{ lookup('env','STORAGE_CLASS') }}"
app_ns: "{{ lookup('env','APP_NAMESPACE') }}"
app_label: "{{ lookup('env','APP_LABEL') }}"
app_pvc: "{{ lookup('env','APP_PVC') }}"
action: "{{ lookup('env','ACTION') }}"

View file

@ -0,0 +1,57 @@
- name: Replace the label in loadgen job spec.
replace:
path: "{{ percona_loadgen }}"
regexp: "loadgen_lkey: loadgen_lvalue"
replace: "{{ loadgen_lkey }}: {{ loadgen_lvalue }}"
- name: Replace the db-user placeholder in tpcc-config file
replace:
path: "{{ tpcc_conf }}"
regexp: "test_user"
replace: "{{ db_user }}"
- name: Replace the password placeholder in tpcc-config file
replace:
path: "{{ tpcc_conf }}"
regexp: "test_password"
replace: "{{ db_password }}"
- name: Replace the duration placeholder in tpcc-config file
replace:
path: "{{ tpcc_conf }}"
regexp: "test_duration"
replace: "{{ load_duration }}"
- name: Replace the warehouse placeholder in tpcc-config file
replace:
path: "{{ tpcc_conf }}"
regexp: "test_warehouse"
replace: "{{ test_warehouse }}"
- name: Replace the test connections placeholder in tpcc-config file
replace:
path: "{{ tpcc_conf }}"
regexp: "test_connections"
replace: "{{ test_connections }}"
- name: Replace the test warmup-period placeholder in tpcc-config file
replace:
path: "{{ tpcc_conf }}"
regexp: "test_warmup_period"
replace: "{{ test_warmup_period }}"
- name: Replace the test interval placeholder in tpcc-config file
replace:
path: "{{ tpcc_conf }}"
regexp: "test_interval"
replace: "{{ test_interval }}"
- name: Getting the Service IP of Application
shell: kubectl get svc -n {{ app_ns }} -l {{ app_service_label }} -o jsonpath='{.items[0].spec.clusterIP}'
register: ip
- name: Replace the Service IP placeholder
replace:
path: "{{ percona_loadgen }}"
regexp: "service_ip"
replace: "{{ ip.stdout }}"

View file

@ -0,0 +1,60 @@
---
apiVersion: batch/v1
kind: Job
metadata:
generateName: percona-loadgen-
namespace: e2e
spec:
template:
metadata:
name: percona-loadgen
namespace: e2e
labels:
loadgen: percona-loadjob
spec:
serviceAccountName: e2e
restartPolicy: Never
containers:
- name: ansibletest
image: openebs/zfs-localpv-e2e:ci
imagePullPolicy: IfNotPresent
env:
- name: ANSIBLE_STDOUT_CALLBACK
value: default
# This is the namespace where percona application is running
- name: APP_NAMESPACE
value: 'percona'
- name: APP_LABEL
value: 'app=percona'
- name: LOADGEN_LABEL
value: loadgen=percona-loadgen
# Database user name
- name: DB_USER
value: root
- name: DB_PASSWORD
value: k8sDem0
# Bench duration (in min)
# TODO: Use a tpcc-template to define workload w/ more granularity
- name: LOAD_DURATION
value: "600"
- name: TPCC_WAREHOUSES
value: "1"
- name: TPCC_CONNECTIONS
value: "18"
- name: TPCC_WARMUP_PERIOD
value: "10"
- name: LOAD_INTERVAL
value: "10"
command: ["/bin/bash"]
args: ["-c", "ansible-playbook ./e2e-tests/apps/percona/workload/test.yml -i /etc/ansible/hosts -v; exit 0"]

View file

@ -0,0 +1,81 @@
---
- hosts: localhost
connection: local
gather_facts: False
vars_files:
- test_vars.yml
tasks:
- block:
## Generating the testname for deployment
- include_tasks: /e2e-tests/hack/create_testname.yml
# RECORD START-OF-TEST IN e2e RESULT CR
- include_tasks: /e2e-tests/hack/update_e2e_result_resource.yml
vars:
status: 'SOT'
- name: Checking the status of test specific namespace.
include_tasks: /e2e-tests/utils/k8s/status_testns.yml
- name: Get the application label value from env
set_fact:
app_lkey: "{{ app_label.split('=')[0] }}"
app_lvalue: "{{ app_label.split('=')[1] }}"
- name: Checking whether application is running
include_tasks: /e2e-tests/utils/k8s/status_app_pod.yml
- name: Obtaining the loadgen pod label from env.
set_fact:
loadgen_lkey: "{{ loadgen_label.split('=')[0] }}"
loadgen_lvalue: "{{ loadgen_label.split('=')[1] }}"
- name: Replace default values/placeholder with test-specific values
include_tasks: ./replace.yml
- name: Checking for configmap
shell: kubectl get configmap -n {{ app_ns }}
register: configmap
- name: Creating a kubernetes config map to hold the tpcc benchmark config
shell: kubectl create configmap tpcc-config --from-file {{ tpcc_conf }} -n {{ app_ns }}
when: "'tpcc-config' not in configmap.stdout"
- name: Create Percona Loadgen Job
shell: kubectl apply -f {{ percona_loadgen }} -n {{ app_ns }}
- name: Verify load-gen pod is running
shell: kubectl get pods -n {{ app_ns }} -l {{ loadgen_label }} -o jsonpath='{.items[0].status.phase}'
args:
executable: /bin/bash
register: result
until: "'Running' in result.stdout"
delay: 5
retries: 60
- name: Getting the Percona POD name
shell: kubectl get po -n {{ app_ns }} -l {{ app_label }} -o jsonpath='{.items[0].metadata.name}'
register: pod_name
- name: Verifying load-generation
shell: kubectl exec -it {{ pod_name.stdout }} -n {{ app_ns }} -- mysql -u{{ db_user }} -p{{ db_password }} -e "show databases"
register: output
until: "'tpcc-' in output.stdout"
delay: 5
retries: 120
- set_fact:
flag: "Pass"
rescue:
- set_fact:
flag: "Fail"
always:
## RECORD END-OF-TEST IN e2e RESULT CR
- include_tasks: /e2e-tests/hack/update_e2e_result_resource.yml
vars:
status: 'EOT'

View file

@ -0,0 +1,27 @@
test_name: percona-loadgen-{{ app_ns }}
percona_loadgen: tpcc_bench.yml
app_ns: "{{ lookup('env','APP_NAMESPACE') }}"
app_label: "{{ lookup('env','APP_LABEL') }}"
app_service_label: "{{ lookup('env','APP_LABEL') }}"
loadgen_label: "{{ lookup('env','LOADGEN_LABEL') }}"
db_user: "{{ lookup('env','DB_USER') }}"
db_password: "{{ lookup('env','DB_PASSWORD') }}"
load_duration: "{{ lookup('env','LOAD_DURATION') }}"
test_warehouse: "{{ lookup('env','TPCC_WAREHOUSES') }}"
test_connections: "{{ lookup('env','TPCC_CONNECTIONS') }}"
test_warmup_period: "{{ lookup('env','TPCC_WARMUP_PERIOD') }}"
test_interval: "{{ lookup('env','LOAD_INTERVAL') }}"
tpcc_conf: tpcc.conf

View file

@ -0,0 +1,9 @@
{
"db_user": "test_user",
"db_password": "test_password",
"warehouses": "test_warehouse",
"connections": "test_connections",
"warmup_period": "test_warmup_period",
"run_duration": "test_duration",
"interval": "test_interval"
}

View file

@ -0,0 +1,27 @@
---
apiVersion: batch/v1
kind: Job
metadata:
name: tpcc-bench
spec:
template:
metadata:
name: tpcc-bench
labels:
loadgen_lkey: loadgen_lvalue
spec:
restartPolicy: Never
containers:
- name: tpcc-bench
image: openebs/tests-tpcc-client
command: ["/bin/bash"]
args: ["-c", "./tpcc-runner.sh service_ip tpcc.conf; exit 0"]
volumeMounts:
- name: tpcc-configmap
mountPath: /tpcc-mysql/tpcc.conf
subPath: tpcc.conf
tty: true
volumes:
- name: tpcc-configmap
configMap:
name: tpcc-config