Newer
Older
# Gitlab-CI Workflow
# stages:
# build:
# - Runs on commits on master or tags that match the pattern v1.0, v2.1rc1
# deploy-staging:
# - Runs on commits on master or tags that match the pattern v1.0, v2.1rc1 (continues deployment)
# deploy-production:
# - Runs on tags that match the pattern v1.0, v2.1rc1 (manual deployment)
variables:
CONTAINER_REPO: "docker.io/parity/substrate-api-sidecar"
HELM_NAMESPACE: "substrate-api-sidecar"
HELM_RELEASE_NAME_1: "kusama-sidecar"
HELM_RELEASE_NAME_2: "polkadot-sidecar"
HELM_CUSTOM_VALUES_FILE_1: "values-kusama.yaml"
HELM_CUSTOM_VALUES_FILE_2: "values-polkadot.yaml"
HELM_REPO: "helm/"
DOCKERFILE_DIRECTORY: "./"
CI_IMAGE: "quay.io/buildah/stable"
retry:
max: 2
when:
- runner_system_failure
- unknown_failure
- api_failure
interruptible: true
before_script:
- |-
if [[ $CI_COMMIT_TAG =~ ^v[0-9]+\.[0-9]+.*$ ]]; then
export DOCKER_IMAGE_TAG="${CI_COMMIT_TAG}"
export BUILD_LATEST_IMAGE="true"
else
export DOCKER_IMAGE_TAG="${CI_COMMIT_SHORT_SHA}-beta"
fi
stages:
- build
- deploy-staging
- deploy-production
- benchmark
- check-benchmark
- push-benchmark
.collect-artifacts: &collect-artifacts
artifacts:
name: "${CI_JOB_NAME}_${CI_COMMIT_REF_NAME}"
when: on_success
expire_in: 1 days
paths:
- ./artifacts/
.test-refs: &test-refs
rules:
- if: $CI_PIPELINE_SOURCE == "pipeline"
when: never
- if: $CI_COMMIT_REF_NAME =~ /^[0-9]+$/ # PRs
.test-refs-manual: &test-refs-manual
rules:
- if: $CI_PIPELINE_SOURCE == "pipeline"
when: never
- if: $CI_COMMIT_REF_NAME =~ /^[0-9]+$/ # PRs
when: manual
.publish-refs: &publish-refs
- if: $CI_PIPELINE_SOURCE == "pipeline"
when: never
- if: $CI_PIPELINE_SOURCE == "schedule"
- if: $CI_COMMIT_REF_NAME == "master"
- if: $CI_COMMIT_REF_NAME =~ /^v[0-9]+\.[0-9]+.*$/ # i.e. v1.0, v2.1rc1
.dockerize: &dockerize
stage: build
image: $CI_IMAGE
.kubernetes-env: &kubernetes-env
image: $CI_IMAGE
tags:
- kubernetes-parity-build
.deploy-k8s: &deploy-k8s
image: paritytech/kubetools:3.5.3
script:
- |-
kubectl get cm helm-custom-values -n $HELM_NAMESPACE -o jsonpath='{.data.values-kusama\.yaml}' > values-kusama.yaml
kubectl get cm helm-custom-values -n $HELM_NAMESPACE -o jsonpath='{.data.values-polkadot\.yaml}' > values-polkadot.yaml
- helm upgrade
--install
--atomic
--timeout 120s
--namespace $HELM_NAMESPACE
--set image.repository="${CONTAINER_REPO}"
--set image.tag="${DOCKER_IMAGE_TAG}"
--values ${HELM_CUSTOM_VALUES_FILE_1}
${HELM_RELEASE_NAME_1} ${HELM_REPO}
- helm upgrade
--install
--atomic
--timeout 120s
--namespace $HELM_NAMESPACE
--set image.repository="${CONTAINER_REPO}"
--set image.tag="${DOCKER_IMAGE_TAG}"
--values ${HELM_CUSTOM_VALUES_FILE_2}
${HELM_RELEASE_NAME_2} ${HELM_REPO}
tags:
- kubernetes-parity-build
build:
<<: *dockerize
<<: *publish-refs
script:
- echo building "$CONTAINER_REPO:$DOCKER_IMAGE_TAG"
- if [[ $BUILD_LATEST_IMAGE ]]; then
buildah bud
--format=docker
--tag "$CONTAINER_REPO:$DOCKER_IMAGE_TAG"
--tag "$CONTAINER_REPO:latest" "$DOCKERFILE_DIRECTORY";
buildah bud
--format=docker
--tag "$CONTAINER_REPO:$DOCKER_IMAGE_TAG" "$DOCKERFILE_DIRECTORY";
fi
- echo ${Docker_Hub_Pass_Parity} |
buildah login --username ${Docker_Hub_User_Parity} --password-stdin docker.io
- echo pushing "$CONTAINER_REPO:$DOCKER_IMAGE_TAG"
- if [[ $BUILD_LATEST_IMAGE ]]; then
buildah push --format=v2s2 "$CONTAINER_REPO:$DOCKER_IMAGE_TAG";
buildah push --format=v2s2 "$CONTAINER_REPO:latest";
buildah push --format=v2s2 "$CONTAINER_REPO:$DOCKER_IMAGE_TAG";
fi
# checks that dockerimage can be built without publishing
build-pr:
<<: *dockerize
<<: *test-refs
script:
- echo building "$CONTAINER_REPO:$DOCKER_IMAGE_TAG"
- if [[ $BUILD_LATEST_IMAGE ]]; then
buildah bud
--format=docker
--tag "$CONTAINER_REPO:$DOCKER_IMAGE_TAG"
--tag "$CONTAINER_REPO:latest" "$DOCKERFILE_DIRECTORY";
else
buildah bud
--format=docker
--tag "$CONTAINER_REPO:$DOCKER_IMAGE_TAG" "$DOCKERFILE_DIRECTORY";
stage: deploy-staging
<<: *deploy-k8s
<<: *publish-refs
stage: deploy-production
<<: *deploy-k8s
- if: $CI_COMMIT_REF_NAME =~ /^v[0-9]+\.[0-9]+.*$/ # i.e. v1.0, v2.1rc1
.benchmark-template: &benchmark-template
stage: benchmark
<<: *collect-artifacts
<<: *kubernetes-env
before_script: []
variables:
POLKADOT_ADDRESS: "ws://substrate-api-sidecar-benchmark-node.nodes:9944"
BENCHMARK_DURATION: "15m" # Tests duration
BENCHMARK_THREADS: 4 # Total number of threads to use
BENCHMARK_CONNECTIONS: 12 # Total number of HTTP connections to keep open with each thread handling N = connections/threads
BENCHMARK_TIMEOUT: "120s" # Record a timeout if a response is not received within this amount of time
BENCHMARK_OPTS: "--latency" # Additional options, --latency: print detailed latency statistics
BENCHMARK_SCRIPT: "./scripts/ci/benchmarks/lightweight-bench.lua" # https://github.com/wg/wrk/blob/master/SCRIPTING
CI_IMAGE: "paritytech/node-wrk:latest"
script:
- export SAS_SUBSTRATE_URL=$POLKADOT_ADDRESS
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
- yarn
- yarn build
- yarn run main &
- sleep 10
- wrk -d${BENCHMARK_DURATION}
-t${BENCHMARK_THREADS}
-c${BENCHMARK_CONNECTIONS}
--timeout ${BENCHMARK_TIMEOUT}
${BENCHMARK_OPTS}
-s ${BENCHMARK_SCRIPT} "http://127.0.0.1:8080" > result.txt
- cat result.txt
- mkdir artifacts/
- mv result.txt artifacts/
tags:
- kubernetes-parity-build
benchmark:
<<: *benchmark-template
<<: *publish-refs
needs:
- build
# manual step to run benchmarks in PR pipeline
benchmark-manual-pr:
<<: *benchmark-template
<<: *test-refs-manual
needs:
- build-pr
check-benchmark:
stage: check-benchmark
<<: *publish-refs
<<: *kubernetes-env
GITHUB_REPO: "paritytech/substrate-api-sidecar"
CI_IMAGE: "paritytech/benchmarks:latest"
- export RESULT=$(cat artifacts/result.txt | grep AvgRequestTime | awk '{print $2}')
- check_single_bench_result -g $GITHUB_REPO
-c $THRESHOLD
-v $RESULT
push-benchmark:
stage: push-benchmark
<<: *publish-refs
<<: *kubernetes-env
needs:
- job: check-benchmark
artifacts: true
PROMETHEUS_URL: "https://pushgateway.parity-build.parity.io"
CI_IMAGE: "paritytech/benchmarks:latest"
script:
- export RESULT=$(cat artifacts/result.txt | grep AvgRequestTime | awk '{print $2}')
- push_bench_result -t common
-p $CI_PROJECT_NAME
-n sidecar
-r $RESULT
-u ms
-s $PROMETHEUS_URL
- push_bench_result -t specific
-p $CI_PROJECT_NAME
-n sidecar
-r $RESULT
-l 'commit="'$CI_COMMIT_SHORT_SHA'"'
-u ms
-s $PROMETHEUS_URL