# This file is part of .gitlab-ci.yml # Here are all jobs that are executed during "publish" stage publish-rustdoc: stage: publish extends: - .kubernetes-env - .publish-gh-pages-refs variables: CI_IMAGE: node:18 GIT_DEPTH: 100 RUSTDOCS_DEPLOY_REFS: "master" needs: - job: build-rustdoc artifacts: true - job: build-implementers-guide artifacts: true script: # If $CI_COMMIT_REF_NAME doesn't match one of $RUSTDOCS_DEPLOY_REFS space-separated values, we # exit immediately. # Putting spaces at the front and back to ensure we are not matching just any substring, but the # whole space-separated value. # setup ssh - eval $(ssh-agent) - ssh-add - <<< ${GITHUB_SSH_PRIV_KEY} - mkdir ~/.ssh && touch ~/.ssh/known_hosts - ssh-keyscan -t rsa github.com >> ~/.ssh/known_hosts # Set git config - git config user.email "devops-team@parity.io" - git config user.name "${GITHUB_USER}" - git config remote.origin.url "git@github.com:/paritytech/${CI_PROJECT_NAME}.git" - git config remote.origin.fetch "+refs/heads/*:refs/remotes/origin/*" - git fetch origin gh-pages # Save README and docs - cp -r ./crate-docs/ /tmp/doc/ - cp -r ./artifacts/book/ /tmp/ - cp README.md /tmp/doc/ # we don't need to commit changes because we copy docs to /tmp - git checkout gh-pages --force # Enable if docs needed for other refs # Install `index-tpl-crud` and generate index.html based on RUSTDOCS_DEPLOY_REFS # - which index-tpl-crud &> /dev/null || yarn global add @substrate/index-tpl-crud # - index-tpl-crud upsert ./index.html ${CI_COMMIT_REF_NAME} # Ensure the destination dir doesn't exist. - rm -rf ${CI_COMMIT_REF_NAME} - rm -rf book/ - mv -f /tmp/doc ${CI_COMMIT_REF_NAME} # dir for implementors guide - mkdir -p book - mv /tmp/book/html/* book/ # Upload files - git add --all # `git commit` has an exit code of > 0 if there is nothing to commit. # This causes GitLab to exit immediately and marks this job failed. # We don't want to mark the entire job failed if there's nothing to # publish though, hence the `|| true`. - git commit -m "___Updated docs for ${CI_COMMIT_REF_NAME}___" || echo "___Nothing to commit___" - git push origin gh-pages --force # artificial sleep to publish gh-pages - sleep 300 after_script: - rm -rf .git/ ./* publish-subsystem-benchmarks: stage: publish variables: CI_IMAGE: "paritytech/tools:latest" extends: - .kubernetes-env - .publish-gh-pages-refs needs: - job: subsystem-benchmark-availability-recovery artifacts: true - job: subsystem-benchmark-availability-distribution artifacts: true - job: publish-rustdoc artifacts: false script: # setup ssh - eval $(ssh-agent) - ssh-add - <<< ${GITHUB_SSH_PRIV_KEY} - mkdir ~/.ssh && touch ~/.ssh/known_hosts - ssh-keyscan -t rsa github.com >> ~/.ssh/known_hosts # Set git config - rm -rf .git/config - git config user.email "devops-team@parity.io" - git config user.name "${GITHUB_USER}" - git config remote.origin.url "git@github.com:/paritytech/${CI_PROJECT_NAME}.git" - git config remote.origin.fetch "+refs/heads/*:refs/remotes/origin/*" - git fetch origin gh-pages # Push result to github - git checkout gh-pages --force - mkdir -p bench/gitlab/ || echo "Directory exists" - rm -rf bench/gitlab/*.json || echo "No json files" - cp -r charts/*.json bench/gitlab/ - git add bench/gitlab/ - git commit -m "Add json files with benchmark results for ${CI_COMMIT_REF_NAME}" - git push origin gh-pages # artificial sleep to publish gh-pages - sleep 300 allow_failure: true after_script: - rm -rf .git/ ./* trigger_workflow: stage: deploy extends: - .kubernetes-env - .publish-gh-pages-refs needs: - job: publish-subsystem-benchmarks artifacts: false - job: subsystem-benchmark-availability-recovery artifacts: true - job: subsystem-benchmark-availability-distribution artifacts: true script: - echo "Triggering workflow" - > for benchmark in $(ls charts/*.json); do export benchmark_name=$(basename $benchmark); echo "Benchmark: $benchmark_name"; export benchmark_dir=$(echo $benchmark_name | sed 's/\.json//'); curl -q -X POST \ -H "Accept: application/vnd.github.v3+json" \ -H "Authorization: token $GITHUB_TOKEN" \ https://api.github.com/repos/paritytech/${CI_PROJECT_NAME}/actions/workflows/subsystem-benchmarks.yml/dispatches \ -d "{\"ref\":\"refs/heads/master\",\"inputs\":{\"benchmark-data-dir-path\":\"$benchmark_dir\",\"output-file-path\":\"$benchmark_name\"}}"; sleep 300; done allow_failure: true # note: images are used not only in zombienet but also in rococo, wococo and versi .build-push-image: image: $BUILDAH_IMAGE extends: - .zombienet-refs variables: DOCKERFILE: "" # docker/path-to.Dockerfile IMAGE_NAME: "" # docker.io/paritypr/image_name script: # Dockertag should differ in a merge queue - if [[ $CI_COMMIT_REF_NAME == *"gh-readonly-queue"* ]]; then export DOCKER_IMAGES_VERSION="${CI_COMMIT_SHORT_SHA}"; fi - $BUILDAH_COMMAND build --format=docker --build-arg VCS_REF="${CI_COMMIT_SHA}" --build-arg BUILD_DATE="$(date -u '+%Y-%m-%dT%H:%M:%SZ')" --build-arg IMAGE_NAME="${IMAGE_NAME}" --build-arg ZOMBIENET_IMAGE="${ZOMBIENET_IMAGE}" --tag "$IMAGE_NAME:${DOCKER_IMAGES_VERSION}" --file ${DOCKERFILE} . - echo "$PARITYPR_PASS" | buildah login --username "$PARITYPR_USER" --password-stdin docker.io - $BUILDAH_COMMAND info - $BUILDAH_COMMAND push --format=v2s2 "$IMAGE_NAME:${DOCKER_IMAGES_VERSION}" after_script: - buildah logout --all build-push-image-polkadot-parachain-debug: stage: publish extends: - .kubernetes-env - .common-refs - .build-push-image needs: - job: build-linux-stable-cumulus artifacts: true variables: DOCKERFILE: "docker/dockerfiles/polkadot-parachain/polkadot-parachain-debug_unsigned_injected.Dockerfile" IMAGE_NAME: "docker.io/paritypr/polkadot-parachain-debug" build-push-image-test-parachain: stage: publish extends: - .kubernetes-env - .common-refs - .build-push-image needs: - job: build-test-parachain artifacts: true variables: DOCKERFILE: "docker/dockerfiles/test-parachain_injected.Dockerfile" IMAGE_NAME: "docker.io/paritypr/test-parachain" build-push-image-polkadot-debug: stage: publish extends: - .kubernetes-env - .common-refs - .build-push-image needs: - job: build-linux-stable artifacts: true variables: DOCKERFILE: "docker/dockerfiles/polkadot/polkadot_injected_debug.Dockerfile" IMAGE_NAME: "docker.io/paritypr/polkadot-debug" build-push-image-colander: stage: publish extends: - .kubernetes-env - .common-refs - .build-push-image needs: - job: build-test-collators artifacts: true variables: DOCKERFILE: "docker/dockerfiles/collator_injected.Dockerfile" IMAGE_NAME: "docker.io/paritypr/colander" build-push-image-malus: stage: publish extends: - .kubernetes-env - .common-refs - .build-push-image needs: - job: build-malus artifacts: true variables: DOCKERFILE: "docker/dockerfiles/malus_injected.Dockerfile" IMAGE_NAME: "docker.io/paritypr/malus" build-push-image-substrate-pr: stage: publish extends: - .kubernetes-env - .common-refs - .build-push-image needs: - job: build-linux-substrate artifacts: true variables: DOCKERFILE: "docker/dockerfiles/substrate_injected.Dockerfile" IMAGE_NAME: "docker.io/paritypr/substrate" # unlike other images, bridges+zombienet image is based on Zombienet image that pulls required binaries # from other fresh images (polkadot and cumulus) build-push-image-bridges-zombienet-tests: stage: publish extends: - .kubernetes-env - .common-refs - .build-push-image needs: - job: build-linux-stable artifacts: true - job: build-linux-stable-cumulus artifacts: true - job: prepare-bridges-zombienet-artifacts artifacts: true variables: DOCKERFILE: "docker/dockerfiles/bridges_zombienet_tests_injected.Dockerfile" IMAGE_NAME: "docker.io/paritypr/bridges-zombienet-tests"