Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • parity/mirrors/polkadot-sdk
1 result
Show changes
Showing
with 2499 additions and 478 deletions
name: RC Build
on:
workflow_call:
inputs:
binary:
description: Binary to be build for the release
required: true
default: polkadot
type: string
package:
description: Package to be built, for now can be polkadot, polkadot-parachain-bin, or polkadot-omni-node
required: true
type: string
release_tag:
description: Tag matching the actual release candidate with the format polkadpt-stableYYMM(-rcX) or plkadot-stableYYMM-X(-rcX)
required: true
type: string
target:
description: Target triple for which the artifacts are being built (e.g. x86_64-unknown-linux-gnu)
required: true
type: string
secrets:
PGP_KMS_KEY:
required: true
PGP_KMS_HASH:
required: true
AWS_ACCESS_KEY_ID:
required: true
AWS_SECRET_ACCESS_KEY:
required: true
AWS_DEFAULT_REGION:
required: true
AWS_RELEASE_ACCESS_KEY_ID:
required: true
AWS_RELEASE_SECRET_ACCESS_KEY:
required: true
permissions:
id-token: write
contents: read
attestations: write
jobs:
set-image:
# GitHub Actions allows using 'env' in a container context.
# However, env variables don't work for forks: https://github.com/orgs/community/discussions/44322
# This workaround sets the container image for each job using 'set-image' job output.
runs-on: ubuntu-latest
outputs:
IMAGE: ${{ steps.set_image.outputs.IMAGE }}
steps:
- name: Checkout
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
- id: set_image
run: cat .github/env >> $GITHUB_OUTPUT
build-rc:
if: ${{ inputs.target == 'x86_64-unknown-linux-gnu' }}
needs: [set-image]
runs-on: ubuntu-latest-m
environment: release
container:
image: ${{ needs.set-image.outputs.IMAGE }}
strategy:
matrix:
binaries: ${{ fromJSON(inputs.binary) }}
env:
PGP_KMS_KEY: ${{ secrets.PGP_KMS_KEY }}
PGP_KMS_HASH: ${{ secrets.PGP_KMS_HASH }}
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
steps:
- name: Install pgpkkms
run: |
# Install pgpkms that is used to sign built artifacts
python3 -m pip install "pgpkms @ git+https://github.com/paritytech-release/pgpkms.git@e7f806f99e9be5c52f0b4a536b7d4ef9c3e695ed"
which pgpkms
- name: Checkout sources
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
with:
ref: ${{ inputs.release_tag }}
fetch-depth: 0
- name: Import gpg keys
shell: bash
run: |
. ./.github/scripts/common/lib.sh
import_gpg_keys
- name: Build binary
run: |
git config --global --add safe.directory "${GITHUB_WORKSPACE}" #avoid "detected dubious ownership" error
./.github/scripts/release/build-linux-release.sh ${{ matrix.binaries }} ${{ inputs.package }}
- name: Generate artifact attestation
uses: actions/attest-build-provenance@520d128f165991a6c774bcb264f323e3d70747f4 # v2.2.0
with:
subject-path: /artifacts/${{ matrix.binaries }}/${{ matrix.binaries }}
- name: Sign artifacts
working-directory: /artifacts/${{ matrix.binaries }}
run: |
python3 -m pgpkms sign --input ${{matrix.binaries }} -o ${{ matrix.binaries }}.asc
- name: Check sha256 ${{ matrix.binaries }}
working-directory: /artifacts/${{ matrix.binaries }}
shell: bash
run: |
. "${GITHUB_WORKSPACE}"/.github/scripts/common/lib.sh
echo "Checking binary ${{ matrix.binaries }}"
check_sha256 ${{ matrix.binaries }}
- name: Check GPG ${{ matrix.binaries }}
working-directory: /artifacts/${{ matrix.binaries }}
shell: bash
run: |
. "${GITHUB_WORKSPACE}"/.github/scripts/common/lib.sh
check_gpg ${{ matrix.binaries }}
- name: Upload ${{ matrix.binaries }} artifacts
uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1
with:
name: ${{ matrix.binaries }}_${{ inputs.target }}
path: /artifacts/${{ matrix.binaries }}
build-macos-rc:
if: ${{ inputs.target == 'aarch64-apple-darwin' }}
runs-on: parity-macos
environment: release
strategy:
matrix:
binaries: ${{ fromJSON(inputs.binary) }}
env:
PGP_KMS_KEY: ${{ secrets.PGP_KMS_KEY }}
PGP_KMS_HASH: ${{ secrets.PGP_KMS_HASH }}
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
steps:
- name: Checkout sources
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
with:
ref: ${{ inputs.release_tag }}
fetch-depth: 0
- name: Set rust version from env file
run: |
RUST_VERSION=$(cat .github/env | sed -E 's/.*ci-unified:([^-]+)-([^-]+).*/\2/')
echo $RUST_VERSION
echo "RUST_VERSION=${RUST_VERSION}" >> $GITHUB_ENV
- name: Set workspace environment variable
# relevant for artifacts upload, which can not interpolate Github Action variable syntax when
# used within valid paths. We can not use root-based paths either, since it is set as read-only
# on the `parity-macos` runner.
run: echo "ARTIFACTS_PATH=${GITHUB_WORKSPACE}/artifacts/${{ matrix.binaries }}" >> $GITHUB_ENV
- name: Set up Homebrew
uses: Homebrew/actions/setup-homebrew@1ccc07ccd54b6048295516a3eb89b192c35057dc # master from 12.09.2024
- name: Set homebrew binaries location on path
run: echo "/opt/homebrew/bin" >> $GITHUB_PATH
- name: Install rust ${{ env.RUST_VERSION }}
uses: actions-rust-lang/setup-rust-toolchain@11df97af8e8102fd60b60a77dfbf58d40cd843b8 # v1.10.1
with:
cache: false
toolchain: ${{ env.RUST_VERSION }}
target: wasm32-unknown-unknown
components: cargo, clippy, rust-docs, rust-src, rustfmt, rustc, rust-std
- name: cargo info
run: |
echo "######## rustup show ########"
rustup show
echo "######## cargo --version ########"
cargo --version
- name: Install protobuf
run: brew install protobuf
- name: Install gpg
run: |
brew install gnupg
# Setup for being able to resolve: keyserver.ubuntu.com.
# See: https://github.com/actions/runner-images/issues/9777
mkdir -p ~/.gnupg/
touch ~/.gnupg/dirmngr.conf
echo "standard-resolver" > ~/.gnupg/dirmngr.conf
- name: Install sha256sum
run: |
brew install coreutils
- name: Install pgpkkms
run: |
# Install pgpkms that is used to sign built artifacts
python3 -m pip install "pgpkms @ git+https://github.com/paritytech-release/pgpkms.git@e7f806f99e9be5c52f0b4a536b7d4ef9c3e695ed"
- name: Import gpg keys
shell: bash
run: |
. ./.github/scripts/common/lib.sh
import_gpg_keys
- name: Build binary
run: |
git config --global --add safe.directory "${GITHUB_WORKSPACE}" #avoid "detected dubious ownership" error
./.github/scripts/release/build-macos-release.sh ${{ matrix.binaries }} ${{ inputs.package }}
- name: Generate artifact attestation
uses: actions/attest-build-provenance@520d128f165991a6c774bcb264f323e3d70747f4 # v2.2.0
with:
subject-path: ${{ env.ARTIFACTS_PATH }}/${{ matrix.binaries }}
- name: Sign artifacts
working-directory: ${{ env.ARTIFACTS_PATH }}
run: |
python3 -m pgpkms sign --input ${{matrix.binaries }} -o ${{ matrix.binaries }}.asc
- name: Check sha256 ${{ matrix.binaries }}
working-directory: ${{ env.ARTIFACTS_PATH }}
shell: bash
run: |
. "${GITHUB_WORKSPACE}"/.github/scripts/common/lib.sh
echo "Checking binary ${{ matrix.binaries }}"
check_sha256 ${{ matrix.binaries }}
- name: Check GPG ${{ matrix.binaries }}
working-directory: ${{ env.ARTIFACTS_PATH }}
shell: bash
run: |
. "${GITHUB_WORKSPACE}"/.github/scripts/common/lib.sh
check_gpg ${{ matrix.binaries }}
- name: Upload ${{ matrix.binaries }} artifacts
uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1
with:
name: ${{ matrix.binaries }}_${{ inputs.target }}
path: ${{ env.ARTIFACTS_PATH }}
build-polkadot-deb-package:
if: ${{ inputs.package == 'polkadot' && inputs.target == 'x86_64-unknown-linux-gnu' }}
needs: [build-rc]
runs-on: ubuntu-latest
steps:
- name: Checkout sources
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
with:
ref: ${{ inputs.release_tag }}
fetch-depth: 0
- name: Download polkadot_x86_64-unknown-linux-gnu artifacts
uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
with:
name: polkadot_x86_64-unknown-linux-gnu
path: target/production
merge-multiple: true
- name: Download polkadot-execute-worker_x86_64-unknown-linux-gnu artifacts
uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
with:
name: polkadot-execute-worker_x86_64-unknown-linux-gnu
path: target/production
merge-multiple: true
- name: Download polkadot-prepare-worker_x86_64-unknown-linux-gnu artifacts
uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
with:
name: polkadot-prepare-worker_x86_64-unknown-linux-gnu
path: target/production
merge-multiple: true
- name: Build polkadot deb package
shell: bash
run: |
. "${GITHUB_WORKSPACE}"/.github/scripts/common/lib.sh
VERSION=$(get_polkadot_node_version_from_code)
. "${GITHUB_WORKSPACE}"/.github/scripts/release/build-deb.sh ${{ inputs.package }} ${VERSION}
- name: Generate artifact attestation
uses: actions/attest-build-provenance@520d128f165991a6c774bcb264f323e3d70747f4 # v2.2.0
with:
subject-path: target/production/*.deb
- name: Upload ${{inputs.package }} artifacts
uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1
with:
name: ${{ inputs.package }}_${{ inputs.target }}
path: target/production
overwrite: true
upload-polkadot-artifacts-to-s3:
if: ${{ inputs.package == 'polkadot' && inputs.target == 'x86_64-unknown-linux-gnu' }}
needs: [build-polkadot-deb-package]
uses: ./.github/workflows/release-reusable-s3-upload.yml
with:
package: ${{ inputs.package }}
release_tag: ${{ inputs.release_tag }}
target: ${{ inputs.target }}
secrets:
AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
AWS_RELEASE_ACCESS_KEY_ID: ${{ secrets.AWS_RELEASE_ACCESS_KEY_ID }}
AWS_RELEASE_SECRET_ACCESS_KEY: ${{ secrets.AWS_RELEASE_SECRET_ACCESS_KEY }}
upload-polkadot-parachain-artifacts-to-s3:
if: ${{ inputs.package == 'polkadot-parachain-bin' && inputs.target == 'x86_64-unknown-linux-gnu' }}
needs: [build-rc]
uses: ./.github/workflows/release-reusable-s3-upload.yml
with:
package: polkadot-parachain
release_tag: ${{ inputs.release_tag }}
target: ${{ inputs.target }}
secrets:
AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
AWS_RELEASE_ACCESS_KEY_ID: ${{ secrets.AWS_RELEASE_ACCESS_KEY_ID }}
AWS_RELEASE_SECRET_ACCESS_KEY: ${{ secrets.AWS_RELEASE_SECRET_ACCESS_KEY }}
upload-polkadot-omni-node-artifacts-to-s3:
if: ${{ inputs.package == 'polkadot-omni-node' && inputs.target == 'x86_64-unknown-linux-gnu' }}
needs: [build-rc]
uses: ./.github/workflows/release-reusable-s3-upload.yml
with:
package: ${{ inputs.package }}
release_tag: ${{ inputs.release_tag }}
target: ${{ inputs.target }}
secrets:
AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
AWS_RELEASE_ACCESS_KEY_ID: ${{ secrets.AWS_RELEASE_ACCESS_KEY_ID }}
AWS_RELEASE_SECRET_ACCESS_KEY: ${{ secrets.AWS_RELEASE_SECRET_ACCESS_KEY }}
upload-frame-omni-bencher-artifacts-to-s3:
if: ${{ inputs.package == 'frame-omni-bencher' && inputs.target == 'x86_64-unknown-linux-gnu' }}
needs: [build-rc]
uses: ./.github/workflows/release-reusable-s3-upload.yml
with:
package: ${{ inputs.package }}
release_tag: ${{ inputs.release_tag }}
target: ${{ inputs.target }}
secrets:
AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
AWS_RELEASE_ACCESS_KEY_ID: ${{ secrets.AWS_RELEASE_ACCESS_KEY_ID }}
AWS_RELEASE_SECRET_ACCESS_KEY: ${{ secrets.AWS_RELEASE_SECRET_ACCESS_KEY }}
upload-chain-spec-builder-artifacts-to-s3:
if: ${{ inputs.package == 'staging-chain-spec-builder' && inputs.target == 'x86_64-unknown-linux-gnu' }}
needs: [build-rc]
uses: ./.github/workflows/release-reusable-s3-upload.yml
with:
package: chain-spec-builder
release_tag: ${{ inputs.release_tag }}
target: ${{ inputs.target }}
secrets:
AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
AWS_RELEASE_ACCESS_KEY_ID: ${{ secrets.AWS_RELEASE_ACCESS_KEY_ID }}
AWS_RELEASE_SECRET_ACCESS_KEY: ${{ secrets.AWS_RELEASE_SECRET_ACCESS_KEY }}
upload-polkadot-macos-artifacts-to-s3:
if: ${{ inputs.package == 'polkadot' && inputs.target == 'aarch64-apple-darwin' }}
# TODO: add and use a `build-polkadot-homebrew-package` which packs all `polkadot` binaries:
# `polkadot`, `polkadot-prepare-worker` and `polkadot-execute-worker`.
needs: [build-macos-rc]
uses: ./.github/workflows/release-reusable-s3-upload.yml
with:
package: ${{ inputs.package }}
release_tag: ${{ inputs.release_tag }}
target: ${{ inputs.target }}
secrets:
AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
AWS_RELEASE_ACCESS_KEY_ID: ${{ secrets.AWS_RELEASE_ACCESS_KEY_ID }}
AWS_RELEASE_SECRET_ACCESS_KEY: ${{ secrets.AWS_RELEASE_SECRET_ACCESS_KEY }}
upload-polkadot-prepare-worker-macos-artifacts-to-s3:
if: ${{ inputs.package == 'polkadot' && inputs.target == 'aarch64-apple-darwin' }}
needs: [build-macos-rc]
uses: ./.github/workflows/release-reusable-s3-upload.yml
with:
package: polkadot-prepare-worker
release_tag: ${{ inputs.release_tag }}
target: ${{ inputs.target }}
secrets:
AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
AWS_RELEASE_ACCESS_KEY_ID: ${{ secrets.AWS_RELEASE_ACCESS_KEY_ID }}
AWS_RELEASE_SECRET_ACCESS_KEY: ${{ secrets.AWS_RELEASE_SECRET_ACCESS_KEY }}
upload-polkadot-execute-worker-macos-artifacts-to-s3:
if: ${{ inputs.package == 'polkadot' && inputs.target == 'aarch64-apple-darwin' }}
needs: [build-macos-rc]
uses: ./.github/workflows/release-reusable-s3-upload.yml
with:
package: polkadot-execute-worker
release_tag: ${{ inputs.release_tag }}
target: ${{ inputs.target }}
secrets:
AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
AWS_RELEASE_ACCESS_KEY_ID: ${{ secrets.AWS_RELEASE_ACCESS_KEY_ID }}
AWS_RELEASE_SECRET_ACCESS_KEY: ${{ secrets.AWS_RELEASE_SECRET_ACCESS_KEY }}
upload-polkadot-omni-node-macos-artifacts-to-s3:
if: ${{ inputs.package == 'polkadot-omni-node' && inputs.target == 'aarch64-apple-darwin' }}
needs: [build-macos-rc]
uses: ./.github/workflows/release-reusable-s3-upload.yml
with:
package: ${{ inputs.package }}
release_tag: ${{ inputs.release_tag }}
target: ${{ inputs.target }}
secrets:
AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
AWS_RELEASE_ACCESS_KEY_ID: ${{ secrets.AWS_RELEASE_ACCESS_KEY_ID }}
AWS_RELEASE_SECRET_ACCESS_KEY: ${{ secrets.AWS_RELEASE_SECRET_ACCESS_KEY }}
upload-polkadot-parachain-macos-artifacts-to-s3:
if: ${{ inputs.package == 'polkadot-parachain-bin' && inputs.target == 'aarch64-apple-darwin' }}
needs: [build-macos-rc]
uses: ./.github/workflows/release-reusable-s3-upload.yml
with:
package: polkadot-parachain
release_tag: ${{ inputs.release_tag }}
target: ${{ inputs.target }}
secrets:
AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
AWS_RELEASE_ACCESS_KEY_ID: ${{ secrets.AWS_RELEASE_ACCESS_KEY_ID }}
AWS_RELEASE_SECRET_ACCESS_KEY: ${{ secrets.AWS_RELEASE_SECRET_ACCESS_KEY }}
upload-frame-omni-bencher-macos-artifacts-to-s3:
if: ${{ inputs.package == 'frame-omni-bencher' && inputs.target == 'aarch64-apple-darwin' }}
needs: [build-macos-rc]
uses: ./.github/workflows/release-reusable-s3-upload.yml
with:
package: ${{ inputs.package }}
release_tag: ${{ inputs.release_tag }}
target: ${{ inputs.target }}
secrets:
AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
AWS_RELEASE_ACCESS_KEY_ID: ${{ secrets.AWS_RELEASE_ACCESS_KEY_ID }}
AWS_RELEASE_SECRET_ACCESS_KEY: ${{ secrets.AWS_RELEASE_SECRET_ACCESS_KEY }}
upload-chain-spec-builder-macos-artifacts-to-s3:
if: ${{ inputs.package == 'staging-chain-spec-builder' && inputs.target == 'aarch64-apple-darwin' }}
needs: [build-macos-rc]
uses: ./.github/workflows/release-reusable-s3-upload.yml
with:
package: chain-spec-builder
release_tag: ${{ inputs.release_tag }}
target: ${{ inputs.target }}
secrets:
AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
AWS_RELEASE_ACCESS_KEY_ID: ${{ secrets.AWS_RELEASE_ACCESS_KEY_ID }}
AWS_RELEASE_SECRET_ACCESS_KEY: ${{ secrets.AWS_RELEASE_SECRET_ACCESS_KEY }}
name: Upload to s3
on:
workflow_call:
inputs:
package:
description: Package to be built, for now is either polkadot or polkadot-parachain-bin
required: true
type: string
release_tag:
description: Tag matching the actual release candidate with the format polkadot-stableYYMM(-X)-rcX or polkadot-stableYYMM-rcX
required: true
type: string
target:
description: Target triple for which the artifacts are being uploaded (e.g aarch64-apple-darwin)
required: true
type: string
secrets:
AWS_DEFAULT_REGION:
required: true
AWS_RELEASE_ACCESS_KEY_ID:
required: true
AWS_RELEASE_SECRET_ACCESS_KEY:
required: true
jobs:
upload-artifacts-to-s3:
runs-on: ubuntu-latest
environment: release
env:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_RELEASE_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_RELEASE_SECRET_ACCESS_KEY }}
AWS_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
steps:
- name: Checkout
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
- name: Download amd64 artifacts
uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
with:
name: ${{ inputs.package }}_${{ inputs.target }}
path: release-artifacts/${{ inputs.target }}/${{ inputs.package }}
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@ececac1a45f3b08a01d2dd070d28d111c5fe6722 # v4.1.0
with:
aws-access-key-id: ${{ env.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ env.AWS_SECRET_ACCESS_KEY }}
aws-region: ${{ env.AWS_REGION }}
- name: Upload ${{ inputs.package }} artifacts to s3
run: |
. ./.github/scripts/release/release_lib.sh
upload_s3_release ${{ inputs.package }} ${{ inputs.release_tag }} ${{ inputs.target }}
name: Srtool build
env:
SUBWASM_VERSION: 0.20.0
SUBWASM_VERSION: 0.21.0
TOML_CLI_VERSION: 0.2.4
on:
......@@ -9,14 +9,18 @@ on:
inputs:
excluded_runtimes:
type: string
build_opts:
type: string
profile:
type: string
outputs:
published_runtimes:
value: ${{ jobs.find-runtimes.outputs.runtime }}
schedule:
- cron: "00 02 * * 1" # 2AM weekly on monday
workflow_dispatch:
permissions:
id-token: write
attestations: write
contents: read
jobs:
find-runtimes:
......@@ -26,7 +30,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.0.0
uses: actions/checkout@6d193bf28034eafb982f37bd894289fe649468fc # v4.0.0
with:
fetch-depth: 0
......@@ -37,7 +41,8 @@ jobs:
sudo dpkg -i toml.deb
toml --version; jq --version
- name: Scan runtimes
- name: Scan and get runtimes list
id: get_runtimes_list
env:
EXCLUDED_RUNTIMES: ${{ inputs.excluded_runtimes }}:"substrate-test"
run: |
......@@ -49,13 +54,6 @@ jobs:
MATRIX=$(find_runtimes | tee runtimes_list.json)
echo $MATRIX
- name: Get runtimes list
id: get_runtimes_list
run: |
ls -al
MATRIX=$(cat runtimes_list.json)
echo $MATRIX
echo "runtime=$MATRIX" >> $GITHUB_OUTPUT
srtool:
......@@ -67,16 +65,19 @@ jobs:
matrix: ${{ fromJSON(needs.find-runtimes.outputs.runtime) }}
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.0.0
- uses: actions/checkout@6d193bf28034eafb982f37bd894289fe649468fc # v4.0.0
with:
fetch-depth: 0
- name: Srtool build
id: srtool_build
uses: chevdor/srtool-actions@v0.9.2
env:
BUILD_OPTS: ${{ inputs.build_opts }}
with:
chain: ${{ matrix.chain }}
runtime_dir: ${{ matrix.runtime_dir }}
profile: ${{ inputs.profile }}
- name: Summary
run: |
......@@ -85,6 +86,11 @@ jobs:
echo "Compact Runtime: ${{ steps.srtool_build.outputs.wasm }}"
echo "Compressed Runtime: ${{ steps.srtool_build.outputs.wasm_compressed }}"
- name: Generate artifact attestation
uses: actions/attest-build-provenance@520d128f165991a6c774bcb264f323e3d70747f4 # v2.2.0
with:
subject-path: ${{ steps.srtool_build.outputs.wasm }}
# We now get extra information thanks to subwasm
- name: Install subwasm
run: |
......
# Reusable workflow to perform checks and generate conditions for other workflows.
# Currently it checks if any Rust (build-related) file is changed
# and if the current (caller) workflow file is changed.
# Example:
#
# jobs:
# changes:
# permissions:
# pull-requests: read
# uses: ./.github/workflows/reusable-check-changed-files.yml
# some-job:
# needs: changes
# if: ${{ needs.changes.outputs.rust }}
# .......
name: Check changes files
on:
workflow_call:
# Map the workflow outputs to job outputs
outputs:
rust:
value: ${{ jobs.changes.outputs.rust }}
description: "true if any of the build-related OR current (caller) workflow files have changed"
current-workflow:
value: ${{ jobs.changes.outputs.current-workflow }}
description: "true if current (caller) workflow file has changed"
jobs:
changes:
runs-on: ubuntu-latest
permissions:
pull-requests: read
outputs:
# true if current workflow (caller) file is changed
rust: ${{ steps.filter.outputs.rust == 'true' || steps.filter.outputs.current-workflow == 'true' }}
current-workflow: ${{ steps.filter.outputs.current-workflow }}
steps:
- id: current-file
run: echo "current-workflow-file=$(echo ${{ github.workflow_ref }} | sed -nE "s/.*(\.github\/workflows\/[a-zA-Z0-9_-]*\.y[a]?ml)@refs.*/\1/p")" >> $GITHUB_OUTPUT
- run: echo "${{ steps.current-file.outputs.current-workflow-file }}"
# For pull requests it's not necessary to checkout the code
- name: Checkout
if: github.event_name != 'pull_request'
uses: actions/checkout@v4
- id: filter
uses: dorny/paths-filter@v3
with:
predicate-quantifier: "every"
# current-workflow - check if the current (caller) workflow file is changed
# rust - check if any Rust (build-related) file is changed
filters: |
current-workflow:
- '${{ steps.current-file.outputs.current-workflow-file }}'
rust:
- '**/*'
- '!.github/**/*'
- '!prdoc/**/*'
- '!docs/**/*'
# The workflow is not part of reusable-preflight.yml to allow testing CI in draft.
name: Preflight isdraft
on:
workflow_call:
# Map the workflow outputs to job outputs
jobs:
isdraft:
runs-on: ubuntu-latest
if: github.event.pull_request.draft == false || contains(github.event.pull_request.labels.*.name, 'A5-run-CI')
steps:
- name: echo test
shell: bash
run: echo "PR is not draft, starting CI"
# Reusable workflow to set various useful variables
# and to perform checks and generate conditions for other workflows.
# Currently it checks if any Rust (build-related) file is changed
# and if the current (caller) workflow file is changed.
# Example:
#
# jobs:
# preflight:
# uses: ./.github/workflows/reusable-preflight.yml
# some-job:
# needs: changes
# if: ${{ needs.preflight.outputs.changes_rust }}
# .......
name: Preflight
on:
workflow_call:
# Map the workflow outputs to job outputs
outputs:
changes_rust:
value: ${{ jobs.preflight.outputs.changes_rust }}
changes_currentWorkflow:
value: ${{ jobs.preflight.outputs.changes_currentWorkflow }}
IMAGE:
value: ${{ jobs.preflight.outputs.IMAGE }}
description: "CI image"
# Runners
# https://github.com/paritytech/ci_cd/wiki/GitHub#paritytech-self-hosted-runners
RUNNER:
value: ${{ jobs.preflight.outputs.RUNNER }}
description: |
Main runner for resource-intensive tasks
By default we use spot machines that can be terminated at any time.
Merge queues use persistent runners to avoid kicking off from queue when the runner is terminated.
RUNNER_OLDLINUX:
value: ${{ jobs.preflight.outputs.RUNNER_OLDLINUX }}
description: |
parity-oldlinux
By default we use spot machines that can be terminated at any time.
Merge queues use persistent runners to avoid kicking off from queue when the runner is terminated.
RUNNER_DEFAULT:
value: ${{ jobs.preflight.outputs.RUNNER_DEFAULT }}
description: "Relatively lightweight runner. When `ubuntu-latest` is not enough"
RUNNER_WEIGHTS:
value: ${{ jobs.preflight.outputs.RUNNER_WEIGHTS }}
RUNNER_BENCHMARK:
value: ${{ jobs.preflight.outputs.RUNNER_BENCHMARK }}
RUNNER_MACOS:
value: ${{ jobs.preflight.outputs.RUNNER_MACOS }}
# Vars
SOURCE_REF_SLUG:
value: ${{ jobs.preflight.outputs.SOURCE_REF_SLUG }}
description: "Name of the current branch for `push` or source branch for `pull_request` with `/` replaced by `_`. Does not exists in merge_group"
REF_SLUG:
value: ${{ jobs.preflight.outputs.REF_SLUG }}
description: |
Name of the current revision (depending on the event) with `/` replaced by `_`, e.g:
push - master
pull_request - 49_merge
merge_group - gh-readonly-queue_master_pr-49-38d43798a986430231c828b2c762997f818ac012
COMMIT_SHA:
value: ${{ jobs.preflight.outputs.COMMIT_SHA }}
description: "Sha of the current revision"
COMMIT_SHA_SHORT:
value: ${{ jobs.preflight.outputs.COMMIT_SHA_SHORT }}
description: "Sha of the current revision, 8-symbols long"
jobs:
#
#
#
preflight:
runs-on: ubuntu-latest
outputs:
changes_rust: ${{ steps.set_changes.outputs.rust_any_changed || steps.set_changes.outputs.currentWorkflow_any_changed }}
changes_currentWorkflow: ${{ steps.set_changes.outputs.currentWorkflow_any_changed }}
IMAGE: ${{ steps.set_image.outputs.IMAGE }}
# Runners
# https://github.com/paritytech/ci_cd/wiki/GitHub#paritytech-self-hosted-runners
RUNNER: ${{ steps.set_runner.outputs.RUNNER }}
RUNNER_OLDLINUX: ${{ steps.set_runner.outputs.RUNNER_OLDLINUX }}
RUNNER_DEFAULT: ${{ steps.set_runner.outputs.RUNNER_DEFAULT }}
RUNNER_WEIGHTS: ${{ steps.set_runner.outputs.RUNNER_WEIGHTS }}
RUNNER_BENCHMARK: ${{ steps.set_runner.outputs.RUNNER_BENCHMARK }}
RUNNER_MACOS: ${{ steps.set_runner.outputs.RUNNER_MACOS }}
SOURCE_REF_SLUG: ${{ steps.set_vars.outputs.SOURCE_REF_SLUG }}
REF_SLUG: ${{ steps.set_vars.outputs.REF_SLUG }}
COMMIT_SHA: ${{ steps.set_vars.outputs.COMMIT_SHA }}
COMMIT_SHA_SHORT: ${{ steps.set_vars.outputs.COMMIT_SHA_SHORT }}
steps:
- uses: actions/checkout@v4
#
# Set changes
#
- name: Current file
id: current_file
shell: bash
run: |
echo "currentWorkflowFile=$(echo ${{ github.workflow_ref }} | sed -nE "s/.*(\.github\/workflows\/[a-zA-Z0-9_-]*\.y[a]?ml)@refs.*/\1/p")" >> $GITHUB_OUTPUT
echo "currentActionDir=$(echo ${{ github.action_path }} | sed -nE "s/.*(\.github\/actions\/[a-zA-Z0-9_-]*)/\1/p")" >> $GITHUB_OUTPUT
- name: Set changes
id: set_changes
uses: tj-actions/changed-files@v45
with:
files_yaml: |
rust:
- '**/*'
- '!.github/**/*'
- '!prdoc/**/*'
- '!docs/**/*'
currentWorkflow:
- '${{ steps.current_file.outputs.currentWorkflowFile }}'
- '.github/workflows/reusable-preflight.yml'
#
# Set image
#
- name: Set image
id: set_image
shell: bash
run: cat .github/env >> $GITHUB_OUTPUT
#
# Set runner
#
# By default we use spot machines that can be terminated at any time.
# Merge queues use persistent runners to avoid kicking off from queue when the runner is terminated.
#
- name: Set runner
id: set_runner
shell: bash
run: |
echo "RUNNER_DEFAULT=parity-default" >> $GITHUB_OUTPUT
echo "RUNNER_WEIGHTS=parity-weights" >> $GITHUB_OUTPUT
echo "RUNNER_BENCHMARK=parity-benchmark" >> $GITHUB_OUTPUT
echo "RUNNER_MACOS=parity-macos" >> $GITHUB_OUTPUT
#
# Run merge queues on persistent runners
if [[ $GITHUB_REF_NAME == *"gh-readonly-queue"* ]]; then
echo "RUNNER=parity-large-persistent" >> $GITHUB_OUTPUT
echo "RUNNER_OLDLINUX=parity-oldlinux-persistent" >> $GITHUB_OUTPUT
else
echo "RUNNER=parity-large" >> $GITHUB_OUTPUT
echo "RUNNER_OLDLINUX=parity-oldlinux" >> $GITHUB_OUTPUT
fi
#
# Set vars
#
- name: Set vars
id: set_vars
shell: bash
run: |
export SOURCE_REF_NAME=${{ github.head_ref || github.ref_name }}
echo "SOURCE_REF_SLUG=${SOURCE_REF_NAME//\//_}" >> $GITHUB_OUTPUT
#
export COMMIT_SHA=${{ github.sha }}
echo "COMMIT_SHA=$COMMIT_SHA" >> $GITHUB_OUTPUT
echo "COMMIT_SHA_SHORT=${COMMIT_SHA:0:8}" >> $GITHUB_OUTPUT
#
export REF_NAME=${{ github.ref_name }}
echo "REF_SLUG=${REF_NAME//\//_}" >> $GITHUB_OUTPUT
- name: log
shell: bash
run: |
echo "workflow file: ${{ steps.current_file.outputs.currentWorkflowFile }}"
echo "Modified: ${{ steps.set_changes.outputs.modified_keys }}"
#
#
#
ci-versions:
needs: [preflight]
runs-on: ubuntu-latest
container:
image: ${{ needs.preflight.outputs.IMAGE }}
steps:
- uses: actions/checkout@v4
- name: Info rust
run: |
rustup show
cargo --version
cargo +nightly --version
cargo clippy --version
echo "yarn version: $(yarn --version)"
echo $( substrate-contracts-node --version | awk 'NF' )
estuary --version
cargo-contract --version
taplo --version
- name: Info forklift
run: forklift version
- name: Info vars
run: |
echo "COMMIT_SHA: ${{ needs.preflight.outputs.COMMIT_SHA }}"
echo "COMMIT_SHA_SHORT: ${{ needs.preflight.outputs.COMMIT_SHA_SHORT }}"
echo "SOURCE_REF_SLUG: ${{ needs.preflight.outputs.SOURCE_REF_SLUG }}"
echo "REF_SLUG: ${{ needs.preflight.outputs.REF_SLUG }}"
echo "RUNNER: ${{ needs.preflight.outputs.RUNNER }}"
echo "IMAGE: ${{ needs.preflight.outputs.IMAGE }}"
#
echo "github.ref: ${{ github.ref }}"
echo "github.ref_name: ${{ github.ref_name }}"
echo "github.sha: ${{ github.sha }}"
\ No newline at end of file
......@@ -15,7 +15,6 @@ on:
jobs:
review-approvals:
runs-on: ubuntu-latest
environment: master
steps:
- name: Generate token
id: app_token
......@@ -30,7 +29,7 @@ jobs:
with:
artifact-name: pr_number
- name: "Evaluates PR reviews and assigns reviewers"
uses: paritytech/review-bot@v2.6.0
uses: paritytech/review-bot@v2.7.0
with:
repo-token: ${{ steps.app_token.outputs.token }}
team-token: ${{ steps.app_token.outputs.token }}
......
[
{
"name": "dev",
"package": "kitchensink-runtime",
"path": "substrate/frame",
"header": "substrate/HEADER-APACHE2",
"template": "substrate/.maintain/frame-weight-template.hbs",
"bench_features": "runtime-benchmarks",
"bench_flags": "--exclude-pallets=pallet_xcm,pallet_xcm_benchmarks::fungible,pallet_xcm_benchmarks::generic,pallet_nomination_pools,pallet_remark,pallet_transaction_storage,pallet_election_provider_multi_block,pallet_election_provider_multi_block::signed,pallet_election_provider_multi_block::unsigned,pallet_election_provider_multi_block::verifier",
"uri": null,
"is_relay": false
},
{
"name": "westend",
"package": "westend-runtime",
"path": "polkadot/runtime/westend",
"header": "polkadot/file_header.txt",
"template": "polkadot/xcm/pallet-xcm-benchmarks/template.hbs",
"bench_flags": "",
"bench_features": "runtime-benchmarks",
"uri": "wss://try-runtime-westend.polkadot.io:443",
"is_relay": true
},
{
"name": "rococo",
"package": "rococo-runtime",
"path": "polkadot/runtime/rococo",
"header": "polkadot/file_header.txt",
"template": "polkadot/xcm/pallet-xcm-benchmarks/template.hbs",
"bench_features": "runtime-benchmarks",
"bench_flags": "",
"uri": "wss://try-runtime-rococo.polkadot.io:443",
"is_relay": true
},
{
"name": "asset-hub-westend",
"package": "asset-hub-westend-runtime",
"path": "cumulus/parachains/runtimes/assets/asset-hub-westend",
"header": "cumulus/file_header.txt",
"template": "cumulus/templates/xcm-bench-template.hbs",
"bench_features": "runtime-benchmarks",
"bench_flags": "",
"uri": "wss://westend-asset-hub-rpc.polkadot.io:443",
"is_relay": false
},
{
"name": "asset-hub-rococo",
"package": "asset-hub-rococo-runtime",
"path": "cumulus/parachains/runtimes/assets/asset-hub-rococo",
"header": "cumulus/file_header.txt",
"template": "cumulus/templates/xcm-bench-template.hbs",
"bench_features": "runtime-benchmarks",
"bench_flags": "",
"uri": "wss://rococo-asset-hub-rpc.polkadot.io:443",
"is_relay": false
},
{
"name": "bridge-hub-rococo",
"package": "bridge-hub-rococo-runtime",
"path": "cumulus/parachains/runtimes/bridge-hubs/bridge-hub-rococo",
"header": "cumulus/file_header.txt",
"template": "cumulus/templates/xcm-bench-template.hbs",
"bench_features": "runtime-benchmarks",
"bench_flags": "",
"uri": "wss://rococo-bridge-hub-rpc.polkadot.io:443",
"is_relay": false
},
{
"name": "bridge-hub-westend",
"package": "bridge-hub-westend-runtime",
"path": "cumulus/parachains/runtimes/bridge-hubs/bridge-hub-westend",
"header": "cumulus/file_header.txt",
"template": "cumulus/templates/xcm-bench-template.hbs",
"bench_features": "runtime-benchmarks",
"bench_flags": "",
"uri": "wss://westend-bridge-hub-rpc.polkadot.io:443",
"is_relay": false
},
{
"name": "collectives-westend",
"package": "collectives-westend-runtime",
"path": "cumulus/parachains/runtimes/collectives/collectives-westend",
"header": "cumulus/file_header.txt",
"template": "cumulus/templates/xcm-bench-template.hbs",
"bench_features": "runtime-benchmarks",
"bench_flags": "",
"uri": "wss://westend-collectives-rpc.polkadot.io:443",
"is_relay": false
},
{
"name": "coretime-rococo",
"package": "coretime-rococo-runtime",
"path": "cumulus/parachains/runtimes/coretime/coretime-rococo",
"header": "cumulus/file_header.txt",
"template": "cumulus/templates/xcm-bench-template.hbs",
"bench_features": "runtime-benchmarks",
"bench_flags": "",
"uri": "wss://rococo-coretime-rpc.polkadot.io:443",
"is_relay": false
},
{
"name": "coretime-westend",
"package": "coretime-westend-runtime",
"path": "cumulus/parachains/runtimes/coretime/coretime-westend",
"header": "cumulus/file_header.txt",
"template": "cumulus/templates/xcm-bench-template.hbs",
"bench_features": "runtime-benchmarks",
"bench_flags": "",
"uri": "wss://westend-coretime-rpc.polkadot.io:443",
"is_relay": false
},
{
"name": "glutton-westend",
"package": "glutton-westend-runtime",
"path": "cumulus/parachains/runtimes/glutton/glutton-westend",
"header": "cumulus/file_header.txt",
"template": "cumulus/templates/xcm-bench-template.hbs",
"bench_features": "runtime-benchmarks",
"bench_flags": "",
"uri": null,
"is_relay": false
},
{
"name": "people-rococo",
"package": "people-rococo-runtime",
"path": "cumulus/parachains/runtimes/people/people-rococo",
"header": "cumulus/file_header.txt",
"template": "cumulus/templates/xcm-bench-template.hbs",
"bench_features": "runtime-benchmarks",
"bench_flags": "",
"uri": "wss://rococo-people-rpc.polkadot.io:443",
"is_relay": false
},
{
"name": "people-westend",
"package": "people-westend-runtime",
"path": "cumulus/parachains/runtimes/people/people-westend",
"header": "cumulus/file_header.txt",
"template": "cumulus/templates/xcm-bench-template.hbs",
"bench_features": "runtime-benchmarks",
"bench_flags": "",
"uri": "wss://westend-people-rpc.polkadot.io:443",
"is_relay": false
}
]
name: EVM test suite
on:
push:
branches:
- master
pull_request:
types: [opened, synchronize, reopened, ready_for_review]
merge_group:
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
jobs:
isdraft:
uses: ./.github/workflows/reusable-isdraft.yml
preflight:
needs: isdraft
uses: ./.github/workflows/reusable-preflight.yml
evm-test-suite:
needs: [preflight]
runs-on: ${{ needs.preflight.outputs.RUNNER }}
if: ${{ needs.preflight.outputs.changes_rust }}
timeout-minutes: 60
container:
image: ${{ needs.preflight.outputs.IMAGE }}
env:
# Enable debug assertions since we are running optimized builds for testing
# but still want to have debug assertions.
RUSTFLAGS: "-C debug-assertions"
RUST_BACKTRACE: 1
steps:
- name: Checkout
uses: actions/checkout@v4
- name: script
run: |
forklift cargo build --locked --profile production -p pallet-revive-eth-rpc --bin eth-rpc
forklift cargo build --bin substrate-node
- name: Checkout evm-tests
uses: actions/checkout@v4
with:
repository: paritytech/evm-test-suite
ref: 7762a35a380023a800d213b8ff98f3fb45500661
path: evm-test-suite
- uses: actions/setup-node@v4
with:
node-version: 22
- name: script
env:
# EVM tests don't work with batchSize 300 on self-hosted runners in docker container
BATCH_SIZE: 100
run: |
echo "Change to the evm-test-suite directory"
cd evm-test-suite
echo "Download the resolc binary"
wget https://github.com/paritytech/revive/releases/download/v0.1.0-dev.9/resolc -q
chmod +x resolc
mv resolc /usr/local/bin
resolc --version
echo "Check that binaries are in place"
export NODE_BIN_PATH=$(readlink -f ../target/debug/substrate-node)
export ETH_RPC_PATH=$(readlink -f ../target/production/eth-rpc)
export RESOLC_PATH=/usr/local/bin/resolc
echo $NODE_BIN_PATH $ETH_RPC_PATH $RESOLC_PATH
echo "Install npm dependencies"
npm install
# cat matter-labs-tests/hardhat.config.ts | grep batchSize
echo "Installing solc"
wget https://github.com/ethereum/solidity/releases/download/v0.8.28/solc-static-linux -q
chmod +x solc-static-linux
mv solc-static-linux /usr/local/bin/solc
echo "Run the tests"
echo "bash init.sh --kitchensink -- --matter-labs -- $NODE_BIN_PATH $ETH_RPC_PATH $RESOLC_PATH"
bash init.sh --kitchensink -- --matter-labs -- $NODE_BIN_PATH $ETH_RPC_PATH $RESOLC_PATH
- name: Collect tests results
if: always()
uses: actions/upload-artifact@v4
with:
name: evm-test-suite-${{ github.sha }}
path: evm-test-suite/test-logs/matter-labs-tests.log
confirm-required-test-evm-jobs-passed:
runs-on: ubuntu-latest
name: All test misc tests passed
# If any new job gets added, be sure to add it to this array
needs:
- evm-test-suite
if: always() && !cancelled()
steps:
- run: |
tee resultfile <<< '${{ toJSON(needs) }}'
FAILURES=$(cat resultfile | grep '"result": "failure"' | wc -l)
if [ $FAILURES -gt 0 ]; then
echo "### At least one required job failed ❌" >> $GITHUB_STEP_SUMMARY
exit 1
else
echo '### Good job! All the required jobs passed 🚀' >> $GITHUB_STEP_SUMMARY
fi
# GHA for test-linux-stable-int, test-linux-stable, test-linux-stable-oldkernel
name: tests linux stable coverage
on:
push:
branches:
- master
pull_request:
types: [opened, synchronize, reopened, ready_for_review, labeled]
merge_group:
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
jobs:
preflight:
uses: ./.github/workflows/reusable-preflight.yml
if: contains(github.event.label.name, 'GHA-coverage') || contains(github.event.pull_request.labels.*.name, 'GHA-coverage')
#
#
#
test-linux-stable-coverage:
needs: [preflight]
runs-on: ${{ needs.preflight.outputs.RUNNER }}
timeout-minutes: 120
container:
image: ${{ needs.preflight.outputs.IMAGE }}
env:
RUST_TOOLCHAIN: stable
# Enable debug assertions since we are running optimized builds for testing
# but still want to have debug assertions.
#
# -Cinstrument-coverage slows everything down but it is necessary for code coverage
# https://doc.rust-lang.org/rustc/instrument-coverage.html
RUSTFLAGS: "-Cdebug-assertions=y -Dwarnings -Cinstrument-coverage"
LLVM_PROFILE_FILE: "/__w/polkadot-sdk/polkadot-sdk/target/coverage/cargo-test-${{ matrix.ci_node_index }}-%p-%m.profraw"
strategy:
fail-fast: false
matrix:
ci_node_index: [1, 2, 3, 4, 5]
ci_node_total: [5]
steps:
- name: Checkout
uses: actions/checkout@v4
- run: rustup component add llvm-tools-preview
- run: cargo install cargo-llvm-cov
- run: mkdir -p target/coverage
# Some tests are excluded because they run very slowly or fail with -Cinstrument-coverage
- name: run tests
run: >
time cargo llvm-cov nextest
--no-report --release
--workspace
--locked --no-fail-fast
--features try-runtime,ci-only-tests,experimental
--filter-expr "
!test(/.*benchmark.*/)
- test(/recovers_from_only_chunks_if_pov_large::case_1/)
- test(/participation_requests_reprioritized_for_newly_included/)
- test(/availability_is_recovered_from_chunks_if_no_group_provided::case_1/)
- test(/rejects_missing_inherent_digest/)
- test(/availability_is_recovered_from_chunks_even_if_backing_group_supplied_if_chunks_only::case_1/)
- test(/availability_is_recovered_from_chunks_if_no_group_provided::case_2/)
- test(/all_security_features_work/)
- test(/nonexistent_cache_dir/)
- test(/recovers_from_only_chunks_if_pov_large::case_3/)
- test(/recovers_from_only_chunks_if_pov_large::case_2/)
- test(/authoring_blocks/)
- test(/rejects_missing_seals/)
- test(/generate_chain_spec/)
- test(/get_preset/)
- test(/list_presets/)
- test(/tests::receive_rate_limit_is_enforced/)
- test(/polkadot-availability-recovery/)
"
--partition count:${{ matrix.ci_node_index }}/${{ matrix.ci_node_total }}
- name: generate report
run: cargo llvm-cov report --release --codecov --output-path coverage-${{ matrix.ci_node_index }}.lcov
- name: upload report
uses: actions/upload-artifact@v4
with:
name: coverage-report-${{ matrix.ci_node_index }}.lcov
path: coverage-${{ matrix.ci_node_index }}.lcov
#
#
# Upload to codecov
upload-reports:
needs: [test-linux-stable-coverage]
runs-on: ubuntu-latest
steps:
- name: Download artifacts
uses: actions/download-artifact@v4
with:
path: reports
pattern: coverage-report-*
merge-multiple: true
- run: ls -al reports/
- name: Upload to Codecov
uses: codecov/codecov-action@v5
with:
token: ${{ secrets.CODECOV_TOKEN }}
verbose: true
directory: reports
root_dir: /__w/polkadot-sdk/polkadot-sdk/
#
#
#
remove-label:
runs-on: ubuntu-latest
needs: [upload-reports]
if: github.event_name == 'pull_request'
steps:
- uses: actions/checkout@v4
- uses: actions-ecosystem/action-remove-labels@v1
with:
labels: GHA-coverage
......@@ -6,42 +6,26 @@ on:
branches:
- master
pull_request:
types: [opened, synchronize, reopened, ready_for_review, labeled]
types: [opened, synchronize, reopened, ready_for_review]
merge_group:
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
jobs:
changes:
# TODO: remove once migration is complete or this workflow is fully stable
if: contains(github.event.label.name, 'GHA-migration')
permissions:
pull-requests: read
uses: ./.github/workflows/reusable-check-changed-files.yml
set-image:
# GitHub Actions allows using 'env' in a container context.
# However, env variables don't work for forks: https://github.com/orgs/community/discussions/44322
# This workaround sets the container image for each job using 'set-image' job output.
needs: changes
if: ${{ needs.changes.outputs.rust }}
runs-on: ubuntu-latest
outputs:
IMAGE: ${{ steps.set_image.outputs.IMAGE }}
steps:
- name: Checkout
uses: actions/checkout@v4
- id: set_image
run: cat .github/env >> $GITHUB_OUTPUT
isdraft:
uses: ./.github/workflows/reusable-isdraft.yml
preflight:
needs: isdraft
uses: ./.github/workflows/reusable-preflight.yml
test-linux-stable-int:
needs: [set-image, changes]
if: ${{ needs.changes.outputs.rust }}
runs-on: arc-runners-polkadot-sdk-beefy
needs: [preflight]
if: ${{ needs.preflight.outputs.changes_rust }}
runs-on: ${{ needs.preflight.outputs.RUNNER }}
timeout-minutes: 60
container:
image: ${{ needs.set-image.outputs.IMAGE }}
image: ${{ needs.preflight.outputs.IMAGE }}
env:
RUSTFLAGS: "-C debug-assertions -D warnings"
RUST_BACKTRACE: 1
......@@ -53,16 +37,23 @@ jobs:
- name: Checkout
uses: actions/checkout@v4
- name: script
id: required
run: WASM_BUILD_NO_COLOR=1 forklift cargo test -p staging-node-cli --release --locked -- --ignored
- name: Stop all workflows if failed
if: ${{ failure() && steps.required.conclusion == 'failure' && !github.event.pull_request.head.repo.fork }}
uses: ./.github/actions/workflow-stopper
with:
app-id: ${{ secrets.WORKFLOW_STOPPER_RUNNER_APP_ID }}
app-key: ${{ secrets.WORKFLOW_STOPPER_RUNNER_APP_KEY }}
# https://github.com/paritytech/ci_cd/issues/864
test-linux-stable-runtime-benchmarks:
needs: [set-image, changes]
if: ${{ needs.changes.outputs.rust }}
runs-on: arc-runners-polkadot-sdk-beefy
needs: [preflight]
if: ${{ needs.preflight.outputs.changes_rust }}
runs-on: ${{ needs.preflight.outputs.RUNNER }}
timeout-minutes: 60
container:
image: ${{ needs.set-image.outputs.IMAGE }}
image: ${{ needs.preflight.outputs.IMAGE }}
env:
RUST_TOOLCHAIN: stable
# Enable debug assertions since we are running optimized builds for testing
......@@ -72,20 +63,31 @@ jobs:
- name: Checkout
uses: actions/checkout@v4
- name: script
run: forklift cargo nextest run --workspace --features runtime-benchmarks benchmark --locked --cargo-profile testnet
id: required
run: forklift cargo nextest run --workspace --features runtime-benchmarks benchmark --locked --cargo-profile testnet --cargo-quiet
- name: Stop all workflows if failed
if: ${{ failure() && steps.required.conclusion == 'failure' && !github.event.pull_request.head.repo.fork }}
uses: ./.github/actions/workflow-stopper
with:
app-id: ${{ secrets.WORKFLOW_STOPPER_RUNNER_APP_ID }}
app-key: ${{ secrets.WORKFLOW_STOPPER_RUNNER_APP_KEY }}
test-linux-stable:
needs: [set-image, changes]
if: ${{ needs.changes.outputs.rust }}
needs: [preflight]
if: ${{ needs.preflight.outputs.changes_rust }}
runs-on: ${{ matrix.runners }}
timeout-minutes: 60
strategy:
fail-fast: false
matrix:
partition: [1/3, 2/3, 3/3]
runners: [arc-runners-polkadot-sdk-beefy, oldlinux]
runners:
[
"${{ needs.preflight.outputs.RUNNER }}",
"${{ needs.preflight.outputs.RUNNER_OLDLINUX }}",
]
container:
image: ${{ needs.set-image.outputs.IMAGE }}
image: ${{ needs.preflight.outputs.IMAGE }}
# needed for tests that use unshare syscall
options: --security-opt seccomp=unconfined
env:
......@@ -97,6 +99,7 @@ jobs:
- name: Checkout
uses: actions/checkout@v4
- name: script
id: required
run: |
# Fixes "detected dubious ownership" error in the ci
git config --global --add safe.directory '*'
......@@ -105,12 +108,61 @@ jobs:
--locked \
--release \
--no-fail-fast \
--features try-runtime,experimental,riscv,ci-only-tests \
--cargo-quiet \
--features try-runtime,experimental,ci-only-tests \
--partition count:${{ matrix.partition }}
# run runtime-api tests with `enable-staging-api` feature on the 1st node
- name: runtime-api tests
if: ${{ matrix.partition == '1/3' }}
run: forklift cargo nextest run -p sp-api-test --features enable-staging-api
run: forklift cargo nextest run -p sp-api-test --features enable-staging-api --cargo-quiet
- name: Stop all workflows if failed
if: ${{ failure() && steps.required.conclusion == 'failure' && !github.event.pull_request.head.repo.fork }}
uses: ./.github/actions/workflow-stopper
with:
app-id: ${{ secrets.WORKFLOW_STOPPER_RUNNER_APP_ID }}
app-key: ${{ secrets.WORKFLOW_STOPPER_RUNNER_APP_KEY }}
# some tests do not run with `try-runtime` feature enabled
# https://github.com/paritytech/polkadot-sdk/pull/4251#discussion_r1624282143
#
# all_security_features_work and nonexistent_cache_dir are currently skipped
# becuase runners don't have the necessary permissions to run them
test-linux-stable-no-try-runtime:
needs: [preflight]
if: ${{ needs.preflight.outputs.changes_rust }}
runs-on: ${{ needs.preflight.outputs.RUNNER }}
timeout-minutes: 60
container:
image: ${{ needs.preflight.outputs.IMAGE }}
strategy:
fail-fast: false
matrix:
partition: [1/2, 2/2]
env:
RUST_TOOLCHAIN: stable
# Enable debug assertions since we are running optimized builds for testing
# but still want to have debug assertions.
RUSTFLAGS: "-Cdebug-assertions=y -Dwarnings"
steps:
- name: Checkout
uses: actions/checkout@v4
- name: script
id: required
run: |
forklift cargo nextest run --workspace \
--locked \
--release \
--no-fail-fast \
--cargo-quiet \
--features experimental,ci-only-tests \
--filter-expr " !test(/all_security_features_work/) - test(/nonexistent_cache_dir/)" \
--partition count:${{ matrix.partition }} \
- name: Stop all workflows if failed
if: ${{ failure() && steps.required.conclusion == 'failure' && !github.event.pull_request.head.repo.fork }}
uses: ./.github/actions/workflow-stopper
with:
app-id: ${{ secrets.WORKFLOW_STOPPER_RUNNER_APP_ID }}
app-key: ${{ secrets.WORKFLOW_STOPPER_RUNNER_APP_KEY }}
confirm-required-jobs-passed:
runs-on: ubuntu-latest
......@@ -121,6 +173,16 @@ jobs:
test-linux-stable-int,
test-linux-stable-runtime-benchmarks,
test-linux-stable,
test-linux-stable-no-try-runtime,
]
if: always() && !cancelled()
steps:
- run: echo '### Good job! All the tests passed 🚀' >> $GITHUB_STEP_SUMMARY
- run: |
tee resultfile <<< '${{ toJSON(needs) }}'
FAILURES=$(cat resultfile | grep '"result": "failure"' | wc -l)
if [ $FAILURES -gt 0 ]; then
echo "### At least one required job failed ❌" >> $GITHUB_STEP_SUMMARY
exit 1
else
echo '### Good job! All the required jobs passed 🚀' >> $GITHUB_STEP_SUMMARY
fi
name: tests misc
on:
push:
branches:
- master
pull_request:
types: [opened, synchronize, reopened, ready_for_review]
merge_group:
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
# Jobs in this workflow depend on each other, only for limiting peak amount of spawned workers
jobs:
isdraft:
uses: ./.github/workflows/reusable-isdraft.yml
preflight:
needs: isdraft
uses: ./.github/workflows/reusable-preflight.yml
# more information about this job can be found here:
# https://github.com/paritytech/substrate/pull/3778
test-full-crypto-feature:
needs: [preflight]
runs-on: ${{ needs.preflight.outputs.RUNNER }}
if: ${{ needs.preflight.outputs.changes_rust }}
timeout-minutes: 60
container:
image: ${{ needs.preflight.outputs.IMAGE }}
env:
# Enable debug assertions since we are running optimized builds for testing
# but still want to have debug assertions.
RUSTFLAGS: "-C debug-assertions"
RUST_BACKTRACE: 1
steps:
- name: Checkout
uses: actions/checkout@v4
- name: script
run: |
cd substrate/primitives/core/
forklift cargo build --locked --no-default-features --features full_crypto
cd ../application-crypto
forklift cargo build --locked --no-default-features --features full_crypto
test-frame-examples-compile-to-wasm:
timeout-minutes: 20
# into one job
needs: [preflight, test-full-crypto-feature]
runs-on: ${{ needs.preflight.outputs.RUNNER }}
if: ${{ needs.preflight.outputs.changes_rust }}
container:
image: ${{ needs.preflight.outputs.IMAGE }}
env:
# Enable debug assertions since we are running optimized builds for testing
# but still want to have debug assertions.
RUSTFLAGS: "-C debug-assertions"
RUST_BACKTRACE: 1
steps:
- name: Checkout
uses: actions/checkout@v4
- name: script
run: |
cd substrate/frame/examples/offchain-worker/
forklift cargo build --locked --target=wasm32-unknown-unknown --no-default-features
cd ../basic
forklift cargo build --locked --target=wasm32-unknown-unknown --no-default-features
test-frame-ui:
timeout-minutes: 60
needs: [preflight]
runs-on: ${{ needs.preflight.outputs.RUNNER }}
if: ${{ needs.preflight.outputs.changes_rust }}
container:
image: ${{ needs.preflight.outputs.IMAGE }}
env:
# Enable debug assertions since we are running optimized builds for testing
# but still want to have debug assertions.
RUSTFLAGS: "-C debug-assertions -D warnings"
RUST_BACKTRACE: 1
SKIP_WASM_BUILD: 1
# Ensure we run the UI tests.
RUN_UI_TESTS: 1
steps:
- name: Checkout
uses: actions/checkout@v4
- name: script
run: |
cargo version
forklift cargo test --locked -q --profile testnet -p frame-support-test --features=frame-feature-testing,no-metadata-docs,try-runtime,experimental ui
forklift cargo test --locked -q --profile testnet -p frame-support-test --features=frame-feature-testing,frame-feature-testing-2,no-metadata-docs,try-runtime,experimental ui
forklift cargo test --locked -q --profile testnet -p xcm-procedural ui
forklift cargo test --locked -q --profile testnet -p frame-election-provider-solution-type ui
forklift cargo test --locked -q --profile testnet -p sp-api-test ui
# There is multiple version of sp-runtime-interface in the repo. So we point to the manifest.
forklift cargo test --locked -q --profile testnet --manifest-path substrate/primitives/runtime-interface/Cargo.toml ui
test-deterministic-wasm:
timeout-minutes: 20
needs: [preflight, test-frame-examples-compile-to-wasm]
runs-on: ${{ needs.preflight.outputs.RUNNER }}
if: ${{ needs.preflight.outputs.changes_rust }}
container:
image: ${{ needs.preflight.outputs.IMAGE }}
env:
WASM_BUILD_NO_COLOR: 1
steps:
- name: Checkout
uses: actions/checkout@v4
- name: script
run: |
# build runtime
forklift cargo build -q --locked --release -p westend-runtime -p rococo-runtime
# make checksum
sha256sum target/release/wbuild/*-runtime/target/wasm32-unknown-unknown/release/*.wasm > checksum.sha256
cargo clean
# build again
forklift cargo build -q --locked --release -p westend-runtime -p rococo-runtime
# confirm checksum
sha256sum -c checksum.sha256
cargo-check-benches:
needs: [preflight]
if: ${{ github.event_name == 'pull_request' || github.event_name == 'merge_group' }}
timeout-minutes: 60
strategy:
matrix:
branch: [master, current]
runs-on: ${{ needs.preflight.outputs.RUNNER }}
container:
image: ${{ needs.preflight.outputs.IMAGE }}
steps:
- name: Checkout
uses: actions/checkout@v4
with:
# if branch is master, use the branch, otherwise set empty string, so it uses the current context
# either PR (including forks) or merge group (main repo)
ref: ${{ matrix.branch == 'master' && matrix.branch || '' }}
- name: script
run: |
ARTIFACTS_DIR=./artifacts
BENCH_TRIE_READ=::trie::read::small
BENCH_NODE_IMPORT=::node::import::sr25519::transfer_keep_alive::paritydb::small
mkdir -p $ARTIFACTS_DIR
SKIP_WASM_BUILD=1 forklift cargo check --locked --benches --all;
forklift cargo run --locked --release -p node-bench -- $BENCH_TRIE_READ --json | tee $ARTIFACTS_DIR/bench_trie_read_small.json;
forklift cargo run --locked --release -p node-bench -- $BENCH_NODE_IMPORT --json | tee $ARTIFACTS_DIR/bench_transfer_keep_alive.json
- name: Upload artifacts
uses: actions/upload-artifact@v4.3.6
with:
path: ./artifacts
name: cargo-check-benches-${{ matrix.branch }}-${{ github.sha }}
retention-days: 1
node-bench-regression-guard:
timeout-minutes: 20
if: always() && !cancelled()
runs-on: ubuntu-latest
needs: [preflight, cargo-check-benches]
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Download artifact (master run)
uses: actions/download-artifact@v4.1.8
continue-on-error: true
with:
name: cargo-check-benches-master-${{ github.sha }}
path: ./artifacts/master
- name: Download artifact (current run)
uses: actions/download-artifact@v4.1.8
continue-on-error: true
with:
name: cargo-check-benches-current-${{ github.sha }}
path: ./artifacts/current
- name: script
id: compare
run: |
if [ "${{ github.ref_name }}" = "master" ]; then
echo -e "Exiting on master branch"
exit 0
fi
# fail if no artifacts
if [ ! -d ./artifacts/master ] || [ ! -d ./artifacts/current ]; then
echo "No artifacts found"
exit 1
fi
docker run --rm \
-v $PWD/artifacts/master:/artifacts/master \
-v $PWD/artifacts/current:/artifacts/current \
paritytech/node-bench-regression-guard:latest \
node-bench-regression-guard --reference /artifacts/master --compare-with /artifacts/current
if [ $? -ne 0 ]; then
FAILED_MSG='### node-bench-regression-guard failed ❌, check the regression in *cargo-check-benches* job'
echo $FAILED_MSG
echo $FAILED_MSG >> $GITHUB_STEP_SUMMARY
exit 1
else
echo "### node-bench-regression-guard passed ✅" >> $GITHUB_STEP_SUMMARY
fi
test-node-metrics:
needs: [preflight]
timeout-minutes: 30
runs-on: ${{ needs.preflight.outputs.RUNNER }}
if: ${{ needs.preflight.outputs.changes_rust }}
container:
image: ${{ needs.preflight.outputs.IMAGE }}
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Run tests
id: tests
env:
RUST_TOOLCHAIN: stable
# Enable debug assertions since we are running optimized builds for testing
# but still want to have debug assertions.
RUSTFLAGS: "-Cdebug-assertions=y -Dwarnings"
run: |
forklift cargo build --bin polkadot-execute-worker --bin polkadot-prepare-worker --profile testnet --verbose --locked
mkdir -p ./artifacts
forklift cargo test --profile testnet --locked --features=runtime-metrics -p polkadot-node-metrics > ./artifacts/log.txt
echo "Metrics test passed"
- name: Upload artifacts if failed
if: ${{ steps.tests.outcome != 'success' }}
uses: actions/upload-artifact@v4.3.6
with:
name: node-metrics-failed
path: ./artifacts
# more information about this job can be found here:
# https://github.com/paritytech/substrate/pull/6916
check-tracing:
timeout-minutes: 20
needs: [preflight, test-node-metrics]
runs-on: ${{ needs.preflight.outputs.RUNNER }}
if: ${{ needs.preflight.outputs.changes_rust }}
container:
image: ${{ needs.preflight.outputs.IMAGE }}
steps:
- name: Checkout
uses: actions/checkout@v4
- name: script
run: |
forklift cargo test --locked --manifest-path ./substrate/primitives/tracing/Cargo.toml --no-default-features
forklift cargo test --locked --manifest-path ./substrate/primitives/tracing/Cargo.toml --no-default-features --features=with-tracing
check-metadata-hash:
timeout-minutes: 20
needs: [preflight, check-tracing]
runs-on: ${{ needs.preflight.outputs.RUNNER }}
if: ${{ needs.preflight.outputs.changes_rust }}
container:
image: ${{ needs.preflight.outputs.IMAGE }}
steps:
- name: Checkout
uses: actions/checkout@v4
- name: script
run: |
forklift cargo build --locked -p westend-runtime --features metadata-hash
# disabled until https://github.com/paritytech/polkadot-sdk/issues/5812 is resolved
# cargo-hfuzz:
# timeout-minutes: 20
# needs: [preflight, check-metadata-hash]
# runs-on: ${{ needs.preflight.outputs.RUNNER }}
# container:
# image: ${{ needs.preflight.outputs.IMAGE }}
# env:
# # max 10s per iteration, 60s per file
# HFUZZ_RUN_ARGS: |
# --exit_upon_crash
# --exit_code_upon_crash 1
# --timeout 10
# --run_time 60
# # use git version of honggfuzz-rs until v0.5.56 is out, we need a few recent changes:
# # https://github.com/rust-fuzz/honggfuzz-rs/pull/75 to avoid breakage on debian
# # https://github.com/rust-fuzz/honggfuzz-rs/pull/81 fix to the above pr
# # https://github.com/rust-fuzz/honggfuzz-rs/pull/82 fix for handling absolute CARGO_TARGET_DIR
# HFUZZ_BUILD_ARGS: |
# --config=patch.crates-io.honggfuzz.git="https://github.com/altaua/honggfuzz-rs"
# --config=patch.crates-io.honggfuzz.rev="205f7c8c059a0d98fe1cb912cdac84f324cb6981"
# steps:
# - name: Checkout
# uses: actions/checkout@v4
# - name: Run honggfuzz
# run: |
# cd substrate/primitives/arithmetic/fuzzer
# forklift cargo hfuzz build
# for target in $(cargo read-manifest | jq -r '.targets | .[] | .name');
# do
# forklift cargo hfuzz run "$target" || { printf "fuzzing failure for %s\n" "$target"; exit 1; };
# done
# - name: Upload artifacts
# uses: actions/upload-artifact@v4.3.6
# with:
# path: substrate/primitives/arithmetic/fuzzer/hfuzz_workspace/
# name: hfuzz-${{ github.sha }}
cargo-check-each-crate:
timeout-minutes: 70
needs: [preflight]
runs-on: ${{ needs.preflight.outputs.RUNNER }}
if: ${{ needs.preflight.outputs.changes_rust }}
container:
image: ${{ needs.preflight.outputs.IMAGE }}
env:
RUSTFLAGS: "-D warnings"
CI_JOB_NAME: cargo-check-each-crate
strategy:
matrix:
index: [1, 2, 3, 4, 5, 6, 7] # 7 parallel jobs
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Check Rust
run: |
rustup show
rustup +nightly show
- name: script
run: |
mkdir -p /github/home/.forklift
cp .forklift/config.toml /github/home/.forklift/config.toml
PYTHONUNBUFFERED=x .github/scripts/check-each-crate.py ${{ matrix.index }} ${{ strategy.job-total }}
cargo-check-all-crate-macos:
timeout-minutes: 30
needs: [preflight]
runs-on: ${{ needs.preflight.outputs.RUNNER_MACOS }}
if: ${{ needs.preflight.outputs.changes_rust }}
env:
SKIP_WASM_BUILD: 1
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
- name: Set rust version from env file
run: |
RUST_VERSION=$(cat .github/env | sed -E 's/.*ci-unified:([^-]+)-([^-]+).*/\2/')
echo $RUST_VERSION
echo "RUST_VERSION=${RUST_VERSION}" >> $GITHUB_ENV
- name: Set up Homebrew
uses: Homebrew/actions/setup-homebrew@1ccc07ccd54b6048295516a3eb89b192c35057dc # master from 12.09.2024
- name: Install rust ${{ env.RUST_VERSION }}
uses: actions-rust-lang/setup-rust-toolchain@11df97af8e8102fd60b60a77dfbf58d40cd843b8 # v1.10.1
with:
cache: false
toolchain: ${{ env.RUST_VERSION }}
target: wasm32-unknown-unknown
components: cargo, clippy, rust-docs, rust-src, rustfmt, rustc, rust-std
- name: Install protobuf
run: brew install protobuf
- name: cargo info
run: |
echo "######## rustup show ########"
rustup show
echo "######## cargo --version ########"
cargo --version
- name: Run cargo check
run: cargo check --workspace --locked
confirm-required-test-misc-jobs-passed:
runs-on: ubuntu-latest
name: All test misc tests passed
# If any new job gets added, be sure to add it to this array
needs:
- test-full-crypto-feature
- test-frame-examples-compile-to-wasm
- test-frame-ui
- cargo-check-benches
- node-bench-regression-guard
- test-node-metrics
- check-tracing
- cargo-check-each-crate
- test-deterministic-wasm
- cargo-check-all-crate-macos
# - cargo-hfuzz remove from required for now, as it's flaky
if: always() && !cancelled()
steps:
- run: |
tee resultfile <<< '${{ toJSON(needs) }}'
FAILURES=$(cat resultfile | grep '"result": "failure"' | wc -l)
if [ $FAILURES -gt 0 ]; then
echo "### At least one required job failed ❌" >> $GITHUB_STEP_SUMMARY
exit 1
else
echo '### Good job! All the required jobs passed 🚀' >> $GITHUB_STEP_SUMMARY
fi
......@@ -5,38 +5,27 @@ on:
branches:
- master
pull_request:
types: [ opened, synchronize, reopened, ready_for_review ]
types: [opened, synchronize, reopened, ready_for_review]
merge_group:
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
jobs:
changes:
permissions:
pull-requests: read
uses: ./.github/workflows/reusable-check-changed-files.yml
set-image:
# GitHub Actions allows using 'env' in a container context.
# However, env variables don't work for forks: https://github.com/orgs/community/discussions/44322
# This workaround sets the container image for each job using 'set-image' job output.
runs-on: ubuntu-latest
outputs:
IMAGE: ${{ steps.set_image.outputs.IMAGE }}
steps:
- name: Checkout
uses: actions/checkout@v4
- id: set_image
run: cat .github/env >> $GITHUB_OUTPUT
isdraft:
uses: ./.github/workflows/reusable-isdraft.yml
preflight:
needs: isdraft
uses: ./.github/workflows/reusable-preflight.yml
# This job runs all benchmarks defined in the `/bin/node/runtime` once to check that there are no errors.
quick-benchmarks:
needs: [ set-image, changes ]
if: ${{ needs.changes.outputs.rust }}
runs-on: arc-runners-polkadot-sdk-beefy
needs: [preflight]
if: ${{ needs.preflight.outputs.changes_rust }}
runs-on: ${{ needs.preflight.outputs.RUNNER }}
timeout-minutes: 60
container:
image: ${{ needs.set-image.outputs.IMAGE }}
image: ${{ needs.preflight.outputs.IMAGE }}
env:
RUSTFLAGS: "-C debug-assertions -D warnings"
RUST_BACKTRACE: "full"
......@@ -46,16 +35,16 @@ jobs:
- name: Checkout
uses: actions/checkout@v4
- name: script
run: time forklift cargo run --locked --release -p staging-node-cli --bin substrate-node --features runtime-benchmarks -- benchmark pallet --chain dev --pallet "*" --extrinsic "*" --steps 2 --repeat 1 --quiet
run: forklift cargo run --locked --release -p staging-node-cli --bin substrate-node --features runtime-benchmarks --quiet -- benchmark pallet --chain dev --pallet "*" --exclude-pallets=pallet_election_provider_multi_block,pallet_election_provider_multi_block::signed,pallet_election_provider_multi_block::unsigned,pallet_election_provider_multi_block::verifier --extrinsic "*" --steps 2 --repeat 1 --quiet
# cf https://github.com/paritytech/polkadot-sdk/issues/1652
test-syscalls:
needs: [ set-image, changes ]
if: ${{ needs.changes.outputs.rust }}
runs-on: arc-runners-polkadot-sdk-beefy
needs: [preflight]
if: ${{ needs.preflight.outputs.changes_rust }}
runs-on: ${{ needs.preflight.outputs.RUNNER }}
timeout-minutes: 60
container:
image: ${{ needs.set-image.outputs.IMAGE }}
image: ${{ needs.preflight.outputs.IMAGE }}
continue-on-error: true # this rarely triggers in practice
env:
SKIP_WASM_BUILD: 1
......@@ -63,28 +52,28 @@ jobs:
- name: Checkout
uses: actions/checkout@v4
- name: script
id: test
run: |
forklift cargo build --locked --profile production --target x86_64-unknown-linux-musl --bin polkadot-execute-worker --bin polkadot-prepare-worker
forklift cargo build --locked --profile production --target x86_64-unknown-linux-musl --bin polkadot-execute-worker --bin polkadot-prepare-worker --quiet
cd polkadot/scripts/list-syscalls
./list-syscalls.rb ../../../target/x86_64-unknown-linux-musl/production/polkadot-execute-worker --only-used-syscalls | diff -u execute-worker-syscalls -
./list-syscalls.rb ../../../target/x86_64-unknown-linux-musl/production/polkadot-prepare-worker --only-used-syscalls | diff -u prepare-worker-syscalls -
# todo:
# after_script:
# - if [[ "$CI_JOB_STATUS" == "failed" ]]; then
# printf "The x86_64 syscalls used by the worker binaries have changed. Please review if this is expected and update polkadot/scripts/list-syscalls/*-worker-syscalls as needed.\n";
# fi
- name: on_failure
if: failure() && steps.test.outcome == 'failure'
run: |
echo "The x86_64 syscalls used by the worker binaries have changed. Please review if this is expected and update polkadot/scripts/list-syscalls/*-worker-syscalls as needed." >> $GITHUB_STEP_SUMMARY
cargo-check-all-benches:
needs: [ set-image, changes ]
if: ${{ needs.changes.outputs.rust }}
runs-on: arc-runners-polkadot-sdk-beefy
needs: [preflight]
if: ${{ needs.preflight.outputs.changes_rust }}
runs-on: ${{ needs.preflight.outputs.RUNNER }}
timeout-minutes: 60
container:
image: ${{ needs.set-image.outputs.IMAGE }}
image: ${{ needs.preflight.outputs.IMAGE }}
env:
SKIP_WASM_BUILD: 1
steps:
- name: Checkout
uses: actions/checkout@v4
- name: script
run: time forklift cargo check --all --benches
run: forklift cargo check --all --benches --quiet
# Reusable workflow to set various useful variables
# and to perform checks and generate conditions for other workflows.
# Currently it checks if any Rust (build-related) file is changed
# and if the current (caller) workflow file is changed.
# Example:
#
# jobs:
# preflight:
# uses: ./.github/workflows/reusable-preflight.yml
# some-job:
# needs: changes
# if: ${{ needs.preflight.outputs.changes_rust }}
# .......
name: Zombienet Preflight
on:
workflow_call:
# Map the workflow outputs to job outputs
outputs:
changes_substrate:
value: ${{ jobs.preflight.outputs.changes_substrate }}
description: |
True iff there are changes in substrate directory or the current workflow
changes_cumulus:
value: ${{ jobs.preflight.outputs.changes_cumulus }}
description: |
True iff there are changes in cumulus directory or the current workflow
changes_polkadot:
value: ${{ jobs.preflight.outputs.changes_polkadot }}
description: |
True iff there are changes in polkadot directory or the current workflow
changes_bridges:
value: ${{ jobs.preflight.outputs.changes_bridges }}
description: |
True iff there are changes in bridges directory or the current workflow
changes_templates:
value: ${{ jobs.preflight.outputs.changes_templates }}
description: |
True iff there are changes in templates directory or the current workflow
ZOMBIENET_IMAGE:
value: ${{ jobs.preflight.outputs.ZOMBIENET_IMAGE }}
description: "ZOMBIENET CI image"
ZOMBIENET_RUNNER:
value: ${{ jobs.preflight.outputs.ZOMBIENET_RUNNER }}
description: |
Main runner for zombienet tests.
TEMP_IMAGES_BASE:
value: ${{ jobs.preflight.outputs.TEMP_IMAGES_BASE }}
description: |
Base location for 'temp' images used in tests.
DOCKER_IMAGES_VERSION:
value: ${{ jobs.preflight.outputs.DOCKER_IMAGES_VERSION }}
description: |
Version for temp docker images.
SOURCE_REF_SLUG:
value: ${{ jobs.preflight.outputs.SOURCE_REF_SLUG }}
BUILD_RUN_ID:
value: ${{ jobs.wait_build_images.outputs.BUILD_RUN_ID }}
description: |
Id of the build run, needed to download the artifacts.
# Zombie vars
PUSHGATEWAY_URL:
value: ${{ jobs.preflight.outputs.PUSHGATEWAY_URL }}
description: "Gateway (url) to push metrics related to test."
DEBUG:
value: ${{ jobs.preflight.outputs.DEBUG }}
description: "Debug value to zombienet v1 tests."
ZOMBIE_PROVIDER:
value: ${{ jobs.preflight.outputs.ZOMBIE_PROVIDER }}
description: "Provider to use in zombienet-sdk tests."
RUST_LOG:
value: ${{ jobs.preflight.outputs.RUST_LOG }}
description: "Log value to use in zombinet-sdk tests."
RUN_IN_CI:
value: ${{ jobs.preflight.outputs.RUN_IN_CI }}
description: "Internal flag to make zombienet aware of the env."
KUBERNETES_CPU_REQUEST:
value: ${{ jobs.preflight.outputs.KUBERNETES_CPU_REQUEST }}
description: "Base cpu (request) for pod runner."
KUBERNETES_MEMORY_REQUEST:
value: ${{ jobs.preflight.outputs.KUBERNETES_MEMORY_REQUEST }}
description: "Base memory (request) for pod runner."
jobs:
#
#
#
preflight:
runs-on: ubuntu-latest
outputs:
changes_substrate: ${{ steps.set_changes.outputs.substrate_any_changed || steps.set_changes.outputs.currentWorkflow_any_changed }}
changes_cumulus: ${{ steps.set_changes.outputs.cumulus_any_changed || steps.set_changes.outputs.currentWorkflow_any_changed }}
changes_polkadot: ${{ steps.set_changes.outputs.polkadot_any_changed || steps.set_changes.outputs.currentWorkflow_any_changed }}
changes_bridges: ${{ steps.set_changes.outputs.bridges_any_changed || steps.set_changes.outputs.currentWorkflow_any_changed }}
changes_templates: ${{ steps.set_changes.outputs.templates_any_changed || steps.set_changes.outputs.currentWorkflow_any_changed }}
ZOMBIENET_IMAGE: ${{ steps.set_vars.outputs.ZOMBIENET_IMAGE }}
ZOMBIENET_RUNNER: ${{ steps.set_vars.outputs.ZOMBIENET_RUNNER }}
TEMP_IMAGES_BASE: ${{ steps.set_vars.outputs.TEMP_IMAGES_BASE }}
# images versions
DOCKER_IMAGES_VERSION: ${{ steps.set_images_version.outputs.DOCKER_IMAGES_VERSION }}
# common vars
PUSHGATEWAY_URL: ${{ steps.set_vars.outputs.PUSHGATEWAY_URL }}
SOURCE_REF_SLUG: ${{ steps.set_vars.outputs.SOURCE_REF_SLUG }}
DEBUG: ${{ steps.set_vars.outputs.DEBUG }}
ZOMBIE_PROVIDER: ${{ steps.set_vars.outputs.ZOMBIE_PROVIDER }}
RUST_LOG: ${{ steps.set_vars.outputs.RUST_LOG }}
RUN_IN_CI: ${{ steps.set_vars.outputs.RUN_IN_CI }}
KUBERNETES_CPU_REQUEST: ${{ steps.set_vars.outputs.KUBERNETES_CPU_REQUEST }}
KUBERNETES_MEMORY_REQUEST: ${{ steps.set_vars.outputs.KUBERNETES_MEMORY_REQUEST }}
steps:
- uses: actions/checkout@v4
#
# Set changes
#
- name: Current file
id: current_file
shell: bash
run: |
echo "currentWorkflowFile=$(echo ${{ github.workflow_ref }} | sed -nE "s/.*(\.github\/workflows\/[a-zA-Z0-9_-]*\.y[a]?ml)@refs.*/\1/p")" >> $GITHUB_OUTPUT
echo "currentActionDir=$(echo ${{ github.action_path }} | sed -nE "s/.*(\.github\/actions\/[a-zA-Z0-9_-]*)/\1/p")" >> $GITHUB_OUTPUT
- name: Set changes
id: set_changes
uses: tj-actions/changed-files@v45
with:
files_yaml: |
substrate:
- 'substrate/**/*'
cumulus:
- 'cumulus/**/*'
polkadot:
- 'polkadot/**/*'
bridges:
- 'bridges/**/*'
templates:
- 'templates/**/*'
currentWorkflow:
- '${{ steps.current_file.outputs.currentWorkflowFile }}'
- '.github/workflows/zombienet-reusable-preflight.yml'
- '.github/zombienet-env'
#
# Set environment vars (including runner/image)
#
- name: Set vars
id: set_vars
shell: bash
run: |
export SOURCE_REF_NAME=${{ github.head_ref || github.ref_name }}
echo "SOURCE_REF_SLUG=${SOURCE_REF_NAME//\//_}" >> $GITHUB_OUTPUT
#
cat .github/zombienet-env >> $GITHUB_OUTPUT
cat .github/zombienet-env
#
#
#
- name: Set docker images version
id: set_images_version
shell: bash
run: |
export DOCKER_IMAGES_VERSION=${{ github.event.pull_request.head.sha }}
if [[ ${{ github.event_name }} == "merge_group" ]]; then export DOCKER_IMAGES_VERSION="${GITHUB_SHA::8}"; fi
echo "DOCKER_IMAGES_VERSION=${DOCKER_IMAGES_VERSION}" >> $GITHUB_OUTPUT
- name: log
shell: bash
run: |
echo "workflow file: ${{ steps.current_file.outputs.currentWorkflowFile }}"
echo "Modified: ${{ steps.set_changes.outputs.modified_keys }}"
echo "ZOMBIENET_IMAGE: ${{ steps.set_vars.outputs.ZOMBIENET_IMAGE }}"
#
#
#
ci-env:
needs: [preflight]
runs-on: ubuntu-latest
steps:
- name: Info vars
run: |
echo "ZOMBIENET_IMAGE: ${{ needs.preflight.outputs.ZOMBIENET_IMAGE }}"
echo "ZOMBIENET_RUNNER: ${{ needs.preflight.outputs.ZOMBIENET_RUNNER }}"
echo "DOCKER_IMAGES_VERSION: ${{ needs.preflight.outputs.DOCKER_IMAGES_VERSION }}"
echo "SOURCE_REF_SLUG: ${{ needs.preflight.outputs.SOURCE_REF_SLUG }}"
echo "PUSHGATEWAY_URL: ${{ needs.preflight.outputs.PUSHGATEWAY_URL }}"
echo "DEBUG: ${{ needs.preflight.outputs.DEBUG }}"
echo "ZOMBIE_PROVIDER: ${{ needs.preflight.outputs.ZOMBIE_PROVIDER }}"
echo "RUST_LOG: ${{ needs.preflight.outputs.RUST_LOG }}"
echo "RUN_IN_CI: ${{ needs.preflight.outputs.RUN_IN_CI }}"
echo "KUBERNETES_CPU_REQUEST: ${{ needs.preflight.outputs.KUBERNETES_CPU_REQUEST }}"
echo "KUBERNETES_MEMORY_REQUEST: ${{ needs.preflight.outputs.KUBERNETES_MEMORY_REQUEST }}"
#
echo "github.ref: ${{ github.ref }}"
echo "github.ref_name: ${{ github.ref_name }}"
echo "github.sha: ${{ github.sha }}"
#
#
# Wait until the 'build and push' image workflow success
wait_build_images:
needs: [ci-env]
runs-on: ubuntu-latest
timeout-minutes: 90
outputs:
BUILD_RUN_ID: ${{ steps.wait_build.outputs.BUILD_RUN_ID }}
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Wait until "Build and push images" workflow is done
id: wait_build
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
gh --version
export SHA=${{ github.event.pull_request.head.sha }}
if [[ ${{ github.event_name }} == "merge_group" ]]; then export SHA="${GITHUB_SHA::8}"; fi
while true; do
STATUS=$(gh run ls -c $SHA -w "Build and push images" --json name,status --jq '.[] | select(.name == "Build and push images") | .status')
echo "CI workflow status: $STATUS"
if [[ $STATUS == "completed" ]]; then
echo "CI workflow is done."
break
fi
if [[ -z "$STATUS" ]]; then
echo "::warning::No CI workflow runs found for this commit"
exit 1
fi
sleep 10
done
#check if the build succeeded
RUN_INFO=($(gh run ls -c $SHA -w "Build and push images" --json name,conclusion,databaseId --jq '.[] | select(.name == "Build and push images") | .conclusion, .databaseId'))
CONCLUSION=${RUN_INFO[@]:0:1}
BUILD_RUN_ID=${RUN_INFO[@]:1:1}
if [[ $CONCLUSION == "success" ]]; then
echo "CI workflow succeeded. (build run_id: ${BUILD_RUN_ID})"
echo "BUILD_RUN_ID=${BUILD_RUN_ID}" >> $GITHUB_OUTPUT
else
echo "::warning:: CI workflow ('Build and push images') fails with conclusion: $CONCLUSION"
exit 1
fi;
name: Zombienet Cumulus
on:
push:
branches:
- master
pull_request:
types: [opened, synchronize, reopened, ready_for_review]
merge_group:
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
env:
RUN_IN_CONTAINER: 1
FF_DISABLE_UMASK_FOR_DOCKER_EXECUTOR: 1
LOCAL_DIR: "./cumulus/zombienet/tests"
GHA_CLUSTER_SERVER_ADDR: "https://kubernetes.default:443"
# only run if we have changes in [subtrate, cumulus, polkadot] directories or this workflow.
jobs:
isdraft:
uses: ./.github/workflows/reusable-isdraft.yml
preflight:
needs: isdraft
uses: ./.github/workflows/zombienet-reusable-preflight.yml
zombienet-cumulus-0001-sync_blocks_from_tip_without_connected_collator:
needs: [preflight]
if: ${{ needs.preflight.outputs.changes_substrate || needs.preflight.outputs.changes_cumulus || needs.preflight.outputs.changes_polkadot }}
runs-on: ${{ needs.preflight.outputs.ZOMBIENET_RUNNER }} # NOTE: should be zombienet-arc-runner (without quotes)
timeout-minutes: 60
container:
image: ${{ needs.preflight.outputs.ZOMBIENET_IMAGE }}
env:
RELAY_IMAGE: "${{ needs.preflight.outputs.TEMP_IMAGES_BASE }}/polkadot-debug:${{ needs.preflight.outputs.DOCKER_IMAGES_VERSION }}"
COL_IMAGE: "${{ needs.preflight.outputs.TEMP_IMAGES_BASE }}/test-parachain:${{ needs.preflight.outputs.DOCKER_IMAGES_VERSION }}"
steps:
- name: Checkout
uses: actions/checkout@v4
- name: script
run: |
echo "RELAY_IMAGE: $RELAY_IMAGE"
echo "COL_IMAGE: $COL_IMAGE"
export DEBUG=${{ needs.preflight.outputs.DEBUG }}
/home/nonroot/zombie-net/scripts/ci/run-test-local-env-manager.sh \
--local-dir="$(pwd)/$LOCAL_DIR" \
--concurrency=1 \
--test="0001-sync_blocks_from_tip_without_connected_collator.zndsl"
- name: upload logs
uses: actions/upload-artifact@v4
with:
name: zombienet-logs-${{ github.job }}-${{ github.sha }}
path: |
/tmp/zombie*/logs/*
zombienet-cumulus-0002-pov_recovery:
needs: [preflight]
if: ${{ needs.preflight.outputs.changes_substrate || needs.preflight.outputs.changes_cumulus || needs.preflight.outputs.changes_polkadot }}
runs-on: ${{ needs.preflight.outputs.ZOMBIENET_RUNNER }} # NOTE: should be zombienet-arc-runner (without quotes)
timeout-minutes: 60
container:
image: ${{ needs.preflight.outputs.ZOMBIENET_IMAGE }}
env:
RELAY_IMAGE: "${{ needs.preflight.outputs.TEMP_IMAGES_BASE }}/polkadot-debug:${{ needs.preflight.outputs.DOCKER_IMAGES_VERSION }}"
COL_IMAGE: "${{ needs.preflight.outputs.TEMP_IMAGES_BASE }}/test-parachain:${{ needs.preflight.outputs.DOCKER_IMAGES_VERSION }}"
steps:
- name: Checkout
uses: actions/checkout@v4
- name: script
run: |
echo "RELAY_IMAGE: $RELAY_IMAGE"
echo "COL_IMAGE: $COL_IMAGE"
export DEBUG=${{ needs.preflight.outputs.DEBUG }}
/home/nonroot/zombie-net/scripts/ci/run-test-local-env-manager.sh \
--local-dir="$(pwd)/$LOCAL_DIR" \
--concurrency=1 \
--test="0002-pov_recovery.zndsl"
- name: upload logs
uses: actions/upload-artifact@v4
with:
name: zombienet-logs-${{ github.job }}-${{ github.sha }}
path: |
/tmp/zombie*/logs/*
zombienet-cumulus-0003-full_node_catching_up:
needs: [preflight]
if: ${{ needs.preflight.outputs.changes_substrate || needs.preflight.outputs.changes_cumulus || needs.preflight.outputs.changes_polkadot }}
runs-on: ${{ needs.preflight.outputs.ZOMBIENET_RUNNER }} # NOTE: should be zombienet-arc-runner (without quotes)
timeout-minutes: 60
container:
image: ${{ needs.preflight.outputs.ZOMBIENET_IMAGE }}
env:
RELAY_IMAGE: "${{ needs.preflight.outputs.TEMP_IMAGES_BASE }}/polkadot-debug:${{ needs.preflight.outputs.DOCKER_IMAGES_VERSION }}"
COL_IMAGE: "${{ needs.preflight.outputs.TEMP_IMAGES_BASE }}/test-parachain:${{ needs.preflight.outputs.DOCKER_IMAGES_VERSION }}"
steps:
- name: Checkout
uses: actions/checkout@v4
- name: script
run: |
echo "RELAY_IMAGE: $RELAY_IMAGE"
echo "COL_IMAGE: $COL_IMAGE"
export DEBUG=${{ needs.preflight.outputs.DEBUG }}
/home/nonroot/zombie-net/scripts/ci/run-test-local-env-manager.sh \
--local-dir="$(pwd)/$LOCAL_DIR" \
--concurrency=1 \
--test="0003-full_node_catching_up.zndsl"
- name: upload logs
uses: actions/upload-artifact@v4
with:
name: zombienet-logs-${{ github.job }}-${{ github.sha }}
path: |
/tmp/zombie*/logs/*
zombienet-cumulus-0004-runtime_upgrade:
needs: [preflight]
if: ${{ needs.preflight.outputs.changes_substrate || needs.preflight.outputs.changes_cumulus || needs.preflight.outputs.changes_polkadot }}
runs-on: ${{ needs.preflight.outputs.ZOMBIENET_RUNNER }} # NOTE: should be zombienet-arc-runner (without quotes)
timeout-minutes: 60
container:
image: ${{ needs.preflight.outputs.ZOMBIENET_IMAGE }}
env:
RELAY_IMAGE: "${{ needs.preflight.outputs.TEMP_IMAGES_BASE }}/polkadot-debug:${{ needs.preflight.outputs.DOCKER_IMAGES_VERSION }}"
COL_IMAGE: "${{ needs.preflight.outputs.TEMP_IMAGES_BASE }}/test-parachain:${{ needs.preflight.outputs.DOCKER_IMAGES_VERSION }}"
steps:
- name: Checkout
uses: actions/checkout@v4
- uses: actions/download-artifact@v4.1.8
with:
name: build-test-parachain-${{ needs.preflight.outputs.SOURCE_REF_SLUG }}
github-token: ${{ secrets.GITHUB_TOKEN }}
run-id: ${{ needs.preflight.outputs.BUILD_RUN_ID }}
- name: tar
run: tar -xvf artifacts.tar
- name: script
run: |
echo "RELAY_IMAGE: $RELAY_IMAGE"
echo "COL_IMAGE: $COL_IMAGE"
ls -ltr *
cp ./artifacts/zombienet/wasm_binary_spec_version_incremented.rs.compact.compressed.wasm /tmp/
ls /tmp
export DEBUG=${{ needs.preflight.outputs.DEBUG }}
/home/nonroot/zombie-net/scripts/ci/run-test-local-env-manager.sh \
--local-dir="$(pwd)/$LOCAL_DIR" \
--concurrency=1 \
--test="0004-runtime_upgrade.zndsl"
- name: upload logs
uses: actions/upload-artifact@v4
with:
name: zombienet-logs-${{ github.job }}-${{ github.sha }}
path: |
/tmp/zombie*/logs/*
zombienet-cumulus-0005-migrate_solo_to_para:
needs: [preflight]
if: ${{ needs.preflight.outputs.changes_substrate || needs.preflight.outputs.changes_cumulus || needs.preflight.outputs.changes_polkadot }}
runs-on: ${{ needs.preflight.outputs.ZOMBIENET_RUNNER }} # NOTE: should be zombienet-arc-runner (without quotes)
timeout-minutes: 60
container:
image: ${{ needs.preflight.outputs.ZOMBIENET_IMAGE }}
env:
RELAY_IMAGE: "${{ needs.preflight.outputs.TEMP_IMAGES_BASE }}/polkadot-debug:${{ needs.preflight.outputs.DOCKER_IMAGES_VERSION }}"
COL_IMAGE: "${{ needs.preflight.outputs.TEMP_IMAGES_BASE }}/test-parachain:${{ needs.preflight.outputs.DOCKER_IMAGES_VERSION }}"
steps:
- name: Checkout
uses: actions/checkout@v4
- name: script
run: |
echo "RELAY_IMAGE: $RELAY_IMAGE"
echo "COL_IMAGE: $COL_IMAGE"
export DEBUG=${{ needs.preflight.outputs.DEBUG }}
/home/nonroot/zombie-net/scripts/ci/run-test-local-env-manager.sh \
--local-dir="$(pwd)/$LOCAL_DIR" \
--concurrency=1 \
--test="0005-migrate_solo_to_para.zndsl"
- name: upload logs
uses: actions/upload-artifact@v4
with:
name: zombienet-logs-${{ github.job }}-${{ github.sha }}
path: |
/tmp/zombie*/logs/*
zombienet-cumulus-0006-rpc_collator_builds_blocks:
needs: [preflight]
if: ${{ needs.preflight.outputs.changes_substrate || needs.preflight.outputs.changes_cumulus || needs.preflight.outputs.changes_polkadot }}
runs-on: ${{ needs.preflight.outputs.ZOMBIENET_RUNNER }} # NOTE: should be zombienet-arc-runner (without quotes)
timeout-minutes: 60
container:
image: ${{ needs.preflight.outputs.ZOMBIENET_IMAGE }}
env:
RELAY_IMAGE: "${{ needs.preflight.outputs.TEMP_IMAGES_BASE }}/polkadot-debug:${{ needs.preflight.outputs.DOCKER_IMAGES_VERSION }}"
COL_IMAGE: "${{ needs.preflight.outputs.TEMP_IMAGES_BASE }}/test-parachain:${{ needs.preflight.outputs.DOCKER_IMAGES_VERSION }}"
steps:
- name: Checkout
uses: actions/checkout@v4
- name: script
run: |
echo "RELAY_IMAGE: $RELAY_IMAGE"
echo "COL_IMAGE: $COL_IMAGE"
export DEBUG=${{ needs.preflight.outputs.DEBUG }}
/home/nonroot/zombie-net/scripts/ci/run-test-local-env-manager.sh \
--local-dir="$(pwd)/$LOCAL_DIR" \
--concurrency=1 \
--test="0006-rpc_collator_builds_blocks.zndsl"
- name: upload logs
uses: actions/upload-artifact@v4
with:
name: zombienet-logs-${{ github.job }}-${{ github.sha }}
path: |
/tmp/zombie*/logs/*
zombienet-cumulus-0007-full_node_warp_sync:
needs: [preflight]
if: ${{ needs.preflight.outputs.changes_substrate || needs.preflight.outputs.changes_cumulus || needs.preflight.outputs.changes_polkadot }}
runs-on: ${{ needs.preflight.outputs.ZOMBIENET_RUNNER }} # NOTE: should be zombienet-arc-runner (without quotes)
timeout-minutes: 60
container:
image: ${{ needs.preflight.outputs.ZOMBIENET_IMAGE }}
env:
RELAY_IMAGE: "${{ needs.preflight.outputs.TEMP_IMAGES_BASE }}/polkadot-debug:${{ needs.preflight.outputs.DOCKER_IMAGES_VERSION }}"
COL_IMAGE: "${{ needs.preflight.outputs.TEMP_IMAGES_BASE }}/test-parachain:${{ needs.preflight.outputs.DOCKER_IMAGES_VERSION }}"
steps:
- name: Checkout
uses: actions/checkout@v4
- name: script
run: |
echo "RELAY_IMAGE: $RELAY_IMAGE"
echo "COL_IMAGE: $COL_IMAGE"
export DEBUG=${{ needs.preflight.outputs.DEBUG }}
/home/nonroot/zombie-net/scripts/ci/run-test-local-env-manager.sh \
--local-dir="$(pwd)/$LOCAL_DIR" \
--concurrency=1 \
--test="0007-full_node_warp_sync.zndsl"
- name: upload logs
uses: actions/upload-artifact@v4
with:
name: zombienet-logs-${{ github.job }}-${{ github.sha }}
path: |
/tmp/zombie*/logs/*
zombienet-cumulus-0008-elastic_authoring:
needs: [preflight]
if: ${{ needs.preflight.outputs.changes_substrate || needs.preflight.outputs.changes_cumulus || needs.preflight.outputs.changes_polkadot }}
runs-on: ${{ needs.preflight.outputs.ZOMBIENET_RUNNER }} # NOTE: should be zombienet-arc-runner (without quotes)
timeout-minutes: 60
container:
image: ${{ needs.preflight.outputs.ZOMBIENET_IMAGE }}
env:
RELAY_IMAGE: "${{ needs.preflight.outputs.TEMP_IMAGES_BASE }}/polkadot-debug:${{ needs.preflight.outputs.DOCKER_IMAGES_VERSION }}"
COL_IMAGE: "${{ needs.preflight.outputs.TEMP_IMAGES_BASE }}/test-parachain:${{ needs.preflight.outputs.DOCKER_IMAGES_VERSION }}"
steps:
- name: Checkout
uses: actions/checkout@v4
- name: script
run: |
echo "RELAY_IMAGE: $RELAY_IMAGE"
echo "COL_IMAGE: $COL_IMAGE"
export DEBUG=${{ needs.preflight.outputs.DEBUG }}
/home/nonroot/zombie-net/scripts/ci/run-test-local-env-manager.sh \
--local-dir="$(pwd)/$LOCAL_DIR" \
--concurrency=1 \
--test="0008-elastic_authoring.zndsl"
- name: upload logs
uses: actions/upload-artifact@v4
with:
name: zombienet-logs-${{ github.job }}-${{ github.sha }}
path: |
/tmp/zombie*/logs/*
zombienet-cumulus-0009-elastic_pov_recovery:
needs: [preflight]
if: ${{ needs.preflight.outputs.changes_substrate || needs.preflight.outputs.changes_cumulus || needs.preflight.outputs.changes_polkadot }}
runs-on: ${{ needs.preflight.outputs.ZOMBIENET_RUNNER }} # NOTE: should be zombienet-arc-runner (without quotes)
timeout-minutes: 60
container:
image: ${{ needs.preflight.outputs.ZOMBIENET_IMAGE }}
env:
RELAY_IMAGE: "${{ needs.preflight.outputs.TEMP_IMAGES_BASE }}/polkadot-debug:${{ needs.preflight.outputs.DOCKER_IMAGES_VERSION }}"
COL_IMAGE: "${{ needs.preflight.outputs.TEMP_IMAGES_BASE }}/test-parachain:${{ needs.preflight.outputs.DOCKER_IMAGES_VERSION }}"
steps:
- name: Checkout
uses: actions/checkout@v4
- name: script
run: |
echo "RELAY_IMAGE: $RELAY_IMAGE"
echo "COL_IMAGE: $COL_IMAGE"
export DEBUG=${{ needs.preflight.outputs.DEBUG }}
/home/nonroot/zombie-net/scripts/ci/run-test-local-env-manager.sh \
--local-dir="$(pwd)/$LOCAL_DIR" \
--concurrency=1 \
--test="0009-elastic_pov_recovery.zndsl"
- name: upload logs
uses: actions/upload-artifact@v4
with:
name: zombienet-logs-${{ github.job }}-${{ github.sha }}
path: |
/tmp/zombie*/logs/*
zombienet-cumulus-0010-elastic_scaling_multiple_block_per_slot:
needs: [preflight]
if: ${{ needs.preflight.outputs.changes_substrate || needs.preflight.outputs.changes_cumulus || needs.preflight.outputs.changes_polkadot }}
runs-on: ${{ needs.preflight.outputs.ZOMBIENET_RUNNER }} # NOTE: should be zombienet-arc-runner (without quotes)
timeout-minutes: 60
container:
image: ${{ needs.preflight.outputs.ZOMBIENET_IMAGE }}
env:
# sdk tests are looking for POLKADOT_IMAGE
POLKADOT_IMAGE: "${{ needs.preflight.outputs.TEMP_IMAGES_BASE }}/polkadot-debug:${{ needs.preflight.outputs.DOCKER_IMAGES_VERSION }}"
CUMULUS_IMAGE: "${{ needs.preflight.outputs.TEMP_IMAGES_BASE }}/test-parachain:${{ needs.preflight.outputs.DOCKER_IMAGES_VERSION }}"
RUST_LOG: ${{ needs.preflight.outputs.RUST_LOG }}
ZOMBIE_PROVIDER: ${{ needs.preflight.outputs.ZOMBIE_PROVIDER }}
# don't retry sdk tests
NEXTEST_RETRIES: 0
steps:
- name: k8s_auth
shell: bash
run: |
. /home/nonroot/zombie-net/scripts/ci/run-test-local-env-manager.sh
k8s_auth
- name: Checkout
uses: actions/checkout@v4
- uses: actions/download-artifact@v4.1.8
with:
name: prepare-cumulus-zombienet-artifacts-${{ needs.preflight.outputs.SOURCE_REF_SLUG }}
github-token: ${{ secrets.GITHUB_TOKEN }}
run-id: ${{ needs.preflight.outputs.BUILD_RUN_ID }}
- name: tar
run: tar -xvf artifacts.tar
- name: script
run: |
echo "POLKADOT_IMAGE: $POLKADOT_IMAGE"
echo "CUMULUS_IMAGE: $CUMULUS_IMAGE"
ls -ltr ./artifacts
# use spot by default
export X_INFRA_INSTANCE=spot
# we want to use `--no-capture` in zombienet tests.
unset NEXTEST_FAILURE_OUTPUT
unset NEXTEST_SUCCESS_OUTPUT
cargo nextest run --archive-file ./artifacts/cumulus-zombienet-tests.tar.zst --no-capture -- elastic_scaling::elastic_scaling_multiple_blocks_per_slot::elastic_scaling_multiple_block_per_slot
- name: upload logs
uses: actions/upload-artifact@v4
with:
name: zombienet-logs-${{ github.job }}-${{ github.sha }}
path: |
/tmp/zombie*/logs/*
name: Zombienet Substrate
on:
push:
branches:
- master
pull_request:
types: [opened, synchronize, reopened, ready_for_review]
merge_group:
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
env:
RUN_IN_CONTAINER: 1
FF_DISABLE_UMASK_FOR_DOCKER_EXECUTOR: 1
LOCAL_DIR: "./substrate/zombienet"
GHA_CLUSTER_SERVER_ADDR: "https://kubernetes.default:443"
# DB generated from commit: https://github.com/paritytech/polkadot-sdk/commit/868788a5bff3ef94869bd36432726703fe3b4e96
# TODO: As a workaround for https://github.com/paritytech/polkadot-sdk/issues/2568 the DB was generated in archive mode.
# After the issue is fixed, we should replace it with a pruned version of the DB.
DB_SNAPSHOT: "https://storage.googleapis.com/zombienet-db-snaps/substrate/0001-basic-warp-sync/chains-9677807d738b951e9f6c82e5fd15518eb0ae0419.tgz"
DB_BLOCK_HEIGHT: 56687
jobs:
isdraft:
uses: ./.github/workflows/reusable-isdraft.yml
preflight:
needs: isdraft
uses: ./.github/workflows/zombienet-reusable-preflight.yml
zombienet-substrate-0000-block-building:
needs: [preflight]
# only run if we have changes in ./substrate directory and the build workflow already finish with success status.
if: ${{ needs.preflight.outputs.changes_substrate || github.event_name == 'workflow_dispatch' }}
runs-on: ${{ needs.preflight.outputs.ZOMBIENET_RUNNER }} # NOTE: should be zombienet-arc-runner (without quotes)
timeout-minutes: 60
container:
image: ${{ needs.preflight.outputs.ZOMBIENET_IMAGE }}
env:
ZOMBIENET_INTEGRATION_TEST_IMAGE: "${{ needs.preflight.outputs.TEMP_IMAGES_BASE }}/substrate:${{ needs.preflight.outputs.DOCKER_IMAGES_VERSION }}"
steps:
- name: Checkout
uses: actions/checkout@v4
- name: script
run: |
echo "Img: $ZOMBIENET_INTEGRATION_TEST_IMAGE"
export DEBUG=${{ needs.preflight.outputs.DEBUG }}
/home/nonroot/zombie-net/scripts/ci/run-test-local-env-manager.sh --local-dir="$(pwd)/$LOCAL_DIR/0000-block-building" --test="block-building.zndsl"
- name: upload logs
uses: actions/upload-artifact@v4
with:
name: zombienet-logs-${{ github.job }}-${{ github.sha }}
path: |
/tmp/zombie*/logs/*
zombienet-substrate-0001-basic-warp-sync:
needs: [preflight]
# only run if we have changes in ./substrate directory and the build workflow already finish with success status.
if: ${{ needs.preflight.outputs.changes_substrate || github.event_name == 'workflow_dispatch' }}
runs-on: ${{ needs.preflight.outputs.ZOMBIENET_RUNNER }} # NOTE: should be zombienet-arc-runner (without quotes)
timeout-minutes: 60
container:
image: ${{ needs.preflight.outputs.ZOMBIENET_IMAGE }}
env:
ZOMBIENET_INTEGRATION_TEST_IMAGE: "${{ needs.preflight.outputs.TEMP_IMAGES_BASE }}/substrate:${{ needs.preflight.outputs.DOCKER_IMAGES_VERSION }}"
steps:
- name: Checkout
uses: actions/checkout@v4
- name: script
run: |
echo "Img: $ZOMBIENET_INTEGRATION_TEST_IMAGE"
export DEBUG=${{ needs.preflight.outputs.DEBUG }}
/home/nonroot/zombie-net/scripts/ci/run-test-local-env-manager.sh --local-dir="$(pwd)/$LOCAL_DIR/0001-basic-warp-sync" --test="test-warp-sync.zndsl"
- name: upload logs
uses: actions/upload-artifact@v4
with:
name: zombienet-logs-${{ github.job }}-${{ github.sha }}
path: |
/tmp/zombie*/logs/*
zombienet-substrate-0002-validators-warp-sync:
needs: [preflight]
# only run if we have changes in ./substrate directory and the build workflow already finish with success status.
if: ${{ needs.preflight.outputs.changes_substrate || github.event_name == 'workflow_dispatch' }}
runs-on: ${{ needs.preflight.outputs.ZOMBIENET_RUNNER }} # NOTE: should be zombienet-arc-runner (without quotes)
timeout-minutes: 60
container:
image: ${{ needs.preflight.outputs.ZOMBIENET_IMAGE }}
env:
ZOMBIENET_INTEGRATION_TEST_IMAGE: "${{ needs.preflight.outputs.TEMP_IMAGES_BASE }}/substrate:${{ needs.preflight.outputs.DOCKER_IMAGES_VERSION }}"
steps:
- name: Checkout
uses: actions/checkout@v4
- name: script
run: |
echo "Img: $ZOMBIENET_INTEGRATION_TEST_IMAGE"
cp --remove-destination ${LOCAL_DIR}/0001-basic-warp-sync/chain-spec.json ${LOCAL_DIR}/0002-validators-warp-sync
export DEBUG=${{ needs.preflight.outputs.DEBUG }}
/home/nonroot/zombie-net/scripts/ci/run-test-local-env-manager.sh --local-dir="$(pwd)/$LOCAL_DIR/0002-validators-warp-sync" --test="test-validators-warp-sync.zndsl"
- name: upload logs
uses: actions/upload-artifact@v4
with:
name: zombienet-logs-${{ github.job }}-${{ github.sha }}
path: |
/tmp/zombie*/logs/*
zombienet-substrate-0003-block-building-warp-sync:
needs: [preflight]
# only run if we have changes in ./substrate directory and the build workflow already finish with success status.
if: ${{ needs.preflight.outputs.changes_substrate || github.event_name == 'workflow_dispatch' }}
runs-on: ${{ needs.preflight.outputs.ZOMBIENET_RUNNER }} # NOTE: should be zombienet-arc-runner (without quotes)
timeout-minutes: 60
container:
image: ${{ needs.preflight.outputs.ZOMBIENET_IMAGE }}
env:
ZOMBIENET_INTEGRATION_TEST_IMAGE: "${{ needs.preflight.outputs.TEMP_IMAGES_BASE }}/substrate:${{ needs.preflight.outputs.DOCKER_IMAGES_VERSION }}"
steps:
- name: Checkout
uses: actions/checkout@v4
- name: script
run: |
echo "Img: $ZOMBIENET_INTEGRATION_TEST_IMAGE"
cp --remove-destination ${LOCAL_DIR}/0001-basic-warp-sync/chain-spec.json ${LOCAL_DIR}/0003-block-building-warp-sync
export DEBUG=${{ needs.preflight.outputs.DEBUG }}
/home/nonroot/zombie-net/scripts/ci/run-test-local-env-manager.sh --local-dir="$(pwd)/$LOCAL_DIR/0003-block-building-warp-sync" --test="test-block-building-warp-sync.zndsl"
- name: upload logs
uses: actions/upload-artifact@v4
with:
name: zombienet-logs-${{ github.job }}-${{ github.sha }}
path: |
/tmp/zombie*/logs/*
ZOMBIENET_IMAGE=docker.io/paritytech/zombienet:v1.3.126
ZOMBIENET_RUNNER=zombienet-arc-runner
PUSHGATEWAY_URL=http://zombienet-prometheus-pushgateway.managed-monitoring:9091/metrics/job/zombie-metrics
DEBUG=zombie,zombie::network-node,zombie::kube::client::logs
ZOMBIE_PROVIDER=k8s
RUST_LOG=info,zombienet_orchestrator=debug
RUN_IN_CI=1
KUBERNETES_CPU_REQUEST=512m
KUBERNETES_MEMORY_REQUEST=1Gi
TEMP_IMAGES_BASE=europe-docker.pkg.dev/parity-ci-2024/temp-images
......@@ -23,13 +23,13 @@
**/node_modules
**/target/
**/wip/*.stderr
**/__pycache__/
/.cargo/config
/.envrc
artifacts
bin/node-template/Cargo.lock
nohup.out
polkadot_argument_parsing
polkadot.*
!docs/sdk/src/polkadot_sdk/polkadot.rs
pwasm-alloc/Cargo.lock
pwasm-libc/Cargo.lock
......@@ -39,3 +39,6 @@ rls*.log
runtime/wasm/target/
substrate.code-workspace
target/
*.scale
justfile
rustc-ice-*
......@@ -21,7 +21,8 @@ workflow:
- if: $CI_COMMIT_BRANCH
variables:
CI_IMAGE: !reference [.ci-unified, variables, CI_IMAGE]
# CI_IMAGE: !reference [ .ci-unified, variables, CI_IMAGE ]
CI_IMAGE: "docker.io/paritytech/ci-unified:bullseye-1.84.1-2025-01-28-v202502131220"
# BUILDAH_IMAGE is defined in group variables
BUILDAH_COMMAND: "buildah --storage-driver overlay2"
RELENG_SCRIPTS_BRANCH: "master"
......@@ -124,18 +125,18 @@ default:
- cat .forklift/config-gitlab.toml > .forklift/config.toml
- >
if [ "$FORKLIFT_BYPASS" != "true" ]; then
echo "FORKLIFT_BYPASS not set";
echo "FORKLIFT_BYPASS not set";
if command -v forklift >/dev/null 2>&1; then
echo "forklift already exists";
echo "forklift already exists";
forklift version
else
echo "forklift does not exist, downloading";
curl --header "PRIVATE-TOKEN: $FL_CI_GROUP_TOKEN" -o forklift -L "${CI_API_V4_URL}/projects/676/packages/generic/forklift/${FL_FORKLIFT_VERSION}/forklift_${FL_FORKLIFT_VERSION}_linux_amd64";
curl --header "PRIVATE-TOKEN: $FL_CI_GROUP_TOKEN" -o forklift -L "${CI_API_V4_URL}/projects/676/packages/generic/forklift/${FL_FORKLIFT_VERSION}/forklift_${FL_FORKLIFT_VERSION}_linux_amd64";
chmod +x forklift;
export PATH=$PATH:$(pwd);
echo ${FL_FORKLIFT_VERSION};
fi
echo "Creating alias cargo='forklift cargo'";
echo "Creating alias cargo='forklift cargo'";
shopt -s expand_aliases;
alias cargo="forklift cargo";
fi
......@@ -224,8 +225,6 @@ include:
- .gitlab/pipeline/test.yml
# build jobs
- .gitlab/pipeline/build.yml
# short-benchmarks jobs
- .gitlab/pipeline/short-benchmarks.yml
# publish jobs
- .gitlab/pipeline/publish.yml
# zombienet jobs
......@@ -268,82 +267,6 @@ remove-cancel-pipeline-message:
PR_NUM: "${CI_COMMIT_REF_NAME}"
trigger:
project: "parity/infrastructure/ci_cd/pipeline-stopper"
# need to copy jobs this way because otherwise gitlab will wait
# for all 3 jobs to finish instead of cancelling if one fails
cancel-pipeline-test-linux-stable1:
extends: .cancel-pipeline-template
needs:
- job: "test-linux-stable 1/3"
cancel-pipeline-test-linux-stable2:
extends: .cancel-pipeline-template
needs:
- job: "test-linux-stable 2/3"
cancel-pipeline-test-linux-stable3:
extends: .cancel-pipeline-template
needs:
- job: "test-linux-stable 3/3"
cancel-pipeline-cargo-check-benches1:
extends: .cancel-pipeline-template
needs:
- job: "cargo-check-benches 1/2"
cancel-pipeline-cargo-check-benches2:
extends: .cancel-pipeline-template
needs:
- job: "cargo-check-benches 2/2"
cancel-pipeline-test-linux-stable-int:
extends: .cancel-pipeline-template
needs:
- job: test-linux-stable-int
cancel-pipeline-cargo-check-each-crate-1:
extends: .cancel-pipeline-template
needs:
- job: "cargo-check-each-crate 1/6"
cancel-pipeline-cargo-check-each-crate-2:
extends: .cancel-pipeline-template
needs:
- job: "cargo-check-each-crate 2/6"
cancel-pipeline-cargo-check-each-crate-3:
extends: .cancel-pipeline-template
needs:
- job: "cargo-check-each-crate 3/6"
cancel-pipeline-cargo-check-each-crate-4:
extends: .cancel-pipeline-template
needs:
- job: "cargo-check-each-crate 4/6"
cancel-pipeline-cargo-check-each-crate-5:
extends: .cancel-pipeline-template
needs:
- job: "cargo-check-each-crate 5/6"
cancel-pipeline-cargo-check-each-crate-6:
extends: .cancel-pipeline-template
needs:
- job: "cargo-check-each-crate 6/6"
cancel-pipeline-cargo-check-each-crate-macos:
extends: .cancel-pipeline-template
needs:
- job: cargo-check-each-crate-macos
cancel-pipeline-check-tracing:
extends: .cancel-pipeline-template
needs:
- job: check-tracing
cancel-pipeline-cargo-clippy:
extends: .cancel-pipeline-template
needs:
- job: cargo-clippy
cancel-pipeline-build-linux-stable:
extends: .cancel-pipeline-template
......@@ -359,43 +282,3 @@ cancel-pipeline-build-linux-substrate:
extends: .cancel-pipeline-template
needs:
- job: build-linux-substrate
cancel-pipeline-test-node-metrics:
extends: .cancel-pipeline-template
needs:
- job: test-node-metrics
cancel-pipeline-test-frame-ui:
extends: .cancel-pipeline-template
needs:
- job: test-frame-ui
cancel-pipeline-quick-benchmarks:
extends: .cancel-pipeline-template
needs:
- job: quick-benchmarks
cancel-pipeline-check-try-runtime:
extends: .cancel-pipeline-template
needs:
- job: check-try-runtime
cancel-pipeline-test-frame-examples-compile-to-wasm:
extends: .cancel-pipeline-template
needs:
- job: test-frame-examples-compile-to-wasm
cancel-pipeline-build-short-benchmark:
extends: .cancel-pipeline-template
needs:
- job: build-short-benchmark
cancel-pipeline-check-runtime-migration-rococo:
extends: .cancel-pipeline-template
needs:
- job: check-runtime-migration-rococo
cancel-pipeline-check-runtime-migration-westend:
extends: .cancel-pipeline-template
needs:
- job: check-runtime-migration-westend
......@@ -83,48 +83,22 @@ build-malus:
- echo "polkadot-test-malus = $(cat ./artifacts/VERSION) (EXTRATAG = $(cat ./artifacts/EXTRATAG))"
- cp -r ./docker/* ./artifacts
build-rustdoc:
build-templates-node:
stage: build
extends:
- .docker-env
- .common-refs
- .run-immediately
variables:
SKIP_WASM_BUILD: 1
RUSTDOCFLAGS: "-Dwarnings --default-theme=ayu --html-in-header ./docs/sdk/assets/header.html --extend-css ./docs/sdk/assets/theme.css --html-after-content ./docs/sdk/assets/after-content.html"
artifacts:
name: "${CI_JOB_NAME}_${CI_COMMIT_REF_NAME}-doc"
when: on_success
expire_in: 1 days
paths:
- ./crate-docs/
- .collect-artifacts
script:
- time cargo doc --all-features --workspace --no-deps
- rm -f ./target/doc/.lock
- mv ./target/doc ./crate-docs
# Inject Simple Analytics (https://www.simpleanalytics.com/) privacy preserving tracker into
# all .html files
- >
inject_simple_analytics() {
local path="$1";
local script_content="<script async defer src=\"https://apisa.parity.io/latest.js\"></script><noscript><img src=\"https://apisa.parity.io/latest.js\" alt=\"\" referrerpolicy=\"no-referrer-when-downgrade\" /></noscript>";
# Function that inject script into the head of an html file using sed.
process_file() {
local file="$1";
echo "Adding Simple Analytics script to $file";
sed -i "s|</head>|$script_content</head>|" "$file";
};
export -f process_file;
# xargs runs process_file in separate shells without access to outer variables.
# make script_content available inside process_file, export it as an env var here.
export script_content;
# Modify .html files in parallel using xargs, otherwise it can take a long time.
find "$path" -name '*.html' | xargs -I {} -P "$(nproc)" bash -c 'process_file "$@"' _ {};
};
inject_simple_analytics "./crate-docs";
- echo "<meta http-equiv=refresh content=0;url=polkadot_sdk_docs/index.html>" > ./crate-docs/index.html
- time cargo build --locked --package parachain-template-node --release
- time cargo build --locked --package minimal-template-node --release
- time cargo build --locked --package solochain-template-node --release
# pack artifacts
- mkdir -p ./artifacts
- mv ./target/release/parachain-template-node ./artifacts/.
- mv ./target/release/minimal-template-node ./artifacts/.
- mv ./target/release/solochain-template-node ./artifacts/.
build-implementers-guide:
stage: build
......@@ -143,18 +117,23 @@ build-implementers-guide:
- mkdir -p artifacts
- mv polkadot/roadmap/implementers-guide/book artifacts/
build-short-benchmark:
build-polkadot-zombienet-tests:
stage: build
extends:
- .docker-env
- .common-refs
- .run-immediately
- .collect-artifacts
needs:
- job: build-linux-stable
artifacts: true
- job: build-linux-stable-cumulus
artifacts: true
script:
- cargo build --profile release --locked --features=runtime-benchmarks,on-chain-release-build --bin polkadot --workspace
- cargo nextest --manifest-path polkadot/zombienet-sdk-tests/Cargo.toml archive --features zombie-metadata --archive-file polkadot-zombienet-tests.tar.zst
- mkdir -p artifacts
- target/release/polkadot --version
- cp ./target/release/polkadot ./artifacts/
- cp polkadot-zombienet-tests.tar.zst ./artifacts
# build jobs from cumulus
......@@ -198,101 +177,6 @@ build-test-parachain:
- mkdir -p ./artifacts/zombienet
- mv ./target/release/wbuild/cumulus-test-runtime/wasm_binary_spec_version_incremented.rs.compact.compressed.wasm ./artifacts/zombienet/.
# build runtime only if files in $RUNTIME_PATH/$RUNTIME_NAME were changed
.build-runtime-template: &build-runtime-template
stage: build
extends:
- .docker-env
- .test-refs-no-trigger-prs-only
- .run-immediately
variables:
RUNTIME_PATH: "parachains/runtimes/assets"
script:
- cd ${RUNTIME_PATH}
- for directory in $(echo */); do
echo "_____Running cargo check for ${directory} ______";
cd ${directory};
pwd;
SKIP_WASM_BUILD=1 cargo check --locked;
cd ..;
done
# DAG: build-runtime-assets -> build-runtime-collectives -> build-runtime-bridge-hubs
# DAG: build-runtime-assets -> build-runtime-collectives -> build-runtime-contracts
# DAG: build-runtime-assets -> build-runtime-coretime
# DAG: build-runtime-assets -> build-runtime-starters -> build-runtime-testing
build-runtime-assets:
<<: *build-runtime-template
variables:
RUNTIME_PATH: "cumulus/parachains/runtimes/assets"
build-runtime-collectives:
<<: *build-runtime-template
variables:
RUNTIME_PATH: "cumulus/parachains/runtimes/collectives"
# this is an artificial job dependency, for pipeline optimization using GitLab's DAGs
needs:
- job: build-runtime-assets
artifacts: false
build-runtime-coretime:
<<: *build-runtime-template
variables:
RUNTIME_PATH: "cumulus/parachains/runtimes/coretime"
# this is an artificial job dependency, for pipeline optimization using GitLab's DAGs
needs:
- job: build-runtime-assets
artifacts: false
build-runtime-bridge-hubs:
<<: *build-runtime-template
variables:
RUNTIME_PATH: "cumulus/parachains/runtimes/bridge-hubs"
# this is an artificial job dependency, for pipeline optimization using GitLab's DAGs
needs:
- job: build-runtime-collectives
artifacts: false
build-runtime-contracts:
<<: *build-runtime-template
variables:
RUNTIME_PATH: "cumulus/parachains/runtimes/contracts"
# this is an artificial job dependency, for pipeline optimization using GitLab's DAGs
needs:
- job: build-runtime-collectives
artifacts: false
build-runtime-starters:
<<: *build-runtime-template
variables:
RUNTIME_PATH: "cumulus/parachains/runtimes/starters"
# this is an artificial job dependency, for pipeline optimization using GitLab's DAGs
needs:
- job: build-runtime-assets
artifacts: false
build-runtime-testing:
<<: *build-runtime-template
variables:
RUNTIME_PATH: "cumulus/parachains/runtimes/testing"
# this is an artificial job dependency, for pipeline optimization using GitLab's DAGs
needs:
- job: build-runtime-starters
artifacts: false
build-short-benchmark-cumulus:
stage: build
extends:
- .docker-env
- .common-refs
- .run-immediately
- .collect-artifacts
script:
- cargo build --profile release --locked --features=runtime-benchmarks,on-chain-release-build -p polkadot-parachain-bin --bin polkadot-parachain --workspace
- mkdir -p artifacts
- target/release/polkadot-parachain --version
- cp ./target/release/polkadot-parachain ./artifacts/
# substrate
build-linux-substrate:
......@@ -329,73 +213,6 @@ build-linux-substrate:
# - printf '\n# building node-template\n\n'
# - ./scripts/ci/node-template-release.sh ./artifacts/substrate/substrate-node-template.tar.gz
build-runtimes-polkavm:
stage: build
extends:
- .docker-env
- .common-refs
- .run-immediately
script:
- SUBSTRATE_RUNTIME_TARGET=riscv cargo check -p minimal-template-runtime
- SUBSTRATE_RUNTIME_TARGET=riscv cargo check -p westend-runtime
- SUBSTRATE_RUNTIME_TARGET=riscv cargo check -p rococo-runtime
- SUBSTRATE_RUNTIME_TARGET=riscv cargo check -p polkadot-test-runtime
.build-subkey:
stage: build
extends:
- .docker-env
- .common-refs
- .run-immediately
# - .collect-artifact
variables:
# this variable gets overridden by "rusty-cachier environment inject", use the value as default
CARGO_TARGET_DIR: "$CI_PROJECT_DIR/target"
before_script:
- mkdir -p ./artifacts/subkey
- !reference [.forklift-cache, before_script]
script:
- cd ./substrate/bin/utils/subkey
- time SKIP_WASM_BUILD=1 cargo build --locked --release
# - cd -
# - mv $CARGO_TARGET_DIR/release/subkey ./artifacts/subkey/.
# - echo -n "Subkey version = "
# - ./artifacts/subkey/subkey --version |
# sed -n -E 's/^subkey ([0-9.]+.*)/\1/p' |
# tee ./artifacts/subkey/VERSION;
# - sha256sum ./artifacts/subkey/subkey | tee ./artifacts/subkey/subkey.sha256
# - cp -r ./scripts/ci/docker/subkey.Dockerfile ./artifacts/subkey/
build-subkey-linux:
extends: .build-subkey
# DAG
needs:
- job: build-malus
artifacts: false
# tbd
# build-subkey-macos:
# extends: .build-subkey
# # duplicating before_script & script sections from .build-subkey hidden job
# # to overwrite rusty-cachier integration as it doesn't work on macos
# before_script:
# # skip timestamp script, the osx bash doesn't support printf %()T
# - !reference [.job-switcher, before_script]
# - mkdir -p ./artifacts/subkey
# script:
# - cd ./bin/utils/subkey
# - SKIP_WASM_BUILD=1 time cargo build --locked --release
# - cd -
# - mv ./target/release/subkey ./artifacts/subkey/.
# - echo -n "Subkey version = "
# - ./artifacts/subkey/subkey --version |
# sed -n -E 's/^subkey ([0-9.]+.*)/\1/p' |
# tee ./artifacts/subkey/VERSION;
# - sha256sum ./artifacts/subkey/subkey | tee ./artifacts/subkey/subkey.sha256
# - cp -r ./scripts/ci/docker/subkey.Dockerfile ./artifacts/subkey/
# after_script: [""]
# tags:
# - osx
# bridges
# we need some non-binary artifacts in our bridges+zombienet image
......