Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • parity/mirrors/polkadot-sdk
1 result
Show changes
Showing
with 2576 additions and 86 deletions
import unittest
from unittest.mock import patch, mock_open, MagicMock, call
import json
import sys
import os
import argparse
# Mock data for runtimes-matrix.json
mock_runtimes_matrix = [
{
"name": "dev",
"package": "kitchensink-runtime",
"path": "substrate/frame",
"header": "substrate/HEADER-APACHE2",
"template": "substrate/.maintain/frame-weight-template.hbs",
"bench_features": "runtime-benchmarks",
"bench_flags": "--flag1 --flag2"
},
{
"name": "westend",
"package": "westend-runtime",
"path": "polkadot/runtime/westend",
"header": "polkadot/file_header.txt",
"template": "polkadot/xcm/pallet-xcm-benchmarks/template.hbs",
"bench_features": "runtime-benchmarks",
"bench_flags": "--flag3 --flag4"
},
{
"name": "rococo",
"package": "rococo-runtime",
"path": "polkadot/runtime/rococo",
"header": "polkadot/file_header.txt",
"template": "polkadot/xcm/pallet-xcm-benchmarks/template.hbs",
"bench_features": "runtime-benchmarks",
"bench_flags": ""
},
{
"name": "asset-hub-westend",
"package": "asset-hub-westend-runtime",
"path": "cumulus/parachains/runtimes/assets/asset-hub-westend",
"header": "cumulus/file_header.txt",
"template": "cumulus/templates/xcm-bench-template.hbs",
"bench_features": "runtime-benchmarks",
"bench_flags": "--flag7 --flag8"
}
]
def get_mock_bench_output(runtime, pallets, output_path, header, bench_flags, template = None):
return f"frame-omni-bencher v1 benchmark pallet --extrinsic=* " \
f"--runtime=target/production/wbuild/{runtime}-runtime/{runtime.replace('-', '_')}_runtime.wasm " \
f"--pallet={pallets} --header={header} " \
f"--output={output_path} " \
f"--wasm-execution=compiled " \
f"--steps=50 --repeat=20 --heap-pages=4096 " \
f"{f'--template={template} ' if template else ''}" \
f"--no-storage-info --no-min-squares --no-median-slopes " \
f"{bench_flags}"
class TestCmd(unittest.TestCase):
def setUp(self):
self.patcher1 = patch('builtins.open', new_callable=mock_open, read_data=json.dumps(mock_runtimes_matrix))
self.patcher2 = patch('json.load', return_value=mock_runtimes_matrix)
self.patcher3 = patch('argparse.ArgumentParser.parse_known_args')
self.patcher4 = patch('os.system', return_value=0)
self.patcher5 = patch('os.popen')
self.patcher6 = patch('importlib.util.spec_from_file_location', return_value=MagicMock())
self.patcher7 = patch('importlib.util.module_from_spec', return_value=MagicMock())
self.patcher8 = patch('cmd.generate_prdoc.main', return_value=0)
self.mock_open = self.patcher1.start()
self.mock_json_load = self.patcher2.start()
self.mock_parse_args = self.patcher3.start()
self.mock_system = self.patcher4.start()
self.mock_popen = self.patcher5.start()
self.mock_spec_from_file_location = self.patcher6.start()
self.mock_module_from_spec = self.patcher7.start()
self.mock_generate_prdoc_main = self.patcher8.start()
# Ensure that cmd.py uses the mock_runtimes_matrix
import cmd
cmd.runtimesMatrix = mock_runtimes_matrix
def tearDown(self):
self.patcher1.stop()
self.patcher2.stop()
self.patcher3.stop()
self.patcher4.stop()
self.patcher5.stop()
self.patcher6.stop()
self.patcher7.stop()
self.patcher8.stop()
def test_bench_command_normal_execution_all_runtimes(self):
self.mock_parse_args.return_value = (argparse.Namespace(
command='bench-omni',
runtime=list(map(lambda x: x['name'], mock_runtimes_matrix)),
pallet=['pallet_balances'],
fail_fast=True,
quiet=False,
clean=False,
image=None
), [])
self.mock_popen.return_value.read.side_effect = [
"pallet_balances\npallet_staking\npallet_something\n", # Output for dev runtime
"pallet_balances\npallet_staking\npallet_something\n", # Output for westend runtime
"pallet_staking\npallet_something\n", # Output for rococo runtime - no pallet here
"pallet_balances\npallet_staking\npallet_something\n", # Output for asset-hub-westend runtime
"./substrate/frame/balances/Cargo.toml\n", # Mock manifest path for dev -> pallet_balances
]
with patch('sys.exit') as mock_exit:
import cmd
cmd.main()
mock_exit.assert_not_called()
expected_calls = [
# Build calls
call("forklift cargo build -p kitchensink-runtime --profile production --features=runtime-benchmarks"),
call("forklift cargo build -p westend-runtime --profile production --features=runtime-benchmarks"),
call("forklift cargo build -p rococo-runtime --profile production --features=runtime-benchmarks"),
call("forklift cargo build -p asset-hub-westend-runtime --profile production --features=runtime-benchmarks"),
call(get_mock_bench_output(
runtime='kitchensink',
pallets='pallet_balances',
output_path='./substrate/frame/balances/src/weights.rs',
header=os.path.abspath('substrate/HEADER-APACHE2'),
bench_flags='--flag1 --flag2',
template="substrate/.maintain/frame-weight-template.hbs"
)),
call(get_mock_bench_output(
runtime='westend',
pallets='pallet_balances',
output_path='./polkadot/runtime/westend/src/weights',
header=os.path.abspath('polkadot/file_header.txt'),
bench_flags='--flag3 --flag4'
)),
# skips rococo benchmark
call(get_mock_bench_output(
runtime='asset-hub-westend',
pallets='pallet_balances',
output_path='./cumulus/parachains/runtimes/assets/asset-hub-westend/src/weights',
header=os.path.abspath('cumulus/file_header.txt'),
bench_flags='--flag7 --flag8'
)),
]
self.mock_system.assert_has_calls(expected_calls, any_order=True)
def test_bench_command_normal_execution(self):
self.mock_parse_args.return_value = (argparse.Namespace(
command='bench-omni',
runtime=['westend'],
pallet=['pallet_balances', 'pallet_staking'],
fail_fast=True,
quiet=False,
clean=False,
image=None
), [])
header_path = os.path.abspath('polkadot/file_header.txt')
self.mock_popen.return_value.read.side_effect = [
"pallet_balances\npallet_staking\npallet_something\n", # Output for westend runtime
]
with patch('sys.exit') as mock_exit:
import cmd
cmd.main()
mock_exit.assert_not_called()
expected_calls = [
# Build calls
call("forklift cargo build -p westend-runtime --profile production --features=runtime-benchmarks"),
# Westend runtime calls
call(get_mock_bench_output(
runtime='westend',
pallets='pallet_balances',
output_path='./polkadot/runtime/westend/src/weights',
header=header_path,
bench_flags='--flag3 --flag4'
)),
call(get_mock_bench_output(
runtime='westend',
pallets='pallet_staking',
output_path='./polkadot/runtime/westend/src/weights',
header=header_path,
bench_flags='--flag3 --flag4'
)),
]
self.mock_system.assert_has_calls(expected_calls, any_order=True)
def test_bench_command_normal_execution_xcm(self):
self.mock_parse_args.return_value = (argparse.Namespace(
command='bench-omni',
runtime=['westend'],
pallet=['pallet_xcm_benchmarks::generic'],
fail_fast=True,
quiet=False,
clean=False,
image=None
), [])
header_path = os.path.abspath('polkadot/file_header.txt')
self.mock_popen.return_value.read.side_effect = [
"pallet_balances\npallet_staking\npallet_something\npallet_xcm_benchmarks::generic\n", # Output for westend runtime
]
with patch('sys.exit') as mock_exit:
import cmd
cmd.main()
mock_exit.assert_not_called()
expected_calls = [
# Build calls
call("forklift cargo build -p westend-runtime --profile production --features=runtime-benchmarks"),
# Westend runtime calls
call(get_mock_bench_output(
runtime='westend',
pallets='pallet_xcm_benchmarks::generic',
output_path='./polkadot/runtime/westend/src/weights/xcm',
header=header_path,
bench_flags='--flag3 --flag4',
template="polkadot/xcm/pallet-xcm-benchmarks/template.hbs"
)),
]
self.mock_system.assert_has_calls(expected_calls, any_order=True)
def test_bench_command_two_runtimes_two_pallets(self):
self.mock_parse_args.return_value = (argparse.Namespace(
command='bench-omni',
runtime=['westend', 'rococo'],
pallet=['pallet_balances', 'pallet_staking'],
fail_fast=True,
quiet=False,
clean=False,
image=None
), [])
self.mock_popen.return_value.read.side_effect = [
"pallet_staking\npallet_balances\n", # Output for westend runtime
"pallet_staking\npallet_balances\n", # Output for rococo runtime
]
with patch('sys.exit') as mock_exit:
import cmd
cmd.main()
mock_exit.assert_not_called()
header_path = os.path.abspath('polkadot/file_header.txt')
expected_calls = [
# Build calls
call("forklift cargo build -p westend-runtime --profile production --features=runtime-benchmarks"),
call("forklift cargo build -p rococo-runtime --profile production --features=runtime-benchmarks"),
# Westend runtime calls
call(get_mock_bench_output(
runtime='westend',
pallets='pallet_staking',
output_path='./polkadot/runtime/westend/src/weights',
header=header_path,
bench_flags='--flag3 --flag4'
)),
call(get_mock_bench_output(
runtime='westend',
pallets='pallet_balances',
output_path='./polkadot/runtime/westend/src/weights',
header=header_path,
bench_flags='--flag3 --flag4'
)),
# Rococo runtime calls
call(get_mock_bench_output(
runtime='rococo',
pallets='pallet_staking',
output_path='./polkadot/runtime/rococo/src/weights',
header=header_path,
bench_flags=''
)),
call(get_mock_bench_output(
runtime='rococo',
pallets='pallet_balances',
output_path='./polkadot/runtime/rococo/src/weights',
header=header_path,
bench_flags=''
)),
]
self.mock_system.assert_has_calls(expected_calls, any_order=True)
def test_bench_command_one_dev_runtime(self):
self.mock_parse_args.return_value = (argparse.Namespace(
command='bench-omni',
runtime=['dev'],
pallet=['pallet_balances'],
fail_fast=True,
quiet=False,
clean=False,
image=None
), [])
manifest_dir = "substrate/frame/kitchensink"
self.mock_popen.return_value.read.side_effect = [
"pallet_balances\npallet_something", # Output for dev runtime
manifest_dir + "/Cargo.toml" # Output for manifest path in dev runtime
]
header_path = os.path.abspath('substrate/HEADER-APACHE2')
with patch('sys.exit') as mock_exit:
import cmd
cmd.main()
mock_exit.assert_not_called()
expected_calls = [
# Build calls
call("forklift cargo build -p kitchensink-runtime --profile production --features=runtime-benchmarks"),
# Westend runtime calls
call(get_mock_bench_output(
runtime='kitchensink',
pallets='pallet_balances',
output_path=manifest_dir + "/src/weights.rs",
header=header_path,
bench_flags='--flag1 --flag2',
template="substrate/.maintain/frame-weight-template.hbs"
)),
]
self.mock_system.assert_has_calls(expected_calls, any_order=True)
def test_bench_command_one_cumulus_runtime(self):
self.mock_parse_args.return_value = (argparse.Namespace(
command='bench-omni',
runtime=['asset-hub-westend'],
pallet=['pallet_assets'],
fail_fast=True,
quiet=False,
clean=False,
image=None
), [])
self.mock_popen.return_value.read.side_effect = [
"pallet_assets\n", # Output for asset-hub-westend runtime
]
header_path = os.path.abspath('cumulus/file_header.txt')
with patch('sys.exit') as mock_exit:
import cmd
cmd.main()
mock_exit.assert_not_called()
expected_calls = [
# Build calls
call("forklift cargo build -p asset-hub-westend-runtime --profile production --features=runtime-benchmarks"),
# Asset-hub-westend runtime calls
call(get_mock_bench_output(
runtime='asset-hub-westend',
pallets='pallet_assets',
output_path='./cumulus/parachains/runtimes/assets/asset-hub-westend/src/weights',
header=header_path,
bench_flags='--flag7 --flag8'
)),
]
self.mock_system.assert_has_calls(expected_calls, any_order=True)
def test_bench_command_one_cumulus_runtime_xcm(self):
self.mock_parse_args.return_value = (argparse.Namespace(
command='bench-omni',
runtime=['asset-hub-westend'],
pallet=['pallet_xcm_benchmarks::generic', 'pallet_assets'],
fail_fast=True,
quiet=False,
clean=False,
image=None
), [])
self.mock_popen.return_value.read.side_effect = [
"pallet_assets\npallet_xcm_benchmarks::generic\n", # Output for asset-hub-westend runtime
]
header_path = os.path.abspath('cumulus/file_header.txt')
with patch('sys.exit') as mock_exit:
import cmd
cmd.main()
mock_exit.assert_not_called()
expected_calls = [
# Build calls
call("forklift cargo build -p asset-hub-westend-runtime --profile production --features=runtime-benchmarks"),
# Asset-hub-westend runtime calls
call(get_mock_bench_output(
runtime='asset-hub-westend',
pallets='pallet_xcm_benchmarks::generic',
output_path='./cumulus/parachains/runtimes/assets/asset-hub-westend/src/weights/xcm',
header=header_path,
bench_flags='--flag7 --flag8',
template="cumulus/templates/xcm-bench-template.hbs"
)),
call(get_mock_bench_output(
runtime='asset-hub-westend',
pallets='pallet_assets',
output_path='./cumulus/parachains/runtimes/assets/asset-hub-westend/src/weights',
header=header_path,
bench_flags='--flag7 --flag8'
)),
]
self.mock_system.assert_has_calls(expected_calls, any_order=True)
@patch('argparse.ArgumentParser.parse_known_args', return_value=(argparse.Namespace(command='fmt'), []))
@patch('os.system', return_value=0)
def test_fmt_command(self, mock_system, mock_parse_args):
with patch('sys.exit') as mock_exit:
import cmd
cmd.main()
mock_exit.assert_not_called()
mock_system.assert_any_call('cargo +nightly fmt')
mock_system.assert_any_call('taplo format --config .config/taplo.toml')
@patch('argparse.ArgumentParser.parse_known_args', return_value=(argparse.Namespace(command='update-ui'), []))
@patch('os.system', return_value=0)
def test_update_ui_command(self, mock_system, mock_parse_args):
with patch('sys.exit') as mock_exit:
import cmd
cmd.main()
mock_exit.assert_not_called()
mock_system.assert_called_with('sh ./scripts/update-ui-tests.sh')
@patch('argparse.ArgumentParser.parse_known_args', return_value=(argparse.Namespace(command='prdoc'), []))
@patch('os.system', return_value=0)
def test_prdoc_command(self, mock_system, mock_parse_args):
with patch('sys.exit') as mock_exit:
import cmd
cmd.main()
mock_exit.assert_not_called()
self.mock_generate_prdoc_main.assert_called_with(mock_parse_args.return_value[0])
if __name__ == '__main__':
unittest.main()
...@@ -237,23 +237,52 @@ fetch_release_artifacts() { ...@@ -237,23 +237,52 @@ fetch_release_artifacts() {
popd > /dev/null popd > /dev/null
} }
# Fetch the release artifacts like binary and signatures from S3. Assumes the ENV are set: # Fetch deb package from S3. Assumes the ENV are set:
# - RELEASE_ID # - RELEASE_ID
# - GITHUB_TOKEN # - GITHUB_TOKEN
# - REPO in the form paritytech/polkadot # - REPO in the form paritytech/polkadot
fetch_release_artifacts_from_s3() { fetch_debian_package_from_s3() {
echo "Version : $VERSION" BINARY=$1
echo "Version : $NODE_VERSION"
echo "Repo : $REPO" echo "Repo : $REPO"
echo "Binary : $BINARY" echo "Binary : $BINARY"
echo "Tag : $VERSION"
OUTPUT_DIR=${OUTPUT_DIR:-"./release-artifacts/${BINARY}"} OUTPUT_DIR=${OUTPUT_DIR:-"./release-artifacts/${BINARY}"}
echo "OUTPUT_DIR : $OUTPUT_DIR" echo "OUTPUT_DIR : $OUTPUT_DIR"
URL_BASE=$(get_s3_url_base $BINARY) URL_BASE=$(get_s3_url_base $BINARY)
echo "URL_BASE=$URL_BASE" echo "URL_BASE=$URL_BASE"
URL_BINARY=$URL_BASE/$VERSION/$BINARY URL=$URL_BASE/$VERSION/x86_64-unknown-linux-gnu/${BINARY}_${NODE_VERSION}_amd64.deb
URL_SHA=$URL_BASE/$VERSION/$BINARY.sha256
URL_ASC=$URL_BASE/$VERSION/$BINARY.asc mkdir -p "$OUTPUT_DIR"
pushd "$OUTPUT_DIR" > /dev/null
echo "Fetching deb package..."
echo "Fetching %s" "$URL"
curl --progress-bar -LO "$URL" || echo "Missing $URL"
pwd
ls -al --color
popd > /dev/null
}
# Fetch the release artifacts like binary and signatures from S3. Assumes the ENV are set:
# inputs: binary (polkadot), target(aarch64-apple-darwin)
fetch_release_artifacts_from_s3() {
BINARY=$1
TARGET=$2
OUTPUT_DIR=${OUTPUT_DIR:-"./release-artifacts/${TARGET}/${BINARY}"}
echo "OUTPUT_DIR : $OUTPUT_DIR"
URL_BASE=$(get_s3_url_base $BINARY)
echo "URL_BASE=$URL_BASE"
URL_BINARY=$URL_BASE/$VERSION/$TARGET/$BINARY
URL_SHA=$URL_BASE/$VERSION/$TARGET/$BINARY.sha256
URL_ASC=$URL_BASE/$VERSION/$TARGET/$BINARY.asc
# Fetch artifacts # Fetch artifacts
mkdir -p "$OUTPUT_DIR" mkdir -p "$OUTPUT_DIR"
...@@ -268,7 +297,6 @@ fetch_release_artifacts_from_s3() { ...@@ -268,7 +297,6 @@ fetch_release_artifacts_from_s3() {
pwd pwd
ls -al --color ls -al --color
popd > /dev/null popd > /dev/null
} }
# Pass the name of the binary as input, it will # Pass the name of the binary as input, it will
...@@ -276,15 +304,26 @@ fetch_release_artifacts_from_s3() { ...@@ -276,15 +304,26 @@ fetch_release_artifacts_from_s3() {
function get_s3_url_base() { function get_s3_url_base() {
name=$1 name=$1
case $name in case $name in
polkadot | polkadot-execute-worker | polkadot-prepare-worker | staking-miner) polkadot | polkadot-execute-worker | polkadot-prepare-worker )
printf "https://releases.parity.io/polkadot" printf "https://releases.parity.io/polkadot"
;; ;;
polkadot-parachain) polkadot-parachain)
printf "https://releases.parity.io/cumulus" printf "https://releases.parity.io/polkadot-parachain"
;;
polkadot-omni-node)
printf "https://releases.parity.io/polkadot-omni-node"
;;
chain-spec-builder)
printf "https://releases.parity.io/chain-spec-builder"
;; ;;
*) frame-omni-bencher)
printf "https://releases.parity.io/frame-omni-bencher"
;;
*)
printf "UNSUPPORTED BINARY $name" printf "UNSUPPORTED BINARY $name"
exit 1 exit 1
;; ;;
...@@ -299,23 +338,24 @@ function check_sha256() { ...@@ -299,23 +338,24 @@ function check_sha256() {
} }
# Import GPG keys of the release team members # Import GPG keys of the release team members
# This is done in parallel as it can take a while sometimes
function import_gpg_keys() { function import_gpg_keys() {
GPG_KEYSERVER=${GPG_KEYSERVER:-"keyserver.ubuntu.com"} GPG_KEYSERVER=${GPG_KEYSERVER:-"hkps://keyserver.ubuntu.com"}
SEC="9D4B2B6EB8F97156D19669A9FF0812D491B96798" SEC="9D4B2B6EB8F97156D19669A9FF0812D491B96798"
EGOR="E6FC4D4782EB0FA64A4903CCDB7D3555DD3932D3" EGOR="E6FC4D4782EB0FA64A4903CCDB7D3555DD3932D3"
MORGAN="2E92A9D8B15D7891363D1AE8AF9E6C43F7F8C4CF" MORGAN="2E92A9D8B15D7891363D1AE8AF9E6C43F7F8C4CF"
PARITY_RELEASES="90BD75EBBB8E95CB3DA6078F94A4029AB4B35DAE"
PARITY_RELEASES_SIGN_COMMITS="D8018FBB3F534D866A45998293C5FB5F6A367B51"
echo "Importing GPG keys from $GPG_KEYSERVER in parallel" echo "Importing GPG keys from $GPG_KEYSERVER"
for key in $SEC $EGOR $MORGAN; do for key in $SEC $EGOR $MORGAN $PARITY_RELEASES $PARITY_RELEASES_SIGN_COMMITS; do
( (
echo "Importing GPG key $key" echo "Importing GPG key $key"
gpg --no-tty --quiet --keyserver $GPG_KEYSERVER --recv-keys $key gpg --no-tty --quiet --keyserver $GPG_KEYSERVER --recv-keys $key
echo -e "5\ny\n" | gpg --no-tty --command-fd 0 --expert --edit-key $key trust; echo -e "5\ny\n" | gpg --no-tty --command-fd 0 --expert --edit-key $key trust;
) & )
done done
wait wait
gpg -k $SEC gpg -k
} }
# Check the GPG signature for a given binary # Check the GPG signature for a given binary
...@@ -404,14 +444,10 @@ function find_runtimes() { ...@@ -404,14 +444,10 @@ function find_runtimes() {
# output: none # output: none
filter_version_from_input() { filter_version_from_input() {
version=$1 version=$1
regex="(^v[0-9]+\.[0-9]+\.[0-9]+)$|(^v[0-9]+\.[0-9]+\.[0-9]+-rc[0-9]+)$" regex="^(v)?[0-9]+\.[0-9]+\.[0-9]+(-rc[0-9]+)?$"
if [[ $version =~ $regex ]]; then if [[ $version =~ $regex ]]; then
if [ -n "${BASH_REMATCH[1]}" ]; then echo $version
echo "${BASH_REMATCH[1]}"
elif [ -n "${BASH_REMATCH[2]}" ]; then
echo "${BASH_REMATCH[2]}"
fi
else else
echo "Invalid version: $version" echo "Invalid version: $version"
exit 1 exit 1
...@@ -461,7 +497,7 @@ function get_polkadot_node_version_from_code() { ...@@ -461,7 +497,7 @@ function get_polkadot_node_version_from_code() {
validate_stable_tag() { validate_stable_tag() {
tag="$1" tag="$1"
pattern='^stable[0-9]+(-[0-9]+)?$' pattern="^(polkadot-)?stable[0-9]{4}(-[0-9]+)?(-rc[0-9]+)?$"
if [[ $tag =~ $pattern ]]; then if [[ $tag =~ $pattern ]]; then
echo $tag echo $tag
...@@ -470,3 +506,16 @@ validate_stable_tag() { ...@@ -470,3 +506,16 @@ validate_stable_tag() {
exit 1 exit 1
fi fi
} }
# Prepare docker stable tag form the polkadot stable tag
# input: tag (polkaodot-stableYYMM(-X) or polkadot-stableYYMM(-X)-rcX)
# output: stableYYMM(-X) or stableYYMM(-X)-rcX
prepare_docker_stable_tag() {
tag="$1"
if [[ "$tag" =~ stable[0-9]{4}(-[0-9]+)?(-rc[0-9]+)? ]]; then
echo "${BASH_REMATCH[0]}"
else
echo "Tag is invalid: $tag"
exit 1
fi
}
...@@ -15,6 +15,7 @@ KNOWN_BAD_GIT_DEPS = { ...@@ -15,6 +15,7 @@ KNOWN_BAD_GIT_DEPS = {
'simple-mermaid': ['xcm-docs'], 'simple-mermaid': ['xcm-docs'],
# Fix in <https://github.com/paritytech/polkadot-sdk/issues/2922> # Fix in <https://github.com/paritytech/polkadot-sdk/issues/2922>
'bandersnatch_vrfs': ['sp-core'], 'bandersnatch_vrfs': ['sp-core'],
'subwasmlib': ['polkadot-zombienet-sdk-tests'],
} }
root = sys.argv[1] if len(sys.argv) > 1 else os.getcwd() root = sys.argv[1] if len(sys.argv) > 1 else os.getcwd()
...@@ -24,7 +25,7 @@ errors = [] ...@@ -24,7 +25,7 @@ errors = []
def check_dep(dep, used_by): def check_dep(dep, used_by):
if dep.location != DependencyLocation.GIT: if dep.location != DependencyLocation.GIT:
return return
if used_by in KNOWN_BAD_GIT_DEPS.get(dep.name, []): if used_by in KNOWN_BAD_GIT_DEPS.get(dep.name, []):
print(f'🤨 Ignoring git dependency {dep.name} in {used_by}') print(f'🤨 Ignoring git dependency {dep.name} in {used_by}')
else: else:
......
#!/usr/bin/env python3
"""
Generate the PrDoc for a Pull Request with a specific number, audience and bump level.
It downloads and parses the patch from the GitHub API to opulate the prdoc with all modified crates.
This will delete any prdoc that already exists for the PR if `--force` is passed.
Usage:
python generate-prdoc.py --pr 1234 --audience node_dev --bump patch
"""
import argparse
import os
import re
import sys
import subprocess
import toml
import yaml
import requests
from github import Github
import whatthepatch
from cargo_workspace import Workspace
# Download the patch and pass the info into `create_prdoc`.
def from_pr_number(n, audience, bump, force):
print(f"Fetching PR '{n}' from GitHub")
g = Github()
repo = g.get_repo("paritytech/polkadot-sdk")
pr = repo.get_pull(n)
patch_url = pr.patch_url
patch = requests.get(patch_url).text
create_prdoc(n, audience, pr.title, pr.body, patch, bump, force)
def translate_audience(audience):
aliases = {
'runtime_dev': 'Runtime Dev',
'runtime_user': 'Runtime User',
'node_dev': 'Node Dev',
'node_operator': 'Node Operator',
'todo': 'Todo',
}
mapped = [aliases.get(a) for a in audience]
if len(mapped) == 1:
mapped = mapped[0]
print(f"Translated audience '{audience}' to '{mapped}'")
return mapped
def create_prdoc(pr, audience, title, description, patch, bump, force):
path = f"prdoc/pr_{pr}.prdoc"
if os.path.exists(path):
if force == True:
print(f"Overwriting existing PrDoc for PR {pr}")
else:
print(f"PrDoc already exists for PR {pr}. Use --force to overwrite.")
sys.exit(1)
else:
print(f"No preexisting PrDoc for PR {pr}")
prdoc = { "title": title, "doc": [{}], "crates": [] }
audience = translate_audience(audience)
prdoc["doc"][0]["audience"] = audience
prdoc["doc"][0]["description"] = description
workspace = Workspace.from_path(".")
modified_paths = []
for diff in whatthepatch.parse_patch(patch):
new_path = diff.header.new_path
# Sometimes this lib returns `/dev/null` as the new path...
if not new_path.startswith("/dev"):
modified_paths.append(new_path)
modified_crates = {}
for p in modified_paths:
# Go up until we find a Cargo.toml
p = os.path.join(workspace.path, p)
while not os.path.exists(os.path.join(p, "Cargo.toml")):
if p == '/':
exit(1)
p = os.path.dirname(p)
with open(os.path.join(p, "Cargo.toml")) as f:
manifest = toml.load(f)
if not "package" in manifest:
continue
crate_name = manifest["package"]["name"]
if workspace.crate_by_name(crate_name).publish:
modified_crates[crate_name] = True
else:
print(f"Skipping unpublished crate: {crate_name}")
for crate_name in modified_crates.keys():
entry = { "name": crate_name }
if bump == 'silent' or bump == 'ignore' or bump == 'no change':
entry["validate"] = False
else:
entry["bump"] = bump
print(f"Adding crate {entry}")
prdoc["crates"].append(entry)
# write the parsed PR documentation back to the file
with open(path, "w") as f:
yaml.dump(prdoc, f, sort_keys=False)
print(f"PrDoc for PR {pr} written to {path}")
# Make the `description` a multiline string instead of escaping \r\n.
def setup_yaml():
def yaml_multiline_string_presenter(dumper, data):
if len(data.splitlines()) > 1:
data = '\n'.join([line.rstrip() for line in data.strip().splitlines()])
return dumper.represent_scalar('tag:yaml.org,2002:str', data, style='|')
return dumper.represent_scalar('tag:yaml.org,2002:str', data)
yaml.add_representer(str, yaml_multiline_string_presenter)
# parse_args is also used by cmd/cmd.py
# if pr_required is False, then --pr is optional, as it can be derived from the PR comment body
def setup_parser(parser=None, pr_required=True):
allowed_audiences = ["runtime_dev", "runtime_user", "node_dev", "node_operator", "todo"]
if parser is None:
parser = argparse.ArgumentParser()
parser.add_argument("--pr", type=int, required=pr_required, help="The PR number to generate the PrDoc for.")
parser.add_argument("--audience", type=str, nargs='*', choices=allowed_audiences, default=["todo"], help="The audience of whom the changes may concern. Example: --audience runtime_dev node_dev")
parser.add_argument("--bump", type=str, default="major", choices=["patch", "minor", "major", "silent", "ignore", "none"], help="A default bump level for all crates. Example: --bump patch")
parser.add_argument("--force", action="store_true", help="Whether to overwrite any existing PrDoc.")
return parser
def snake_to_title(s):
return ' '.join(word.capitalize() for word in s.split('_'))
def main(args):
print(f"Args: {args}, force: {args.force}")
setup_yaml()
try:
from_pr_number(args.pr, args.audience, args.bump, args.force)
return 0
except Exception as e:
print(f"Error generating prdoc: {e}")
return 1
if __name__ == "__main__":
parser = setup_parser()
args = parser.parse_args()
main(args)
requests
cargo-workspace
PyGithub
whatthepatch
pyyaml
toml
\ No newline at end of file
#!/usr/bin/env python3
"""
A script to generate READMEs for all public crates,
if they do not already have one.
It relies on functions from the `check-workspace.py` script.
The resulting README is based on a template defined below,
and includes the crate name, description, license,
and optionally - the SDK release version.
# Example
```sh
python3 -m pip install toml
.github/scripts/generate-readmes.py . --sdk-version 1.15.0
```
"""
import os
import toml
import importlib
import argparse
check_workspace = importlib.import_module("check-workspace")
README_TEMPLATE = """<div align="center">
<img src="https://raw.githubusercontent.com/paritytech/polkadot-sdk/master/docs/images/Polkadot_Logo_Horizontal_Pink_BlackOnWhite.png" alt="Polkadot logo" width="200">
# {name}
This crate is part of the [Polkadot SDK](https://github.com/paritytech/polkadot-sdk/).
</div>
## Description
{description}
## Additional Resources
In order to learn about Polkadot SDK, head over to the [Polkadot SDK Developer Documentation](https://paritytech.github.io/polkadot-sdk/master/polkadot_sdk_docs/index.html).
To learn about Polkadot, visit [polkadot.com](https://polkadot.com/).
## License
This crate is licensed with {license}.
"""
VERSION_TEMPLATE = """
## Version
This version of `{name}` is associated with Polkadot {sdk_version} release.
"""
def generate_readme(member, *, workspace_dir, workspace_license, sdk_version):
print(f"Loading manifest for: {member}")
manifest = toml.load(os.path.join(workspace_dir, member, "Cargo.toml"))
if manifest["package"].get("publish", True) == False:
print(f"⏩ Skipping un-published crate: {member}")
return
if os.path.exists(os.path.join(workspace_dir, member, "README.md")):
print(f"⏩ Skipping crate with an existing readme: {member}")
return
print(f"📝 Generating README for: {member}")
license = manifest["package"]["license"]
if isinstance(license, dict):
if not license.get("workspace", False):
print(
f"❌ License for {member} is unexpectedly declared as workspace=false."
)
# Skipping this crate as it is not clear what license it should use.
return
license = workspace_license
name = manifest["package"]["name"]
description = manifest["package"]["description"]
description = description + "." if not description.endswith(".") else description
filled_readme = README_TEMPLATE.format(
name=name, description=description, license=license
)
if sdk_version:
filled_readme += VERSION_TEMPLATE.format(name=name, sdk_version=sdk_version)
with open(os.path.join(workspace_dir, member, "README.md"), "w") as new_readme:
new_readme.write(filled_readme)
def parse_args():
parser = argparse.ArgumentParser(
description="Generate readmes for published crates."
)
parser.add_argument(
"workspace_dir",
help="The directory to check",
metavar="workspace_dir",
type=str,
nargs=1,
)
parser.add_argument(
"--sdk-version",
help="Optional SDK release version",
metavar="sdk_version",
type=str,
nargs=1,
required=False,
)
args = parser.parse_args()
return (args.workspace_dir[0], args.sdk_version[0] if args.sdk_version else None)
def main():
(workspace_dir, sdk_version) = parse_args()
root_manifest = toml.load(os.path.join(workspace_dir, "Cargo.toml"))
workspace_license = root_manifest["workspace"]["package"]["license"]
members = check_workspace.get_members(workspace_dir, [])
for member in members:
generate_readme(
member,
workspace_dir=workspace_dir,
workspace_license=workspace_license,
sdk_version=sdk_version,
)
if __name__ == "__main__":
main()
#!/usr/bin/env bash
set -e
PRODUCT=$1
VERSION=$2
PROFILE=${PROFILE:-production}
cargo install --version 2.7.0 cargo-deb --locked -q
echo "Using cargo-deb v$(cargo-deb --version)"
echo "Building a Debian package for '$PRODUCT' in '$PROFILE' profile"
cargo deb --profile $PROFILE --no-strip --no-build -p $PRODUCT --deb-version $VERSION
deb=target/debian/$PRODUCT_*_amd64.deb
cp $deb target/production/
#!/usr/bin/env bash
# This is used to build our binaries:
# - polkadot
# - polkadot-parachain
# - polkadot-omni-node
#
# set -e
BIN=$1
PACKAGE=${2:-$BIN}
PROFILE=${PROFILE:-production}
ARTIFACTS=/artifacts/$BIN
echo "Artifacts will be copied into $ARTIFACTS"
mkdir -p "$ARTIFACTS"
git log --pretty=oneline -n 1
time cargo build --profile $PROFILE --locked --verbose --bin $BIN --package $PACKAGE
echo "Artifact target: $ARTIFACTS"
cp ./target/$PROFILE/$BIN "$ARTIFACTS"
pushd "$ARTIFACTS" > /dev/null
sha256sum "$BIN" | tee "$BIN.sha256"
chmod a+x "$BIN"
VERSION="$($ARTIFACTS/$BIN --version)"
EXTRATAG="$(echo "${VERSION}" |
sed -n -r 's/^'$BIN' ([0-9.]+.*-[0-9a-f]{7,13})-.*$/\1/p')"
EXTRATAG="${VERSION}-${EXTRATAG}-$(cut -c 1-8 $ARTIFACTS/$BIN.sha256)"
echo "$BIN version = ${VERSION} (EXTRATAG = ${EXTRATAG})"
echo -n ${VERSION} > "$ARTIFACTS/VERSION"
echo -n ${EXTRATAG} > "$ARTIFACTS/EXTRATAG"
#!/usr/bin/env bash
# This is used to build our binaries:
# - polkadot
# - polkadot-parachain
# - polkadot-omni-node
# set -e
BIN=$1
PACKAGE=${2:-$BIN}
PROFILE=${PROFILE:-production}
# parity-macos runner needs a path where it can
# write, so make it relative to github workspace.
ARTIFACTS=$GITHUB_WORKSPACE/artifacts/$BIN
VERSION=$(git tag -l --contains HEAD | grep -E "^v.*")
echo "Artifacts will be copied into $ARTIFACTS"
mkdir -p "$ARTIFACTS"
git log --pretty=oneline -n 1
time cargo build --profile $PROFILE --locked --verbose --bin $BIN --package $PACKAGE
echo "Artifact target: $ARTIFACTS"
cp ./target/$PROFILE/$BIN "$ARTIFACTS"
pushd "$ARTIFACTS" > /dev/null
sha256sum "$BIN" | tee "$BIN.sha256"
EXTRATAG="$($ARTIFACTS/$BIN --version |
sed -n -r 's/^'$BIN' ([0-9.]+.*-[0-9a-f]{7,13})-.*$/\1/p')"
EXTRATAG="${VERSION}-${EXTRATAG}-$(cut -c 1-8 $ARTIFACTS/$BIN.sha256)"
echo "$BIN version = ${VERSION} (EXTRATAG = ${EXTRATAG})"
echo -n ${VERSION} > "$ARTIFACTS/VERSION"
echo -n ${EXTRATAG} > "$ARTIFACTS/EXTRATAG"
Origin: Parity
Label: Parity
Codename: release
Architectures: amd64
Components: main
Description: Apt repository for software made by Parity Technologies Ltd.
SignWith: 90BD75EBBB8E95CB3DA6078F94A4029AB4B35DAE
Origin: Parity
Label: Parity Staging
Codename: staging
Architectures: amd64
Components: main
Description: Staging distribution for Parity Technologies Ltd. packages
SignWith: 90BD75EBBB8E95CB3DA6078F94A4029AB4B35DAE
Origin: Parity
Label: Parity stable2407
Codename: stable2407
Architectures: amd64
Components: main
Description: Apt repository for software made by Parity Technologies Ltd.
SignWith: 90BD75EBBB8E95CB3DA6078F94A4029AB4B35DAE
Origin: Parity
Label: Parity stable2409
Codename: stable2409
Architectures: amd64
Components: main
Description: Apt repository for software made by Parity Technologies Ltd.
SignWith: 90BD75EBBB8E95CB3DA6078F94A4029AB4B35DAE
Origin: Parity
Label: Parity stable2412
Codename: stable2412
Architectures: amd64
Components: main
Description: Apt repository for software made by Parity Technologies Ltd.
SignWith: 90BD75EBBB8E95CB3DA6078F94A4029AB4B35DAE
#!/usr/bin/env bash
# Set the new version by replacing the value of the constant given as pattern
# in the file.
#
# input: pattern, version, file
#output: none
set_version() {
pattern=$1
version=$2
file=$3
sed -i "s/$pattern/\1\"${version}\"/g" $file
return 0
}
# Commit changes to git with specific message.
# "|| true" does not let script to fail with exit code 1,
# in case there is nothing to commit.
#
# input: MESSAGE (any message which should be used for the commit)
# output: none
commit_with_message() {
MESSAGE=$1
git commit -a -m "$MESSAGE" || true
}
# Retun list of the runtimes filterd
# input: none
# output: list of filtered runtimes
get_filtered_runtimes_list() {
grep_filters=("runtime.*" "test|template|starters|substrate")
git grep spec_version: | grep .rs: | grep -e "${grep_filters[0]}" | grep "lib.rs" | grep -vE "${grep_filters[1]}" | cut -d: -f1
}
# Sets provided spec version
# input: version
set_spec_versions() {
NEW_VERSION=$1
runtimes_list=(${@:2})
printf "Setting spec_version to $NEW_VERSION\n"
for f in ${runtimes_list[@]}; do
printf " processing $f"
sed -ri "s/spec_version: [0-9]+_[0-9]+_[0-9]+,/spec_version: $NEW_VERSION,/" $f
done
commit_with_message "Bump spec_version to $NEW_VERSION"
git_show_log 'spec_version'
}
# Displays formated results of the git log command
# for the given pattern which needs to be found in logs
# input: pattern, count (optional, default is 10)
git_show_log() {
PATTERN="$1"
COUNT=${2:-10}
git log --pretty=format:"%h %ad | %s%d [%an]" --graph --date=iso-strict | \
head -n $COUNT | grep -iE "$PATTERN" --color=always -z
}
# Get a spec_version number from the crate version
#
# ## inputs
# - v1.12.0 or 1.12.0
#
# ## output:
# 1_012_000 or 1_012_001 if SUFFIX is set
function get_spec_version() {
INPUT=$1
SUFFIX=${SUFFIX:-000} #this variable makes it possible to set a specific runtime version like 93826 it can be initialised as system variable
[[ $INPUT =~ .*([0-9]+\.[0-9]+\.[0-9]{1,2}).* ]]
VERSION="${BASH_REMATCH[1]}"
MATCH="${BASH_REMATCH[0]}"
if [ -z $MATCH ]; then
return 1
else
SPEC_VERSION="$(sed -e "s/\./_0/g" -e "s/_[^_]*\$/_$SUFFIX/" <<< $VERSION)"
echo "$SPEC_VERSION"
return 0
fi
}
# Reorganize the prdoc files for the release
#
# input: VERSION (e.g. v1.0.0)
# output: none
reorder_prdocs() {
VERSION="$1"
printf "[+] ℹ️ Reordering prdocs:"
VERSION=$(sed -E 's/^v([0-9]+\.[0-9]+\.[0-9]+).*$/\1/' <<< "$VERSION") #getting reed of the 'v' prefix
mkdir -p "prdoc/$VERSION"
mv prdoc/pr_*.prdoc prdoc/$VERSION
git add -A
commit_with_message "Reordering prdocs for the release $VERSION"
}
# Bump the binary version of the polkadot-parachain binary with the
# new bumped version and commit changes.
#
# input: version e.g. 1.16.0
set_polkadot_parachain_binary_version() {
bumped_version="$1"
cargo_toml_file="$2"
set_version "\(^version = \)\".*\"" $bumped_version $cargo_toml_file
cargo update --workspace --offline # we need this to update Cargo.loc with the new versions as well
MESSAGE="Bump versions in: ${cargo_toml_file}"
commit_with_message "$MESSAGE"
git_show_log "$MESSAGE"
}
upload_s3_release() {
alias aws='podman run --rm -it docker.io/paritytech/awscli -e AWS_ACCESS_KEY_ID -e AWS_SECRET_ACCESS_KEY -e AWS_BUCKET aws'
product=$1
version=$2
target=$3
echo "Working on product: $product "
echo "Working on version: $version "
echo "Working on platform: $target "
URL_BASE=$(get_s3_url_base $product)
echo "Current content, should be empty on new uploads:"
aws s3 ls "s3://${URL_BASE}/${version}/${target}" --recursive --human-readable --summarize || true
echo "Content to be uploaded:"
artifacts="release-artifacts/$target/$product/"
ls "$artifacts"
aws s3 sync --acl public-read "$artifacts" "s3://${URL_BASE}/${version}/${target}"
echo "Uploaded files:"
aws s3 ls "s3://${URL_BASE}/${version}/${target}" --recursive --human-readable --summarize
echo "✅ The release should be at https://${URL_BASE}/${version}/${target}"
}
# Upload runtimes artifacts to s3 release bucket
#
# input: version (stable release tage.g. polkadot-stable2412 or polkadot-stable2412-rc1)
# output: none
upload_s3_runtimes_release_artifacts() {
alias aws='podman run --rm -it docker.io/paritytech/awscli -e AWS_ACCESS_KEY_ID -e AWS_SECRET_ACCESS_KEY -e AWS_BUCKET aws'
version=$1
echo "Working on version: $version "
echo "Current content, should be empty on new uploads:"
aws s3 ls "s3://releases.parity.io/polkadot/runtimes/${version}/" --recursive --human-readable --summarize || true
echo "Content to be uploaded:"
artifacts="artifacts/runtimes/"
ls "$artifacts"
aws s3 sync --acl public-read "$artifacts" "s3://releases.parity.io/polkadot/runtimes/${version}/"
echo "Uploaded files:"
aws s3 ls "s3://releases.parity.io/polkadot/runtimes/${version}/" --recursive --human-readable --summarize
echo "✅ The release should be at https://releases.parity.io/polkadot/runtimes/${version}"
}
# Pass the name of the binary as input, it will
# return the s3 base url
function get_s3_url_base() {
name=$1
case $name in
polkadot | polkadot-execute-worker | polkadot-prepare-worker )
printf "releases.parity.io/polkadot"
;;
polkadot-parachain)
printf "releases.parity.io/polkadot-parachain"
;;
polkadot-omni-node)
printf "releases.parity.io/polkadot-omni-node"
;;
chain-spec-builder)
printf "releases.parity.io/chain-spec-builder"
;;
frame-omni-bencher)
printf "releases.parity.io/frame-omni-bencher"
;;
*)
printf "UNSUPPORTED BINARY $name"
exit 1
;;
esac
}
name: Bench all runtimes
on:
# schedule:
# - cron: '0 1 * * 0' # weekly on Sunday night 01:00 UTC
workflow_dispatch:
inputs:
draft:
type: boolean
default: false
description: "Whether to create a draft PR"
permissions: # allow the action to create a PR
contents: write
issues: write
pull-requests: write
actions: read
jobs:
preflight:
uses: ./.github/workflows/reusable-preflight.yml
runtime-matrix:
runs-on: ubuntu-latest
needs: [preflight]
timeout-minutes: 30
outputs:
runtime: ${{ steps.runtime.outputs.runtime }}
branch: ${{ steps.branch.outputs.branch }}
date: ${{ steps.branch.outputs.date }}
container:
image: ${{ needs.preflight.outputs.IMAGE }}
name: Extract runtimes from matrix
steps:
- uses: actions/checkout@v4
with:
ref: master
- name: Extract runtimes
id: runtime
run: |
RUNTIMES=$(jq '[.[] | select(.package != null)]' .github/workflows/runtimes-matrix.json)
RUNTIMES=$(echo $RUNTIMES | jq -c .)
echo "runtime=$RUNTIMES"
echo "runtime=$RUNTIMES" >> $GITHUB_OUTPUT
- name: Create branch
id: branch
run: |
DATE=$(date +'%Y-%m-%d-%s')
BRANCH="update-weights-weekly-$DATE"
# Fixes "detected dubious ownership" error in the ci
git config --global --add safe.directory $GITHUB_WORKSPACE
git checkout -b $BRANCH
git push --set-upstream origin $BRANCH
echo "date=$DATE" >> $GITHUB_OUTPUT
echo "branch=$BRANCH" >> $GITHUB_OUTPUT
run-frame-omni-bencher:
needs: [preflight, runtime-matrix]
runs-on: ${{ needs.preflight.outputs.RUNNER_WEIGHTS }}
# 24 hours per runtime.
# Max it takes 14hr for westend to recalculate, but due to limited runners,
# sometimes it can take longer.
timeout-minutes: 1440
strategy:
fail-fast: false # keep running other workflows even if one fails, to see the logs of all possible failures
matrix:
runtime: ${{ fromJSON(needs.runtime-matrix.outputs.runtime) }}
container:
image: ${{ needs.preflight.outputs.IMAGE }}
env:
PACKAGE_NAME: ${{ matrix.runtime.package }}
FLAGS: ${{ matrix.runtime.bench_flags }}
RUST_LOG: "frame_omni_bencher=info,polkadot_sdk_frame=info"
steps:
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: 0
ref: ${{ needs.runtime-matrix.outputs.branch }} # checkout always from the initially created branch to avoid conflicts
- name: script
id: required
run: |
git --version
# Fixes "detected dubious ownership" error in the ci
git config --global --add safe.directory $GITHUB_WORKSPACE
git remote -v
python3 -m pip install -r .github/scripts/generate-prdoc.requirements.txt
python3 .github/scripts/cmd/cmd.py bench --runtime ${{ matrix.runtime.name }}
git add .
git status
if [ -f /tmp/cmd/command_output.log ]; then
CMD_OUTPUT=$(cat /tmp/cmd/command_output.log)
# export to summary to display in the PR
echo "$CMD_OUTPUT" >> $GITHUB_STEP_SUMMARY
# should be multiline, otherwise it captures the first line only
echo 'cmd_output<<EOF' >> $GITHUB_OUTPUT
echo "$CMD_OUTPUT" >> $GITHUB_OUTPUT
echo 'EOF' >> $GITHUB_OUTPUT
fi
# Create patch that includes both modifications and new files
git add -A
git diff --staged > diff-${{ matrix.runtime.name }}.patch -U0
git reset
- name: Upload diff
uses: actions/upload-artifact@v4
with:
name: diff-${{ matrix.runtime.name }}
path: diff-${{ matrix.runtime.name }}.patch
apply-diff-commit:
runs-on: ubuntu-latest
needs: [runtime-matrix, run-frame-omni-bencher]
steps:
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: 0
ref: ${{ needs.runtime-matrix.outputs.branch }}
- name: Download all artifacts
uses: actions/download-artifact@v4
with:
path: patches
# needs to be able to trigger CI
- uses: actions/create-github-app-token@v1
id: generate_token
with:
app-id: ${{ secrets.CMD_BOT_APP_ID }}
private-key: ${{ secrets.CMD_BOT_APP_KEY }}
- name: Apply diff and create PR
env:
GH_TOKEN: ${{ steps.generate_token.outputs.token }}
BRANCH: ${{ needs.runtime-matrix.outputs.branch }}
DATE: ${{ needs.runtime-matrix.outputs.date }}
run: |
git --version
git config user.name "github-actions[bot]"
git config user.email "41898282+github-actions[bot]@users.noreply.github.com"
git status
# Apply all patches
for file in patches/diff-*/diff-*.patch; do
if [ -f "$file" ] && [ -s "$file" ]; then
echo "Applying $file"
# using --3way and --ours for conflicts resolution. Requires git 2.47+
git apply "$file" --unidiff-zero --allow-empty --3way --ours || echo "Failed to apply $file"
else
echo "Skipping empty or non-existent patch file: $file"
fi
done
rm -rf patches
# Get release tags from 1 and 3 months ago
ONE_MONTH_AGO=$(date -d "1 month ago" +%Y-%m-%d)
THREE_MONTHS_AGO=$(date -d "3 months ago" +%Y-%m-%d)
# Get tags with their dates
ONE_MONTH_INFO=$(git for-each-ref --sort=-creatordate --format '%(refname:short)|%(creatordate:iso-strict-local)' 'refs/tags/polkadot-v*' | awk -v date="$ONE_MONTH_AGO" -F'|' '$2 <= date {print $0; exit}')
THREE_MONTHS_INFO=$(git for-each-ref --sort=-creatordate --format '%(refname:short)|%(creatordate:iso-strict-local)' 'refs/tags/polkadot-v*' | awk -v date="$THREE_MONTHS_AGO" -F'|' '$2 <= date {print $0; exit}')
# Split into tag and date
ONE_MONTH_TAG=$(echo "$ONE_MONTH_INFO" | cut -d'|' -f1)
ONE_MONTH_DATE=$(echo "$ONE_MONTH_INFO" | cut -d'|' -f2 | cut -d'T' -f1)
THREE_MONTHS_TAG=$(echo "$THREE_MONTHS_INFO" | cut -d'|' -f1)
THREE_MONTHS_DATE=$(echo "$THREE_MONTHS_INFO" | cut -d'|' -f2 | cut -d'T' -f1)
# Base URL for Subweight comparisons
BASE_URL="https://weights.tasty.limo/compare?repo=polkadot-sdk&threshold=5&path_pattern=.%2F**%2Fweights%2F**%2F*.rs%2C.%2F**%2Fweights.rs&method=asymptotic&ignore_errors=true&unit=time"
# Generate comparison links
MASTER_LINK="${BASE_URL}&old=master&new=${BRANCH}"
ONE_MONTH_LINK="${BASE_URL}&old=${ONE_MONTH_TAG}&new=${BRANCH}"
THREE_MONTHS_LINK="${BASE_URL}&old=${THREE_MONTHS_TAG}&new=${BRANCH}"
# Create PR body with all links in a temporary file
cat > /tmp/pr_body.md << EOF
Auto-update of all weights for ${DATE}.
Subweight results:
- [now vs master](${MASTER_LINK})
- [now vs ${ONE_MONTH_TAG} (${ONE_MONTH_DATE})](${ONE_MONTH_LINK})
- [now vs ${THREE_MONTHS_TAG} (${THREE_MONTHS_DATE})](${THREE_MONTHS_LINK})
EOF
git add .
git commit -m "Update all weights weekly for $DATE"
git push --set-upstream origin "$BRANCH"
MAYBE_DRAFT=${{ inputs.draft && '--draft' || '' }}
PR_TITLE="Auto-update of all weights for $DATE"
gh pr create \
--title "$PR_TITLE" \
--head "$BRANCH" \
--base "master" \
--reviewer paritytech/ci \
--reviewer paritytech/release-engineering \
$MAYBE_DRAFT \
--label "R0-silent" \
--body "$(cat /tmp/pr_body.md)"
\ No newline at end of file
name: Networking Benchmarks
on:
push:
branches:
- master
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
permissions:
contents: read
jobs:
preflight:
uses: ./.github/workflows/reusable-preflight.yml
build:
timeout-minutes: 50
needs: [preflight]
runs-on: ${{ needs.preflight.outputs.RUNNER_BENCHMARK }}
container:
image: ${{ needs.preflight.outputs.IMAGE }}
strategy:
fail-fast: false
matrix:
features:
[
{ bench: "notifications_protocol" },
{ bench: "request_response_protocol" },
]
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Run Benchmarks
id: run-benchmarks
run: |
mkdir -p ./charts
forklift cargo bench -p sc-network --bench ${{ matrix.features.bench }} -- --output-format bencher | grep "^test" | tee ./charts/${{ matrix.features.bench }}.txt || echo "Benchmarks failed"
ls -lsa ./charts
- name: Upload artifacts
uses: actions/upload-artifact@v4.3.6
with:
name: ${{ matrix.features.bench }}-${{ github.sha }}
path: ./charts
publish-benchmarks:
timeout-minutes: 60
needs: [build]
if: github.ref == 'refs/heads/master'
environment: subsystem-benchmarks
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
with:
ref: gh-pages
fetch-depth: 0
- run: git checkout master --
- name: Download artifacts
uses: actions/download-artifact@v4.1.8
with:
name: notifications_protocol-${{ github.sha }}
path: ./charts
- name: Download artifacts
uses: actions/download-artifact@v4.1.8
with:
name: request_response_protocol-${{ github.sha }}
path: ./charts
- name: Setup git
run: |
# Fixes "detected dubious ownership" error in the ci
git config --global --add safe.directory '*'
ls -lsR ./charts
- uses: actions/create-github-app-token@v1
id: app-token
with:
app-id: ${{ secrets.POLKADOTSDK_GHPAGES_APP_ID }}
private-key: ${{ secrets.POLKADOTSDK_GHPAGES_APP_KEY }}
- name: Generate ${{ env.BENCH }}
env:
BENCH: notifications_protocol
uses: benchmark-action/github-action-benchmark@v1
with:
tool: "cargo"
name: ${{ env.BENCH }}
output-file-path: ./charts/${{ env.BENCH }}.txt
benchmark-data-dir-path: ./bench/${{ env.BENCH }}
github-token: ${{ steps.app-token.outputs.token }}
auto-push: true
- name: Generate ${{ env.BENCH }}
env:
BENCH: request_response_protocol
uses: benchmark-action/github-action-benchmark@v1
with:
tool: "cargo"
name: ${{ env.BENCH }}
output-file-path: ./charts/${{ env.BENCH }}.txt
benchmark-data-dir-path: ./bench/${{ env.BENCH }}
github-token: ${{ steps.app-token.outputs.token }}
auto-push: true
name: Subsystem Benchmarks
on:
push:
branches:
- master
pull_request:
types: [opened, synchronize, reopened, ready_for_review]
merge_group:
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
permissions:
contents: read
jobs:
isdraft:
uses: ./.github/workflows/reusable-isdraft.yml
preflight:
needs: isdraft
uses: ./.github/workflows/reusable-preflight.yml
build:
timeout-minutes: 80
needs: [preflight]
runs-on: ${{ needs.preflight.outputs.RUNNER }}
container:
image: ${{ needs.preflight.outputs.IMAGE }}
strategy:
fail-fast: false
matrix:
features:
[
{
name: "polkadot-availability-recovery",
bench: "availability-recovery-regression-bench",
},
{
name: "polkadot-availability-distribution",
bench: "availability-distribution-regression-bench",
},
{
name: "polkadot-node-core-approval-voting",
bench: "approval-voting-regression-bench",
},
{
name: "polkadot-statement-distribution",
bench: "statement-distribution-regression-bench",
},
]
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Check Rust
run: |
rustup show
rustup +nightly show
- name: Run Benchmarks
id: run-benchmarks
run: |
forklift cargo bench -p ${{ matrix.features.name }} --bench ${{ matrix.features.bench }} --features subsystem-benchmarks || echo "Benchmarks failed"
ls -lsa ./charts
- name: Upload artifacts
uses: actions/upload-artifact@v4.3.6
with:
name: ${{matrix.features.bench}}
path: ./charts
publish-benchmarks:
timeout-minutes: 60
needs: [build]
if: github.ref == 'refs/heads/master'
environment: subsystem-benchmarks
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
with:
ref: gh-pages
fetch-depth: 0
- run: git checkout master --
- name: Download artifacts
uses: actions/download-artifact@v4.1.8
with:
path: ./charts
- name: Setup git
run: |
# Fixes "detected dubious ownership" error in the ci
git config --global --add safe.directory '*'
ls -lsR ./charts
- uses: actions/create-github-app-token@v1
id: app-token
with:
app-id: ${{ secrets.POLKADOTSDK_GHPAGES_APP_ID }}
private-key: ${{ secrets.POLKADOTSDK_GHPAGES_APP_KEY }}
- name: Generate ${{ env.BENCH }}
env:
BENCH: availability-recovery-regression-bench
uses: benchmark-action/github-action-benchmark@v1
with:
tool: "customSmallerIsBetter"
name: ${{ env.BENCH }}
output-file-path: ./charts/${{ env.BENCH }}/${{ env.BENCH }}.json
benchmark-data-dir-path: ./bench/${{ env.BENCH }}
github-token: ${{ steps.app-token.outputs.token }}
auto-push: true
- name: Generate ${{ env.BENCH }}
env:
BENCH: availability-distribution-regression-bench
uses: benchmark-action/github-action-benchmark@v1
with:
tool: "customSmallerIsBetter"
name: ${{ env.BENCH }}
output-file-path: ./charts/${{ env.BENCH }}/${{ env.BENCH }}.json
benchmark-data-dir-path: ./bench/${{ env.BENCH }}
github-token: ${{ steps.app-token.outputs.token }}
auto-push: true
- name: Generate ${{ env.BENCH }}
env:
BENCH: approval-voting-regression-bench
uses: benchmark-action/github-action-benchmark@v1
with:
tool: "customSmallerIsBetter"
name: ${{ env.BENCH }}
output-file-path: ./charts/${{ env.BENCH }}/${{ env.BENCH }}.json
benchmark-data-dir-path: ./bench/${{ env.BENCH }}
github-token: ${{ steps.app-token.outputs.token }}
auto-push: true
- name: Generate ${{ env.BENCH }}
env:
BENCH: statement-distribution-regression-bench
uses: benchmark-action/github-action-benchmark@v1
with:
tool: "customSmallerIsBetter"
name: ${{ env.BENCH }}
output-file-path: ./charts/${{ env.BENCH }}/${{ env.BENCH }}.json
benchmark-data-dir-path: ./bench/${{ env.BENCH }}
github-token: ${{ steps.app-token.outputs.token }}
auto-push: true
name: Build Misc
on:
push:
branches:
- master
pull_request:
types: [opened, synchronize, reopened, ready_for_review]
merge_group:
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
permissions:
contents: read
jobs:
isdraft:
uses: ./.github/workflows/reusable-isdraft.yml
preflight:
needs: isdraft
uses: ./.github/workflows/reusable-preflight.yml
build-runtimes-polkavm:
timeout-minutes: 60
needs: [preflight]
runs-on: ${{ needs.preflight.outputs.RUNNER }}
container:
image: ${{ needs.preflight.outputs.IMAGE }}
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Check Rust
run: |
rustup show
rustup +nightly show
- name: Build
env:
SUBSTRATE_RUNTIME_TARGET: riscv
id: required
run: forklift cargo check -p minimal-template-runtime -p westend-runtime -p rococo-runtime -p polkadot-test-runtime
- name: Stop all workflows if failed
if: ${{ failure() && steps.required.conclusion == 'failure' && !github.event.pull_request.head.repo.fork }}
uses: ./.github/actions/workflow-stopper
with:
app-id: ${{ secrets.WORKFLOW_STOPPER_RUNNER_APP_ID }}
app-key: ${{ secrets.WORKFLOW_STOPPER_RUNNER_APP_KEY }}
# As part of our test fixtures we build the revive-uapi crate always with the `unstable-hostfn` feature.
# To make sure that it won't break for users downstream which are not setting this feature
# It doesn't need to produce working code so we just use a similar enough RISC-V target
check-revive-stable-uapi-polkavm:
timeout-minutes: 30
needs: [preflight]
runs-on: ${{ needs.preflight.outputs.RUNNER }}
container:
image: ${{ needs.preflight.outputs.IMAGE }}
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Check Rust
run: |
rustup show
rustup +nightly show
- name: Build
id: required
run: forklift cargo +nightly check -p pallet-revive-uapi --no-default-features --target riscv64imac-unknown-none-elf -Zbuild-std=core
- name: Stop all workflows if failed
if: ${{ failure() && steps.required.conclusion == 'failure' && !github.event.pull_request.head.repo.fork }}
uses: ./.github/actions/workflow-stopper
with:
app-id: ${{ secrets.WORKFLOW_STOPPER_RUNNER_APP_ID }}
app-key: ${{ secrets.WORKFLOW_STOPPER_RUNNER_APP_KEY }}
build-subkey:
timeout-minutes: 20
needs: [preflight]
runs-on: ${{ needs.preflight.outputs.RUNNER }}
container:
image: ${{ needs.preflight.outputs.IMAGE }}
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Check Rust
run: |
rustup show
rustup +nightly show
- name: Build
env:
SKIP_WASM_BUILD: 1
id: required
run: |
cd ./substrate/bin/utils/subkey
forklift cargo build --locked --release
- name: Stop all workflows if failed
if: ${{ failure() && steps.required.conclusion == 'failure' && !github.event.pull_request.head.repo.fork }}
uses: ./.github/actions/workflow-stopper
with:
app-id: ${{ secrets.WORKFLOW_STOPPER_RUNNER_APP_ID }}
app-key: ${{ secrets.WORKFLOW_STOPPER_RUNNER_APP_KEY }}
confirm-required-build-misc-jobs-passed:
runs-on: ubuntu-latest
name: All build misc jobs passed
# If any new job gets added, be sure to add it to this array
needs: [build-runtimes-polkavm, build-subkey]
if: always() && !cancelled()
steps:
- run: |
tee resultfile <<< '${{ toJSON(needs) }}'
FAILURES=$(cat resultfile | grep '"result": "failure"' | wc -l)
if [ $FAILURES -gt 0 ]; then
echo "### At least one required job failed ❌" >> $GITHUB_STEP_SUMMARY
exit 1
else
echo '### Good job! All the required jobs passed 🚀' >> $GITHUB_STEP_SUMMARY
fi
name: Build and push ETH-RPC image
on:
push:
branches:
- master
pull_request:
types: [opened, synchronize, reopened, ready_for_review]
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
env:
ETH_RPC_IMAGE_NAME: "docker.io/paritypr/eth-rpc"
jobs:
isdraft:
uses: ./.github/workflows/reusable-isdraft.yml
set-variables:
# This workaround sets the container image for each job using 'set-variables' job output.
# env variables don't work for PR from forks, so we need to use outputs.
runs-on: ubuntu-latest
needs: isdraft
outputs:
VERSION: ${{ steps.version.outputs.VERSION }}
steps:
- name: Define version
id: version
run: |
export COMMIT_SHA=${{ github.sha }}
export COMMIT_SHA_SHORT=${COMMIT_SHA:0:8}
export REF_NAME=${{ github.ref_name }}
export REF_SLUG=${REF_NAME//\//_}
VERSION=${REF_SLUG}-${COMMIT_SHA_SHORT}
echo "VERSION=${REF_SLUG}-${COMMIT_SHA_SHORT}" >> $GITHUB_OUTPUT
echo "set VERSION=${VERSION}"
build_docker:
name: Build docker images
runs-on: parity-large
needs: [set-variables]
env:
VERSION: ${{ needs.set-variables.outputs.VERSION }}
steps:
- name: Check out the repo
uses: actions/checkout@v4
- name: Build eth-rpc Docker image
uses: docker/build-push-action@v6
with:
context: .
file: ./substrate/frame/revive/rpc/dockerfiles/eth-rpc/Dockerfile
push: false
tags: |
${{ env.ETH_RPC_IMAGE_NAME }}:${{ env.VERSION }}
build_push_docker:
name: Build and push docker images
runs-on: parity-large
if: github.ref == 'refs/heads/master'
needs: [set-variables]
env:
VERSION: ${{ needs.set-variables.outputs.VERSION }}
steps:
- name: Check out the repo
uses: actions/checkout@v4
- name: Log in to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ secrets.PARITYPR_DOCKERHUB_USERNAME }}
password: ${{ secrets.PARITYPR_DOCKERHUB_PASSWORD }}
- name: Build eth-rpc Docker image
uses: docker/build-push-action@v6
with:
context: .
file: ./substrate/frame/revive/rpc/dockerfiles/eth-rpc/Dockerfile
push: true
tags: |
${{ env.ETH_RPC_IMAGE_NAME }}:${{ env.VERSION }}
# GHA for build-*
name: Build and push images
on:
push:
branches:
- master
pull_request:
types: [opened, synchronize, reopened, ready_for_review]
merge_group:
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
env:
COMMIT_SHA: ${{ github.event.pull_request.head.sha || github.sha }}
jobs:
#
#
#
isdraft:
uses: ./.github/workflows/reusable-isdraft.yml
preflight:
needs: isdraft
uses: ./.github/workflows/reusable-preflight.yml
### Build ########################
#
#
#
build-linux-stable:
needs: [preflight]
runs-on: ${{ needs.preflight.outputs.RUNNER }}
timeout-minutes: 60
container:
image: ${{ needs.preflight.outputs.IMAGE }}
env:
RUST_TOOLCHAIN: stable
# Enable debug assertions since we are running optimized builds for testing
# but still want to have debug assertions.
RUSTFLAGS: "-Cdebug-assertions=y -Dwarnings"
steps:
- name: Checkout
uses: actions/checkout@v4
- name: build
run: |
forklift cargo build --locked --profile testnet --features pyroscope,fast-runtime --bin polkadot --bin polkadot-prepare-worker --bin polkadot-execute-worker
ROCOCO_EPOCH_DURATION=10 ./polkadot/scripts/build-only-wasm.sh rococo-runtime $(pwd)/runtimes/rococo-runtime-10/
ROCOCO_EPOCH_DURATION=100 ./polkadot/scripts/build-only-wasm.sh rococo-runtime $(pwd)/runtimes/rococo-runtime-100/
ROCOCO_EPOCH_DURATION=600 ./polkadot/scripts/build-only-wasm.sh rococo-runtime $(pwd)/runtimes/rococo-runtime-600/
pwd
ls -alR runtimes
- name: pack artifacts
run: |
mkdir -p ./artifacts
VERSION="${{ needs.preflight.outputs.SOURCE_REF_SLUG }}" # will be tag or branch name
mv ./target/testnet/polkadot ./artifacts/.
mv ./target/testnet/polkadot-prepare-worker ./artifacts/.
mv ./target/testnet/polkadot-execute-worker ./artifacts/.
mv ./runtimes/ ./artifacts/.
cd artifacts/
sha256sum polkadot | tee polkadot.sha256
shasum -c polkadot.sha256
cd ../
EXTRATAG="${{ needs.preflight.outputs.SOURCE_REF_SLUG }}-${COMMIT_SHA}"
echo "Polkadot version = ${VERSION} (EXTRATAG = ${EXTRATAG})"
echo -n ${VERSION} > ./artifacts/VERSION
echo -n ${EXTRATAG} > ./artifacts/EXTRATAG
echo -n ${GITHUB_RUN_ID} > ./artifacts/BUILD_LINUX_JOB_ID
RELEASE_VERSION=$(./artifacts/polkadot -V | awk '{print $2}'| awk -F "-" '{print $1}')
echo -n "v${RELEASE_VERSION}" > ./artifacts/BUILD_RELEASE_VERSION
cp -r docker/* ./artifacts
- name: tar
run: tar -cvf artifacts.tar artifacts
- name: upload artifacts
uses: actions/upload-artifact@v4
with:
name: ${{ github.job }}-${{ needs.preflight.outputs.SOURCE_REF_SLUG }}
path: artifacts.tar
retention-days: 1
#
#
#
build-linux-stable-cumulus:
needs: [preflight]
runs-on: ${{ needs.preflight.outputs.RUNNER }}
timeout-minutes: 60
container:
image: ${{ needs.preflight.outputs.IMAGE }}
env:
RUSTFLAGS: "-Cdebug-assertions=y -Dwarnings"
steps:
- name: Checkout
uses: actions/checkout@v4
- name: build
run: |
echo "___Building a binary, please refrain from using it in production since it goes with the debug assertions.___"
forklift cargo build --release --locked -p polkadot-parachain-bin --bin polkadot-parachain
echo "___Packing the artifacts___"
mkdir -p ./artifacts
mv ./target/release/polkadot-parachain ./artifacts/.
echo "___The VERSION is either a tag name or the curent branch if triggered not by a tag___"
echo ${{ needs.preflight.outputs.SOURCE_REF_SLUG }} | tee ./artifacts/VERSION
- name: tar
run: tar -cvf artifacts.tar artifacts
- name: upload artifacts
uses: actions/upload-artifact@v4
with:
name: ${{ github.job }}-${{ needs.preflight.outputs.SOURCE_REF_SLUG }}
path: artifacts.tar
retention-days: 1
#
#
#
build-test-parachain:
needs: [preflight]
runs-on: ${{ needs.preflight.outputs.RUNNER }}
timeout-minutes: 60
container:
image: ${{ needs.preflight.outputs.IMAGE }}
env:
RUSTFLAGS: "-Cdebug-assertions=y -Dwarnings"
steps:
- name: Checkout
uses: actions/checkout@v4
- name: build
run: |
echo "___Building a binary, please refrain from using it in production since it goes with the debug assertions.___"
forklift cargo build --release --locked -p cumulus-test-service --bin test-parachain
- name: pack artifacts
run: |
echo "___Packing the artifacts___"
mkdir -p ./artifacts
mv ./target/release/test-parachain ./artifacts/.
mkdir -p ./artifacts/zombienet
mv ./target/release/wbuild/cumulus-test-runtime/wasm_binary_spec_version_incremented.rs.compact.compressed.wasm ./artifacts/zombienet/.
- name: tar
run: tar -cvf artifacts.tar artifacts
- name: upload artifacts
uses: actions/upload-artifact@v4
with:
name: ${{ github.job }}-${{ needs.preflight.outputs.SOURCE_REF_SLUG }}
path: artifacts.tar
retention-days: 1
#
#
#
build-test-collators:
needs: [preflight]
runs-on: ${{ needs.preflight.outputs.RUNNER }}
timeout-minutes: 60
container:
image: ${{ needs.preflight.outputs.IMAGE }}
steps:
- name: Checkout
uses: actions/checkout@v4
- name: build
run: |
forklift cargo build --locked --profile testnet -p test-parachain-adder-collator
forklift cargo build --locked --profile testnet -p test-parachain-undying-collator
- name: pack artifacts
run: |
mkdir -p ./artifacts
mv ./target/testnet/adder-collator ./artifacts/.
mv ./target/testnet/undying-collator ./artifacts/.
echo -n "${{ needs.preflight.outputs.SOURCE_REF_SLUG }}" > ./artifacts/VERSION
echo -n "${{ needs.preflight.outputs.SOURCE_REF_SLUG }}-${COMMIT_SHA}" > ./artifacts/EXTRATAG
echo "adder-collator version = $(cat ./artifacts/VERSION) (EXTRATAG = $(cat ./artifacts/EXTRATAG))"
echo "undying-collator version = $(cat ./artifacts/VERSION) (EXTRATAG = $(cat ./artifacts/EXTRATAG))"
cp -r ./docker/* ./artifacts
- name: tar
run: tar -cvf artifacts.tar artifacts
- name: upload artifacts
uses: actions/upload-artifact@v4
with:
name: ${{ github.job }}-${{ needs.preflight.outputs.SOURCE_REF_SLUG }}
path: artifacts.tar
retention-days: 1
#
#
#
build-malus:
needs: [preflight]
runs-on: ${{ needs.preflight.outputs.RUNNER }}
timeout-minutes: 60
container:
image: ${{ needs.preflight.outputs.IMAGE }}
steps:
- name: Checkout
uses: actions/checkout@v4
- name: build
run: |
forklift cargo build --locked --profile testnet -p polkadot-test-malus --bin malus --bin polkadot-prepare-worker --bin polkadot-execute-worker
- name: pack artifacts
run: |
mkdir -p ./artifacts
mv ./target/testnet/malus ./artifacts/.
mv ./target/testnet/polkadot-execute-worker ./artifacts/.
mv ./target/testnet/polkadot-prepare-worker ./artifacts/.
echo -n "${{ needs.preflight.outputs.SOURCE_REF_SLUG }}" > ./artifacts/VERSION
echo -n "${{ needs.preflight.outputs.SOURCE_REF_SLUG }}-${COMMIT_SHA}" > ./artifacts/EXTRATAG
echo "polkadot-test-malus = $(cat ./artifacts/VERSION) (EXTRATAG = $(cat ./artifacts/EXTRATAG))"
cp -r ./docker/* ./artifacts
- name: tar
run: tar -cvf artifacts.tar artifacts
- name: upload artifacts
uses: actions/upload-artifact@v4
with:
name: ${{ github.job }}-${{ needs.preflight.outputs.SOURCE_REF_SLUG }}
path: artifacts.tar
retention-days: 1
#
#
#
build-linux-substrate:
needs: [preflight]
runs-on: ${{ needs.preflight.outputs.RUNNER }}
timeout-minutes: 60
container:
image: ${{ needs.preflight.outputs.IMAGE }}
steps:
- name: Checkout
uses: actions/checkout@v4
- name: build
run: |
mkdir -p ./artifacts/substrate/
WASM_BUILD_NO_COLOR=1 forklift cargo build --locked --release -p staging-node-cli
ls -la target/release/
- name: pack artifacts
shell: bash
run: |
mv target/release/substrate-node ./artifacts/substrate/substrate
echo -n "Substrate version = "
if [[ "${{ github.ref }}" == "refs/tags/"* ]]; then
echo "${{ github.ref_name }}" | tee ./artifacts/substrate/VERSION;
else
./artifacts/substrate/substrate --version |
cut -d ' ' -f 2 | tee ./artifacts/substrate/VERSION;
fi
sha256sum ./artifacts/substrate/substrate | tee ./artifacts/substrate/substrate.sha256
cp -r ./docker/dockerfiles/substrate_injected.Dockerfile ./artifacts/substrate/
- name: tar
run: tar -cvf artifacts.tar artifacts
- name: upload artifacts
uses: actions/upload-artifact@v4
with:
name: ${{ github.job }}-${{ needs.preflight.outputs.SOURCE_REF_SLUG }}
path: artifacts.tar
retention-days: 1
### Build zombienet test artifacts ########################
#
#
#
prepare-bridges-zombienet-artifacts:
needs: [preflight]
runs-on: ${{ needs.preflight.outputs.RUNNER }}
timeout-minutes: 60
container:
image: ${{ needs.preflight.outputs.IMAGE }}
steps:
- name: Checkout
uses: actions/checkout@v4
- name: build
run: |
forklift cargo build --locked --profile testnet -p polkadot-test-malus --bin malus --bin polkadot-prepare-worker --bin polkadot-execute-worker
- name: pack artifacts
run: |
mkdir -p ./artifacts/bridges-polkadot-sdk/bridges
cp -r bridges/testing ./artifacts/bridges-polkadot-sdk/bridges/testing
- name: tar
run: tar -cvf artifacts.tar artifacts
- name: upload artifacts
uses: actions/upload-artifact@v4
with:
name: ${{ github.job }}-${{ needs.preflight.outputs.SOURCE_REF_SLUG }}
path: artifacts.tar
retention-days: 1
#
#
#
prepare-polkadot-zombienet-artifacts:
needs: [preflight]
runs-on: ${{ needs.preflight.outputs.RUNNER }}
timeout-minutes: 60
container:
image: ${{ needs.preflight.outputs.IMAGE }}
steps:
- name: Checkout
uses: actions/checkout@v4
- name: build
run: |
forklift cargo nextest --manifest-path polkadot/zombienet-sdk-tests/Cargo.toml archive --locked --features zombie-metadata --archive-file polkadot-zombienet-tests.tar.zst
- name: pack artifacts
run: |
mkdir -p artifacts
cp polkadot-zombienet-tests.tar.zst ./artifacts
- name: tar
run: tar -cvf artifacts.tar artifacts
- name: upload artifacts
uses: actions/upload-artifact@v4
with:
name: ${{ github.job }}-${{ needs.preflight.outputs.SOURCE_REF_SLUG }}
path: artifacts.tar
retention-days: 1
#
#
#
prepare-cumulus-zombienet-artifacts:
needs: [preflight]
runs-on: ${{ needs.preflight.outputs.RUNNER }}
timeout-minutes: 60
container:
image: ${{ needs.preflight.outputs.IMAGE }}
steps:
- name: Checkout
uses: actions/checkout@v4
- name: build
run: |
forklift cargo nextest --manifest-path cumulus/zombienet/zombienet-sdk/Cargo.toml archive --locked --features zombie-ci --archive-file cumulus-zombienet-tests.tar.zst
- name: pack artifacts
run: |
mkdir -p artifacts
cp cumulus-zombienet-tests.tar.zst ./artifacts
- name: tar
run: tar -cvf artifacts.tar artifacts
- name: upload artifacts
uses: actions/upload-artifact@v4
with:
name: ${{ github.job }}-${{ needs.preflight.outputs.SOURCE_REF_SLUG }}
path: artifacts.tar
retention-days: 1
### Publish ########################
#
#
#
build-push-image-test-parachain:
needs: [preflight, build-test-parachain]
runs-on: ${{ needs.preflight.outputs.RUNNER_DEFAULT }}
timeout-minutes: 60
steps:
- name: Checkout
uses: actions/checkout@v4
- uses: actions/download-artifact@v4.1.8
with:
name: build-test-parachain-${{ needs.preflight.outputs.SOURCE_REF_SLUG }}
- name: tar
run: tar -xvf artifacts.tar
- name: build and push image
uses: ./.github/actions/build-push-image
with:
image-name: "europe-docker.pkg.dev/parity-ci-2024/temp-images/test-parachain"
dockerfile: "docker/dockerfiles/test-parachain_injected.Dockerfile"
#
#
#
build-push-image-polkadot-debug:
needs: [preflight, build-linux-stable]
runs-on: ${{ needs.preflight.outputs.RUNNER_DEFAULT }}
timeout-minutes: 60
steps:
- name: Checkout
uses: actions/checkout@v4
- uses: actions/download-artifact@v4.1.8
with:
name: build-linux-stable-${{ needs.preflight.outputs.SOURCE_REF_SLUG }}
- name: tar
run: tar -xvf artifacts.tar
- name: build and push image
uses: ./.github/actions/build-push-image
with:
image-name: "europe-docker.pkg.dev/parity-ci-2024/temp-images/polkadot-debug"
dockerfile: "docker/dockerfiles/polkadot/polkadot_injected_debug.Dockerfile"
#
#
#
build-push-image-colander:
needs: [preflight, build-test-collators]
runs-on: ${{ needs.preflight.outputs.RUNNER_DEFAULT }}
timeout-minutes: 60
steps:
- name: Checkout
uses: actions/checkout@v4
- uses: actions/download-artifact@v4.1.8
with:
name: build-test-collators-${{ needs.preflight.outputs.SOURCE_REF_SLUG }}
- name: tar
run: tar -xvf artifacts.tar
- name: build and push image
uses: ./.github/actions/build-push-image
with:
image-name: "europe-docker.pkg.dev/parity-ci-2024/temp-images/colander"
dockerfile: "docker/dockerfiles/collator_injected.Dockerfile"
#
#
#
build-push-image-malus:
needs: [preflight, build-malus]
runs-on: ${{ needs.preflight.outputs.RUNNER_DEFAULT }}
timeout-minutes: 60
steps:
- name: Checkout
uses: actions/checkout@v4
- uses: actions/download-artifact@v4.1.8
with:
name: build-malus-${{ needs.preflight.outputs.SOURCE_REF_SLUG }}
- name: tar
run: tar -xvf artifacts.tar
- name: build and push image
uses: ./.github/actions/build-push-image
with:
image-name: "europe-docker.pkg.dev/parity-ci-2024/temp-images/malus"
dockerfile: "docker/dockerfiles/malus_injected.Dockerfile"
#
#
#
build-push-image-substrate-pr:
needs: [preflight, build-linux-substrate]
runs-on: ${{ needs.preflight.outputs.RUNNER_DEFAULT }}
timeout-minutes: 60
steps:
- name: Checkout
uses: actions/checkout@v4
- uses: actions/download-artifact@v4.1.8
with:
name: build-linux-substrate-${{ needs.preflight.outputs.SOURCE_REF_SLUG }}
- name: tar
run: tar -xvf artifacts.tar
- name: build and push image
uses: ./.github/actions/build-push-image
with:
image-name: "europe-docker.pkg.dev/parity-ci-2024/temp-images/substrate"
dockerfile: "docker/dockerfiles/substrate_injected.Dockerfile"
#
#
#
# unlike other images, bridges+zombienet image is based on Zombienet image that pulls required binaries
# from other fresh images (polkadot and cumulus)
build-push-image-bridges-zombienet-tests:
needs:
[
preflight,
build-linux-stable,
build-linux-stable-cumulus,
prepare-bridges-zombienet-artifacts,
]
runs-on: ${{ needs.preflight.outputs.RUNNER_DEFAULT }}
timeout-minutes: 60
steps:
- name: Checkout
uses: actions/checkout@v4
- uses: actions/download-artifact@v4.1.8
with:
name: build-linux-stable-${{ needs.preflight.outputs.SOURCE_REF_SLUG }}
- name: tar
run: |
tar -xvf artifacts.tar
rm artifacts.tar
- uses: actions/download-artifact@v4.1.8
with:
name: build-linux-stable-cumulus-${{ needs.preflight.outputs.SOURCE_REF_SLUG }}
- name: tar
run: |
tar -xvf artifacts.tar
rm artifacts.tar
- uses: actions/download-artifact@v4.1.8
with:
name: prepare-bridges-zombienet-artifacts-${{ needs.preflight.outputs.SOURCE_REF_SLUG }}
- name: tar
run: |
tar -xvf artifacts.tar
rm artifacts.tar
- name: build and push image
uses: ./.github/actions/build-push-image
with:
image-name: "europe-docker.pkg.dev/parity-ci-2024/temp-images/bridges-zombienet-tests"
dockerfile: "docker/dockerfiles/bridges_zombienet_tests_injected.Dockerfile"
#
#
#
build-push-image-polkadot-parachain-debug:
needs: [preflight, build-linux-stable-cumulus]
runs-on: ${{ needs.preflight.outputs.RUNNER_DEFAULT }}
timeout-minutes: 60
steps:
- name: Checkout
uses: actions/checkout@v4
- uses: actions/download-artifact@v4.1.8
with:
name: build-linux-stable-cumulus-${{ needs.preflight.outputs.SOURCE_REF_SLUG }}
- name: tar
run: tar -xvf artifacts.tar
- name: build and push image
uses: ./.github/actions/build-push-image
with:
image-name: "europe-docker.pkg.dev/parity-ci-2024/temp-images/polkadot-parachain-debug"
dockerfile: "docker/dockerfiles/polkadot-parachain/polkadot-parachain-debug_unsigned_injected.Dockerfile"
name: Check Cargo Check Runtimes
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
on:
pull_request:
types: [opened, synchronize, reopened, ready_for_review]
paths:
- "cumulus/parachains/runtimes/*"
# Jobs in this workflow depend on each other, only for limiting peak amount of spawned workers
jobs:
isdraft:
uses: ./.github/workflows/reusable-isdraft.yml
preflight:
needs: isdraft
uses: ./.github/workflows/reusable-preflight.yml
check-runtime-assets:
runs-on: ${{ needs.preflight.outputs.RUNNER }}
needs: [preflight]
timeout-minutes: 20
container:
image: ${{ needs.preflight.outputs.IMAGE }}
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Run cargo check
uses: ./.github/actions/cargo-check-runtimes
with:
root: cumulus/parachains/runtimes/assets
check-runtime-collectives:
runs-on: ${{ needs.preflight.outputs.RUNNER }}
needs: [check-runtime-assets, preflight]
timeout-minutes: 20
container:
image: ${{ needs.preflight.outputs.IMAGE }}
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Run cargo check
uses: ./.github/actions/cargo-check-runtimes
with:
root: cumulus/parachains/runtimes/collectives
check-runtime-coretime:
runs-on: ${{ needs.preflight.outputs.RUNNER }}
container:
image: ${{ needs.preflight.outputs.IMAGE }}
needs: [check-runtime-assets, preflight]
timeout-minutes: 20
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Run cargo check
uses: ./.github/actions/cargo-check-runtimes
with:
root: cumulus/parachains/runtimes/coretime
check-runtime-bridge-hubs:
runs-on: ${{ needs.preflight.outputs.RUNNER }}
container:
image: ${{ needs.preflight.outputs.IMAGE }}
needs: [preflight]
timeout-minutes: 20
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Run cargo check
uses: ./.github/actions/cargo-check-runtimes
with:
root: cumulus/parachains/runtimes/bridge-hubs
check-runtime-contracts:
runs-on: ${{ needs.preflight.outputs.RUNNER }}
container:
image: ${{ needs.preflight.outputs.IMAGE }}
needs: [check-runtime-collectives, preflight]
timeout-minutes: 20
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Run cargo check
uses: ./.github/actions/cargo-check-runtimes
with:
root: cumulus/parachains/runtimes/contracts
check-runtime-testing:
runs-on: ${{ needs.preflight.outputs.RUNNER }}
container:
image: ${{ needs.preflight.outputs.IMAGE }}
needs: [preflight]
timeout-minutes: 20
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Run cargo check
uses: ./.github/actions/cargo-check-runtimes
with:
root: cumulus/parachains/runtimes/testing
confirm-required-jobs-passed:
runs-on: ubuntu-latest
name: All check-runtime-* tests passed
# If any new job gets added, be sure to add it to this array
needs:
- check-runtime-assets
- check-runtime-collectives
- check-runtime-coretime
- check-runtime-bridge-hubs
- check-runtime-contracts
- check-runtime-testing
if: always() && !cancelled()
steps:
- run: |
tee resultfile <<< '${{ toJSON(needs) }}'
FAILURES=$(cat resultfile | grep '"result": "failure"' | wc -l)
if [ $FAILURES -gt 0 ]; then
echo "### At least one required job failed ❌" >> $GITHUB_STEP_SUMMARY
exit 1
else
echo '### Good job! All the required jobs passed 🚀' >> $GITHUB_STEP_SUMMARY
fi
name: Check Features
on:
pull_request:
types: [opened, synchronize, reopened, ready_for_review]
jobs:
check-features:
runs-on: ubuntu-latest
steps:
- name: Fetch latest code
uses: actions/checkout@v4
- name: Check
uses: hack-ink/cargo-featalign-action@bea88a864d6ca7d0c53c26f1391ce1d431dc7f34 # v0.1.1
with:
crate: templates/parachain/runtime/
features: std,runtime-benchmarks,try-runtime
ignore: sc-executor
default-std: true
...@@ -5,7 +5,7 @@ on: ...@@ -5,7 +5,7 @@ on:
branches: branches:
- master - master
pull_request: pull_request:
types: [ opened, synchronize, reopened, ready_for_review, labeled ] types: [opened, synchronize, reopened, ready_for_review]
merge_group: merge_group:
concurrency: concurrency:
...@@ -16,70 +16,110 @@ env: ...@@ -16,70 +16,110 @@ env:
ARTIFACTS_NAME: frame-omni-bencher-artifacts ARTIFACTS_NAME: frame-omni-bencher-artifacts
jobs: jobs:
changes: isdraft:
# TODO: remove once migration is complete or this workflow is fully stable uses: ./.github/workflows/reusable-isdraft.yml
if: contains(github.event.label.name, 'GHA-migration') preflight:
permissions: needs: isdraft
pull-requests: read uses: ./.github/workflows/reusable-preflight.yml
uses: ./.github/workflows/reusable-check-changed-files.yml
set-image: quick-benchmarks-omni:
# GitHub Actions allows using 'env' in a container context. runs-on: ${{ needs.preflight.outputs.RUNNER_BENCHMARK }}
# However, env variables don't work for forks: https://github.com/orgs/community/discussions/44322 needs: [preflight]
# This workaround sets the container image for each job using 'set-image' job output. if: ${{ needs.preflight.outputs.changes_rust }}
runs-on: ubuntu-latest env:
needs: changes RUSTFLAGS: "-C debug-assertions"
if: ${{ needs.changes.outputs.rust }} RUST_BACKTRACE: "full"
outputs: WASM_BUILD_NO_COLOR: 1
IMAGE: ${{ steps.set_image.outputs.IMAGE }} WASM_BUILD_RUSTFLAGS: "-C debug-assertions"
RUST_LOG: "frame_omni_bencher=info,polkadot_sdk_frame=info"
timeout-minutes: 30
container:
image: ${{ needs.preflight.outputs.IMAGE }}
steps: steps:
- name: Checkout - name: Checkout
uses: actions/checkout@v4 uses: actions/checkout@v4
- id: set_image - name: script
run: cat .github/env >> $GITHUB_OUTPUT id: required
run: |
forklift cargo build --locked --quiet --release -p asset-hub-westend-runtime --features runtime-benchmarks
forklift cargo run --locked --release -p frame-omni-bencher --quiet -- v1 benchmark pallet --runtime target/release/wbuild/asset-hub-westend-runtime/asset_hub_westend_runtime.compact.compressed.wasm --all --steps 2 --repeat 1 --quiet
- name: Stop all workflows if failed
if: ${{ failure() && steps.required.conclusion == 'failure' && !github.event.pull_request.head.repo.fork }}
uses: ./.github/actions/workflow-stopper
with:
app-id: ${{ secrets.WORKFLOW_STOPPER_RUNNER_APP_ID }}
app-key: ${{ secrets.WORKFLOW_STOPPER_RUNNER_APP_KEY }}
runtime-matrix:
runs-on: ubuntu-latest
needs: [preflight]
if: ${{ needs.preflight.outputs.changes_rust }}
timeout-minutes: 30
outputs:
runtime: ${{ steps.runtime.outputs.runtime }}
container:
image: ${{ needs.preflight.outputs.IMAGE }}
name: Extract runtimes from matrix
steps:
- uses: actions/checkout@v4
- id: runtime
run: |
RUNTIMES=$(jq '[.[] | select(.package != null)]' .github/workflows/runtimes-matrix.json)
RUNTIMES=$(echo $RUNTIMES | jq -c .)
echo "runtime=$RUNTIMES"
echo "runtime=$RUNTIMES" >> $GITHUB_OUTPUT
run-frame-omni-bencher: run-frame-omni-bencher:
runs-on: arc-runners-polkadot-sdk-beefy runs-on: ${{ needs.preflight.outputs.RUNNER_BENCHMARK }}
needs: [ set-image, changes ] # , build-frame-omni-bencher ] needs: [preflight, runtime-matrix]
if: ${{ needs.changes.outputs.rust }} if: ${{ needs.preflight.outputs.changes_rust }}
timeout-minutes: 30 timeout-minutes: 30
strategy: strategy:
fail-fast: false # keep running other workflows even if one fails, to see the logs of all possible failures fail-fast: false # keep running other workflows even if one fails, to see the logs of all possible failures
matrix: matrix:
runtime: runtime: ${{ fromJSON(needs.runtime-matrix.outputs.runtime) }}
[
westend-runtime,
rococo-runtime,
asset-hub-rococo-runtime,
asset-hub-westend-runtime,
bridge-hub-rococo-runtime,
bridge-hub-westend-runtime,
collectives-westend-runtime,
coretime-rococo-runtime,
coretime-westend-runtime,
people-rococo-runtime,
people-westend-runtime,
glutton-westend-runtime,
]
container: container:
image: ${{ needs.set-image.outputs.IMAGE }} image: ${{ needs.preflight.outputs.IMAGE }}
env: env:
PACKAGE_NAME: ${{ matrix.runtime }} PACKAGE_NAME: ${{ matrix.runtime.package }}
FLAGS: ${{ matrix.runtime.bench_flags }}
RUST_LOG: "frame_omni_bencher=info,polkadot_sdk_frame=info"
steps: steps:
- name: Checkout - name: Checkout
uses: actions/checkout@v4 uses: actions/checkout@v4
- name: script - name: script
id: required
run: | run: |
RUNTIME_BLOB_NAME=$(echo $PACKAGE_NAME | sed 's/-/_/g').compact.compressed.wasm RUNTIME_BLOB_NAME=$(echo $PACKAGE_NAME | sed 's/-/_/g').compact.compressed.wasm
RUNTIME_BLOB_PATH=./target/release/wbuild/$PACKAGE_NAME/$RUNTIME_BLOB_NAME RUNTIME_BLOB_PATH=./target/release/wbuild/$PACKAGE_NAME/$RUNTIME_BLOB_NAME
forklift cargo build --release --locked -p $PACKAGE_NAME -p frame-omni-bencher --features runtime-benchmarks forklift cargo build --release --locked -p $PACKAGE_NAME -p frame-omni-bencher --features=${{ matrix.runtime.bench_features }} --quiet
echo "Running short benchmarking for PACKAGE_NAME=$PACKAGE_NAME and RUNTIME_BLOB_PATH=$RUNTIME_BLOB_PATH" echo "Running short benchmarking for PACKAGE_NAME=$PACKAGE_NAME and RUNTIME_BLOB_PATH=$RUNTIME_BLOB_PATH"
ls -lrt $RUNTIME_BLOB_PATH ls -lrt $RUNTIME_BLOB_PATH
./target/release/frame-omni-bencher v1 benchmark pallet --runtime $RUNTIME_BLOB_PATH --all --steps 2 --repeat 1
cmd="./target/release/frame-omni-bencher v1 benchmark pallet --runtime $RUNTIME_BLOB_PATH --all --steps 2 --repeat 1 $FLAGS"
echo "Running command: $cmd"
eval "$cmd"
- name: Stop all workflows if failed
if: ${{ failure() && steps.required.conclusion == 'failure' && !github.event.pull_request.head.repo.fork }}
uses: ./.github/actions/workflow-stopper
with:
app-id: ${{ secrets.WORKFLOW_STOPPER_RUNNER_APP_ID }}
app-key: ${{ secrets.WORKFLOW_STOPPER_RUNNER_APP_KEY }}
confirm-frame-omni-benchers-passed: confirm-frame-omni-benchers-passed:
runs-on: ubuntu-latest runs-on: ubuntu-latest
name: All benchmarks passed name: All benchmarks passed
needs: run-frame-omni-bencher needs: [quick-benchmarks-omni, run-frame-omni-bencher]
if: always() && !cancelled()
steps: steps:
- run: echo '### Good job! All the benchmarks passed 🚀' >> $GITHUB_STEP_SUMMARY - run: |
tee resultfile <<< '${{ toJSON(needs) }}'
FAILURES=$(cat resultfile | grep '"result": "failure"' | wc -l)
if [ $FAILURES -gt 0 ]; then
echo "### At least one required job failed ❌" >> $GITHUB_STEP_SUMMARY
exit 1
else
echo '### Good job! All the required jobs passed 🚀' >> $GITHUB_STEP_SUMMARY
fi