From 9be3e0b6c13581e48e3765440663ec695302fbf2 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 18 Mar 2024 02:57:01 +0000 Subject: [PATCH 01/39] Bump thiserror from 1.0.57 to 1.0.58 Bumps [thiserror](https://github.com/dtolnay/thiserror) from 1.0.57 to 1.0.58. - [Release notes](https://github.com/dtolnay/thiserror/releases) - [Commits](https://github.com/dtolnay/thiserror/compare/1.0.57...1.0.58) --- updated-dependencies: - dependency-name: thiserror dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- Cargo.lock | 10 +++++----- Cargo.toml | 2 +- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 4b2bc3bbf..13ca4de16 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -9364,9 +9364,9 @@ dependencies = [ [[package]] name = "thiserror" -version = "1.0.57" +version = "1.0.58" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e45bcbe8ed29775f228095caf2cd67af7a4ccf756ebff23a306bf3e8b47b24b" +checksum = "03468839009160513471e86a034bb2c5c0e4baae3b43f79ffc55c4a5427b3297" dependencies = [ "thiserror-impl", ] @@ -9393,9 +9393,9 @@ dependencies = [ [[package]] name = "thiserror-impl" -version = "1.0.57" +version = "1.0.58" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a953cb265bef375dae3de6663da4d3804eee9682ea80d8e2542529b73c531c81" +checksum = "c61f3ba182994efc43764a46c018c347bc492c79f024e705f46567b418f6d4f7" dependencies = [ "proc-macro2 1.0.76", "quote 1.0.35", @@ -9806,7 +9806,7 @@ version = "1.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "97fee6b57c6a41524a810daee9286c02d7752c4253064d0b05472833a438f675" dependencies = [ - "cfg-if 0.1.10", + "cfg-if 1.0.0", "digest 0.10.7", "rand", "static_assertions", diff --git a/Cargo.toml b/Cargo.toml index 78953c9ae..f866e6827 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -92,4 +92,4 @@ log = { version = "0.4.20", default-features = false } quote = { version = "1.0.33" } serde = { version = "1.0.197", default-features = false } serde_json = { version = "1.0.114", default-features = false } -thiserror = { version = "1.0.48" } +thiserror = { version = "1.0.58" } -- GitLab From d9c5e410847410c4c9196e812dd4f9e507ea0f6f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 18 Mar 2024 02:56:35 +0000 Subject: [PATCH 02/39] Bump scale-info from 2.10.0 to 2.11.0 Bumps [scale-info](https://github.com/paritytech/scale-info) from 2.10.0 to 2.11.0. - [Release notes](https://github.com/paritytech/scale-info/releases) - [Changelog](https://github.com/paritytech/scale-info/blob/master/CHANGELOG.md) - [Commits](https://github.com/paritytech/scale-info/compare/v2.10.0...v2.11.0) --- updated-dependencies: - dependency-name: scale-info dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- Cargo.lock | 4 ++-- bin/runtime-common/Cargo.toml | 2 +- modules/beefy/Cargo.toml | 2 +- modules/grandpa/Cargo.toml | 2 +- modules/messages/Cargo.toml | 2 +- modules/parachains/Cargo.toml | 2 +- modules/relayers/Cargo.toml | 2 +- modules/xcm-bridge-hub-router/Cargo.toml | 2 +- modules/xcm-bridge-hub/Cargo.toml | 2 +- primitives/beefy/Cargo.toml | 2 +- primitives/chain-asset-hub-rococo/Cargo.toml | 2 +- primitives/chain-asset-hub-westend/Cargo.toml | 2 +- primitives/chain-polkadot-bulletin/Cargo.toml | 2 +- primitives/header-chain/Cargo.toml | 2 +- primitives/messages/Cargo.toml | 2 +- primitives/parachains/Cargo.toml | 2 +- primitives/polkadot-core/Cargo.toml | 2 +- primitives/relayers/Cargo.toml | 2 +- primitives/runtime/Cargo.toml | 2 +- primitives/xcm-bridge-hub-router/Cargo.toml | 2 +- relays/client-bridge-hub-kusama/Cargo.toml | 2 +- relays/client-bridge-hub-polkadot/Cargo.toml | 2 +- relays/client-bridge-hub-rococo/Cargo.toml | 2 +- relays/client-bridge-hub-westend/Cargo.toml | 2 +- relays/client-kusama/Cargo.toml | 2 +- relays/client-polkadot-bulletin/Cargo.toml | 2 +- relays/client-polkadot/Cargo.toml | 2 +- relays/client-rococo/Cargo.toml | 2 +- relays/client-substrate/Cargo.toml | 2 +- relays/client-westend/Cargo.toml | 2 +- 30 files changed, 31 insertions(+), 31 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 13ca4de16..203348af7 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -7443,9 +7443,9 @@ dependencies = [ [[package]] name = "scale-info" -version = "2.10.0" +version = "2.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f7d66a1128282b7ef025a8ead62a4a9fcf017382ec53b8ffbf4d7bf77bd3c60" +checksum = "2ef2175c2907e7c8bc0a9c3f86aeb5ec1f3b275300ad58a44d0c3ae379a5e52e" dependencies = [ "bitvec", "cfg-if 1.0.0", diff --git a/bin/runtime-common/Cargo.toml b/bin/runtime-common/Cargo.toml index af0c658a0..835a8cf13 100644 --- a/bin/runtime-common/Cargo.toml +++ b/bin/runtime-common/Cargo.toml @@ -14,7 +14,7 @@ workspace = true codec = { package = "parity-scale-codec", version = "3.1.5", default-features = false, features = ["derive"] } hash-db = { version = "0.16.0", default-features = false } log = { workspace = true } -scale-info = { version = "2.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "2.11.0", default-features = false, features = ["derive"] } static_assertions = { version = "1.1", optional = true } # Bridge dependencies diff --git a/modules/beefy/Cargo.toml b/modules/beefy/Cargo.toml index 30c91feb5..4ead33c44 100644 --- a/modules/beefy/Cargo.toml +++ b/modules/beefy/Cargo.toml @@ -12,7 +12,7 @@ workspace = true [dependencies] codec = { package = "parity-scale-codec", version = "3.1.5", default-features = false } log = { workspace = true } -scale-info = { version = "2.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "2.11.0", default-features = false, features = ["derive"] } serde = { optional = true, workspace = true } # Bridge Dependencies diff --git a/modules/grandpa/Cargo.toml b/modules/grandpa/Cargo.toml index 1a5bfeff1..eb7ad6a27 100644 --- a/modules/grandpa/Cargo.toml +++ b/modules/grandpa/Cargo.toml @@ -15,7 +15,7 @@ workspace = true codec = { package = "parity-scale-codec", version = "3.1.5", default-features = false } finality-grandpa = { version = "0.16.2", default-features = false } log = { workspace = true } -scale-info = { version = "2.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "2.11.0", default-features = false, features = ["derive"] } # Bridge Dependencies diff --git a/modules/messages/Cargo.toml b/modules/messages/Cargo.toml index f6b1e7120..55b48cc03 100644 --- a/modules/messages/Cargo.toml +++ b/modules/messages/Cargo.toml @@ -13,7 +13,7 @@ workspace = true codec = { package = "parity-scale-codec", version = "3.1.5", default-features = false } log = { workspace = true } num-traits = { version = "0.2", default-features = false } -scale-info = { version = "2.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "2.11.0", default-features = false, features = ["derive"] } # Bridge dependencies diff --git a/modules/parachains/Cargo.toml b/modules/parachains/Cargo.toml index 2011f2cbb..2b8445b79 100644 --- a/modules/parachains/Cargo.toml +++ b/modules/parachains/Cargo.toml @@ -12,7 +12,7 @@ workspace = true [dependencies] codec = { package = "parity-scale-codec", version = "3.1.5", default-features = false } log = { workspace = true } -scale-info = { version = "2.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "2.11.0", default-features = false, features = ["derive"] } # Bridge Dependencies diff --git a/modules/relayers/Cargo.toml b/modules/relayers/Cargo.toml index 5e93e3196..972b4c33a 100644 --- a/modules/relayers/Cargo.toml +++ b/modules/relayers/Cargo.toml @@ -12,7 +12,7 @@ workspace = true [dependencies] codec = { package = "parity-scale-codec", version = "3.1.5", default-features = false } log = { workspace = true } -scale-info = { version = "2.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "2.11.0", default-features = false, features = ["derive"] } # Bridge dependencies diff --git a/modules/xcm-bridge-hub-router/Cargo.toml b/modules/xcm-bridge-hub-router/Cargo.toml index 280eeac94..926b704bd 100644 --- a/modules/xcm-bridge-hub-router/Cargo.toml +++ b/modules/xcm-bridge-hub-router/Cargo.toml @@ -12,7 +12,7 @@ workspace = true [dependencies] codec = { package = "parity-scale-codec", version = "3.1.5", default-features = false } log = { workspace = true } -scale-info = { version = "2.10.0", default-features = false, features = ["bit-vec", "derive", "serde"] } +scale-info = { version = "2.11.0", default-features = false, features = ["bit-vec", "derive", "serde"] } # Bridge dependencies diff --git a/modules/xcm-bridge-hub/Cargo.toml b/modules/xcm-bridge-hub/Cargo.toml index aaa114941..4d5d01234 100644 --- a/modules/xcm-bridge-hub/Cargo.toml +++ b/modules/xcm-bridge-hub/Cargo.toml @@ -12,7 +12,7 @@ workspace = true [dependencies] codec = { package = "parity-scale-codec", version = "3.1.5", default-features = false } log = { workspace = true } -scale-info = { version = "2.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "2.11.0", default-features = false, features = ["derive"] } # Bridge Dependencies bp-messages = { path = "../../primitives/messages", default-features = false } diff --git a/primitives/beefy/Cargo.toml b/primitives/beefy/Cargo.toml index 4785f8297..b6bb26986 100644 --- a/primitives/beefy/Cargo.toml +++ b/primitives/beefy/Cargo.toml @@ -11,7 +11,7 @@ workspace = true [dependencies] codec = { package = "parity-scale-codec", version = "3.1.5", default-features = false, features = ["derive", "bit-vec"] } -scale-info = { version = "2.10.0", default-features = false, features = ["bit-vec", "derive"] } +scale-info = { version = "2.11.0", default-features = false, features = ["bit-vec", "derive"] } serde = { default-features = false, features = ["alloc", "derive"], workspace = true } # Bridge Dependencies diff --git a/primitives/chain-asset-hub-rococo/Cargo.toml b/primitives/chain-asset-hub-rococo/Cargo.toml index ad0eb39f9..08be719bf 100644 --- a/primitives/chain-asset-hub-rococo/Cargo.toml +++ b/primitives/chain-asset-hub-rococo/Cargo.toml @@ -11,7 +11,7 @@ workspace = true [dependencies] codec = { package = "parity-scale-codec", version = "3.1.5", default-features = false } -scale-info = { version = "2.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "2.11.0", default-features = false, features = ["derive"] } # Substrate Dependencies frame-support = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } diff --git a/primitives/chain-asset-hub-westend/Cargo.toml b/primitives/chain-asset-hub-westend/Cargo.toml index 15b17f4d0..d8245c7a8 100644 --- a/primitives/chain-asset-hub-westend/Cargo.toml +++ b/primitives/chain-asset-hub-westend/Cargo.toml @@ -11,7 +11,7 @@ workspace = true [dependencies] codec = { package = "parity-scale-codec", version = "3.1.5", default-features = false } -scale-info = { version = "2.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "2.11.0", default-features = false, features = ["derive"] } # Substrate Dependencies frame-support = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } diff --git a/primitives/chain-polkadot-bulletin/Cargo.toml b/primitives/chain-polkadot-bulletin/Cargo.toml index 86f05624d..797a893b2 100644 --- a/primitives/chain-polkadot-bulletin/Cargo.toml +++ b/primitives/chain-polkadot-bulletin/Cargo.toml @@ -11,7 +11,7 @@ workspace = true [dependencies] codec = { package = "parity-scale-codec", version = "3.1.5", default-features = false, features = ["derive"] } -scale-info = { version = "2.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "2.11.0", default-features = false, features = ["derive"] } # Bridge Dependencies diff --git a/primitives/header-chain/Cargo.toml b/primitives/header-chain/Cargo.toml index 7167c4104..6e9178f3c 100644 --- a/primitives/header-chain/Cargo.toml +++ b/primitives/header-chain/Cargo.toml @@ -12,7 +12,7 @@ workspace = true [dependencies] codec = { package = "parity-scale-codec", version = "3.1.5", default-features = false } finality-grandpa = { version = "0.16.2", default-features = false } -scale-info = { version = "2.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "2.11.0", default-features = false, features = ["derive"] } serde = { features = ["alloc", "derive"], workspace = true } # Bridge dependencies diff --git a/primitives/messages/Cargo.toml b/primitives/messages/Cargo.toml index d121b6931..282ef93d3 100644 --- a/primitives/messages/Cargo.toml +++ b/primitives/messages/Cargo.toml @@ -11,7 +11,7 @@ workspace = true [dependencies] codec = { package = "parity-scale-codec", version = "3.1.5", default-features = false, features = ["bit-vec", "derive"] } -scale-info = { version = "2.10.0", default-features = false, features = ["bit-vec", "derive"] } +scale-info = { version = "2.11.0", default-features = false, features = ["bit-vec", "derive"] } serde = { features = ["alloc", "derive"], workspace = true } # Bridge dependencies diff --git a/primitives/parachains/Cargo.toml b/primitives/parachains/Cargo.toml index e62ae6a8d..3e148d528 100644 --- a/primitives/parachains/Cargo.toml +++ b/primitives/parachains/Cargo.toml @@ -12,7 +12,7 @@ workspace = true [dependencies] codec = { package = "parity-scale-codec", version = "3.1.5", default-features = false, features = ["derive"] } impl-trait-for-tuples = "0.2" -scale-info = { version = "2.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "2.11.0", default-features = false, features = ["derive"] } # Bridge dependencies diff --git a/primitives/polkadot-core/Cargo.toml b/primitives/polkadot-core/Cargo.toml index c28f3f2e3..dd3912429 100644 --- a/primitives/polkadot-core/Cargo.toml +++ b/primitives/polkadot-core/Cargo.toml @@ -12,7 +12,7 @@ workspace = true [dependencies] codec = { package = "parity-scale-codec", version = "3.1.5", default-features = false, features = ["derive"] } parity-util-mem = { version = "0.12.0", optional = true } -scale-info = { version = "2.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "2.11.0", default-features = false, features = ["derive"] } serde = { default-features = false, features = ["derive"], optional = true, workspace = true } # Bridge Dependencies diff --git a/primitives/relayers/Cargo.toml b/primitives/relayers/Cargo.toml index 19aed6b03..3ee433974 100644 --- a/primitives/relayers/Cargo.toml +++ b/primitives/relayers/Cargo.toml @@ -11,7 +11,7 @@ workspace = true [dependencies] codec = { package = "parity-scale-codec", version = "3.1.5", default-features = false, features = ["bit-vec", "derive"] } -scale-info = { version = "2.10.0", default-features = false, features = ["bit-vec", "derive"] } +scale-info = { version = "2.11.0", default-features = false, features = ["bit-vec", "derive"] } # Bridge Dependencies diff --git a/primitives/runtime/Cargo.toml b/primitives/runtime/Cargo.toml index d7cef6a10..b6ed0bc06 100644 --- a/primitives/runtime/Cargo.toml +++ b/primitives/runtime/Cargo.toml @@ -15,7 +15,7 @@ hash-db = { version = "0.16.0", default-features = false } impl-trait-for-tuples = "0.2.2" log = { workspace = true } num-traits = { version = "0.2", default-features = false } -scale-info = { version = "2.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "2.11.0", default-features = false, features = ["derive"] } serde = { features = ["alloc", "derive"], workspace = true } # Substrate Dependencies diff --git a/primitives/xcm-bridge-hub-router/Cargo.toml b/primitives/xcm-bridge-hub-router/Cargo.toml index a9f584e2a..c7bae8443 100644 --- a/primitives/xcm-bridge-hub-router/Cargo.toml +++ b/primitives/xcm-bridge-hub-router/Cargo.toml @@ -11,7 +11,7 @@ workspace = true [dependencies] codec = { package = "parity-scale-codec", version = "3.1.5", default-features = false, features = ["bit-vec", "derive"] } -scale-info = { version = "2.10.0", default-features = false, features = ["bit-vec", "derive"] } +scale-info = { version = "2.11.0", default-features = false, features = ["bit-vec", "derive"] } # Substrate Dependencies sp-runtime = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } diff --git a/relays/client-bridge-hub-kusama/Cargo.toml b/relays/client-bridge-hub-kusama/Cargo.toml index 30177443c..6e41bb3fc 100644 --- a/relays/client-bridge-hub-kusama/Cargo.toml +++ b/relays/client-bridge-hub-kusama/Cargo.toml @@ -10,7 +10,7 @@ workspace = true [dependencies] codec = { package = "parity-scale-codec", version = "3.1.5", features = ["derive"] } -scale-info = { version = "2.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "2.11.0", default-features = false, features = ["derive"] } #relay-substrate-client = { path = "../client-substrate" } subxt = { version = "0.32.1", default-features = false, features = ["native"] } diff --git a/relays/client-bridge-hub-polkadot/Cargo.toml b/relays/client-bridge-hub-polkadot/Cargo.toml index a85e2d685..f5fc69d02 100644 --- a/relays/client-bridge-hub-polkadot/Cargo.toml +++ b/relays/client-bridge-hub-polkadot/Cargo.toml @@ -10,7 +10,7 @@ workspace = true [dependencies] codec = { package = "parity-scale-codec", version = "3.1.5", features = ["derive"] } -scale-info = { version = "2.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "2.11.0", default-features = false, features = ["derive"] } subxt = { version = "0.32.1", default-features = false, features = ["native"] } # Bridge dependencies diff --git a/relays/client-bridge-hub-rococo/Cargo.toml b/relays/client-bridge-hub-rococo/Cargo.toml index ea59240fd..efccfa5fb 100644 --- a/relays/client-bridge-hub-rococo/Cargo.toml +++ b/relays/client-bridge-hub-rococo/Cargo.toml @@ -10,7 +10,7 @@ workspace = true [dependencies] codec = { package = "parity-scale-codec", version = "3.1.5", features = ["derive"] } -scale-info = { version = "2.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "2.11.0", default-features = false, features = ["derive"] } subxt = { version = "0.32.1", default-features = false, features = ["native"] } # Bridge dependencies diff --git a/relays/client-bridge-hub-westend/Cargo.toml b/relays/client-bridge-hub-westend/Cargo.toml index 18b4f3230..188839e2d 100644 --- a/relays/client-bridge-hub-westend/Cargo.toml +++ b/relays/client-bridge-hub-westend/Cargo.toml @@ -10,7 +10,7 @@ workspace = true [dependencies] codec = { package = "parity-scale-codec", version = "3.1.5", features = ["derive"] } -scale-info = { version = "2.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "2.11.0", default-features = false, features = ["derive"] } subxt = { version = "0.32.1", default-features = false, features = ["native"] } # Bridge dependencies diff --git a/relays/client-kusama/Cargo.toml b/relays/client-kusama/Cargo.toml index 3fa32aa48..6d3a71b38 100644 --- a/relays/client-kusama/Cargo.toml +++ b/relays/client-kusama/Cargo.toml @@ -10,7 +10,7 @@ workspace = true [dependencies] codec = { package = "parity-scale-codec", version = "3.1.5", features = ["derive"] } -scale-info = { version = "2.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "2.11.0", default-features = false, features = ["derive"] } subxt = { version = "0.32.1", default-features = false, features = ["native"] } # Bridge dependencies diff --git a/relays/client-polkadot-bulletin/Cargo.toml b/relays/client-polkadot-bulletin/Cargo.toml index b0eafc963..2160a35fb 100644 --- a/relays/client-polkadot-bulletin/Cargo.toml +++ b/relays/client-polkadot-bulletin/Cargo.toml @@ -10,7 +10,7 @@ workspace = true [dependencies] codec = { package = "parity-scale-codec", version = "3.1.5", features = ["derive"] } -scale-info = { version = "2.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "2.11.0", default-features = false, features = ["derive"] } subxt = { version = "0.32.1", default-features = false, features = ["native"] } # Bridge dependencies diff --git a/relays/client-polkadot/Cargo.toml b/relays/client-polkadot/Cargo.toml index 52c836e14..daa3423bb 100644 --- a/relays/client-polkadot/Cargo.toml +++ b/relays/client-polkadot/Cargo.toml @@ -10,7 +10,7 @@ workspace = true [dependencies] codec = { package = "parity-scale-codec", version = "3.1.5", features = ["derive"] } -scale-info = { version = "2.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "2.11.0", default-features = false, features = ["derive"] } subxt = { version = "0.32.1", default-features = false, features = ["native"] } # Bridge dependencies diff --git a/relays/client-rococo/Cargo.toml b/relays/client-rococo/Cargo.toml index ba546396f..7a6b7d3ba 100644 --- a/relays/client-rococo/Cargo.toml +++ b/relays/client-rococo/Cargo.toml @@ -10,7 +10,7 @@ workspace = true [dependencies] codec = { package = "parity-scale-codec", version = "3.1.5", features = ["derive"] } -scale-info = { version = "2.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "2.11.0", default-features = false, features = ["derive"] } subxt = { version = "0.32.1", default-features = false, features = ["native"] } # Bridge dependencies diff --git a/relays/client-substrate/Cargo.toml b/relays/client-substrate/Cargo.toml index 7ff2e2f45..ea02a0ad2 100644 --- a/relays/client-substrate/Cargo.toml +++ b/relays/client-substrate/Cargo.toml @@ -17,7 +17,7 @@ jsonrpsee = { version = "0.17", features = ["macros", "ws-client"] } log = { workspace = true } num-traits = "0.2" rand = "0.8" -scale-info = { version = "2.10.0", features = ["derive"] } +scale-info = { version = "2.11.0", features = ["derive"] } tokio = { version = "1.36", features = ["rt-multi-thread"] } thiserror = { workspace = true } diff --git a/relays/client-westend/Cargo.toml b/relays/client-westend/Cargo.toml index 1933a1f41..2c9aacc16 100644 --- a/relays/client-westend/Cargo.toml +++ b/relays/client-westend/Cargo.toml @@ -10,7 +10,7 @@ workspace = true [dependencies] codec = { package = "parity-scale-codec", version = "3.1.5", features = ["derive"] } -scale-info = { version = "2.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "2.11.0", default-features = false, features = ["derive"] } subxt = { version = "0.32.1", default-features = false, features = ["native"] } # Bridge dependencies -- GitLab From 61e865bc7170200caebce90199844103aafa9e8c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 18 Mar 2024 02:56:13 +0000 Subject: [PATCH 03/39] Bump anyhow from 1.0.80 to 1.0.81 Bumps [anyhow](https://github.com/dtolnay/anyhow) from 1.0.80 to 1.0.81. - [Release notes](https://github.com/dtolnay/anyhow/releases) - [Commits](https://github.com/dtolnay/anyhow/compare/1.0.80...1.0.81) --- updated-dependencies: - dependency-name: anyhow dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 203348af7..0f005b9c6 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -219,9 +219,9 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.80" +version = "1.0.81" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ad32ce52e4161730f7098c077cd2ed6229b5804ccf99e5366be1ab72a98b4e1" +checksum = "0952808a6c2afd1aa8947271f3a60f1a6763c7b912d210184c5149b5cf147247" [[package]] name = "approx" -- GitLab From 62372e7455697bb0352f259875e9f19d40f24965 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 18 Mar 2024 02:55:47 +0000 Subject: [PATCH 04/39] Bump async-trait from 0.1.77 to 0.1.78 Bumps [async-trait](https://github.com/dtolnay/async-trait) from 0.1.77 to 0.1.78. - [Release notes](https://github.com/dtolnay/async-trait/releases) - [Commits](https://github.com/dtolnay/async-trait/compare/0.1.77...0.1.78) --- updated-dependencies: - dependency-name: async-trait dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 0f005b9c6..347da03ae 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -761,9 +761,9 @@ checksum = "ecc7ab41815b3c653ccd2978ec3255c81349336702dfdf62ee6f7069b12a3aae" [[package]] name = "async-trait" -version = "0.1.77" +version = "0.1.78" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c980ee35e870bd1a4d2c8294d4c04d0499e67bca1e4b5cefcc693c2fa00caea9" +checksum = "461abc97219de0eaaf81fe3ef974a540158f3d079c2ab200f891f1a2ef201e85" dependencies = [ "proc-macro2 1.0.76", "quote 1.0.35", -- GitLab From 9cb8a2cae83ba954251453edfbbbae852d9fdebc Mon Sep 17 00:00:00 2001 From: Svyatoslav Nikolsky Date: Tue, 19 Mar 2024 12:13:29 +0300 Subject: [PATCH 05/39] fixed dependency in runtime-codegen (#2886) --- tools/runtime-codegen/Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/runtime-codegen/Cargo.toml b/tools/runtime-codegen/Cargo.toml index de281f55a..24fe717f3 100644 --- a/tools/runtime-codegen/Cargo.toml +++ b/tools/runtime-codegen/Cargo.toml @@ -15,7 +15,7 @@ clap = { version = "4.4.6", features = ["derive", "cargo"] } codec = { package = "parity-scale-codec", version = "3.1.5", default-features = false, features = ["derive"] } color-eyre = "0.6.1" proc-macro2 = "1.0.56" -quote = { workspace = true } +quote = { version = "1.0.33" } subxt-codegen = { git = "https://github.com/paritytech/subxt", branch = "master", default-features = false, features = ["fetch-metadata"] } wasm-loader = { git = "https://github.com/chevdor/subwasm", branch = "master" } wasm-testbed = { git = "https://github.com/chevdor/subwasm", branch = "master" } -- GitLab From 2a76cbbb94259b5f656b9899b8eb01bbd7a53a0b Mon Sep 17 00:00:00 2001 From: Serban Iorga Date: Wed, 20 Mar 2024 09:45:43 +0100 Subject: [PATCH 06/39] Move generic CLI logic to different crate (#2885) * Move generic CLI logic to separate crate * Move and rename `CliChain` trait definition Move it to `relay-substrate-client` * Move generic cli logic to substrate-relay-helper * Fix docs warnings --- Cargo.lock | 3 + Cargo.toml | 2 +- ..._kusama_messages_to_bridge_hub_polkadot.rs | 7 +- ..._polkadot_messages_to_bridge_hub_kusama.rs | 7 +- .../kusama_headers_to_bridge_hub_polkadot.rs | 2 +- ...usama_parachains_to_bridge_hub_polkadot.rs | 6 +- .../polkadot_headers_to_bridge_hub_kusama.rs | 2 +- ...olkadot_parachains_to_bridge_hub_kusama.rs | 6 +- ..._polkadot_messages_to_polkadot_bulletin.rs | 7 +- ...bulletin_headers_to_bridge_hub_polkadot.rs | 10 +- ...ulletin_messages_to_bridge_hub_polkadot.rs | 7 +- .../polkadot_headers_to_polkadot_bulletin.rs | 8 +- ...olkadot_parachains_to_polkadot_bulletin.rs | 4 +- ..._hub_rococo_messages_to_rococo_bulletin.rs | 7 +- .../src/bridges/rococo_bulletin/mod.rs | 9 +- ...o_bulletin_headers_to_bridge_hub_rococo.rs | 9 +- ..._bulletin_messages_to_bridge_hub_rococo.rs | 7 +- .../rococo_headers_to_rococo_bulletin.rs | 7 +- .../rococo_parachains_to_rococo_bulletin.rs | 2 +- ...b_rococo_messages_to_bridge_hub_westend.rs | 7 +- ...b_westend_messages_to_bridge_hub_rococo.rs | 7 +- .../rococo_headers_to_bridge_hub_westend.rs | 8 +- ...rococo_parachains_to_bridge_hub_westend.rs | 6 +- .../westend_headers_to_bridge_hub_rococo.rs | 8 +- ...westend_parachains_to_bridge_hub_rococo.rs | 6 +- relays/bin-substrate/src/chains/kusama.rs | 32 -- relays/bin-substrate/src/chains/mod.rs | 23 - relays/bin-substrate/src/chains/polkadot.rs | 32 -- .../src/chains/polkadot_bulletin.rs | 26 - relays/bin-substrate/src/chains/rococo.rs | 32 -- relays/bin-substrate/src/chains/westend.rs | 32 -- relays/bin-substrate/src/cli/chain_schema.rs | 238 +-------- .../src/cli/detect_equivocations.rs | 65 +-- relays/bin-substrate/src/cli/init_bridge.rs | 158 ++---- relays/bin-substrate/src/cli/mod.rs | 208 +------- relays/bin-substrate/src/cli/relay_headers.rs | 60 +-- .../mod.rs => relay_headers_and_messages.rs} | 354 ++----------- .../bin-substrate/src/cli/relay_messages.rs | 88 +--- .../bin-substrate/src/cli/relay_parachains.rs | 78 +-- relays/bin-substrate/src/main.rs | 1 - relays/client-bridge-hub-kusama/src/lib.rs | 10 +- relays/client-bridge-hub-polkadot/src/lib.rs | 10 +- relays/client-bridge-hub-rococo/src/lib.rs | 10 +- relays/client-bridge-hub-westend/src/lib.rs | 10 +- relays/client-kusama/src/lib.rs | 10 +- relays/client-polkadot-bulletin/src/lib.rs | 10 +- relays/client-polkadot/src/lib.rs | 10 +- relays/client-rococo/src/lib.rs | 10 +- relays/client-substrate/src/chain.rs | 11 + relays/client-substrate/src/lib.rs | 6 +- relays/client-westend/src/lib.rs | 10 +- relays/lib-substrate-relay/Cargo.toml | 7 +- .../src/cli/bridge.rs | 37 +- .../src/cli/chain_schema.rs | 250 +++++++++ .../src/cli/detect_equivocations.rs | 65 +++ .../src/cli/init_bridge.rs | 85 +++ relays/lib-substrate-relay/src/cli/mod.rs | 192 +++++++ .../src/cli/relay_headers.rs | 76 +++ .../src/cli/relay_headers_and_messages/mod.rs | 484 ++++++++++++++++++ .../parachain_to_parachain.rs | 54 +- .../relay_to_parachain.rs | 42 +- .../relay_to_relay.rs | 24 +- .../src/cli/relay_messages.rs | 89 ++++ .../src/cli/relay_parachains.rs | 91 ++++ relays/lib-substrate-relay/src/lib.rs | 1 + 65 files changed, 1748 insertions(+), 1437 deletions(-) delete mode 100644 relays/bin-substrate/src/chains/kusama.rs delete mode 100644 relays/bin-substrate/src/chains/mod.rs delete mode 100644 relays/bin-substrate/src/chains/polkadot.rs delete mode 100644 relays/bin-substrate/src/chains/polkadot_bulletin.rs delete mode 100644 relays/bin-substrate/src/chains/rococo.rs delete mode 100644 relays/bin-substrate/src/chains/westend.rs rename relays/bin-substrate/src/cli/{relay_headers_and_messages/mod.rs => relay_headers_and_messages.rs} (52%) rename relays/{bin-substrate => lib-substrate-relay}/src/cli/bridge.rs (81%) create mode 100644 relays/lib-substrate-relay/src/cli/chain_schema.rs create mode 100644 relays/lib-substrate-relay/src/cli/detect_equivocations.rs create mode 100644 relays/lib-substrate-relay/src/cli/init_bridge.rs create mode 100644 relays/lib-substrate-relay/src/cli/mod.rs create mode 100644 relays/lib-substrate-relay/src/cli/relay_headers.rs create mode 100644 relays/lib-substrate-relay/src/cli/relay_headers_and_messages/mod.rs rename relays/{bin-substrate => lib-substrate-relay}/src/cli/relay_headers_and_messages/parachain_to_parachain.rs (82%) rename relays/{bin-substrate => lib-substrate-relay}/src/cli/relay_headers_and_messages/relay_to_parachain.rs (88%) rename relays/{bin-substrate => lib-substrate-relay}/src/cli/relay_headers_and_messages/relay_to_relay.rs (91%) create mode 100644 relays/lib-substrate-relay/src/cli/relay_messages.rs create mode 100644 relays/lib-substrate-relay/src/cli/relay_parachains.rs diff --git a/Cargo.lock b/Cargo.lock index 347da03ae..5ae38806b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -9109,6 +9109,7 @@ dependencies = [ "pallet-transaction-payment", "parachains-relay", "parity-scale-codec", + "rbtag", "relay-bridge-hub-rococo-client", "relay-bridge-hub-westend-client", "relay-rococo-client", @@ -9117,6 +9118,8 @@ dependencies = [ "sp-consensus-grandpa", "sp-core", "sp-runtime", + "structopt", + "strum 0.26.2", "thiserror", ] diff --git a/Cargo.toml b/Cargo.toml index f866e6827..b71199959 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -53,7 +53,7 @@ members = [ "relays/lib-substrate-relay", "relays/messages", "relays/parachains", - "relays/utils", + "relays/utils" ] # Setup clippy lints as `polkadot-sdk`, diff --git a/relays/bin-substrate/src/bridges/kusama_polkadot/bridge_hub_kusama_messages_to_bridge_hub_polkadot.rs b/relays/bin-substrate/src/bridges/kusama_polkadot/bridge_hub_kusama_messages_to_bridge_hub_polkadot.rs index e57302315..fc239ca1e 100644 --- a/relays/bin-substrate/src/bridges/kusama_polkadot/bridge_hub_kusama_messages_to_bridge_hub_polkadot.rs +++ b/relays/bin-substrate/src/bridges/kusama_polkadot/bridge_hub_kusama_messages_to_bridge_hub_polkadot.rs @@ -16,10 +16,13 @@ //! BridgeHubKusama-to-BridgeHubPolkadot messages sync entrypoint. -use crate::cli::bridge::{CliBridgeBase, MessagesCliBridge}; use relay_bridge_hub_kusama_client::BridgeHubKusama; use relay_bridge_hub_polkadot_client::BridgeHubPolkadot; -use substrate_relay_helper::{messages_lane::SubstrateMessageLane, UtilityPalletBatchCallBuilder}; +use substrate_relay_helper::{ + cli::bridge::{CliBridgeBase, MessagesCliBridge}, + messages_lane::SubstrateMessageLane, + UtilityPalletBatchCallBuilder, +}; /// BridgeHubKusama-to-BridgeHubPolkadot messages bridge. pub struct BridgeHubKusamaToBridgeHubPolkadotMessagesCliBridge {} diff --git a/relays/bin-substrate/src/bridges/kusama_polkadot/bridge_hub_polkadot_messages_to_bridge_hub_kusama.rs b/relays/bin-substrate/src/bridges/kusama_polkadot/bridge_hub_polkadot_messages_to_bridge_hub_kusama.rs index 0a1b21cd1..8d8e5e0c3 100644 --- a/relays/bin-substrate/src/bridges/kusama_polkadot/bridge_hub_polkadot_messages_to_bridge_hub_kusama.rs +++ b/relays/bin-substrate/src/bridges/kusama_polkadot/bridge_hub_polkadot_messages_to_bridge_hub_kusama.rs @@ -16,10 +16,13 @@ //! BridgeHubPolkadot-to-BridgeHubKusama messages sync entrypoint. -use crate::cli::bridge::{CliBridgeBase, MessagesCliBridge}; use relay_bridge_hub_kusama_client::BridgeHubKusama; use relay_bridge_hub_polkadot_client::BridgeHubPolkadot; -use substrate_relay_helper::{messages_lane::SubstrateMessageLane, UtilityPalletBatchCallBuilder}; +use substrate_relay_helper::{ + cli::bridge::{CliBridgeBase, MessagesCliBridge}, + messages_lane::SubstrateMessageLane, + UtilityPalletBatchCallBuilder, +}; /// BridgeHubPolkadot-to-BridgeHubKusama messages bridge. pub struct BridgeHubPolkadotToBridgeHubKusamaMessagesCliBridge {} diff --git a/relays/bin-substrate/src/bridges/kusama_polkadot/kusama_headers_to_bridge_hub_polkadot.rs b/relays/bin-substrate/src/bridges/kusama_polkadot/kusama_headers_to_bridge_hub_polkadot.rs index dafb5f568..196a22cd7 100644 --- a/relays/bin-substrate/src/bridges/kusama_polkadot/kusama_headers_to_bridge_hub_polkadot.rs +++ b/relays/bin-substrate/src/bridges/kusama_polkadot/kusama_headers_to_bridge_hub_polkadot.rs @@ -16,7 +16,7 @@ //! Kusama-to-BridgeHubPolkadot headers sync entrypoint. -use crate::cli::bridge::{ +use substrate_relay_helper::cli::bridge::{ CliBridgeBase, RelayToRelayEquivocationDetectionCliBridge, RelayToRelayHeadersCliBridge, }; diff --git a/relays/bin-substrate/src/bridges/kusama_polkadot/kusama_parachains_to_bridge_hub_polkadot.rs b/relays/bin-substrate/src/bridges/kusama_polkadot/kusama_parachains_to_bridge_hub_polkadot.rs index 9b76cdbfa..b39b97008 100644 --- a/relays/bin-substrate/src/bridges/kusama_polkadot/kusama_parachains_to_bridge_hub_polkadot.rs +++ b/relays/bin-substrate/src/bridges/kusama_polkadot/kusama_parachains_to_bridge_hub_polkadot.rs @@ -16,11 +16,11 @@ //! Kusama-to-BridgeHubPolkadot parachains sync entrypoint. -use crate::cli::bridge::{CliBridgeBase, MessagesCliBridge, ParachainToRelayHeadersCliBridge}; use bp_polkadot_core::parachains::{ParaHash, ParaHeadsProof, ParaId}; use relay_substrate_client::{CallOf, HeaderIdOf}; -use substrate_relay_helper::parachains::{ - SubmitParachainHeadsCallBuilder, SubstrateParachainsPipeline, +use substrate_relay_helper::{ + cli::bridge::{CliBridgeBase, MessagesCliBridge, ParachainToRelayHeadersCliBridge}, + parachains::{SubmitParachainHeadsCallBuilder, SubstrateParachainsPipeline}, }; /// Kusama-to-BridgeHubPolkadot parachain sync description. diff --git a/relays/bin-substrate/src/bridges/kusama_polkadot/polkadot_headers_to_bridge_hub_kusama.rs b/relays/bin-substrate/src/bridges/kusama_polkadot/polkadot_headers_to_bridge_hub_kusama.rs index 019afab0b..d96326a28 100644 --- a/relays/bin-substrate/src/bridges/kusama_polkadot/polkadot_headers_to_bridge_hub_kusama.rs +++ b/relays/bin-substrate/src/bridges/kusama_polkadot/polkadot_headers_to_bridge_hub_kusama.rs @@ -16,7 +16,7 @@ //! Polkadot-to-KusamaBridgeHub headers sync entrypoint. -use crate::cli::bridge::{ +use substrate_relay_helper::cli::bridge::{ CliBridgeBase, RelayToRelayEquivocationDetectionCliBridge, RelayToRelayHeadersCliBridge, }; diff --git a/relays/bin-substrate/src/bridges/kusama_polkadot/polkadot_parachains_to_bridge_hub_kusama.rs b/relays/bin-substrate/src/bridges/kusama_polkadot/polkadot_parachains_to_bridge_hub_kusama.rs index 439eddaa1..25ce53cb5 100644 --- a/relays/bin-substrate/src/bridges/kusama_polkadot/polkadot_parachains_to_bridge_hub_kusama.rs +++ b/relays/bin-substrate/src/bridges/kusama_polkadot/polkadot_parachains_to_bridge_hub_kusama.rs @@ -16,11 +16,11 @@ //! Polkadot-to-BridgeHubKusama parachains sync entrypoint. -use crate::cli::bridge::{CliBridgeBase, MessagesCliBridge, ParachainToRelayHeadersCliBridge}; use bp_polkadot_core::parachains::{ParaHash, ParaHeadsProof, ParaId}; use relay_substrate_client::{CallOf, HeaderIdOf}; -use substrate_relay_helper::parachains::{ - SubmitParachainHeadsCallBuilder, SubstrateParachainsPipeline, +use substrate_relay_helper::{ + cli::bridge::{CliBridgeBase, MessagesCliBridge, ParachainToRelayHeadersCliBridge}, + parachains::{SubmitParachainHeadsCallBuilder, SubstrateParachainsPipeline}, }; /// Polkadot-to-BridgeHubKusama parachain sync description. diff --git a/relays/bin-substrate/src/bridges/polkadot_bulletin/bridge_hub_polkadot_messages_to_polkadot_bulletin.rs b/relays/bin-substrate/src/bridges/polkadot_bulletin/bridge_hub_polkadot_messages_to_polkadot_bulletin.rs index ba177271d..8114d2329 100644 --- a/relays/bin-substrate/src/bridges/polkadot_bulletin/bridge_hub_polkadot_messages_to_polkadot_bulletin.rs +++ b/relays/bin-substrate/src/bridges/polkadot_bulletin/bridge_hub_polkadot_messages_to_polkadot_bulletin.rs @@ -16,10 +16,13 @@ //! BridgeHubPolkadot-to-PolkadotBulletin messages sync entrypoint. -use crate::cli::bridge::{CliBridgeBase, MessagesCliBridge}; use relay_bridge_hub_polkadot_client::BridgeHubPolkadot; use relay_polkadot_bulletin_client::PolkadotBulletin; -use substrate_relay_helper::{messages_lane::SubstrateMessageLane, UtilityPalletBatchCallBuilder}; +use substrate_relay_helper::{ + cli::bridge::{CliBridgeBase, MessagesCliBridge}, + messages_lane::SubstrateMessageLane, + UtilityPalletBatchCallBuilder, +}; /// BridgeHubPolkadot-to-PolkadotBulletin messages bridge. pub struct BridgeHubPolkadotToPolkadotBulletinMessagesCliBridge {} diff --git a/relays/bin-substrate/src/bridges/polkadot_bulletin/polkadot_bulletin_headers_to_bridge_hub_polkadot.rs b/relays/bin-substrate/src/bridges/polkadot_bulletin/polkadot_bulletin_headers_to_bridge_hub_polkadot.rs index 7019a6b55..eb63785d3 100644 --- a/relays/bin-substrate/src/bridges/polkadot_bulletin/polkadot_bulletin_headers_to_bridge_hub_polkadot.rs +++ b/relays/bin-substrate/src/bridges/polkadot_bulletin/polkadot_bulletin_headers_to_bridge_hub_polkadot.rs @@ -16,11 +16,6 @@ //! PolkadotBulletin-to-BridgeHubPolkadot headers sync entrypoint. -use crate::cli::bridge::{ - CliBridgeBase, MessagesCliBridge, RelayToRelayEquivocationDetectionCliBridge, - RelayToRelayHeadersCliBridge, -}; - use async_trait::async_trait; use substrate_relay_helper::{ equivocation::SubstrateEquivocationDetectionPipeline, @@ -28,6 +23,11 @@ use substrate_relay_helper::{ finality_base::{engine::Grandpa as GrandpaFinalityEngine, SubstrateFinalityPipeline}, }; +use substrate_relay_helper::cli::bridge::{ + CliBridgeBase, MessagesCliBridge, RelayToRelayEquivocationDetectionCliBridge, + RelayToRelayHeadersCliBridge, +}; + /// Description of `PolkadotBulletin` -> `PolkadotBridgeHub` finalized headers bridge. #[derive(Clone, Debug)] pub struct PolkadotBulletinFinalityToBridgeHubPolkadot; diff --git a/relays/bin-substrate/src/bridges/polkadot_bulletin/polkadot_bulletin_messages_to_bridge_hub_polkadot.rs b/relays/bin-substrate/src/bridges/polkadot_bulletin/polkadot_bulletin_messages_to_bridge_hub_polkadot.rs index 1b5f3e5f6..1c04f8788 100644 --- a/relays/bin-substrate/src/bridges/polkadot_bulletin/polkadot_bulletin_messages_to_bridge_hub_polkadot.rs +++ b/relays/bin-substrate/src/bridges/polkadot_bulletin/polkadot_bulletin_messages_to_bridge_hub_polkadot.rs @@ -16,10 +16,13 @@ //! PolkadotBulletin-to-BridgeHubPolkadot messages sync entrypoint. -use crate::cli::bridge::{CliBridgeBase, MessagesCliBridge}; use relay_bridge_hub_polkadot_client::BridgeHubPolkadot; use relay_polkadot_bulletin_client::PolkadotBulletin; -use substrate_relay_helper::{messages_lane::SubstrateMessageLane, UtilityPalletBatchCallBuilder}; +use substrate_relay_helper::{ + cli::bridge::{CliBridgeBase, MessagesCliBridge}, + messages_lane::SubstrateMessageLane, + UtilityPalletBatchCallBuilder, +}; /// PolkadotBulletin-to-BridgeHubPolkadot messages bridge. pub struct PolkadotBulletinToBridgeHubPolkadotMessagesCliBridge {} diff --git a/relays/bin-substrate/src/bridges/polkadot_bulletin/polkadot_headers_to_polkadot_bulletin.rs b/relays/bin-substrate/src/bridges/polkadot_bulletin/polkadot_headers_to_polkadot_bulletin.rs index 897c2ac88..7996d1613 100644 --- a/relays/bin-substrate/src/bridges/polkadot_bulletin/polkadot_headers_to_polkadot_bulletin.rs +++ b/relays/bin-substrate/src/bridges/polkadot_bulletin/polkadot_headers_to_polkadot_bulletin.rs @@ -16,10 +16,6 @@ //! Polkadot-to-PolkadotBulletin headers sync entrypoint. -use crate::cli::bridge::{ - CliBridgeBase, RelayToRelayEquivocationDetectionCliBridge, RelayToRelayHeadersCliBridge, -}; - use async_trait::async_trait; use substrate_relay_helper::{ equivocation::SubstrateEquivocationDetectionPipeline, @@ -27,6 +23,10 @@ use substrate_relay_helper::{ finality_base::{engine::Grandpa as GrandpaFinalityEngine, SubstrateFinalityPipeline}, }; +use substrate_relay_helper::cli::bridge::{ + CliBridgeBase, RelayToRelayEquivocationDetectionCliBridge, RelayToRelayHeadersCliBridge, +}; + /// Description of Polkadot -> `PolkadotBulletin` finalized headers bridge. #[derive(Clone, Debug)] pub struct PolkadotFinalityToPolkadotBulletin; diff --git a/relays/bin-substrate/src/bridges/polkadot_bulletin/polkadot_parachains_to_polkadot_bulletin.rs b/relays/bin-substrate/src/bridges/polkadot_bulletin/polkadot_parachains_to_polkadot_bulletin.rs index 674c84adb..0bfce11ba 100644 --- a/relays/bin-substrate/src/bridges/polkadot_bulletin/polkadot_parachains_to_polkadot_bulletin.rs +++ b/relays/bin-substrate/src/bridges/polkadot_bulletin/polkadot_parachains_to_polkadot_bulletin.rs @@ -16,7 +16,9 @@ //! Polkadot-to-PolkadotBulletin parachains sync entrypoint. -use crate::cli::bridge::{CliBridgeBase, MessagesCliBridge, ParachainToRelayHeadersCliBridge}; +use substrate_relay_helper::cli::bridge::{ + CliBridgeBase, MessagesCliBridge, ParachainToRelayHeadersCliBridge, +}; use bp_polkadot_core::parachains::{ParaHash, ParaHeadsProof, ParaId}; use bp_runtime::Chain; diff --git a/relays/bin-substrate/src/bridges/rococo_bulletin/bridge_hub_rococo_messages_to_rococo_bulletin.rs b/relays/bin-substrate/src/bridges/rococo_bulletin/bridge_hub_rococo_messages_to_rococo_bulletin.rs index a2de83831..b8e95556b 100644 --- a/relays/bin-substrate/src/bridges/rococo_bulletin/bridge_hub_rococo_messages_to_rococo_bulletin.rs +++ b/relays/bin-substrate/src/bridges/rococo_bulletin/bridge_hub_rococo_messages_to_rococo_bulletin.rs @@ -17,9 +17,12 @@ //! BridgeHubRococo-to-RococoBulletin messages sync entrypoint. use super::BridgeHubRococoAsBridgeHubPolkadot; -use crate::cli::bridge::{CliBridgeBase, MessagesCliBridge}; use relay_polkadot_bulletin_client::PolkadotBulletin as RococoBulletin; -use substrate_relay_helper::{messages_lane::SubstrateMessageLane, UtilityPalletBatchCallBuilder}; +use substrate_relay_helper::{ + cli::bridge::{CliBridgeBase, MessagesCliBridge}, + messages_lane::SubstrateMessageLane, + UtilityPalletBatchCallBuilder, +}; /// BridgeHubRococo-to-RococoBulletin messages bridge. pub struct BridgeHubRococoToRococoBulletinMessagesCliBridge {} diff --git a/relays/bin-substrate/src/bridges/rococo_bulletin/mod.rs b/relays/bin-substrate/src/bridges/rococo_bulletin/mod.rs index 2d7b5aec1..738fea8c5 100644 --- a/relays/bin-substrate/src/bridges/rococo_bulletin/mod.rs +++ b/relays/bin-substrate/src/bridges/rococo_bulletin/mod.rs @@ -16,8 +16,6 @@ //! Declaration of all bridges between Rococo Bulletin Chain and Rococo Bridge Hub. -use crate::cli::CliChain; - use bp_messages::MessageNonce; use bp_runtime::{ AccountIdOf, BalanceOf, BlockNumberOf, ChainId, HashOf, HasherOf, HeaderOf, NonceOf, @@ -25,7 +23,8 @@ use bp_runtime::{ }; use frame_support::pallet_prelude::Weight; use relay_substrate_client::{ - Error as SubstrateError, SignParam, SimpleRuntimeVersion, UnsignedTransaction, + ChainWithRuntimeVersion, Error as SubstrateError, SignParam, SimpleRuntimeVersion, + UnsignedTransaction, }; use sp_core::storage::StorageKey; use std::time::Duration; @@ -127,7 +126,7 @@ impl relay_substrate_client::ChainWithTransactions for RococoAsPolkadot { } } -impl CliChain for RococoAsPolkadot { +impl ChainWithRuntimeVersion for RococoAsPolkadot { const RUNTIME_VERSION: Option = None; } @@ -232,7 +231,7 @@ impl relay_substrate_client::ChainWithMessages for BridgeHubRococoAsBridgeHubPol relay_bridge_hub_polkadot_client::BridgeHubPolkadot::FROM_CHAIN_MESSAGE_DETAILS_METHOD; } -impl CliChain for BridgeHubRococoAsBridgeHubPolkadot { +impl ChainWithRuntimeVersion for BridgeHubRococoAsBridgeHubPolkadot { const RUNTIME_VERSION: Option = Some(SimpleRuntimeVersion { spec_version: 1_003_000, transaction_version: 3 }); } diff --git a/relays/bin-substrate/src/bridges/rococo_bulletin/rococo_bulletin_headers_to_bridge_hub_rococo.rs b/relays/bin-substrate/src/bridges/rococo_bulletin/rococo_bulletin_headers_to_bridge_hub_rococo.rs index e897cd859..0d54fd210 100644 --- a/relays/bin-substrate/src/bridges/rococo_bulletin/rococo_bulletin_headers_to_bridge_hub_rococo.rs +++ b/relays/bin-substrate/src/bridges/rococo_bulletin/rococo_bulletin_headers_to_bridge_hub_rococo.rs @@ -17,10 +17,6 @@ //! RococoBulletin-to-BridgeHubRococo headers sync entrypoint. use super::BridgeHubRococoAsBridgeHubPolkadot; -use crate::cli::bridge::{ - CliBridgeBase, MessagesCliBridge, RelayToRelayEquivocationDetectionCliBridge, - RelayToRelayHeadersCliBridge, -}; use async_trait::async_trait; use substrate_relay_helper::{ @@ -29,6 +25,11 @@ use substrate_relay_helper::{ finality_base::{engine::Grandpa as GrandpaFinalityEngine, SubstrateFinalityPipeline}, }; +use substrate_relay_helper::cli::bridge::{ + CliBridgeBase, MessagesCliBridge, RelayToRelayEquivocationDetectionCliBridge, + RelayToRelayHeadersCliBridge, +}; + /// Description of `RococoBulletin` -> `RococoBridgeHub` finalized headers bridge. #[derive(Clone, Debug)] pub struct RococoBulletinFinalityToBridgeHubRococo; diff --git a/relays/bin-substrate/src/bridges/rococo_bulletin/rococo_bulletin_messages_to_bridge_hub_rococo.rs b/relays/bin-substrate/src/bridges/rococo_bulletin/rococo_bulletin_messages_to_bridge_hub_rococo.rs index 856be9cf6..d192ec038 100644 --- a/relays/bin-substrate/src/bridges/rococo_bulletin/rococo_bulletin_messages_to_bridge_hub_rococo.rs +++ b/relays/bin-substrate/src/bridges/rococo_bulletin/rococo_bulletin_messages_to_bridge_hub_rococo.rs @@ -17,9 +17,12 @@ //! RococoBulletin-to-BridgeHubRococo messages sync entrypoint. use super::BridgeHubRococoAsBridgeHubPolkadot; -use crate::cli::bridge::{CliBridgeBase, MessagesCliBridge}; use relay_polkadot_bulletin_client::PolkadotBulletin as RococoBulletin; -use substrate_relay_helper::{messages_lane::SubstrateMessageLane, UtilityPalletBatchCallBuilder}; +use substrate_relay_helper::{ + cli::bridge::{CliBridgeBase, MessagesCliBridge}, + messages_lane::SubstrateMessageLane, + UtilityPalletBatchCallBuilder, +}; /// RococoBulletin-to-BridgeHubRococo messages bridge. pub struct RococoBulletinToBridgeHubRococoMessagesCliBridge {} diff --git a/relays/bin-substrate/src/bridges/rococo_bulletin/rococo_headers_to_rococo_bulletin.rs b/relays/bin-substrate/src/bridges/rococo_bulletin/rococo_headers_to_rococo_bulletin.rs index 8a4b44eec..45c890267 100644 --- a/relays/bin-substrate/src/bridges/rococo_bulletin/rococo_headers_to_rococo_bulletin.rs +++ b/relays/bin-substrate/src/bridges/rococo_bulletin/rococo_headers_to_rococo_bulletin.rs @@ -17,9 +17,6 @@ //! Rococo-to-RococoBulletin headers sync entrypoint. use super::RococoAsPolkadot; -use crate::cli::bridge::{ - CliBridgeBase, RelayToRelayEquivocationDetectionCliBridge, RelayToRelayHeadersCliBridge, -}; use async_trait::async_trait; use substrate_relay_helper::{ @@ -28,6 +25,10 @@ use substrate_relay_helper::{ finality_base::{engine::Grandpa as GrandpaFinalityEngine, SubstrateFinalityPipeline}, }; +use substrate_relay_helper::cli::bridge::{ + CliBridgeBase, RelayToRelayEquivocationDetectionCliBridge, RelayToRelayHeadersCliBridge, +}; + /// Description of Rococo -> `RococoBulletin` finalized headers bridge. #[derive(Clone, Debug)] pub struct RococoFinalityToRococoBulletin; diff --git a/relays/bin-substrate/src/bridges/rococo_bulletin/rococo_parachains_to_rococo_bulletin.rs b/relays/bin-substrate/src/bridges/rococo_bulletin/rococo_parachains_to_rococo_bulletin.rs index ee44bad52..d14a133d2 100644 --- a/relays/bin-substrate/src/bridges/rococo_bulletin/rococo_parachains_to_rococo_bulletin.rs +++ b/relays/bin-substrate/src/bridges/rococo_bulletin/rococo_parachains_to_rococo_bulletin.rs @@ -17,12 +17,12 @@ //! Rococo-to-RococoBulletin parachains sync entrypoint. use super::{BridgeHubRococoAsBridgeHubPolkadot, RococoAsPolkadot}; -use crate::cli::bridge::{CliBridgeBase, MessagesCliBridge, ParachainToRelayHeadersCliBridge}; use bp_polkadot_core::parachains::{ParaHash, ParaHeadsProof, ParaId}; use bp_runtime::Chain; use relay_substrate_client::{CallOf, HeaderIdOf}; use substrate_relay_helper::{ + cli::bridge::{CliBridgeBase, MessagesCliBridge, ParachainToRelayHeadersCliBridge}, messages_lane::MessagesRelayLimits, parachains::{SubmitParachainHeadsCallBuilder, SubstrateParachainsPipeline}, }; diff --git a/relays/bin-substrate/src/bridges/rococo_westend/bridge_hub_rococo_messages_to_bridge_hub_westend.rs b/relays/bin-substrate/src/bridges/rococo_westend/bridge_hub_rococo_messages_to_bridge_hub_westend.rs index cbf122a2d..ec6b07d98 100644 --- a/relays/bin-substrate/src/bridges/rococo_westend/bridge_hub_rococo_messages_to_bridge_hub_westend.rs +++ b/relays/bin-substrate/src/bridges/rococo_westend/bridge_hub_rococo_messages_to_bridge_hub_westend.rs @@ -16,10 +16,13 @@ //! BridgeHubRococo-to-BridgeHubWestend messages sync entrypoint. -use crate::cli::bridge::{CliBridgeBase, MessagesCliBridge}; use relay_bridge_hub_rococo_client::BridgeHubRococo; use relay_bridge_hub_westend_client::BridgeHubWestend; -use substrate_relay_helper::{messages_lane::SubstrateMessageLane, UtilityPalletBatchCallBuilder}; +use substrate_relay_helper::{ + cli::bridge::{CliBridgeBase, MessagesCliBridge}, + messages_lane::SubstrateMessageLane, + UtilityPalletBatchCallBuilder, +}; pub struct BridgeHubRococoToBridgeHubWestendMessagesCliBridge {} diff --git a/relays/bin-substrate/src/bridges/rococo_westend/bridge_hub_westend_messages_to_bridge_hub_rococo.rs b/relays/bin-substrate/src/bridges/rococo_westend/bridge_hub_westend_messages_to_bridge_hub_rococo.rs index bb823981b..4e978cd83 100644 --- a/relays/bin-substrate/src/bridges/rococo_westend/bridge_hub_westend_messages_to_bridge_hub_rococo.rs +++ b/relays/bin-substrate/src/bridges/rococo_westend/bridge_hub_westend_messages_to_bridge_hub_rococo.rs @@ -16,10 +16,13 @@ //! BridgeHubWestend-to-BridgeHubRococo messages sync entrypoint. -use crate::cli::bridge::{CliBridgeBase, MessagesCliBridge}; use relay_bridge_hub_rococo_client::BridgeHubRococo; use relay_bridge_hub_westend_client::BridgeHubWestend; -use substrate_relay_helper::{messages_lane::SubstrateMessageLane, UtilityPalletBatchCallBuilder}; +use substrate_relay_helper::{ + cli::bridge::{CliBridgeBase, MessagesCliBridge}, + messages_lane::SubstrateMessageLane, + UtilityPalletBatchCallBuilder, +}; pub struct BridgeHubWestendToBridgeHubRococoMessagesCliBridge {} diff --git a/relays/bin-substrate/src/bridges/rococo_westend/rococo_headers_to_bridge_hub_westend.rs b/relays/bin-substrate/src/bridges/rococo_westend/rococo_headers_to_bridge_hub_westend.rs index 6e6661d54..bf30a87bf 100644 --- a/relays/bin-substrate/src/bridges/rococo_westend/rococo_headers_to_bridge_hub_westend.rs +++ b/relays/bin-substrate/src/bridges/rococo_westend/rococo_headers_to_bridge_hub_westend.rs @@ -16,10 +16,6 @@ //! Rococo-to-Westend bridge hubs headers sync entrypoint. -use crate::cli::bridge::{ - CliBridgeBase, RelayToRelayEquivocationDetectionCliBridge, RelayToRelayHeadersCliBridge, -}; - use async_trait::async_trait; use substrate_relay_helper::{ equivocation::SubstrateEquivocationDetectionPipeline, @@ -27,6 +23,10 @@ use substrate_relay_helper::{ finality_base::{engine::Grandpa as GrandpaFinalityEngine, SubstrateFinalityPipeline}, }; +use substrate_relay_helper::cli::bridge::{ + CliBridgeBase, RelayToRelayEquivocationDetectionCliBridge, RelayToRelayHeadersCliBridge, +}; + /// Description of Rococo -> Westend finalized headers bridge. #[derive(Clone, Debug)] pub struct RococoFinalityToBridgeHubWestend; diff --git a/relays/bin-substrate/src/bridges/rococo_westend/rococo_parachains_to_bridge_hub_westend.rs b/relays/bin-substrate/src/bridges/rococo_westend/rococo_parachains_to_bridge_hub_westend.rs index 16b646233..31de8c4d1 100644 --- a/relays/bin-substrate/src/bridges/rococo_westend/rococo_parachains_to_bridge_hub_westend.rs +++ b/relays/bin-substrate/src/bridges/rococo_westend/rococo_parachains_to_bridge_hub_westend.rs @@ -16,11 +16,11 @@ //! Westend-to-Rococo parachains sync entrypoint. -use crate::cli::bridge::{CliBridgeBase, MessagesCliBridge, ParachainToRelayHeadersCliBridge}; use bp_polkadot_core::parachains::{ParaHash, ParaHeadsProof, ParaId}; use relay_substrate_client::{CallOf, HeaderIdOf}; -use substrate_relay_helper::parachains::{ - SubmitParachainHeadsCallBuilder, SubstrateParachainsPipeline, +use substrate_relay_helper::{ + cli::bridge::{CliBridgeBase, MessagesCliBridge, ParachainToRelayHeadersCliBridge}, + parachains::{SubmitParachainHeadsCallBuilder, SubstrateParachainsPipeline}, }; /// BridgeHub-to-BridgeHub parachain sync description. diff --git a/relays/bin-substrate/src/bridges/rococo_westend/westend_headers_to_bridge_hub_rococo.rs b/relays/bin-substrate/src/bridges/rococo_westend/westend_headers_to_bridge_hub_rococo.rs index 6f4ebb84a..4a1419f06 100644 --- a/relays/bin-substrate/src/bridges/rococo_westend/westend_headers_to_bridge_hub_rococo.rs +++ b/relays/bin-substrate/src/bridges/rococo_westend/westend_headers_to_bridge_hub_rococo.rs @@ -16,10 +16,6 @@ //! Westend-to-Rococo bridge hubs headers sync entrypoint. -use crate::cli::bridge::{ - CliBridgeBase, RelayToRelayEquivocationDetectionCliBridge, RelayToRelayHeadersCliBridge, -}; - use async_trait::async_trait; use substrate_relay_helper::{ equivocation::SubstrateEquivocationDetectionPipeline, @@ -27,6 +23,10 @@ use substrate_relay_helper::{ finality_base::{engine::Grandpa as GrandpaFinalityEngine, SubstrateFinalityPipeline}, }; +use substrate_relay_helper::cli::bridge::{ + CliBridgeBase, RelayToRelayEquivocationDetectionCliBridge, RelayToRelayHeadersCliBridge, +}; + /// Description of Westend -> Rococo finalized headers bridge. #[derive(Clone, Debug)] pub struct WestendFinalityToBridgeHubRococo; diff --git a/relays/bin-substrate/src/bridges/rococo_westend/westend_parachains_to_bridge_hub_rococo.rs b/relays/bin-substrate/src/bridges/rococo_westend/westend_parachains_to_bridge_hub_rococo.rs index dac915dc3..fc6f65328 100644 --- a/relays/bin-substrate/src/bridges/rococo_westend/westend_parachains_to_bridge_hub_rococo.rs +++ b/relays/bin-substrate/src/bridges/rococo_westend/westend_parachains_to_bridge_hub_rococo.rs @@ -16,11 +16,11 @@ //! Rococo-to-Westend parachains sync entrypoint. -use crate::cli::bridge::{CliBridgeBase, MessagesCliBridge, ParachainToRelayHeadersCliBridge}; use bp_polkadot_core::parachains::{ParaHash, ParaHeadsProof, ParaId}; use relay_substrate_client::{CallOf, HeaderIdOf}; -use substrate_relay_helper::parachains::{ - SubmitParachainHeadsCallBuilder, SubstrateParachainsPipeline, +use substrate_relay_helper::{ + cli::bridge::{CliBridgeBase, MessagesCliBridge, ParachainToRelayHeadersCliBridge}, + parachains::{SubmitParachainHeadsCallBuilder, SubstrateParachainsPipeline}, }; /// BridgeHub-to-BridgeHub parachain sync description. diff --git a/relays/bin-substrate/src/chains/kusama.rs b/relays/bin-substrate/src/chains/kusama.rs deleted file mode 100644 index 80ffdfed0..000000000 --- a/relays/bin-substrate/src/chains/kusama.rs +++ /dev/null @@ -1,32 +0,0 @@ -// Copyright 2022 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Kusama + Kusama parachains specification for CLI. - -use crate::cli::CliChain; -use relay_bridge_hub_kusama_client::BridgeHubKusama; -use relay_kusama_client::Kusama; -use relay_substrate_client::SimpleRuntimeVersion; - -impl CliChain for Kusama { - const RUNTIME_VERSION: Option = - Some(SimpleRuntimeVersion { spec_version: 1_001_002, transaction_version: 25 }); -} - -impl CliChain for BridgeHubKusama { - const RUNTIME_VERSION: Option = - Some(SimpleRuntimeVersion { spec_version: 1_001_000, transaction_version: 4 }); -} diff --git a/relays/bin-substrate/src/chains/mod.rs b/relays/bin-substrate/src/chains/mod.rs deleted file mode 100644 index ab15a9e67..000000000 --- a/relays/bin-substrate/src/chains/mod.rs +++ /dev/null @@ -1,23 +0,0 @@ -// Copyright 2019-2021 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Chain-specific relayer configuration. - -mod kusama; -mod polkadot; -mod polkadot_bulletin; -mod rococo; -mod westend; diff --git a/relays/bin-substrate/src/chains/polkadot.rs b/relays/bin-substrate/src/chains/polkadot.rs deleted file mode 100644 index 1f1c60791..000000000 --- a/relays/bin-substrate/src/chains/polkadot.rs +++ /dev/null @@ -1,32 +0,0 @@ -// Copyright 2022 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Polkadot + Polkadot parachains specification for CLI. - -use crate::cli::CliChain; -use relay_bridge_hub_polkadot_client::BridgeHubPolkadot; -use relay_polkadot_client::Polkadot; -use relay_substrate_client::SimpleRuntimeVersion; - -impl CliChain for Polkadot { - const RUNTIME_VERSION: Option = - Some(SimpleRuntimeVersion { spec_version: 1_001_002, transaction_version: 25 }); -} - -impl CliChain for BridgeHubPolkadot { - const RUNTIME_VERSION: Option = - Some(SimpleRuntimeVersion { spec_version: 1_001_000, transaction_version: 3 }); -} diff --git a/relays/bin-substrate/src/chains/polkadot_bulletin.rs b/relays/bin-substrate/src/chains/polkadot_bulletin.rs deleted file mode 100644 index ee7edbd9f..000000000 --- a/relays/bin-substrate/src/chains/polkadot_bulletin.rs +++ /dev/null @@ -1,26 +0,0 @@ -// Copyright 2022 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Polkadot + Polkadot parachains specification for CLI. - -use crate::cli::CliChain; -use relay_polkadot_bulletin_client::PolkadotBulletin; -use relay_substrate_client::SimpleRuntimeVersion; - -impl CliChain for PolkadotBulletin { - const RUNTIME_VERSION: Option = - Some(SimpleRuntimeVersion { spec_version: 100, transaction_version: 1 }); -} diff --git a/relays/bin-substrate/src/chains/rococo.rs b/relays/bin-substrate/src/chains/rococo.rs deleted file mode 100644 index 0640447fd..000000000 --- a/relays/bin-substrate/src/chains/rococo.rs +++ /dev/null @@ -1,32 +0,0 @@ -// Copyright 2022 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Rococo + Rococo parachains specification for CLI. - -use crate::cli::CliChain; -use relay_bridge_hub_rococo_client::BridgeHubRococo; -use relay_rococo_client::Rococo; -use relay_substrate_client::SimpleRuntimeVersion; - -impl CliChain for Rococo { - const RUNTIME_VERSION: Option = - Some(SimpleRuntimeVersion { spec_version: 1_008_000, transaction_version: 24 }); -} - -impl CliChain for BridgeHubRococo { - const RUNTIME_VERSION: Option = - Some(SimpleRuntimeVersion { spec_version: 1_008_000, transaction_version: 4 }); -} diff --git a/relays/bin-substrate/src/chains/westend.rs b/relays/bin-substrate/src/chains/westend.rs deleted file mode 100644 index 41f5fc4e9..000000000 --- a/relays/bin-substrate/src/chains/westend.rs +++ /dev/null @@ -1,32 +0,0 @@ -// Copyright 2019-2021 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Westend chain specification for CLI. - -use crate::cli::CliChain; -use relay_bridge_hub_westend_client::BridgeHubWestend; -use relay_substrate_client::SimpleRuntimeVersion; -use relay_westend_client::Westend; - -impl CliChain for Westend { - const RUNTIME_VERSION: Option = - Some(SimpleRuntimeVersion { spec_version: 1_008_000, transaction_version: 24 }); -} - -impl CliChain for BridgeHubWestend { - const RUNTIME_VERSION: Option = - Some(SimpleRuntimeVersion { spec_version: 1_008_000, transaction_version: 4 }); -} diff --git a/relays/bin-substrate/src/cli/chain_schema.rs b/relays/bin-substrate/src/cli/chain_schema.rs index 65559397a..4422332a5 100644 --- a/relays/bin-substrate/src/cli/chain_schema.rs +++ b/relays/bin-substrate/src/cli/chain_schema.rs @@ -12,248 +12,12 @@ // GNU General Public License for more details. // You should have received a copy of the GNU General Public License - // along with Parity Bridges Common. If not, see . -use relay_substrate_client::{AccountKeyPairOf, ChainWithTransactions}; -use structopt::StructOpt; -use strum::{EnumString, VariantNames}; - -use crate::cli::CliChain; -pub use relay_substrate_client::{ChainRuntimeVersion, SimpleRuntimeVersion}; -use substrate_relay_helper::TransactionParams; - -#[doc = "Runtime version params."] -#[derive(StructOpt, Debug, PartialEq, Eq, Clone, Copy, EnumString, VariantNames)] -pub enum RuntimeVersionType { - /// Auto query version from chain - Auto, - /// Custom `spec_version` and `transaction_version` - Custom, - /// Read version from bundle dependencies directly. - Bundle, -} - -/// Create chain-specific set of runtime version parameters. -#[macro_export] -macro_rules! declare_chain_runtime_version_params_cli_schema { - ($chain:ident, $chain_prefix:ident) => { - bp_runtime::paste::item! { - #[doc = $chain " runtime version params."] - #[derive(StructOpt, Debug, PartialEq, Eq, Clone, Copy)] - pub struct [<$chain RuntimeVersionParams>] { - #[doc = "The type of runtime version for chain " $chain] - #[structopt(long, default_value = "Bundle")] - pub [<$chain_prefix _version_mode>]: RuntimeVersionType, - #[doc = "The custom sepc_version for chain " $chain] - #[structopt(long)] - pub [<$chain_prefix _spec_version>]: Option, - #[doc = "The custom transaction_version for chain " $chain] - #[structopt(long)] - pub [<$chain_prefix _transaction_version>]: Option, - } - - impl [<$chain RuntimeVersionParams>] { - /// Converts self into `ChainRuntimeVersion`. - pub fn into_runtime_version( - self, - bundle_runtime_version: Option, - ) -> anyhow::Result { - Ok(match self.[<$chain_prefix _version_mode>] { - RuntimeVersionType::Auto => ChainRuntimeVersion::Auto, - RuntimeVersionType::Custom => { - let custom_spec_version = self.[<$chain_prefix _spec_version>] - .ok_or_else(|| anyhow::Error::msg(format!("The {}-spec-version is required when choose custom mode", stringify!($chain_prefix))))?; - let custom_transaction_version = self.[<$chain_prefix _transaction_version>] - .ok_or_else(|| anyhow::Error::msg(format!("The {}-transaction-version is required when choose custom mode", stringify!($chain_prefix))))?; - ChainRuntimeVersion::Custom( - SimpleRuntimeVersion { - spec_version: custom_spec_version, - transaction_version: custom_transaction_version - } - ) - }, - RuntimeVersionType::Bundle => match bundle_runtime_version { - Some(runtime_version) => ChainRuntimeVersion::Custom(runtime_version), - None => { - return Err(anyhow::format_err!("Cannot use bundled runtime version of {}: it is not known to the relay", stringify!($chain_prefix))); - } - }, - }) - } - } - } - }; -} - -/// Create chain-specific set of runtime version parameters. -#[macro_export] -macro_rules! declare_chain_connection_params_cli_schema { - ($chain:ident, $chain_prefix:ident) => { - bp_runtime::paste::item! { - #[doc = $chain " connection params."] - #[derive(StructOpt, Debug, PartialEq, Eq, Clone)] - pub struct [<$chain ConnectionParams>] { - #[doc = "Connect to " $chain " node at given host."] - #[structopt(long, default_value = "127.0.0.1")] - pub [<$chain_prefix _host>]: String, - #[doc = "Connect to " $chain " node websocket server at given port."] - #[structopt(long, default_value = "9944")] - pub [<$chain_prefix _port>]: u16, - #[doc = "Use secure websocket connection."] - #[structopt(long)] - pub [<$chain_prefix _secure>]: bool, - #[doc = "Custom runtime version"] - #[structopt(flatten)] - pub [<$chain_prefix _runtime_version>]: [<$chain RuntimeVersionParams>], - } - - impl [<$chain ConnectionParams>] { - /// Convert connection params into Substrate client. - #[allow(dead_code)] - pub async fn into_client( - self, - ) -> anyhow::Result> { - let chain_runtime_version = self - .[<$chain_prefix _runtime_version>] - .into_runtime_version(Chain::RUNTIME_VERSION)?; - Ok(relay_substrate_client::Client::new(relay_substrate_client::ConnectionParams { - host: self.[<$chain_prefix _host>], - port: self.[<$chain_prefix _port>], - secure: self.[<$chain_prefix _secure>], - chain_runtime_version, - }) - .await - ) - } - } - } - }; -} - -/// Create chain-specific set of signing parameters. -#[macro_export] -macro_rules! declare_chain_signing_params_cli_schema { - ($chain:ident, $chain_prefix:ident) => { - bp_runtime::paste::item! { - #[doc = $chain " signing params."] - #[derive(StructOpt, Debug, PartialEq, Eq, Clone)] - pub struct [<$chain SigningParams>] { - #[doc = "The SURI of secret key to use when transactions are submitted to the " $chain " node."] - #[structopt(long)] - pub [<$chain_prefix _signer>]: Option, - #[doc = "The password for the SURI of secret key to use when transactions are submitted to the " $chain " node."] - #[structopt(long)] - pub [<$chain_prefix _signer_password>]: Option, - - #[doc = "Path to the file, that contains SURI of secret key to use when transactions are submitted to the " $chain " node. Can be overridden with " $chain_prefix "_signer option."] - #[structopt(long)] - pub [<$chain_prefix _signer_file>]: Option, - #[doc = "Path to the file, that password for the SURI of secret key to use when transactions are submitted to the " $chain " node. Can be overridden with " $chain_prefix "_signer_password option."] - #[structopt(long)] - pub [<$chain_prefix _signer_password_file>]: Option, - - #[doc = "Transactions mortality period, in blocks. MUST be a power of two in [4; 65536] range. MAY NOT be larger than `BlockHashCount` parameter of the chain system module."] - #[structopt(long)] - pub [<$chain_prefix _transactions_mortality>]: Option, - } - - impl [<$chain SigningParams>] { - /// Return transactions mortality. - #[allow(dead_code)] - pub fn transactions_mortality(&self) -> anyhow::Result> { - self.[<$chain_prefix _transactions_mortality>] - .map(|transactions_mortality| { - if !(4..=65536).contains(&transactions_mortality) - || !transactions_mortality.is_power_of_two() - { - Err(anyhow::format_err!( - "Transactions mortality {} is not a power of two in a [4; 65536] range", - transactions_mortality, - )) - } else { - Ok(transactions_mortality) - } - }) - .transpose() - } - - /// Parse signing params into chain-specific KeyPair. - #[allow(dead_code)] - pub fn to_keypair(&self) -> anyhow::Result> { - let suri = match (self.[<$chain_prefix _signer>].as_ref(), self.[<$chain_prefix _signer_file>].as_ref()) { - (Some(suri), _) => suri.to_owned(), - (None, Some(suri_file)) => std::fs::read_to_string(suri_file) - .map_err(|err| anyhow::format_err!( - "Failed to read SURI from file {:?}: {}", - suri_file, - err, - ))?, - (None, None) => return Err(anyhow::format_err!( - "One of options must be specified: '{}' or '{}'", - stringify!([<$chain_prefix _signer>]), - stringify!([<$chain_prefix _signer_file>]), - )), - }; - - let suri_password = match ( - self.[<$chain_prefix _signer_password>].as_ref(), - self.[<$chain_prefix _signer_password_file>].as_ref(), - ) { - (Some(suri_password), _) => Some(suri_password.to_owned()), - (None, Some(suri_password_file)) => std::fs::read_to_string(suri_password_file) - .map(Some) - .map_err(|err| anyhow::format_err!( - "Failed to read SURI password from file {:?}: {}", - suri_password_file, - err, - ))?, - _ => None, - }; - - use sp_core::crypto::Pair; - - AccountKeyPairOf::::from_string( - &suri, - suri_password.as_deref() - ).map_err(|e| anyhow::format_err!("{:?}", e)) - } - - /// Return transaction parameters. - #[allow(dead_code)] - pub fn transaction_params( - &self, - ) -> anyhow::Result>> { - Ok(TransactionParams { - mortality: self.transactions_mortality()?, - signer: self.to_keypair::()?, - }) - } - } - } - }; -} - -/// Create chain-specific set of configuration objects: connection parameters, -/// signing parameters and bridge initialization parameters. -#[macro_export] -macro_rules! declare_chain_cli_schema { - ($chain:ident, $chain_prefix:ident) => { - $crate::declare_chain_runtime_version_params_cli_schema!($chain, $chain_prefix); - $crate::declare_chain_connection_params_cli_schema!($chain, $chain_prefix); - $crate::declare_chain_signing_params_cli_schema!($chain, $chain_prefix); - }; -} - -declare_chain_cli_schema!(Source, source); -declare_chain_cli_schema!(Target, target); -declare_chain_cli_schema!(Relaychain, relaychain); -declare_chain_cli_schema!(Parachain, parachain); - #[cfg(test)] mod tests { - use super::*; use sp_core::Pair; + use substrate_relay_helper::cli::chain_schema::TargetSigningParams; #[test] fn reads_suri_from_file() { diff --git a/relays/bin-substrate/src/cli/detect_equivocations.rs b/relays/bin-substrate/src/cli/detect_equivocations.rs index a8f1ed35f..7717b5015 100644 --- a/relays/bin-substrate/src/cli/detect_equivocations.rs +++ b/relays/bin-substrate/src/cli/detect_equivocations.rs @@ -14,25 +14,23 @@ // You should have received a copy of the GNU General Public License // along with Parity Bridges Common. If not, see . -use crate::{ - bridges::{ - kusama_polkadot::{ - kusama_headers_to_bridge_hub_polkadot::KusamaToBridgeHubPolkadotCliBridge, - polkadot_headers_to_bridge_hub_kusama::PolkadotToBridgeHubKusamaCliBridge, - }, - rococo_westend::{ - rococo_headers_to_bridge_hub_westend::RococoToBridgeHubWestendCliBridge, - westend_headers_to_bridge_hub_rococo::WestendToBridgeHubRococoCliBridge, - }, +use crate::bridges::{ + kusama_polkadot::{ + kusama_headers_to_bridge_hub_polkadot::KusamaToBridgeHubPolkadotCliBridge, + polkadot_headers_to_bridge_hub_kusama::PolkadotToBridgeHubKusamaCliBridge, + }, + rococo_westend::{ + rococo_headers_to_bridge_hub_westend::RococoToBridgeHubWestendCliBridge, + westend_headers_to_bridge_hub_rococo::WestendToBridgeHubRococoCliBridge, }, - cli::{bridge::*, chain_schema::*, PrometheusParams}, }; -use async_trait::async_trait; -use relay_substrate_client::ChainWithTransactions; use structopt::StructOpt; use strum::{EnumString, VariantNames}; -use substrate_relay_helper::{equivocation, equivocation::SubstrateEquivocationDetectionPipeline}; + +use substrate_relay_helper::cli::detect_equivocations::{ + DetectEquivocationsParams, EquivocationsDetector, +}; /// Start equivocation detection loop. #[derive(StructOpt)] @@ -40,13 +38,7 @@ pub struct DetectEquivocations { #[structopt(possible_values = DetectEquivocationsBridge::VARIANTS, case_insensitive = true)] bridge: DetectEquivocationsBridge, #[structopt(flatten)] - source: SourceConnectionParams, - #[structopt(flatten)] - source_sign: SourceSigningParams, - #[structopt(flatten)] - target: TargetConnectionParams, - #[structopt(flatten)] - prometheus_params: PrometheusParams, + params: DetectEquivocationsParams, } #[derive(Debug, EnumString, VariantNames)] @@ -59,29 +51,6 @@ pub enum DetectEquivocationsBridge { WestendToBridgeHubRococo, } -#[async_trait] -trait EquivocationsDetector: RelayToRelayEquivocationDetectionCliBridge -where - Self::Source: ChainWithTransactions, -{ - async fn start(data: DetectEquivocations) -> anyhow::Result<()> { - let source_client = data.source.into_client::().await?; - Self::Equivocation::start_relay_guards( - &source_client, - source_client.can_start_version_guard(), - ) - .await?; - - equivocation::run::( - source_client, - data.target.into_client::().await?, - data.source_sign.transaction_params::()?, - data.prometheus_params.into_metrics_params()?, - ) - .await - } -} - impl EquivocationsDetector for KusamaToBridgeHubPolkadotCliBridge {} impl EquivocationsDetector for PolkadotToBridgeHubKusamaCliBridge {} impl EquivocationsDetector for RococoToBridgeHubWestendCliBridge {} @@ -92,13 +61,13 @@ impl DetectEquivocations { pub async fn run(self) -> anyhow::Result<()> { match self.bridge { DetectEquivocationsBridge::KusamaToBridgeHubPolkadot => - KusamaToBridgeHubPolkadotCliBridge::start(self), + KusamaToBridgeHubPolkadotCliBridge::start(self.params), DetectEquivocationsBridge::PolkadotToBridgeHubKusama => - PolkadotToBridgeHubKusamaCliBridge::start(self), + PolkadotToBridgeHubKusamaCliBridge::start(self.params), DetectEquivocationsBridge::RococoToBridgeHubWestend => - RococoToBridgeHubWestendCliBridge::start(self), + RococoToBridgeHubWestendCliBridge::start(self.params), DetectEquivocationsBridge::WestendToBridgeHubRococo => - WestendToBridgeHubRococoCliBridge::start(self), + WestendToBridgeHubRococoCliBridge::start(self.params), } .await } diff --git a/relays/bin-substrate/src/cli/init_bridge.rs b/relays/bin-substrate/src/cli/init_bridge.rs index 0b2f9aa7e..441487b35 100644 --- a/relays/bin-substrate/src/cli/init_bridge.rs +++ b/relays/bin-substrate/src/cli/init_bridge.rs @@ -14,107 +14,31 @@ // You should have received a copy of the GNU General Public License // along with Parity Bridges Common. If not, see . -use async_trait::async_trait; -use codec::Encode; - -use crate::{ - bridges::{ - kusama_polkadot::{ - kusama_headers_to_bridge_hub_polkadot::KusamaToBridgeHubPolkadotCliBridge, - polkadot_headers_to_bridge_hub_kusama::PolkadotToBridgeHubKusamaCliBridge, - }, - polkadot_bulletin::{ - polkadot_bulletin_headers_to_bridge_hub_polkadot::PolkadotBulletinToBridgeHubPolkadotCliBridge, - polkadot_headers_to_polkadot_bulletin::PolkadotToPolkadotBulletinCliBridge, - }, - rococo_bulletin::{ - rococo_bulletin_headers_to_bridge_hub_rococo::RococoBulletinToBridgeHubRococoCliBridge, - rococo_headers_to_rococo_bulletin::RococoToRococoBulletinCliBridge, - }, - rococo_westend::{ - rococo_headers_to_bridge_hub_westend::RococoToBridgeHubWestendCliBridge, - westend_headers_to_bridge_hub_rococo::WestendToBridgeHubRococoCliBridge, - }, +use crate::bridges::{ + kusama_polkadot::{ + kusama_headers_to_bridge_hub_polkadot::KusamaToBridgeHubPolkadotCliBridge, + polkadot_headers_to_bridge_hub_kusama::PolkadotToBridgeHubKusamaCliBridge, + }, + polkadot_bulletin::{ + polkadot_bulletin_headers_to_bridge_hub_polkadot::PolkadotBulletinToBridgeHubPolkadotCliBridge, + polkadot_headers_to_polkadot_bulletin::PolkadotToPolkadotBulletinCliBridge, + }, + rococo_bulletin::{ + rococo_bulletin_headers_to_bridge_hub_rococo::RococoBulletinToBridgeHubRococoCliBridge, + rococo_headers_to_rococo_bulletin::RococoToRococoBulletinCliBridge, + }, + rococo_westend::{ + rococo_headers_to_bridge_hub_westend::RococoToBridgeHubWestendCliBridge, + westend_headers_to_bridge_hub_rococo::WestendToBridgeHubRococoCliBridge, }, - cli::{bridge::CliBridgeBase, chain_schema::*}, }; -use bp_runtime::Chain as ChainBase; -use relay_substrate_client::{AccountKeyPairOf, Chain, UnsignedTransaction}; -use sp_core::Pair; +use relay_substrate_client::Chain; use structopt::StructOpt; use strum::{EnumString, VariantNames}; -use substrate_relay_helper::finality_base::engine::{Engine, Grandpa as GrandpaFinalityEngine}; - -/// Initialize bridge pallet. -#[derive(StructOpt)] -pub struct InitBridge { - /// A bridge instance to initialize. - #[structopt(possible_values = InitBridgeName::VARIANTS, case_insensitive = true)] - bridge: InitBridgeName, - #[structopt(flatten)] - source: SourceConnectionParams, - #[structopt(flatten)] - target: TargetConnectionParams, - #[structopt(flatten)] - target_sign: TargetSigningParams, - /// Generates all required data, but does not submit extrinsic - #[structopt(long)] - dry_run: bool, -} - -#[derive(Debug, EnumString, VariantNames)] -#[strum(serialize_all = "kebab_case")] -/// Bridge to initialize. -pub enum InitBridgeName { - KusamaToBridgeHubPolkadot, - PolkadotToBridgeHubKusama, - PolkadotToPolkadotBulletin, - PolkadotBulletinToBridgeHubPolkadot, - RococoToRococoBulletin, - RococoBulletinToBridgeHubRococo, - RococoToBridgeHubWestend, - WestendToBridgeHubRococo, -} - -#[async_trait] -trait BridgeInitializer: CliBridgeBase -where - ::AccountId: From< as Pair>::Public>, -{ - type Engine: Engine; - - /// Get the encoded call to init the bridge. - fn encode_init_bridge( - init_data: >::InitializationData, - ) -> ::Call; - - /// Initialize the bridge. - async fn init_bridge(data: InitBridge) -> anyhow::Result<()> { - let source_client = data.source.into_client::().await?; - let target_client = data.target.into_client::().await?; - let target_sign = data.target_sign.to_keypair::()?; - let dry_run = data.dry_run; - - substrate_relay_helper::finality::initialize::initialize::( - source_client, - target_client.clone(), - target_sign, - move |transaction_nonce, initialization_data| { - let call = Self::encode_init_bridge(initialization_data); - log::info!( - target: "bridge", - "Initialize bridge call encoded as hex string: {:?}", - format!("0x{}", hex::encode(call.encode())) - ); - Ok(UnsignedTransaction::new(call.into(), transaction_nonce)) - }, - dry_run, - ) - .await; - - Ok(()) - } -} +use substrate_relay_helper::{ + cli::init_bridge::{BridgeInitializer, InitBridgeParams}, + finality_base::engine::{Engine, Grandpa as GrandpaFinalityEngine}, +}; impl BridgeInitializer for RococoToBridgeHubWestendCliBridge { type Engine = GrandpaFinalityEngine; @@ -225,26 +149,50 @@ impl BridgeInitializer for RococoBulletinToBridgeHubRococoCliBridge { } } +/// Initialize bridge pallet. +#[derive(StructOpt)] +pub struct InitBridge { + /// A bridge instance to initialize. + #[structopt(possible_values = InitBridgeName::VARIANTS, case_insensitive = true)] + bridge: InitBridgeName, + #[structopt(flatten)] + params: InitBridgeParams, +} + +#[derive(Debug, EnumString, VariantNames)] +#[strum(serialize_all = "kebab_case")] +/// Bridge to initialize. +pub enum InitBridgeName { + KusamaToBridgeHubPolkadot, + PolkadotToBridgeHubKusama, + PolkadotToPolkadotBulletin, + PolkadotBulletinToBridgeHubPolkadot, + RococoToRococoBulletin, + RococoBulletinToBridgeHubRococo, + RococoToBridgeHubWestend, + WestendToBridgeHubRococo, +} + impl InitBridge { /// Run the command. pub async fn run(self) -> anyhow::Result<()> { match self.bridge { InitBridgeName::KusamaToBridgeHubPolkadot => - KusamaToBridgeHubPolkadotCliBridge::init_bridge(self), + KusamaToBridgeHubPolkadotCliBridge::init_bridge(self.params), InitBridgeName::PolkadotToBridgeHubKusama => - PolkadotToBridgeHubKusamaCliBridge::init_bridge(self), + PolkadotToBridgeHubKusamaCliBridge::init_bridge(self.params), InitBridgeName::PolkadotToPolkadotBulletin => - PolkadotToPolkadotBulletinCliBridge::init_bridge(self), + PolkadotToPolkadotBulletinCliBridge::init_bridge(self.params), InitBridgeName::PolkadotBulletinToBridgeHubPolkadot => - PolkadotBulletinToBridgeHubPolkadotCliBridge::init_bridge(self), + PolkadotBulletinToBridgeHubPolkadotCliBridge::init_bridge(self.params), InitBridgeName::RococoToRococoBulletin => - RococoToRococoBulletinCliBridge::init_bridge(self), + RococoToRococoBulletinCliBridge::init_bridge(self.params), InitBridgeName::RococoBulletinToBridgeHubRococo => - RococoBulletinToBridgeHubRococoCliBridge::init_bridge(self), + RococoBulletinToBridgeHubRococoCliBridge::init_bridge(self.params), InitBridgeName::RococoToBridgeHubWestend => - RococoToBridgeHubWestendCliBridge::init_bridge(self), + RococoToBridgeHubWestendCliBridge::init_bridge(self.params), InitBridgeName::WestendToBridgeHubRococo => - WestendToBridgeHubRococoCliBridge::init_bridge(self), + WestendToBridgeHubRococoCliBridge::init_bridge(self.params), } .await } diff --git a/relays/bin-substrate/src/cli/mod.rs b/relays/bin-substrate/src/cli/mod.rs index 6d799023c..504058894 100644 --- a/relays/bin-substrate/src/cli/mod.rs +++ b/relays/bin-substrate/src/cli/mod.rs @@ -17,18 +17,10 @@ //! Deal with CLI args of substrate-to-substrate relay. use async_std::prelude::*; -use codec::{Decode, Encode}; use futures::{select, FutureExt}; -use rbtag::BuildInfo; use signal_hook::consts::*; use signal_hook_async_std::Signals; -use structopt::{clap::arg_enum, StructOpt}; -use strum::{EnumString, VariantNames}; - -use bp_messages::LaneId; -use relay_substrate_client::SimpleRuntimeVersion; - -pub(crate) mod bridge; +use structopt::StructOpt; mod chain_schema; mod detect_equivocations; @@ -50,11 +42,17 @@ pub fn parse_args() -> Command { #[derive(StructOpt)] #[structopt(about = "Substrate-to-Substrate relay")] pub enum Command { + /// Initialize on-chain bridge pallet with current header data. + /// + /// Sends initialization transaction to bootstrap the bridge with current finalized block data. + InitBridge(init_bridge::InitBridge), /// Start headers relay between two chains. /// /// The on-chain bridge component should have been already initialized with /// `init-bridge` sub-command. RelayHeaders(relay_headers::RelayHeaders), + /// Relay parachain heads. + RelayParachains(relay_parachains::RelayParachains), /// Start messages relay between two chains. /// /// Ties up to `Messages` pallets on both chains and starts relaying messages. @@ -67,12 +65,6 @@ pub enum Command { /// the message relays - i.e. when there are messages or confirmations that needs to be /// relayed between chains. RelayHeadersAndMessages(Box), - /// Initialize on-chain bridge pallet with current header data. - /// - /// Sends initialization transaction to bootstrap the bridge with current finalized block data. - InitBridge(init_bridge::InitBridge), - /// Relay parachain heads. - RelayParachains(relay_parachains::RelayParachains), /// Detect and report equivocations. /// /// Parses the source chain headers that were synchronized with the target chain looking for @@ -86,10 +78,10 @@ impl Command { use relay_utils::initialize::{initialize_logger, initialize_relay}; match self { + Self::InitBridge(_) | Self::RelayHeaders(_) | Self::RelayMessages(_) | - Self::RelayHeadersAndMessages(_) | - Self::InitBridge(_) => { + Self::RelayHeadersAndMessages(_) => { initialize_relay(); }, _ => { @@ -101,11 +93,11 @@ impl Command { /// Run the command. async fn do_run(self) -> anyhow::Result<()> { match self { + Self::InitBridge(arg) => arg.run().await?, Self::RelayHeaders(arg) => arg.run().await?, + Self::RelayParachains(arg) => arg.run().await?, Self::RelayMessages(arg) => arg.run().await?, Self::RelayHeadersAndMessages(arg) => arg.run().await?, - Self::InitBridge(arg) => arg.run().await?, - Self::RelayParachains(arg) => arg.run().await?, Self::DetectEquivocations(arg) => arg.run().await?, } Ok(()) @@ -137,181 +129,3 @@ impl Command { } } } - -arg_enum! { - #[derive(Debug)] - /// The origin to use when dispatching the message on the target chain. - /// - /// - `Target` uses account existing on the target chain (requires target private key). - /// - `Origin` uses account derived from the source-chain account. - pub enum Origins { - Target, - Source, - } -} - -/// Bridge-supported network definition. -/// -/// Used to abstract away CLI commands. -pub trait CliChain: relay_substrate_client::Chain { - /// Current version of the chain runtime, known to relay. - /// - /// can be `None` if relay is not going to submit transactions to that chain. - const RUNTIME_VERSION: Option; -} - -/// Lane id. -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct HexLaneId(pub [u8; 4]); - -impl From for LaneId { - fn from(lane_id: HexLaneId) -> LaneId { - LaneId(lane_id.0) - } -} - -impl std::str::FromStr for HexLaneId { - type Err = hex::FromHexError; - - fn from_str(s: &str) -> Result { - let mut lane_id = [0u8; 4]; - hex::decode_to_slice(s, &mut lane_id)?; - Ok(HexLaneId(lane_id)) - } -} - -/// Nicer formatting for raw bytes vectors. -#[derive(Default, Encode, Decode, PartialEq, Eq)] -pub struct HexBytes(pub Vec); - -impl std::str::FromStr for HexBytes { - type Err = hex::FromHexError; - - fn from_str(s: &str) -> Result { - Ok(Self(hex::decode(s)?)) - } -} - -impl std::fmt::Debug for HexBytes { - fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result { - write!(fmt, "0x{self}") - } -} - -impl std::fmt::Display for HexBytes { - fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result { - write!(fmt, "{}", hex::encode(&self.0)) - } -} - -/// Prometheus metrics params. -#[derive(Clone, Debug, PartialEq, StructOpt)] -pub struct PrometheusParams { - /// Do not expose a Prometheus metric endpoint. - #[structopt(long)] - pub no_prometheus: bool, - /// Expose Prometheus endpoint at given interface. - #[structopt(long, default_value = "127.0.0.1")] - pub prometheus_host: String, - /// Expose Prometheus endpoint at given port. - #[structopt(long, default_value = "9616")] - pub prometheus_port: u16, -} - -/// Struct to get git commit info and build time. -#[derive(BuildInfo)] -struct SubstrateRelayBuildInfo; - -impl SubstrateRelayBuildInfo { - /// Get git commit in form ``. - pub fn get_git_commit() -> String { - // on gitlab we use images without git installed, so we can't use `rbtag` there - // locally we don't have `CI_*` env variables, so we can't rely on them - // => we are using `CI_*` env variables or else `rbtag` - let maybe_sha_from_ci = option_env!("CI_COMMIT_SHORT_SHA"); - maybe_sha_from_ci - .map(|short_sha| { - // we assume that on CI the copy is always clean - format!("{short_sha}-clean") - }) - .unwrap_or_else(|| SubstrateRelayBuildInfo.get_build_commit().into()) - } -} - -impl PrometheusParams { - /// Tries to convert CLI metrics params into metrics params, used by the relay. - pub fn into_metrics_params(self) -> anyhow::Result { - let metrics_address = if !self.no_prometheus { - Some(relay_utils::metrics::MetricsAddress { - host: self.prometheus_host, - port: self.prometheus_port, - }) - } else { - None - }; - - let relay_version = option_env!("CARGO_PKG_VERSION").unwrap_or("unknown"); - let relay_commit = SubstrateRelayBuildInfo::get_git_commit(); - relay_utils::metrics::MetricsParams::new( - metrics_address, - relay_version.into(), - relay_commit, - ) - .map_err(|e| anyhow::format_err!("{:?}", e)) - } -} - -/// Either explicit or maximal allowed value. -#[derive(Debug, Clone, PartialEq, Eq)] -pub enum ExplicitOrMaximal { - /// User has explicitly specified argument value. - Explicit(V), - /// Maximal allowed value for this argument. - Maximal, -} - -impl std::str::FromStr for ExplicitOrMaximal -where - V::Err: std::fmt::Debug, -{ - type Err = String; - - fn from_str(s: &str) -> Result { - if s.to_lowercase() == "max" { - return Ok(ExplicitOrMaximal::Maximal) - } - - V::from_str(s) - .map(ExplicitOrMaximal::Explicit) - .map_err(|e| format!("Failed to parse '{e:?}'. Expected 'max' or explicit value")) - } -} - -#[doc = "Runtime version params."] -#[derive(StructOpt, Debug, PartialEq, Eq, Clone, Copy, EnumString, VariantNames)] -pub enum RuntimeVersionType { - /// Auto query version from chain - Auto, - /// Custom `spec_version` and `transaction_version` - Custom, - /// Read version from bundle dependencies directly. - Bundle, -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn hex_bytes_display_matches_from_str_for_clap() { - // given - let hex = HexBytes(vec![1, 2, 3, 4]); - let display = format!("{hex}"); - - // when - let hex2: HexBytes = display.parse().unwrap(); - - // then - assert_eq!(hex.0, hex2.0); - } -} diff --git a/relays/bin-substrate/src/cli/relay_headers.rs b/relays/bin-substrate/src/cli/relay_headers.rs index a3b5c4c9f..e244d0e9a 100644 --- a/relays/bin-substrate/src/cli/relay_headers.rs +++ b/relays/bin-substrate/src/cli/relay_headers.rs @@ -14,7 +14,6 @@ // You should have received a copy of the GNU General Public License // along with Parity Bridges Common. If not, see . -use async_trait::async_trait; use structopt::StructOpt; use strum::{EnumString, VariantNames}; @@ -32,10 +31,8 @@ use crate::bridges::{ rococo_headers_to_rococo_bulletin::RococoToRococoBulletinCliBridge, }, }; -use relay_utils::metrics::{GlobalMetrics, StandaloneMetric}; -use substrate_relay_helper::finality::SubstrateFinalitySyncPipeline; -use crate::cli::{bridge::*, chain_schema::*, PrometheusParams}; +use substrate_relay_helper::cli::relay_headers::{HeadersRelayer, RelayHeadersParams}; /// Start headers relayer process. #[derive(StructOpt)] @@ -43,18 +40,8 @@ pub struct RelayHeaders { /// A bridge instance to relay headers for. #[structopt(possible_values = RelayHeadersBridge::VARIANTS, case_insensitive = true)] bridge: RelayHeadersBridge, - /// If passed, only mandatory headers (headers that are changing the GRANDPA authorities set) - /// are relayed. - #[structopt(long)] - only_mandatory_headers: bool, #[structopt(flatten)] - source: SourceConnectionParams, - #[structopt(flatten)] - target: TargetConnectionParams, - #[structopt(flatten)] - target_sign: TargetSigningParams, - #[structopt(flatten)] - prometheus_params: PrometheusParams, + params: RelayHeadersParams, } #[derive(Debug, EnumString, VariantNames)] @@ -69,37 +56,6 @@ pub enum RelayHeadersBridge { RococoBulletinToBridgeHubRococo, } -#[async_trait] -trait HeadersRelayer: RelayToRelayHeadersCliBridge { - /// Relay headers. - async fn relay_headers(data: RelayHeaders) -> anyhow::Result<()> { - let source_client = data.source.into_client::().await?; - let target_client = data.target.into_client::().await?; - let target_transactions_mortality = data.target_sign.target_transactions_mortality; - let target_sign = data.target_sign.to_keypair::()?; - - let metrics_params: relay_utils::metrics::MetricsParams = - data.prometheus_params.into_metrics_params()?; - GlobalMetrics::new()?.register_and_spawn(&metrics_params.registry)?; - - let target_transactions_params = substrate_relay_helper::TransactionParams { - signer: target_sign, - mortality: target_transactions_mortality, - }; - Self::Finality::start_relay_guards(&target_client, target_client.can_start_version_guard()) - .await?; - - substrate_relay_helper::finality::run::( - source_client, - target_client, - data.only_mandatory_headers, - target_transactions_params, - metrics_params, - ) - .await - } -} - impl HeadersRelayer for KusamaToBridgeHubPolkadotCliBridge {} impl HeadersRelayer for PolkadotToBridgeHubKusamaCliBridge {} impl HeadersRelayer for PolkadotToPolkadotBulletinCliBridge {} @@ -112,17 +68,17 @@ impl RelayHeaders { pub async fn run(self) -> anyhow::Result<()> { match self.bridge { RelayHeadersBridge::KusamaToBridgeHubPolkadot => - KusamaToBridgeHubPolkadotCliBridge::relay_headers(self), + KusamaToBridgeHubPolkadotCliBridge::relay_headers(self.params), RelayHeadersBridge::PolkadotToBridgeHubKusama => - PolkadotToBridgeHubKusamaCliBridge::relay_headers(self), + PolkadotToBridgeHubKusamaCliBridge::relay_headers(self.params), RelayHeadersBridge::PolkadotToPolkadotBulletin => - PolkadotToPolkadotBulletinCliBridge::relay_headers(self), + PolkadotToPolkadotBulletinCliBridge::relay_headers(self.params), RelayHeadersBridge::PolkadotBulletinToBridgeHubPolkadot => - PolkadotBulletinToBridgeHubPolkadotCliBridge::relay_headers(self), + PolkadotBulletinToBridgeHubPolkadotCliBridge::relay_headers(self.params), RelayHeadersBridge::RococoToRococoBulletin => - RococoToRococoBulletinCliBridge::relay_headers(self), + RococoToRococoBulletinCliBridge::relay_headers(self.params), RelayHeadersBridge::RococoBulletinToBridgeHubRococo => - RococoBulletinToBridgeHubRococoCliBridge::relay_headers(self), + RococoBulletinToBridgeHubRococoCliBridge::relay_headers(self.params), } .await } diff --git a/relays/bin-substrate/src/cli/relay_headers_and_messages/mod.rs b/relays/bin-substrate/src/cli/relay_headers_and_messages.rs similarity index 52% rename from relays/bin-substrate/src/cli/relay_headers_and_messages/mod.rs rename to relays/bin-substrate/src/cli/relay_headers_and_messages.rs index c445bdddc..229661748 100644 --- a/relays/bin-substrate/src/cli/relay_headers_and_messages/mod.rs +++ b/relays/bin-substrate/src/cli/relay_headers_and_messages.rs @@ -23,176 +23,48 @@ //! `declare_chain_to_parachain_bridge_schema` for the bridge. //! 3) declare a new struct for the added bridge and implement the `Full2WayBridge` trait for it. -#[macro_use] -mod parachain_to_parachain; -#[macro_use] -mod relay_to_relay; -#[macro_use] -mod relay_to_parachain; - use async_trait::async_trait; -use std::{marker::PhantomData, sync::Arc}; use structopt::StructOpt; -use futures::{FutureExt, TryFutureExt}; -use relay_to_parachain::*; - -use crate::{ - bridges::{ - kusama_polkadot::{ - kusama_parachains_to_bridge_hub_polkadot::BridgeHubKusamaToBridgeHubPolkadotCliBridge, - polkadot_parachains_to_bridge_hub_kusama::BridgeHubPolkadotToBridgeHubKusamaCliBridge, - }, - polkadot_bulletin::{ - polkadot_bulletin_headers_to_bridge_hub_polkadot::PolkadotBulletinToBridgeHubPolkadotCliBridge, - polkadot_parachains_to_polkadot_bulletin::PolkadotToPolkadotBulletinCliBridge, - }, - rococo_bulletin::{ - rococo_bulletin_headers_to_bridge_hub_rococo::RococoBulletinToBridgeHubRococoCliBridge, - rococo_parachains_to_rococo_bulletin::RococoToRococoBulletinCliBridge, - BridgeHubRococoAsBridgeHubPolkadot, - }, - rococo_westend::{ - rococo_parachains_to_bridge_hub_westend::BridgeHubRococoToBridgeHubWestendCliBridge, - westend_parachains_to_bridge_hub_rococo::BridgeHubWestendToBridgeHubRococoCliBridge, - }, +use crate::bridges::{ + kusama_polkadot::{ + kusama_parachains_to_bridge_hub_polkadot::BridgeHubKusamaToBridgeHubPolkadotCliBridge, + polkadot_parachains_to_bridge_hub_kusama::BridgeHubPolkadotToBridgeHubKusamaCliBridge, + }, + polkadot_bulletin::{ + polkadot_bulletin_headers_to_bridge_hub_polkadot::PolkadotBulletinToBridgeHubPolkadotCliBridge, + polkadot_parachains_to_polkadot_bulletin::PolkadotToPolkadotBulletinCliBridge, }, + rococo_bulletin::{ + rococo_bulletin_headers_to_bridge_hub_rococo::RococoBulletinToBridgeHubRococoCliBridge, + rococo_parachains_to_rococo_bulletin::RococoToRococoBulletinCliBridge, + BridgeHubRococoAsBridgeHubPolkadot, + }, + rococo_westend::{ + rococo_parachains_to_bridge_hub_westend::BridgeHubRococoToBridgeHubWestendCliBridge, + westend_parachains_to_bridge_hub_rococo::BridgeHubWestendToBridgeHubRococoCliBridge, + }, +}; +use relay_substrate_client::{ + AccountKeyPairOf, ChainRuntimeVersion, ChainWithRuntimeVersion, ChainWithTransactions, + Parachain, SimpleRuntimeVersion, +}; +use substrate_relay_helper::{ cli::{ bridge::{ CliBridgeBase, MessagesCliBridge, ParachainToRelayHeadersCliBridge, RelayToRelayHeadersCliBridge, }, chain_schema::*, - relay_headers_and_messages::parachain_to_parachain::ParachainToParachainBridge, - CliChain, HexLaneId, PrometheusParams, + relay_headers_and_messages::{ + parachain_to_parachain::ParachainToParachainBridge, relay_to_parachain::*, + BridgeEndCommonParams, Full2WayBridge, Full2WayBridgeCommonParams, + HeadersAndMessagesSharedParams, + }, }, - declare_chain_cli_schema, + declare_chain_cli_schema, declare_parachain_to_parachain_bridge_schema, + declare_relay_to_parachain_bridge_schema, TransactionParams, }; -use bp_messages::LaneId; -use bp_runtime::BalanceOf; -use relay_substrate_client::{ - AccountIdOf, AccountKeyPairOf, Chain, ChainWithBalances, ChainWithMessages, - ChainWithTransactions, Client, Parachain, -}; -use relay_utils::metrics::MetricsParams; -use sp_core::Pair; -use substrate_relay_helper::{ - messages_lane::{MessagesRelayLimits, MessagesRelayParams}, - on_demand::OnDemandRelay, - TaggedAccount, TransactionParams, -}; - -/// Parameters that have the same names across all bridges. -#[derive(Debug, PartialEq, StructOpt)] -pub struct HeadersAndMessagesSharedParams { - /// Hex-encoded lane identifiers that should be served by the complex relay. - #[structopt(long, default_value = "00000000")] - pub lane: Vec, - /// If passed, only mandatory headers (headers that are changing the GRANDPA authorities set) - /// are relayed. - #[structopt(long)] - pub only_mandatory_headers: bool, - #[structopt(flatten)] - pub prometheus_params: PrometheusParams, -} - -/// Bridge parameters, shared by all bridge types. -pub struct Full2WayBridgeCommonParams< - Left: ChainWithTransactions + CliChain, - Right: ChainWithTransactions + CliChain, -> { - /// Shared parameters. - pub shared: HeadersAndMessagesSharedParams, - /// Parameters of the left chain. - pub left: BridgeEndCommonParams, - /// Parameters of the right chain. - pub right: BridgeEndCommonParams, - - /// Common metric parameters. - pub metrics_params: MetricsParams, -} - -impl - Full2WayBridgeCommonParams -{ - /// Creates new bridge parameters from its components. - pub fn new>( - shared: HeadersAndMessagesSharedParams, - left: BridgeEndCommonParams, - right: BridgeEndCommonParams, - ) -> anyhow::Result { - // Create metrics registry. - let metrics_params = shared.prometheus_params.clone().into_metrics_params()?; - let metrics_params = relay_utils::relay_metrics(metrics_params).into_params(); - - Ok(Self { shared, left, right, metrics_params }) - } -} - -/// Parameters that are associated with one side of the bridge. -pub struct BridgeEndCommonParams { - /// Chain client. - pub client: Client, - /// Params used for sending transactions to the chain. - pub tx_params: TransactionParams>, - /// Accounts, which balances are exposed as metrics by the relay process. - pub accounts: Vec>>, -} - -/// All data of the bidirectional complex relay. -struct FullBridge< - 'a, - Source: ChainWithTransactions + CliChain, - Target: ChainWithTransactions + CliChain, - Bridge: MessagesCliBridge, -> { - source: &'a mut BridgeEndCommonParams, - target: &'a mut BridgeEndCommonParams, - metrics_params: &'a MetricsParams, - _phantom_data: PhantomData, -} - -impl< - 'a, - Source: ChainWithTransactions + CliChain, - Target: ChainWithTransactions + CliChain, - Bridge: MessagesCliBridge, - > FullBridge<'a, Source, Target, Bridge> -where - AccountIdOf: From< as Pair>::Public>, - AccountIdOf: From< as Pair>::Public>, - BalanceOf: TryFrom> + Into, -{ - /// Construct complex relay given it components. - fn new( - source: &'a mut BridgeEndCommonParams, - target: &'a mut BridgeEndCommonParams, - metrics_params: &'a MetricsParams, - ) -> Self { - Self { source, target, metrics_params, _phantom_data: Default::default() } - } - - /// Returns message relay parameters. - fn messages_relay_params( - &self, - source_to_target_headers_relay: Arc>, - target_to_source_headers_relay: Arc>, - lane_id: LaneId, - maybe_limits: Option, - ) -> MessagesRelayParams { - MessagesRelayParams { - source_client: self.source.client.clone(), - source_transaction_params: self.source.tx_params.clone(), - target_client: self.target.client.clone(), - target_transaction_params: self.target.tx_params.clone(), - source_to_target_headers_relay: Some(source_to_target_headers_relay), - target_to_source_headers_relay: Some(target_to_source_headers_relay), - lane_id, - limits: maybe_limits, - metrics_params: self.metrics_params.clone().disable(), - } - } -} // All supported chains. declare_chain_cli_schema!(Rococo, rococo); @@ -247,171 +119,6 @@ declare_parachain_to_parachain_bridge_schema!(BridgeHubKusama, Kusama, BridgeHub declare_relay_to_parachain_bridge_schema!(PolkadotBulletin, BridgeHubPolkadot, Polkadot); declare_relay_to_parachain_bridge_schema!(RococoBulletin, BridgeHubRococo, Rococo); -/// Base portion of the bidirectional complex relay. -/// -/// This main purpose of extracting this trait is that in different relays the implementation -/// of `start_on_demand_headers_relayers` method will be different. But the number of -/// implementations is limited to relay <> relay, parachain <> relay and parachain <> parachain. -/// This trait allows us to reuse these implementations in different bridges. -#[async_trait] -trait Full2WayBridgeBase: Sized + Send + Sync { - /// The CLI params for the bridge. - type Params; - /// The left relay chain. - type Left: ChainWithTransactions + CliChain; - /// The right destination chain (it can be a relay or a parachain). - type Right: ChainWithTransactions + CliChain; - - /// Reference to common relay parameters. - fn common(&self) -> &Full2WayBridgeCommonParams; - - /// Mutable reference to common relay parameters. - fn mut_common(&mut self) -> &mut Full2WayBridgeCommonParams; - - /// Start on-demand headers relays. - async fn start_on_demand_headers_relayers( - &mut self, - ) -> anyhow::Result<( - Arc>, - Arc>, - )>; -} - -/// Bidirectional complex relay. -#[async_trait] -trait Full2WayBridge: Sized + Sync -where - AccountIdOf: From< as Pair>::Public>, - AccountIdOf: From< as Pair>::Public>, - BalanceOf: TryFrom> + Into, - BalanceOf: TryFrom> + Into, -{ - /// Base portion of the bidirectional complex relay. - type Base: Full2WayBridgeBase; - - /// The left relay chain. - type Left: ChainWithTransactions + ChainWithBalances + ChainWithMessages + CliChain; - /// The right relay chain. - type Right: ChainWithTransactions + ChainWithBalances + ChainWithMessages + CliChain; - - /// Left to Right bridge. - type L2R: MessagesCliBridge; - /// Right to Left bridge - type R2L: MessagesCliBridge; - - /// Construct new bridge. - fn new(params: ::Params) -> anyhow::Result; - - /// Reference to the base relay portion. - fn base(&self) -> &Self::Base; - - /// Mutable reference to the base relay portion. - fn mut_base(&mut self) -> &mut Self::Base; - - /// Creates and returns Left to Right complex relay. - fn left_to_right(&mut self) -> FullBridge { - let common = self.mut_base().mut_common(); - FullBridge::<_, _, Self::L2R>::new( - &mut common.left, - &mut common.right, - &common.metrics_params, - ) - } - - /// Creates and returns Right to Left complex relay. - fn right_to_left(&mut self) -> FullBridge { - let common = self.mut_base().mut_common(); - FullBridge::<_, _, Self::R2L>::new( - &mut common.right, - &mut common.left, - &common.metrics_params, - ) - } - - /// Start complex relay. - async fn run(&mut self) -> anyhow::Result<()> { - // Register standalone metrics. - { - let common = self.mut_base().mut_common(); - common.left.accounts.push(TaggedAccount::Messages { - id: common.left.tx_params.signer.public().into(), - bridged_chain: Self::Right::NAME.to_string(), - }); - common.right.accounts.push(TaggedAccount::Messages { - id: common.right.tx_params.signer.public().into(), - bridged_chain: Self::Left::NAME.to_string(), - }); - } - - // start on-demand header relays - let (left_to_right_on_demand_headers, right_to_left_on_demand_headers) = - self.mut_base().start_on_demand_headers_relayers().await?; - - // add balance-related metrics - let lanes = self - .base() - .common() - .shared - .lane - .iter() - .cloned() - .map(Into::into) - .collect::>(); - { - let common = self.mut_base().mut_common(); - substrate_relay_helper::messages_metrics::add_relay_balances_metrics::<_, Self::Right>( - common.left.client.clone(), - &common.metrics_params, - &common.left.accounts, - &lanes, - ) - .await?; - substrate_relay_helper::messages_metrics::add_relay_balances_metrics::<_, Self::Left>( - common.right.client.clone(), - &common.metrics_params, - &common.right.accounts, - &lanes, - ) - .await?; - } - - // Need 2x capacity since we consider both directions for each lane - let mut message_relays = Vec::with_capacity(lanes.len() * 2); - for lane in lanes { - let left_to_right_messages = substrate_relay_helper::messages_lane::run::< - ::MessagesLane, - >(self.left_to_right().messages_relay_params( - left_to_right_on_demand_headers.clone(), - right_to_left_on_demand_headers.clone(), - lane, - Self::L2R::maybe_messages_limits(), - )) - .map_err(|e| anyhow::format_err!("{}", e)) - .boxed(); - message_relays.push(left_to_right_messages); - - let right_to_left_messages = substrate_relay_helper::messages_lane::run::< - ::MessagesLane, - >(self.right_to_left().messages_relay_params( - right_to_left_on_demand_headers.clone(), - left_to_right_on_demand_headers.clone(), - lane, - Self::R2L::maybe_messages_limits(), - )) - .map_err(|e| anyhow::format_err!("{}", e)) - .boxed(); - message_relays.push(right_to_left_messages); - } - - relay_utils::relay_metrics(self.base().common().metrics_params.clone()) - .expose() - .await - .map_err(|e| anyhow::format_err!("{}", e))?; - - futures::future::select_all(message_relays).await.0 - } -} - /// BridgeHubRococo <> BridgeHubWestend complex relay. pub struct BridgeHubRococoBridgeHubWestendFull2WayBridge { base: ::Base, @@ -556,6 +263,7 @@ impl RelayHeadersAndMessages { #[cfg(test)] mod tests { use super::*; + use substrate_relay_helper::cli::{HexLaneId, PrometheusParams}; #[test] fn should_parse_parachain_to_parachain_options() { diff --git a/relays/bin-substrate/src/cli/relay_messages.rs b/relays/bin-substrate/src/cli/relay_messages.rs index b20725b53..92b98f4d9 100644 --- a/relays/bin-substrate/src/cli/relay_messages.rs +++ b/relays/bin-substrate/src/cli/relay_messages.rs @@ -14,10 +14,8 @@ // You should have received a copy of the GNU General Public License // along with Parity Bridges Common. If not, see . -use async_trait::async_trait; -use sp_core::Pair; use structopt::StructOpt; -use strum::VariantNames; +use strum::{EnumString, VariantNames}; use crate::bridges::{ kusama_polkadot::{ @@ -37,10 +35,21 @@ use crate::bridges::{ bridge_hub_westend_messages_to_bridge_hub_rococo::BridgeHubWestendToBridgeHubRococoMessagesCliBridge, }, }; -use relay_substrate_client::{AccountIdOf, AccountKeyPairOf, BalanceOf, ChainWithTransactions}; -use substrate_relay_helper::{messages_lane::MessagesRelayParams, TransactionParams}; +use substrate_relay_helper::cli::relay_messages::{MessagesRelayer, RelayMessagesParams}; -use crate::cli::{bridge::*, chain_schema::*, CliChain, HexLaneId, PrometheusParams}; +#[derive(Debug, PartialEq, Eq, EnumString, VariantNames)] +#[strum(serialize_all = "kebab_case")] +/// Supported full bridges (headers + messages). +pub enum FullBridge { + BridgeHubRococoToBridgeHubWestend, + BridgeHubWestendToBridgeHubRococo, + BridgeHubKusamaToBridgeHubPolkadot, + BridgeHubPolkadotToBridgeHubKusama, + PolkadotBulletinToBridgeHubPolkadot, + BridgeHubPolkadotToPolkadotBulletin, + RococoBulletinToBridgeHubRococo, + BridgeHubRococoToRococoBulletin, +} /// Start messages relayer process. #[derive(StructOpt)] @@ -48,57 +57,8 @@ pub struct RelayMessages { /// A bridge instance to relay messages for. #[structopt(possible_values = FullBridge::VARIANTS, case_insensitive = true)] bridge: FullBridge, - /// Hex-encoded lane id that should be served by the relay. Defaults to `00000000`. - #[structopt(long, default_value = "00000000")] - lane: HexLaneId, - #[structopt(flatten)] - source: SourceConnectionParams, - #[structopt(flatten)] - source_sign: SourceSigningParams, - #[structopt(flatten)] - target: TargetConnectionParams, - #[structopt(flatten)] - target_sign: TargetSigningParams, #[structopt(flatten)] - prometheus_params: PrometheusParams, -} - -#[async_trait] -trait MessagesRelayer: MessagesCliBridge -where - Self::Source: ChainWithTransactions + CliChain, - AccountIdOf: From< as Pair>::Public>, - AccountIdOf: From< as Pair>::Public>, - BalanceOf: TryFrom>, -{ - async fn relay_messages(data: RelayMessages) -> anyhow::Result<()> { - let source_client = data.source.into_client::().await?; - let source_sign = data.source_sign.to_keypair::()?; - let source_transactions_mortality = data.source_sign.transactions_mortality()?; - let target_client = data.target.into_client::().await?; - let target_sign = data.target_sign.to_keypair::()?; - let target_transactions_mortality = data.target_sign.transactions_mortality()?; - - substrate_relay_helper::messages_lane::run::(MessagesRelayParams { - source_client, - source_transaction_params: TransactionParams { - signer: source_sign, - mortality: source_transactions_mortality, - }, - target_client, - target_transaction_params: TransactionParams { - signer: target_sign, - mortality: target_transactions_mortality, - }, - source_to_target_headers_relay: None, - target_to_source_headers_relay: None, - lane_id: data.lane.into(), - limits: Self::maybe_messages_limits(), - metrics_params: data.prometheus_params.into_metrics_params()?, - }) - .await - .map_err(|e| anyhow::format_err!("{}", e)) - } + params: RelayMessagesParams, } impl MessagesRelayer for BridgeHubRococoToBridgeHubWestendMessagesCliBridge {} @@ -115,21 +75,21 @@ impl RelayMessages { pub async fn run(self) -> anyhow::Result<()> { match self.bridge { FullBridge::BridgeHubRococoToBridgeHubWestend => - BridgeHubRococoToBridgeHubWestendMessagesCliBridge::relay_messages(self), + BridgeHubRococoToBridgeHubWestendMessagesCliBridge::relay_messages(self.params), FullBridge::BridgeHubWestendToBridgeHubRococo => - BridgeHubWestendToBridgeHubRococoMessagesCliBridge::relay_messages(self), + BridgeHubWestendToBridgeHubRococoMessagesCliBridge::relay_messages(self.params), FullBridge::BridgeHubKusamaToBridgeHubPolkadot => - BridgeHubKusamaToBridgeHubPolkadotMessagesCliBridge::relay_messages(self), + BridgeHubKusamaToBridgeHubPolkadotMessagesCliBridge::relay_messages(self.params), FullBridge::BridgeHubPolkadotToBridgeHubKusama => - BridgeHubPolkadotToBridgeHubKusamaMessagesCliBridge::relay_messages(self), + BridgeHubPolkadotToBridgeHubKusamaMessagesCliBridge::relay_messages(self.params), FullBridge::PolkadotBulletinToBridgeHubPolkadot => - PolkadotBulletinToBridgeHubPolkadotMessagesCliBridge::relay_messages(self), + PolkadotBulletinToBridgeHubPolkadotMessagesCliBridge::relay_messages(self.params), FullBridge::BridgeHubPolkadotToPolkadotBulletin => - BridgeHubPolkadotToPolkadotBulletinMessagesCliBridge::relay_messages(self), + BridgeHubPolkadotToPolkadotBulletinMessagesCliBridge::relay_messages(self.params), FullBridge::RococoBulletinToBridgeHubRococo => - RococoBulletinToBridgeHubRococoMessagesCliBridge::relay_messages(self), + RococoBulletinToBridgeHubRococoMessagesCliBridge::relay_messages(self.params), FullBridge::BridgeHubRococoToRococoBulletin => - BridgeHubRococoToRococoBulletinMessagesCliBridge::relay_messages(self), + BridgeHubRococoToRococoBulletinMessagesCliBridge::relay_messages(self.params), } .await } diff --git a/relays/bin-substrate/src/cli/relay_parachains.rs b/relays/bin-substrate/src/cli/relay_parachains.rs index dc1c5ad36..65382d1ca 100644 --- a/relays/bin-substrate/src/cli/relay_parachains.rs +++ b/relays/bin-substrate/src/cli/relay_parachains.rs @@ -26,24 +26,9 @@ use crate::bridges::{ westend_parachains_to_bridge_hub_rococo::BridgeHubWestendToBridgeHubRococoCliBridge, }, }; -use async_std::sync::Mutex; -use async_trait::async_trait; -use parachains_relay::parachains_loop::{AvailableHeader, SourceClient, TargetClient}; -use relay_substrate_client::Parachain; -use relay_utils::metrics::{GlobalMetrics, StandaloneMetric}; -use std::sync::Arc; use structopt::StructOpt; use strum::{EnumString, VariantNames}; -use substrate_relay_helper::{ - parachains::{source::ParachainsSource, target::ParachainsTarget, ParachainsPipelineAdapter}, - TransactionParams, -}; - -use crate::cli::{ - bridge::{CliBridgeBase, ParachainToRelayHeadersCliBridge}, - chain_schema::*, - PrometheusParams, -}; +use substrate_relay_helper::cli::relay_parachains::{ParachainsRelayer, RelayParachainsParams}; /// Start parachain heads relayer process. #[derive(StructOpt)] @@ -52,13 +37,7 @@ pub struct RelayParachains { #[structopt(possible_values = RelayParachainsBridge::VARIANTS, case_insensitive = true)] bridge: RelayParachainsBridge, #[structopt(flatten)] - source: SourceConnectionParams, - #[structopt(flatten)] - target: TargetConnectionParams, - #[structopt(flatten)] - target_sign: TargetSigningParams, - #[structopt(flatten)] - prometheus_params: PrometheusParams, + params: RelayParachainsParams, } /// Parachain heads relay bridge. @@ -73,47 +52,6 @@ pub enum RelayParachainsBridge { WestendToBridgeHubRococo, } -#[async_trait] -trait ParachainsRelayer: ParachainToRelayHeadersCliBridge -where - ParachainsSource: - SourceClient>, - ParachainsTarget: - TargetClient>, - ::Source: Parachain, -{ - async fn relay_parachains(data: RelayParachains) -> anyhow::Result<()> { - let source_client = data.source.into_client::().await?; - let source_client = ParachainsSource::::new( - source_client, - Arc::new(Mutex::new(AvailableHeader::Missing)), - ); - - let target_transaction_params = TransactionParams { - signer: data.target_sign.to_keypair::()?, - mortality: data.target_sign.target_transactions_mortality, - }; - let target_client = data.target.into_client::().await?; - let target_client = ParachainsTarget::::new( - target_client.clone(), - target_transaction_params, - ); - - let metrics_params: relay_utils::metrics::MetricsParams = - data.prometheus_params.into_metrics_params()?; - GlobalMetrics::new()?.register_and_spawn(&metrics_params.registry)?; - - parachains_relay::parachains_loop::run( - source_client, - target_client, - metrics_params, - futures::future::pending(), - ) - .await - .map_err(|e| anyhow::format_err!("{}", e)) - } -} - impl ParachainsRelayer for BridgeHubRococoToBridgeHubWestendCliBridge {} impl ParachainsRelayer for BridgeHubWestendToBridgeHubRococoCliBridge {} impl ParachainsRelayer for BridgeHubKusamaToBridgeHubPolkadotCliBridge {} @@ -126,17 +64,17 @@ impl RelayParachains { pub async fn run(self) -> anyhow::Result<()> { match self.bridge { RelayParachainsBridge::RococoToBridgeHubWestend => - BridgeHubRococoToBridgeHubWestendCliBridge::relay_parachains(self), + BridgeHubRococoToBridgeHubWestendCliBridge::relay_parachains(self.params), RelayParachainsBridge::WestendToBridgeHubRococo => - BridgeHubWestendToBridgeHubRococoCliBridge::relay_parachains(self), + BridgeHubWestendToBridgeHubRococoCliBridge::relay_parachains(self.params), RelayParachainsBridge::KusamaToBridgeHubPolkadot => - BridgeHubKusamaToBridgeHubPolkadotCliBridge::relay_parachains(self), + BridgeHubKusamaToBridgeHubPolkadotCliBridge::relay_parachains(self.params), RelayParachainsBridge::PolkadotToBridgeHubKusama => - BridgeHubPolkadotToBridgeHubKusamaCliBridge::relay_parachains(self), + BridgeHubPolkadotToBridgeHubKusamaCliBridge::relay_parachains(self.params), RelayParachainsBridge::PolkadotToPolkadotBulletin => - PolkadotToPolkadotBulletinCliBridge::relay_parachains(self), + PolkadotToPolkadotBulletinCliBridge::relay_parachains(self.params), RelayParachainsBridge::RococoToRococoBulletin => - RococoToRococoBulletinCliBridge::relay_parachains(self), + RococoToRococoBulletinCliBridge::relay_parachains(self.params), } .await } diff --git a/relays/bin-substrate/src/main.rs b/relays/bin-substrate/src/main.rs index 33a423b07..214bfa60e 100644 --- a/relays/bin-substrate/src/main.rs +++ b/relays/bin-substrate/src/main.rs @@ -19,7 +19,6 @@ #![warn(missing_docs)] mod bridges; -mod chains; mod cli; fn main() { diff --git a/relays/client-bridge-hub-kusama/src/lib.rs b/relays/client-bridge-hub-kusama/src/lib.rs index 4ad6d2e2f..80f621dee 100644 --- a/relays/client-bridge-hub-kusama/src/lib.rs +++ b/relays/client-bridge-hub-kusama/src/lib.rs @@ -23,8 +23,9 @@ use bp_polkadot::SuffixedCommonTransactionExtensionExt; use codec::Encode; use relay_substrate_client::{ calls::UtilityCall as MockUtilityCall, Chain, ChainWithBalances, ChainWithMessages, - ChainWithTransactions, ChainWithUtilityPallet, Error as SubstrateError, - MockedRuntimeUtilityPallet, SignParam, UnderlyingChainProvider, UnsignedTransaction, + ChainWithRuntimeVersion, ChainWithTransactions, ChainWithUtilityPallet, + Error as SubstrateError, MockedRuntimeUtilityPallet, SignParam, SimpleRuntimeVersion, + UnderlyingChainProvider, UnsignedTransaction, }; use sp_core::{storage::StorageKey, Pair}; use sp_runtime::{generic::SignedPayload, traits::IdentifyAccount}; @@ -120,3 +121,8 @@ impl ChainWithMessages for BridgeHubKusama { const FROM_CHAIN_MESSAGE_DETAILS_METHOD: &'static str = bp_bridge_hub_kusama::FROM_BRIDGE_HUB_KUSAMA_MESSAGE_DETAILS_METHOD; } + +impl ChainWithRuntimeVersion for BridgeHubKusama { + const RUNTIME_VERSION: Option = + Some(SimpleRuntimeVersion { spec_version: 1_001_000, transaction_version: 4 }); +} diff --git a/relays/client-bridge-hub-polkadot/src/lib.rs b/relays/client-bridge-hub-polkadot/src/lib.rs index 8ac656921..ed147e92d 100644 --- a/relays/client-bridge-hub-polkadot/src/lib.rs +++ b/relays/client-bridge-hub-polkadot/src/lib.rs @@ -23,8 +23,9 @@ use bp_polkadot_core::SuffixedCommonTransactionExtensionExt; use codec::Encode; use relay_substrate_client::{ calls::UtilityCall as MockUtilityCall, Chain, ChainWithBalances, ChainWithMessages, - ChainWithTransactions, ChainWithUtilityPallet, Error as SubstrateError, - MockedRuntimeUtilityPallet, SignParam, UnderlyingChainProvider, UnsignedTransaction, + ChainWithRuntimeVersion, ChainWithTransactions, ChainWithUtilityPallet, + Error as SubstrateError, MockedRuntimeUtilityPallet, SignParam, SimpleRuntimeVersion, + UnderlyingChainProvider, UnsignedTransaction, }; use sp_core::{storage::StorageKey, Pair}; use sp_runtime::{generic::SignedPayload, traits::IdentifyAccount}; @@ -124,3 +125,8 @@ impl ChainWithMessages for BridgeHubPolkadot { const FROM_CHAIN_MESSAGE_DETAILS_METHOD: &'static str = bp_bridge_hub_polkadot::FROM_BRIDGE_HUB_POLKADOT_MESSAGE_DETAILS_METHOD; } + +impl ChainWithRuntimeVersion for BridgeHubPolkadot { + const RUNTIME_VERSION: Option = + Some(SimpleRuntimeVersion { spec_version: 1_001_000, transaction_version: 3 }); +} diff --git a/relays/client-bridge-hub-rococo/src/lib.rs b/relays/client-bridge-hub-rococo/src/lib.rs index 7ec584534..169135646 100644 --- a/relays/client-bridge-hub-rococo/src/lib.rs +++ b/relays/client-bridge-hub-rococo/src/lib.rs @@ -23,8 +23,9 @@ use bp_polkadot_core::SuffixedCommonTransactionExtensionExt; use codec::Encode; use relay_substrate_client::{ calls::UtilityCall as MockUtilityCall, Chain, ChainWithBalances, ChainWithMessages, - ChainWithTransactions, ChainWithUtilityPallet, Error as SubstrateError, - MockedRuntimeUtilityPallet, SignParam, UnderlyingChainProvider, UnsignedTransaction, + ChainWithRuntimeVersion, ChainWithTransactions, ChainWithUtilityPallet, + Error as SubstrateError, MockedRuntimeUtilityPallet, SignParam, SimpleRuntimeVersion, + UnderlyingChainProvider, UnsignedTransaction, }; use sp_core::{storage::StorageKey, Pair}; use sp_runtime::{generic::SignedPayload, traits::IdentifyAccount}; @@ -122,3 +123,8 @@ impl ChainWithMessages for BridgeHubRococo { const FROM_CHAIN_MESSAGE_DETAILS_METHOD: &'static str = bp_bridge_hub_rococo::FROM_BRIDGE_HUB_ROCOCO_MESSAGE_DETAILS_METHOD; } + +impl ChainWithRuntimeVersion for BridgeHubRococo { + const RUNTIME_VERSION: Option = + Some(SimpleRuntimeVersion { spec_version: 1_008_000, transaction_version: 4 }); +} diff --git a/relays/client-bridge-hub-westend/src/lib.rs b/relays/client-bridge-hub-westend/src/lib.rs index c6bf1b45d..5e4462f86 100644 --- a/relays/client-bridge-hub-westend/src/lib.rs +++ b/relays/client-bridge-hub-westend/src/lib.rs @@ -23,8 +23,9 @@ use bp_polkadot_core::SuffixedCommonTransactionExtensionExt; use codec::Encode; use relay_substrate_client::{ calls::UtilityCall as MockUtilityCall, Chain, ChainWithBalances, ChainWithMessages, - ChainWithTransactions, ChainWithUtilityPallet, Error as SubstrateError, - MockedRuntimeUtilityPallet, SignParam, UnderlyingChainProvider, UnsignedTransaction, + ChainWithRuntimeVersion, ChainWithTransactions, ChainWithUtilityPallet, + Error as SubstrateError, MockedRuntimeUtilityPallet, SignParam, SimpleRuntimeVersion, + UnderlyingChainProvider, UnsignedTransaction, }; use sp_core::{storage::StorageKey, Pair}; use sp_runtime::{generic::SignedPayload, traits::IdentifyAccount}; @@ -120,3 +121,8 @@ impl ChainWithMessages for BridgeHubWestend { const FROM_CHAIN_MESSAGE_DETAILS_METHOD: &'static str = bp_bridge_hub_westend::FROM_BRIDGE_HUB_WESTEND_MESSAGE_DETAILS_METHOD; } + +impl ChainWithRuntimeVersion for BridgeHubWestend { + const RUNTIME_VERSION: Option = + Some(SimpleRuntimeVersion { spec_version: 1_008_000, transaction_version: 4 }); +} diff --git a/relays/client-kusama/src/lib.rs b/relays/client-kusama/src/lib.rs index 0f412284e..a953a383e 100644 --- a/relays/client-kusama/src/lib.rs +++ b/relays/client-kusama/src/lib.rs @@ -22,8 +22,9 @@ use bp_kusama::{AccountInfoStorageMapKeyProvider, KUSAMA_SYNCED_HEADERS_GRANDPA_ use bp_polkadot_core::SuffixedCommonTransactionExtensionExt; use codec::Encode; use relay_substrate_client::{ - Chain, ChainWithBalances, ChainWithGrandpa, ChainWithTransactions, Error as SubstrateError, - RelayChain, SignParam, UnderlyingChainProvider, UnsignedTransaction, + Chain, ChainWithBalances, ChainWithGrandpa, ChainWithRuntimeVersion, ChainWithTransactions, + Error as SubstrateError, RelayChain, SignParam, SimpleRuntimeVersion, UnderlyingChainProvider, + UnsignedTransaction, }; use sp_core::{storage::StorageKey, Pair}; use sp_runtime::{generic::SignedPayload, traits::IdentifyAccount, MultiAddress}; @@ -114,3 +115,8 @@ impl ChainWithTransactions for Kusama { )) } } + +impl ChainWithRuntimeVersion for Kusama { + const RUNTIME_VERSION: Option = + Some(SimpleRuntimeVersion { spec_version: 1_001_002, transaction_version: 25 }); +} diff --git a/relays/client-polkadot-bulletin/src/lib.rs b/relays/client-polkadot-bulletin/src/lib.rs index b5bbeda80..a6cdd8ee6 100644 --- a/relays/client-polkadot-bulletin/src/lib.rs +++ b/relays/client-polkadot-bulletin/src/lib.rs @@ -21,8 +21,9 @@ mod codegen_runtime; use bp_polkadot_bulletin::POLKADOT_BULLETIN_SYNCED_HEADERS_GRANDPA_INFO_METHOD; use codec::Encode; use relay_substrate_client::{ - Chain, ChainWithBalances, ChainWithGrandpa, ChainWithMessages, ChainWithTransactions, - Error as SubstrateError, SignParam, UnderlyingChainProvider, UnsignedTransaction, + Chain, ChainWithBalances, ChainWithGrandpa, ChainWithMessages, ChainWithRuntimeVersion, + ChainWithTransactions, Error as SubstrateError, SignParam, SimpleRuntimeVersion, + UnderlyingChainProvider, UnsignedTransaction, }; use sp_core::{storage::StorageKey, Pair}; use sp_runtime::{generic::SignedPayload, traits::IdentifyAccount, MultiAddress}; @@ -131,3 +132,8 @@ impl ChainWithTransactions for PolkadotBulletin { )) } } + +impl ChainWithRuntimeVersion for PolkadotBulletin { + const RUNTIME_VERSION: Option = + Some(SimpleRuntimeVersion { spec_version: 100, transaction_version: 1 }); +} diff --git a/relays/client-polkadot/src/lib.rs b/relays/client-polkadot/src/lib.rs index 638e01d1b..af75f0dbb 100644 --- a/relays/client-polkadot/src/lib.rs +++ b/relays/client-polkadot/src/lib.rs @@ -22,8 +22,9 @@ use bp_polkadot::{AccountInfoStorageMapKeyProvider, POLKADOT_SYNCED_HEADERS_GRAN use bp_polkadot_core::SuffixedCommonTransactionExtensionExt; use codec::Encode; use relay_substrate_client::{ - Chain, ChainWithBalances, ChainWithGrandpa, ChainWithTransactions, Error as SubstrateError, - RelayChain, SignParam, UnderlyingChainProvider, UnsignedTransaction, + Chain, ChainWithBalances, ChainWithGrandpa, ChainWithRuntimeVersion, ChainWithTransactions, + Error as SubstrateError, RelayChain, SignParam, SimpleRuntimeVersion, UnderlyingChainProvider, + UnsignedTransaction, }; use sp_core::{storage::StorageKey, Pair}; use sp_runtime::{generic::SignedPayload, traits::IdentifyAccount, MultiAddress}; @@ -114,3 +115,8 @@ impl ChainWithTransactions for Polkadot { )) } } + +impl ChainWithRuntimeVersion for Polkadot { + const RUNTIME_VERSION: Option = + Some(SimpleRuntimeVersion { spec_version: 1_001_002, transaction_version: 25 }); +} diff --git a/relays/client-rococo/src/lib.rs b/relays/client-rococo/src/lib.rs index 7843af241..5fad80d5c 100644 --- a/relays/client-rococo/src/lib.rs +++ b/relays/client-rococo/src/lib.rs @@ -22,8 +22,9 @@ use bp_polkadot_core::SuffixedCommonTransactionExtensionExt; use bp_rococo::ROCOCO_SYNCED_HEADERS_GRANDPA_INFO_METHOD; use codec::Encode; use relay_substrate_client::{ - Chain, ChainWithBalances, ChainWithGrandpa, ChainWithTransactions, Error as SubstrateError, - RelayChain, SignParam, UnderlyingChainProvider, UnsignedTransaction, + Chain, ChainWithBalances, ChainWithGrandpa, ChainWithRuntimeVersion, ChainWithTransactions, + Error as SubstrateError, RelayChain, SignParam, SimpleRuntimeVersion, UnderlyingChainProvider, + UnsignedTransaction, }; use sp_core::{storage::StorageKey, Pair}; use sp_runtime::{generic::SignedPayload, traits::IdentifyAccount, MultiAddress}; @@ -114,3 +115,8 @@ impl ChainWithTransactions for Rococo { )) } } + +impl ChainWithRuntimeVersion for Rococo { + const RUNTIME_VERSION: Option = + Some(SimpleRuntimeVersion { spec_version: 1_008_000, transaction_version: 24 }); +} diff --git a/relays/client-substrate/src/chain.rs b/relays/client-substrate/src/chain.rs index 4f9467ec5..2aba5f567 100644 --- a/relays/client-substrate/src/chain.rs +++ b/relays/client-substrate/src/chain.rs @@ -16,6 +16,7 @@ use crate::calls::UtilityCall; +use crate::SimpleRuntimeVersion; use bp_header_chain::ChainWithGrandpa as ChainWithGrandpaBase; use bp_messages::ChainWithMessages as ChainWithMessagesBase; use bp_runtime::{ @@ -58,6 +59,16 @@ pub trait Chain: ChainBase + Clone { type Call: Clone + Codec + Debug + Send + Sync; } +/// Bridge-supported network definition. +/// +/// Used to abstract away CLI commands. +pub trait ChainWithRuntimeVersion: Chain { + /// Current version of the chain runtime, known to relay. + /// + /// can be `None` if relay is not going to submit transactions to that chain. + const RUNTIME_VERSION: Option; +} + /// Substrate-based relay chain that supports parachains. /// /// We assume that the parachains are supported using `runtime_parachains::paras` pallet. diff --git a/relays/client-substrate/src/lib.rs b/relays/client-substrate/src/lib.rs index 84c2ad10c..6c62b8e1c 100644 --- a/relays/client-substrate/src/lib.rs +++ b/relays/client-substrate/src/lib.rs @@ -35,9 +35,9 @@ use std::time::Duration; pub use crate::{ chain::{ AccountKeyPairOf, BlockWithJustification, CallOf, Chain, ChainWithBalances, - ChainWithGrandpa, ChainWithMessages, ChainWithTransactions, ChainWithUtilityPallet, - FullRuntimeUtilityPallet, MockedRuntimeUtilityPallet, Parachain, RelayChain, SignParam, - TransactionStatusOf, UnsignedTransaction, UtilityPallet, + ChainWithGrandpa, ChainWithMessages, ChainWithRuntimeVersion, ChainWithTransactions, + ChainWithUtilityPallet, FullRuntimeUtilityPallet, MockedRuntimeUtilityPallet, Parachain, + RelayChain, SignParam, TransactionStatusOf, UnsignedTransaction, UtilityPallet, }, client::{ is_ancient_block, ChainRuntimeVersion, Client, OpaqueGrandpaAuthoritiesSet, diff --git a/relays/client-westend/src/lib.rs b/relays/client-westend/src/lib.rs index 8067f67a2..737c6c085 100644 --- a/relays/client-westend/src/lib.rs +++ b/relays/client-westend/src/lib.rs @@ -22,8 +22,9 @@ use bp_polkadot_core::SuffixedCommonTransactionExtensionExt; use bp_westend::WESTEND_SYNCED_HEADERS_GRANDPA_INFO_METHOD; use codec::Encode; use relay_substrate_client::{ - Chain, ChainWithBalances, ChainWithGrandpa, ChainWithTransactions, Error as SubstrateError, - RelayChain, SignParam, UnderlyingChainProvider, UnsignedTransaction, + Chain, ChainWithBalances, ChainWithGrandpa, ChainWithRuntimeVersion, ChainWithTransactions, + Error as SubstrateError, RelayChain, SignParam, SimpleRuntimeVersion, UnderlyingChainProvider, + UnsignedTransaction, }; use sp_core::{storage::StorageKey, Pair}; use sp_runtime::{generic::SignedPayload, traits::IdentifyAccount, MultiAddress}; @@ -114,3 +115,8 @@ impl ChainWithTransactions for Westend { )) } } + +impl ChainWithRuntimeVersion for Westend { + const RUNTIME_VERSION: Option = + Some(SimpleRuntimeVersion { spec_version: 1_008_000, transaction_version: 24 }); +} diff --git a/relays/lib-substrate-relay/Cargo.toml b/relays/lib-substrate-relay/Cargo.toml index 161548ac4..d85e2762a 100644 --- a/relays/lib-substrate-relay/Cargo.toml +++ b/relays/lib-substrate-relay/Cargo.toml @@ -10,14 +10,17 @@ workspace = true [dependencies] anyhow = "1.0" -thiserror = { workspace = true } async-std = "1.9.0" async-trait = "0.1" codec = { package = "parity-scale-codec", version = "3.1.5" } futures = "0.3.30" hex = "0.4" -num-traits = "0.2" log = { workspace = true } +num-traits = "0.2" +rbtag = "0.3" +structopt = "0.3" +strum = { version = "0.26.2", features = ["derive"] } +thiserror = { workspace = true } # Bridge dependencies diff --git a/relays/bin-substrate/src/cli/bridge.rs b/relays/lib-substrate-relay/src/cli/bridge.rs similarity index 81% rename from relays/bin-substrate/src/cli/bridge.rs rename to relays/lib-substrate-relay/src/cli/bridge.rs index 9457dfa5c..316f59a2b 100644 --- a/relays/bin-substrate/src/cli/bridge.rs +++ b/relays/lib-substrate-relay/src/cli/bridge.rs @@ -14,38 +14,26 @@ // You should have received a copy of the GNU General Public License // along with Parity Bridges Common. If not, see . -use crate::cli::CliChain; -use pallet_bridge_parachains::{RelayBlockHash, RelayBlockHasher, RelayBlockNumber}; -use relay_substrate_client::{Chain, ChainWithTransactions, Parachain, RelayChain}; -use strum::{EnumString, VariantNames}; -use substrate_relay_helper::{ +//! Basic traits for exposing bridges in the CLI. + +use crate::{ equivocation::SubstrateEquivocationDetectionPipeline, finality::SubstrateFinalitySyncPipeline, messages_lane::{MessagesRelayLimits, SubstrateMessageLane}, parachains::SubstrateParachainsPipeline, }; - -#[derive(Debug, PartialEq, Eq, EnumString, VariantNames)] -#[strum(serialize_all = "kebab_case")] -/// Supported full bridges (headers + messages). -pub enum FullBridge { - BridgeHubRococoToBridgeHubWestend, - BridgeHubWestendToBridgeHubRococo, - BridgeHubKusamaToBridgeHubPolkadot, - BridgeHubPolkadotToBridgeHubKusama, - PolkadotBulletinToBridgeHubPolkadot, - BridgeHubPolkadotToPolkadotBulletin, - RococoBulletinToBridgeHubRococo, - BridgeHubRococoToRococoBulletin, -} +use pallet_bridge_parachains::{RelayBlockHash, RelayBlockHasher, RelayBlockNumber}; +use relay_substrate_client::{ + Chain, ChainWithRuntimeVersion, ChainWithTransactions, Parachain, RelayChain, +}; /// Minimal bridge representation that can be used from the CLI. /// It connects a source chain to a target chain. pub trait CliBridgeBase: Sized { /// The source chain. - type Source: Chain + CliChain; + type Source: Chain + ChainWithRuntimeVersion; /// The target chain. - type Target: ChainWithTransactions + CliChain; + type Target: ChainWithTransactions + ChainWithRuntimeVersion; } /// Bridge representation that can be used from the CLI for relaying headers @@ -60,6 +48,7 @@ pub trait RelayToRelayHeadersCliBridge: CliBridgeBase { /// Convenience trait that adds bounds to `CliBridgeBase`. pub trait RelayToRelayEquivocationDetectionCliBridgeBase: CliBridgeBase { + /// The source chain with extra bounds. type BoundedSource: ChainWithTransactions; } @@ -89,10 +78,10 @@ pub trait ParachainToRelayHeadersCliBridge: CliBridgeBase where Self::Source: Parachain, { - // The `CliBridgeBase` type represents the parachain in this situation. - // We need to add an extra type for the relay chain. + /// The `CliBridgeBase` type represents the parachain in this situation. + /// We need to add an extra type for the relay chain. type SourceRelay: Chain - + CliChain + + ChainWithRuntimeVersion + RelayChain; /// Finality proofs synchronization pipeline (source parachain -> target). type ParachainFinality: SubstrateParachainsPipeline< diff --git a/relays/lib-substrate-relay/src/cli/chain_schema.rs b/relays/lib-substrate-relay/src/cli/chain_schema.rs new file mode 100644 index 000000000..c5b802173 --- /dev/null +++ b/relays/lib-substrate-relay/src/cli/chain_schema.rs @@ -0,0 +1,250 @@ +// Copyright 2019-2022 Parity Technologies (UK) Ltd. +// This file is part of Parity Bridges Common. + +// Parity Bridges Common is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Parity Bridges Common is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Parity Bridges Common. If not, see . + +//! Primitives related to chain CLI options. + +use relay_substrate_client::{AccountKeyPairOf, ChainWithTransactions}; +use structopt::StructOpt; +use strum::{EnumString, VariantNames}; + +use relay_substrate_client::{ChainRuntimeVersion, ChainWithRuntimeVersion, SimpleRuntimeVersion}; + +use crate::TransactionParams; + +#[doc = "Runtime version params."] +#[derive(StructOpt, Debug, PartialEq, Eq, Clone, Copy, EnumString, VariantNames)] +pub enum RuntimeVersionType { + /// Auto query version from chain + Auto, + /// Custom `spec_version` and `transaction_version` + Custom, + /// Read version from bundle dependencies directly. + Bundle, +} + +/// Create chain-specific set of runtime version parameters. +#[macro_export] +macro_rules! declare_chain_runtime_version_params_cli_schema { + ($chain:ident, $chain_prefix:ident) => { + bp_runtime::paste::item! { + #[doc = $chain " runtime version params."] + #[derive(StructOpt, Debug, PartialEq, Eq, Clone, Copy)] + pub struct [<$chain RuntimeVersionParams>] { + #[doc = "The type of runtime version for chain " $chain] + #[structopt(long, default_value = "Bundle")] + pub [<$chain_prefix _version_mode>]: RuntimeVersionType, + #[doc = "The custom sepc_version for chain " $chain] + #[structopt(long)] + pub [<$chain_prefix _spec_version>]: Option, + #[doc = "The custom transaction_version for chain " $chain] + #[structopt(long)] + pub [<$chain_prefix _transaction_version>]: Option, + } + + impl [<$chain RuntimeVersionParams>] { + /// Converts self into `ChainRuntimeVersion`. + pub fn into_runtime_version( + self, + bundle_runtime_version: Option, + ) -> anyhow::Result { + Ok(match self.[<$chain_prefix _version_mode>] { + RuntimeVersionType::Auto => ChainRuntimeVersion::Auto, + RuntimeVersionType::Custom => { + let custom_spec_version = self.[<$chain_prefix _spec_version>] + .ok_or_else(|| anyhow::Error::msg(format!("The {}-spec-version is required when choose custom mode", stringify!($chain_prefix))))?; + let custom_transaction_version = self.[<$chain_prefix _transaction_version>] + .ok_or_else(|| anyhow::Error::msg(format!("The {}-transaction-version is required when choose custom mode", stringify!($chain_prefix))))?; + ChainRuntimeVersion::Custom( + SimpleRuntimeVersion { + spec_version: custom_spec_version, + transaction_version: custom_transaction_version + } + ) + }, + RuntimeVersionType::Bundle => match bundle_runtime_version { + Some(runtime_version) => ChainRuntimeVersion::Custom(runtime_version), + None => { + return Err(anyhow::format_err!("Cannot use bundled runtime version of {}: it is not known to the relay", stringify!($chain_prefix))); + } + }, + }) + } + } + } + }; +} + +/// Create chain-specific set of runtime version parameters. +#[macro_export] +macro_rules! declare_chain_connection_params_cli_schema { + ($chain:ident, $chain_prefix:ident) => { + bp_runtime::paste::item! { + #[doc = $chain " connection params."] + #[derive(StructOpt, Debug, PartialEq, Eq, Clone)] + pub struct [<$chain ConnectionParams>] { + #[doc = "Connect to " $chain " node at given host."] + #[structopt(long, default_value = "127.0.0.1")] + pub [<$chain_prefix _host>]: String, + #[doc = "Connect to " $chain " node websocket server at given port."] + #[structopt(long, default_value = "9944")] + pub [<$chain_prefix _port>]: u16, + #[doc = "Use secure websocket connection."] + #[structopt(long)] + pub [<$chain_prefix _secure>]: bool, + #[doc = "Custom runtime version"] + #[structopt(flatten)] + pub [<$chain_prefix _runtime_version>]: [<$chain RuntimeVersionParams>], + } + + impl [<$chain ConnectionParams>] { + /// Convert connection params into Substrate client. + #[allow(dead_code)] + pub async fn into_client( + self, + ) -> anyhow::Result> { + let chain_runtime_version = self + .[<$chain_prefix _runtime_version>] + .into_runtime_version(Chain::RUNTIME_VERSION)?; + Ok(relay_substrate_client::Client::new(relay_substrate_client::ConnectionParams { + host: self.[<$chain_prefix _host>], + port: self.[<$chain_prefix _port>], + secure: self.[<$chain_prefix _secure>], + chain_runtime_version, + }) + .await + ) + } + } + } + }; +} + +/// Create chain-specific set of signing parameters. +#[macro_export] +macro_rules! declare_chain_signing_params_cli_schema { + ($chain:ident, $chain_prefix:ident) => { + bp_runtime::paste::item! { + #[doc = $chain " signing params."] + #[derive(StructOpt, Debug, PartialEq, Eq, Clone)] + pub struct [<$chain SigningParams>] { + #[doc = "The SURI of secret key to use when transactions are submitted to the " $chain " node."] + #[structopt(long)] + pub [<$chain_prefix _signer>]: Option, + #[doc = "The password for the SURI of secret key to use when transactions are submitted to the " $chain " node."] + #[structopt(long)] + pub [<$chain_prefix _signer_password>]: Option, + + #[doc = "Path to the file, that contains SURI of secret key to use when transactions are submitted to the " $chain " node. Can be overridden with " $chain_prefix "_signer option."] + #[structopt(long)] + pub [<$chain_prefix _signer_file>]: Option, + #[doc = "Path to the file, that password for the SURI of secret key to use when transactions are submitted to the " $chain " node. Can be overridden with " $chain_prefix "_signer_password option."] + #[structopt(long)] + pub [<$chain_prefix _signer_password_file>]: Option, + + #[doc = "Transactions mortality period, in blocks. MUST be a power of two in [4; 65536] range. MAY NOT be larger than `BlockHashCount` parameter of the chain system module."] + #[structopt(long)] + pub [<$chain_prefix _transactions_mortality>]: Option, + } + + impl [<$chain SigningParams>] { + /// Return transactions mortality. + #[allow(dead_code)] + pub fn transactions_mortality(&self) -> anyhow::Result> { + self.[<$chain_prefix _transactions_mortality>] + .map(|transactions_mortality| { + if !(4..=65536).contains(&transactions_mortality) + || !transactions_mortality.is_power_of_two() + { + Err(anyhow::format_err!( + "Transactions mortality {} is not a power of two in a [4; 65536] range", + transactions_mortality, + )) + } else { + Ok(transactions_mortality) + } + }) + .transpose() + } + + /// Parse signing params into chain-specific KeyPair. + #[allow(dead_code)] + pub fn to_keypair(&self) -> anyhow::Result> { + let suri = match (self.[<$chain_prefix _signer>].as_ref(), self.[<$chain_prefix _signer_file>].as_ref()) { + (Some(suri), _) => suri.to_owned(), + (None, Some(suri_file)) => std::fs::read_to_string(suri_file) + .map_err(|err| anyhow::format_err!( + "Failed to read SURI from file {:?}: {}", + suri_file, + err, + ))?, + (None, None) => return Err(anyhow::format_err!( + "One of options must be specified: '{}' or '{}'", + stringify!([<$chain_prefix _signer>]), + stringify!([<$chain_prefix _signer_file>]), + )), + }; + + let suri_password = match ( + self.[<$chain_prefix _signer_password>].as_ref(), + self.[<$chain_prefix _signer_password_file>].as_ref(), + ) { + (Some(suri_password), _) => Some(suri_password.to_owned()), + (None, Some(suri_password_file)) => std::fs::read_to_string(suri_password_file) + .map(Some) + .map_err(|err| anyhow::format_err!( + "Failed to read SURI password from file {:?}: {}", + suri_password_file, + err, + ))?, + _ => None, + }; + + use sp_core::crypto::Pair; + + AccountKeyPairOf::::from_string( + &suri, + suri_password.as_deref() + ).map_err(|e| anyhow::format_err!("{:?}", e)) + } + + /// Return transaction parameters. + #[allow(dead_code)] + pub fn transaction_params( + &self, + ) -> anyhow::Result>> { + Ok(TransactionParams { + mortality: self.transactions_mortality()?, + signer: self.to_keypair::()?, + }) + } + } + } + }; +} + +/// Create chain-specific set of configuration objects: connection parameters, +/// signing parameters and bridge initialization parameters. +#[macro_export] +macro_rules! declare_chain_cli_schema { + ($chain:ident, $chain_prefix:ident) => { + $crate::declare_chain_runtime_version_params_cli_schema!($chain, $chain_prefix); + $crate::declare_chain_connection_params_cli_schema!($chain, $chain_prefix); + $crate::declare_chain_signing_params_cli_schema!($chain, $chain_prefix); + }; +} + +declare_chain_cli_schema!(Source, source); +declare_chain_cli_schema!(Target, target); diff --git a/relays/lib-substrate-relay/src/cli/detect_equivocations.rs b/relays/lib-substrate-relay/src/cli/detect_equivocations.rs new file mode 100644 index 000000000..b98e41b2a --- /dev/null +++ b/relays/lib-substrate-relay/src/cli/detect_equivocations.rs @@ -0,0 +1,65 @@ +// Copyright 2019-2023 Parity Technologies (UK) Ltd. +// This file is part of Parity Bridges Common. + +// Parity Bridges Common is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Parity Bridges Common is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Parity Bridges Common. If not, see . + +//! Primitives for exposing the equivocation detection functionality in the CLI. + +use crate::{ + cli::{bridge::*, chain_schema::*, PrometheusParams}, + equivocation, + equivocation::SubstrateEquivocationDetectionPipeline, +}; + +use async_trait::async_trait; +use relay_substrate_client::ChainWithTransactions; +use structopt::StructOpt; + +/// Start equivocation detection loop. +#[derive(StructOpt)] +pub struct DetectEquivocationsParams { + #[structopt(flatten)] + source: SourceConnectionParams, + #[structopt(flatten)] + source_sign: SourceSigningParams, + #[structopt(flatten)] + target: TargetConnectionParams, + #[structopt(flatten)] + prometheus_params: PrometheusParams, +} + +/// Trait used for starting the equivocation detection loop between 2 chains. +#[async_trait] +pub trait EquivocationsDetector: RelayToRelayEquivocationDetectionCliBridge +where + Self::Source: ChainWithTransactions, +{ + /// Start the equivocation detection loop. + async fn start(data: DetectEquivocationsParams) -> anyhow::Result<()> { + let source_client = data.source.into_client::().await?; + Self::Equivocation::start_relay_guards( + &source_client, + source_client.can_start_version_guard(), + ) + .await?; + + equivocation::run::( + source_client, + data.target.into_client::().await?, + data.source_sign.transaction_params::()?, + data.prometheus_params.into_metrics_params()?, + ) + .await + } +} diff --git a/relays/lib-substrate-relay/src/cli/init_bridge.rs b/relays/lib-substrate-relay/src/cli/init_bridge.rs new file mode 100644 index 000000000..bf7c86437 --- /dev/null +++ b/relays/lib-substrate-relay/src/cli/init_bridge.rs @@ -0,0 +1,85 @@ +// Copyright 2019-2021 Parity Technologies (UK) Ltd. +// This file is part of Parity Bridges Common. + +// Parity Bridges Common is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Parity Bridges Common is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Parity Bridges Common. If not, see . + +//! Primitives for exposing the bridge initialization functionality in the CLI. + +use async_trait::async_trait; +use codec::Encode; + +use crate::{ + cli::{bridge::CliBridgeBase, chain_schema::*}, + finality_base::engine::Engine, +}; +use bp_runtime::Chain as ChainBase; +use relay_substrate_client::{AccountKeyPairOf, Chain, UnsignedTransaction}; +use sp_core::Pair; +use structopt::StructOpt; + +/// Bridge initialization params. +#[derive(StructOpt)] +pub struct InitBridgeParams { + #[structopt(flatten)] + source: SourceConnectionParams, + #[structopt(flatten)] + target: TargetConnectionParams, + #[structopt(flatten)] + target_sign: TargetSigningParams, + /// Generates all required data, but does not submit extrinsic + #[structopt(long)] + dry_run: bool, +} + +/// Trait used for bridge initializing. +#[async_trait] +pub trait BridgeInitializer: CliBridgeBase +where + ::AccountId: From< as Pair>::Public>, +{ + /// The finality engine used by the source chain. + type Engine: Engine; + + /// Get the encoded call to init the bridge. + fn encode_init_bridge( + init_data: >::InitializationData, + ) -> ::Call; + + /// Initialize the bridge. + async fn init_bridge(data: InitBridgeParams) -> anyhow::Result<()> { + let source_client = data.source.into_client::().await?; + let target_client = data.target.into_client::().await?; + let target_sign = data.target_sign.to_keypair::()?; + let dry_run = data.dry_run; + + crate::finality::initialize::initialize::( + source_client, + target_client.clone(), + target_sign, + move |transaction_nonce, initialization_data| { + let call = Self::encode_init_bridge(initialization_data); + log::info!( + target: "bridge", + "Initialize bridge call encoded as hex string: {:?}", + format!("0x{}", hex::encode(call.encode())) + ); + Ok(UnsignedTransaction::new(call.into(), transaction_nonce)) + }, + dry_run, + ) + .await; + + Ok(()) + } +} diff --git a/relays/lib-substrate-relay/src/cli/mod.rs b/relays/lib-substrate-relay/src/cli/mod.rs new file mode 100644 index 000000000..0dd0d5474 --- /dev/null +++ b/relays/lib-substrate-relay/src/cli/mod.rs @@ -0,0 +1,192 @@ +// Copyright 2019-2021 Parity Technologies (UK) Ltd. +// This file is part of Parity Bridges Common. + +// Parity Bridges Common is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Parity Bridges Common is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Parity Bridges Common. If not, see . + +//! Deal with CLI args of substrate-to-substrate relay. + +use codec::{Decode, Encode}; +use rbtag::BuildInfo; +use structopt::StructOpt; +use strum::{EnumString, VariantNames}; + +use bp_messages::LaneId; + +pub mod bridge; +pub mod chain_schema; +pub mod detect_equivocations; +pub mod init_bridge; +pub mod relay_headers; +pub mod relay_headers_and_messages; +pub mod relay_messages; +pub mod relay_parachains; + +/// The target that will be used when publishing logs related to this pallet. +pub const LOG_TARGET: &str = "bridge"; + +/// Lane id. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct HexLaneId(pub [u8; 4]); + +impl From for LaneId { + fn from(lane_id: HexLaneId) -> LaneId { + LaneId(lane_id.0) + } +} + +impl std::str::FromStr for HexLaneId { + type Err = hex::FromHexError; + + fn from_str(s: &str) -> Result { + let mut lane_id = [0u8; 4]; + hex::decode_to_slice(s, &mut lane_id)?; + Ok(HexLaneId(lane_id)) + } +} + +/// Nicer formatting for raw bytes vectors. +#[derive(Default, Encode, Decode, PartialEq, Eq)] +pub struct HexBytes(pub Vec); + +impl std::str::FromStr for HexBytes { + type Err = hex::FromHexError; + + fn from_str(s: &str) -> Result { + Ok(Self(hex::decode(s)?)) + } +} + +impl std::fmt::Debug for HexBytes { + fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result { + write!(fmt, "0x{self}") + } +} + +impl std::fmt::Display for HexBytes { + fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result { + write!(fmt, "{}", hex::encode(&self.0)) + } +} + +/// Prometheus metrics params. +#[derive(Clone, Debug, PartialEq, StructOpt)] +pub struct PrometheusParams { + /// Do not expose a Prometheus metric endpoint. + #[structopt(long)] + pub no_prometheus: bool, + /// Expose Prometheus endpoint at given interface. + #[structopt(long, default_value = "127.0.0.1")] + pub prometheus_host: String, + /// Expose Prometheus endpoint at given port. + #[structopt(long, default_value = "9616")] + pub prometheus_port: u16, +} + +/// Struct to get git commit info and build time. +#[derive(BuildInfo)] +struct SubstrateRelayBuildInfo; + +impl SubstrateRelayBuildInfo { + /// Get git commit in form ``. + pub fn get_git_commit() -> String { + // on gitlab we use images without git installed, so we can't use `rbtag` there + // locally we don't have `CI_*` env variables, so we can't rely on them + // => we are using `CI_*` env variables or else `rbtag` + let maybe_sha_from_ci = option_env!("CI_COMMIT_SHORT_SHA"); + maybe_sha_from_ci + .map(|short_sha| { + // we assume that on CI the copy is always clean + format!("{short_sha}-clean") + }) + .unwrap_or_else(|| SubstrateRelayBuildInfo.get_build_commit().into()) + } +} + +impl PrometheusParams { + /// Tries to convert CLI metrics params into metrics params, used by the relay. + pub fn into_metrics_params(self) -> anyhow::Result { + let metrics_address = if !self.no_prometheus { + Some(relay_utils::metrics::MetricsAddress { + host: self.prometheus_host, + port: self.prometheus_port, + }) + } else { + None + }; + + let relay_version = option_env!("CARGO_PKG_VERSION").unwrap_or("unknown"); + let relay_commit = SubstrateRelayBuildInfo::get_git_commit(); + relay_utils::metrics::MetricsParams::new( + metrics_address, + relay_version.into(), + relay_commit, + ) + .map_err(|e| anyhow::format_err!("{:?}", e)) + } +} + +/// Either explicit or maximal allowed value. +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum ExplicitOrMaximal { + /// User has explicitly specified argument value. + Explicit(V), + /// Maximal allowed value for this argument. + Maximal, +} + +impl std::str::FromStr for ExplicitOrMaximal +where + V::Err: std::fmt::Debug, +{ + type Err = String; + + fn from_str(s: &str) -> Result { + if s.to_lowercase() == "max" { + return Ok(ExplicitOrMaximal::Maximal) + } + + V::from_str(s) + .map(ExplicitOrMaximal::Explicit) + .map_err(|e| format!("Failed to parse '{e:?}'. Expected 'max' or explicit value")) + } +} + +#[doc = "Runtime version params."] +#[derive(StructOpt, Debug, PartialEq, Eq, Clone, Copy, EnumString, VariantNames)] +pub enum RuntimeVersionType { + /// Auto query version from chain + Auto, + /// Custom `spec_version` and `transaction_version` + Custom, + /// Read version from bundle dependencies directly. + Bundle, +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn hex_bytes_display_matches_from_str_for_clap() { + // given + let hex = HexBytes(vec![1, 2, 3, 4]); + let display = format!("{hex}"); + + // when + let hex2: HexBytes = display.parse().unwrap(); + + // then + assert_eq!(hex.0, hex2.0); + } +} diff --git a/relays/lib-substrate-relay/src/cli/relay_headers.rs b/relays/lib-substrate-relay/src/cli/relay_headers.rs new file mode 100644 index 000000000..90558ed46 --- /dev/null +++ b/relays/lib-substrate-relay/src/cli/relay_headers.rs @@ -0,0 +1,76 @@ +// Copyright 2019-2021 Parity Technologies (UK) Ltd. +// This file is part of Parity Bridges Common. + +// Parity Bridges Common is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Parity Bridges Common is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Parity Bridges Common. If not, see . + +//! Primitives for exposing the headers relaying functionality in the CLI. + +use async_trait::async_trait; +use structopt::StructOpt; + +use relay_utils::metrics::{GlobalMetrics, StandaloneMetric}; + +use crate::{ + cli::{bridge::*, chain_schema::*, PrometheusParams}, + finality::SubstrateFinalitySyncPipeline, +}; + +/// Chain headers relaying params. +#[derive(StructOpt)] +pub struct RelayHeadersParams { + /// If passed, only mandatory headers (headers that are changing the GRANDPA authorities set) + /// are relayed. + #[structopt(long)] + only_mandatory_headers: bool, + #[structopt(flatten)] + source: SourceConnectionParams, + #[structopt(flatten)] + target: TargetConnectionParams, + #[structopt(flatten)] + target_sign: TargetSigningParams, + #[structopt(flatten)] + prometheus_params: PrometheusParams, +} + +/// Trait used for relaying headers between 2 chains. +#[async_trait] +pub trait HeadersRelayer: RelayToRelayHeadersCliBridge { + /// Relay headers. + async fn relay_headers(data: RelayHeadersParams) -> anyhow::Result<()> { + let source_client = data.source.into_client::().await?; + let target_client = data.target.into_client::().await?; + let target_transactions_mortality = data.target_sign.target_transactions_mortality; + let target_sign = data.target_sign.to_keypair::()?; + + let metrics_params: relay_utils::metrics::MetricsParams = + data.prometheus_params.into_metrics_params()?; + GlobalMetrics::new()?.register_and_spawn(&metrics_params.registry)?; + + let target_transactions_params = crate::TransactionParams { + signer: target_sign, + mortality: target_transactions_mortality, + }; + Self::Finality::start_relay_guards(&target_client, target_client.can_start_version_guard()) + .await?; + + crate::finality::run::( + source_client, + target_client, + data.only_mandatory_headers, + target_transactions_params, + metrics_params, + ) + .await + } +} diff --git a/relays/lib-substrate-relay/src/cli/relay_headers_and_messages/mod.rs b/relays/lib-substrate-relay/src/cli/relay_headers_and_messages/mod.rs new file mode 100644 index 000000000..d404f714b --- /dev/null +++ b/relays/lib-substrate-relay/src/cli/relay_headers_and_messages/mod.rs @@ -0,0 +1,484 @@ +// Copyright 2019-2022 Parity Technologies (UK) Ltd. +// This file is part of Parity Bridges Common. + +// Parity Bridges Common is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Parity Bridges Common is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Parity Bridges Common. If not, see . + +//! Complex 2-ways headers+messages relays support. +//! +//! To add new complex relay between `ChainA` and `ChainB`, you must: +//! +//! 1) ensure that there's a `declare_chain_cli_schema!(...)` for both chains. +//! 2) add `declare_chain_to_chain_bridge_schema!(...)` or +//! `declare_chain_to_parachain_bridge_schema` for the bridge. +//! 3) declare a new struct for the added bridge and implement the `Full2WayBridge` trait for it. + +#[macro_use] +pub mod parachain_to_parachain; +#[macro_use] +pub mod relay_to_relay; +#[macro_use] +pub mod relay_to_parachain; + +use async_trait::async_trait; +use std::{marker::PhantomData, sync::Arc}; +use structopt::StructOpt; + +use futures::{FutureExt, TryFutureExt}; + +use crate::{ + cli::{bridge::MessagesCliBridge, HexLaneId, PrometheusParams}, + messages_lane::{MessagesRelayLimits, MessagesRelayParams}, + on_demand::OnDemandRelay, + TaggedAccount, TransactionParams, +}; +use bp_messages::LaneId; +use bp_runtime::BalanceOf; +use relay_substrate_client::{ + AccountIdOf, AccountKeyPairOf, Chain, ChainWithBalances, ChainWithMessages, + ChainWithRuntimeVersion, ChainWithTransactions, Client, +}; +use relay_utils::metrics::MetricsParams; +use sp_core::Pair; + +/// Parameters that have the same names across all bridges. +#[derive(Debug, PartialEq, StructOpt)] +pub struct HeadersAndMessagesSharedParams { + /// Hex-encoded lane identifiers that should be served by the complex relay. + #[structopt(long, default_value = "00000000")] + pub lane: Vec, + /// If passed, only mandatory headers (headers that are changing the GRANDPA authorities set) + /// are relayed. + #[structopt(long)] + pub only_mandatory_headers: bool, + #[structopt(flatten)] + /// Prometheus metrics params. + pub prometheus_params: PrometheusParams, +} + +/// Bridge parameters, shared by all bridge types. +pub struct Full2WayBridgeCommonParams< + Left: ChainWithTransactions + ChainWithRuntimeVersion, + Right: ChainWithTransactions + ChainWithRuntimeVersion, +> { + /// Shared parameters. + pub shared: HeadersAndMessagesSharedParams, + /// Parameters of the left chain. + pub left: BridgeEndCommonParams, + /// Parameters of the right chain. + pub right: BridgeEndCommonParams, + + /// Common metric parameters. + pub metrics_params: MetricsParams, +} + +impl< + Left: ChainWithTransactions + ChainWithRuntimeVersion, + Right: ChainWithTransactions + ChainWithRuntimeVersion, + > Full2WayBridgeCommonParams +{ + /// Creates new bridge parameters from its components. + pub fn new>( + shared: HeadersAndMessagesSharedParams, + left: BridgeEndCommonParams, + right: BridgeEndCommonParams, + ) -> anyhow::Result { + // Create metrics registry. + let metrics_params = shared.prometheus_params.clone().into_metrics_params()?; + let metrics_params = relay_utils::relay_metrics(metrics_params).into_params(); + + Ok(Self { shared, left, right, metrics_params }) + } +} + +/// Parameters that are associated with one side of the bridge. +pub struct BridgeEndCommonParams { + /// Chain client. + pub client: Client, + /// Params used for sending transactions to the chain. + pub tx_params: TransactionParams>, + /// Accounts, which balances are exposed as metrics by the relay process. + pub accounts: Vec>>, +} + +/// All data of the bidirectional complex relay. +pub struct FullBridge< + 'a, + Source: ChainWithTransactions + ChainWithRuntimeVersion, + Target: ChainWithTransactions + ChainWithRuntimeVersion, + Bridge: MessagesCliBridge, +> { + source: &'a mut BridgeEndCommonParams, + target: &'a mut BridgeEndCommonParams, + metrics_params: &'a MetricsParams, + _phantom_data: PhantomData, +} + +impl< + 'a, + Source: ChainWithTransactions + ChainWithRuntimeVersion, + Target: ChainWithTransactions + ChainWithRuntimeVersion, + Bridge: MessagesCliBridge, + > FullBridge<'a, Source, Target, Bridge> +where + AccountIdOf: From< as Pair>::Public>, + AccountIdOf: From< as Pair>::Public>, + BalanceOf: TryFrom> + Into, +{ + /// Construct complex relay given it components. + fn new( + source: &'a mut BridgeEndCommonParams, + target: &'a mut BridgeEndCommonParams, + metrics_params: &'a MetricsParams, + ) -> Self { + Self { source, target, metrics_params, _phantom_data: Default::default() } + } + + /// Returns message relay parameters. + fn messages_relay_params( + &self, + source_to_target_headers_relay: Arc>, + target_to_source_headers_relay: Arc>, + lane_id: LaneId, + maybe_limits: Option, + ) -> MessagesRelayParams { + MessagesRelayParams { + source_client: self.source.client.clone(), + source_transaction_params: self.source.tx_params.clone(), + target_client: self.target.client.clone(), + target_transaction_params: self.target.tx_params.clone(), + source_to_target_headers_relay: Some(source_to_target_headers_relay), + target_to_source_headers_relay: Some(target_to_source_headers_relay), + lane_id, + limits: maybe_limits, + metrics_params: self.metrics_params.clone().disable(), + } + } +} + +/// Base portion of the bidirectional complex relay. +/// +/// This main purpose of extracting this trait is that in different relays the implementation +/// of `start_on_demand_headers_relayers` method will be different. But the number of +/// implementations is limited to relay <> relay, parachain <> relay and parachain <> parachain. +/// This trait allows us to reuse these implementations in different bridges. +#[async_trait] +pub trait Full2WayBridgeBase: Sized + Send + Sync { + /// The CLI params for the bridge. + type Params; + /// The left relay chain. + type Left: ChainWithTransactions + ChainWithRuntimeVersion; + /// The right destination chain (it can be a relay or a parachain). + type Right: ChainWithTransactions + ChainWithRuntimeVersion; + + /// Reference to common relay parameters. + fn common(&self) -> &Full2WayBridgeCommonParams; + + /// Mutable reference to common relay parameters. + fn mut_common(&mut self) -> &mut Full2WayBridgeCommonParams; + + /// Start on-demand headers relays. + async fn start_on_demand_headers_relayers( + &mut self, + ) -> anyhow::Result<( + Arc>, + Arc>, + )>; +} + +/// Bidirectional complex relay. +#[async_trait] +pub trait Full2WayBridge: Sized + Sync +where + AccountIdOf: From< as Pair>::Public>, + AccountIdOf: From< as Pair>::Public>, + BalanceOf: TryFrom> + Into, + BalanceOf: TryFrom> + Into, +{ + /// Base portion of the bidirectional complex relay. + type Base: Full2WayBridgeBase; + + /// The left relay chain. + type Left: ChainWithTransactions + + ChainWithBalances + + ChainWithMessages + + ChainWithRuntimeVersion; + /// The right relay chain. + type Right: ChainWithTransactions + + ChainWithBalances + + ChainWithMessages + + ChainWithRuntimeVersion; + + /// Left to Right bridge. + type L2R: MessagesCliBridge; + /// Right to Left bridge + type R2L: MessagesCliBridge; + + /// Construct new bridge. + fn new(params: ::Params) -> anyhow::Result; + + /// Reference to the base relay portion. + fn base(&self) -> &Self::Base; + + /// Mutable reference to the base relay portion. + fn mut_base(&mut self) -> &mut Self::Base; + + /// Creates and returns Left to Right complex relay. + fn left_to_right(&mut self) -> FullBridge { + let common = self.mut_base().mut_common(); + FullBridge::<_, _, Self::L2R>::new( + &mut common.left, + &mut common.right, + &common.metrics_params, + ) + } + + /// Creates and returns Right to Left complex relay. + fn right_to_left(&mut self) -> FullBridge { + let common = self.mut_base().mut_common(); + FullBridge::<_, _, Self::R2L>::new( + &mut common.right, + &mut common.left, + &common.metrics_params, + ) + } + + /// Start complex relay. + async fn run(&mut self) -> anyhow::Result<()> { + // Register standalone metrics. + { + let common = self.mut_base().mut_common(); + common.left.accounts.push(TaggedAccount::Messages { + id: common.left.tx_params.signer.public().into(), + bridged_chain: Self::Right::NAME.to_string(), + }); + common.right.accounts.push(TaggedAccount::Messages { + id: common.right.tx_params.signer.public().into(), + bridged_chain: Self::Left::NAME.to_string(), + }); + } + + // start on-demand header relays + let (left_to_right_on_demand_headers, right_to_left_on_demand_headers) = + self.mut_base().start_on_demand_headers_relayers().await?; + + // add balance-related metrics + let lanes = self + .base() + .common() + .shared + .lane + .iter() + .cloned() + .map(Into::into) + .collect::>(); + { + let common = self.mut_base().mut_common(); + crate::messages_metrics::add_relay_balances_metrics::<_, Self::Right>( + common.left.client.clone(), + &common.metrics_params, + &common.left.accounts, + &lanes, + ) + .await?; + crate::messages_metrics::add_relay_balances_metrics::<_, Self::Left>( + common.right.client.clone(), + &common.metrics_params, + &common.right.accounts, + &lanes, + ) + .await?; + } + + // Need 2x capacity since we consider both directions for each lane + let mut message_relays = Vec::with_capacity(lanes.len() * 2); + for lane in lanes { + let left_to_right_messages = crate::messages_lane::run::< + ::MessagesLane, + >(self.left_to_right().messages_relay_params( + left_to_right_on_demand_headers.clone(), + right_to_left_on_demand_headers.clone(), + lane, + Self::L2R::maybe_messages_limits(), + )) + .map_err(|e| anyhow::format_err!("{}", e)) + .boxed(); + message_relays.push(left_to_right_messages); + + let right_to_left_messages = crate::messages_lane::run::< + ::MessagesLane, + >(self.right_to_left().messages_relay_params( + right_to_left_on_demand_headers.clone(), + left_to_right_on_demand_headers.clone(), + lane, + Self::R2L::maybe_messages_limits(), + )) + .map_err(|e| anyhow::format_err!("{}", e)) + .boxed(); + message_relays.push(right_to_left_messages); + } + + relay_utils::relay_metrics(self.base().common().metrics_params.clone()) + .expose() + .await + .map_err(|e| anyhow::format_err!("{}", e))?; + + futures::future::select_all(message_relays).await.0 + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::{cli::chain_schema::RuntimeVersionType, declare_chain_cli_schema}; + + use relay_substrate_client::{ChainRuntimeVersion, Parachain, SimpleRuntimeVersion}; + + #[test] + // We need `#[allow(dead_code)]` because some of the methods generated by the macros + // are not used. + #[allow(dead_code)] + fn should_parse_parachain_to_parachain_options() { + // Chains. + declare_chain_cli_schema!(Kusama, kusama); + declare_chain_cli_schema!(BridgeHubKusama, bridge_hub_kusama); + declare_chain_cli_schema!(Polkadot, polkadot); + declare_chain_cli_schema!(BridgeHubPolkadot, bridge_hub_polkadot); + // Means to override signers of different layer transactions. + declare_chain_cli_schema!( + KusamaHeadersToBridgeHubPolkadot, + kusama_headers_to_bridge_hub_polkadot + ); + declare_chain_cli_schema!( + KusamaParachainsToBridgeHubPolkadot, + kusama_parachains_to_bridge_hub_polkadot + ); + declare_chain_cli_schema!( + PolkadotHeadersToBridgeHubKusama, + polkadot_headers_to_bridge_hub_kusama + ); + declare_chain_cli_schema!( + PolkadotParachainsToBridgeHubKusama, + polkadot_parachains_to_bridge_hub_kusama + ); + // Bridges. + declare_parachain_to_parachain_bridge_schema!( + BridgeHubKusama, + Kusama, + BridgeHubPolkadot, + Polkadot + ); + + let res = BridgeHubKusamaBridgeHubPolkadotHeadersAndMessages::from_iter(vec![ + "bridge-hub-kusama-bridge-hub-polkadot-headers-and-messages", + "--bridge-hub-kusama-host", + "bridge-hub-kusama-node-collator1", + "--bridge-hub-kusama-port", + "9944", + "--bridge-hub-kusama-signer", + "//Iden", + "--bridge-hub-kusama-transactions-mortality", + "64", + "--kusama-host", + "kusama-alice", + "--kusama-port", + "9944", + "--bridge-hub-polkadot-host", + "bridge-hub-polkadot-collator1", + "--bridge-hub-polkadot-port", + "9944", + "--bridge-hub-polkadot-signer", + "//George", + "--bridge-hub-polkadot-transactions-mortality", + "64", + "--polkadot-host", + "polkadot-alice", + "--polkadot-port", + "9944", + "--lane", + "00000000", + "--prometheus-host", + "0.0.0.0", + ]); + + // then + assert_eq!( + res, + BridgeHubKusamaBridgeHubPolkadotHeadersAndMessages { + shared: HeadersAndMessagesSharedParams { + lane: vec![HexLaneId([0x00, 0x00, 0x00, 0x00])], + only_mandatory_headers: false, + prometheus_params: PrometheusParams { + no_prometheus: false, + prometheus_host: "0.0.0.0".into(), + prometheus_port: 9616, + }, + }, + left: BridgeHubKusamaConnectionParams { + bridge_hub_kusama_host: "bridge-hub-kusama-node-collator1".into(), + bridge_hub_kusama_port: 9944, + bridge_hub_kusama_secure: false, + bridge_hub_kusama_runtime_version: BridgeHubKusamaRuntimeVersionParams { + bridge_hub_kusama_version_mode: RuntimeVersionType::Bundle, + bridge_hub_kusama_spec_version: None, + bridge_hub_kusama_transaction_version: None, + }, + }, + left_sign: BridgeHubKusamaSigningParams { + bridge_hub_kusama_signer: Some("//Iden".into()), + bridge_hub_kusama_signer_password: None, + bridge_hub_kusama_signer_file: None, + bridge_hub_kusama_signer_password_file: None, + bridge_hub_kusama_transactions_mortality: Some(64), + }, + left_relay: KusamaConnectionParams { + kusama_host: "kusama-alice".into(), + kusama_port: 9944, + kusama_secure: false, + kusama_runtime_version: KusamaRuntimeVersionParams { + kusama_version_mode: RuntimeVersionType::Bundle, + kusama_spec_version: None, + kusama_transaction_version: None, + }, + }, + right: BridgeHubPolkadotConnectionParams { + bridge_hub_polkadot_host: "bridge-hub-polkadot-collator1".into(), + bridge_hub_polkadot_port: 9944, + bridge_hub_polkadot_secure: false, + bridge_hub_polkadot_runtime_version: BridgeHubPolkadotRuntimeVersionParams { + bridge_hub_polkadot_version_mode: RuntimeVersionType::Bundle, + bridge_hub_polkadot_spec_version: None, + bridge_hub_polkadot_transaction_version: None, + }, + }, + right_sign: BridgeHubPolkadotSigningParams { + bridge_hub_polkadot_signer: Some("//George".into()), + bridge_hub_polkadot_signer_password: None, + bridge_hub_polkadot_signer_file: None, + bridge_hub_polkadot_signer_password_file: None, + bridge_hub_polkadot_transactions_mortality: Some(64), + }, + right_relay: PolkadotConnectionParams { + polkadot_host: "polkadot-alice".into(), + polkadot_port: 9944, + polkadot_secure: false, + polkadot_runtime_version: PolkadotRuntimeVersionParams { + polkadot_version_mode: RuntimeVersionType::Bundle, + polkadot_spec_version: None, + polkadot_transaction_version: None, + }, + }, + } + ); + } +} diff --git a/relays/bin-substrate/src/cli/relay_headers_and_messages/parachain_to_parachain.rs b/relays/lib-substrate-relay/src/cli/relay_headers_and_messages/parachain_to_parachain.rs similarity index 82% rename from relays/bin-substrate/src/cli/relay_headers_and_messages/parachain_to_parachain.rs rename to relays/lib-substrate-relay/src/cli/relay_headers_and_messages/parachain_to_parachain.rs index 32ba6b3dd..76accfa29 100644 --- a/relays/bin-substrate/src/cli/relay_headers_and_messages/parachain_to_parachain.rs +++ b/relays/lib-substrate-relay/src/cli/relay_headers_and_messages/parachain_to_parachain.rs @@ -14,26 +14,28 @@ // You should have received a copy of the GNU General Public License // along with Parity Bridges Common. If not, see . +//! Parachain to parachain relayer CLI primitives. + use async_trait::async_trait; use std::sync::Arc; -use crate::cli::{ - bridge::{CliBridgeBase, MessagesCliBridge, ParachainToRelayHeadersCliBridge}, - relay_headers_and_messages::{Full2WayBridgeBase, Full2WayBridgeCommonParams}, - CliChain, +use crate::{ + cli::{ + bridge::{CliBridgeBase, MessagesCliBridge, ParachainToRelayHeadersCliBridge}, + relay_headers_and_messages::{Full2WayBridgeBase, Full2WayBridgeCommonParams}, + }, + finality::SubstrateFinalitySyncPipeline, + on_demand::{ + headers::OnDemandHeadersRelay, parachains::OnDemandParachainsRelay, OnDemandRelay, + }, }; use bp_polkadot_core::parachains::ParaHash; use pallet_bridge_parachains::{RelayBlockHash, RelayBlockHasher, RelayBlockNumber}; use relay_substrate_client::{ - AccountIdOf, AccountKeyPairOf, Chain, ChainWithTransactions, Client, Parachain, + AccountIdOf, AccountKeyPairOf, Chain, ChainWithRuntimeVersion, ChainWithTransactions, Client, + Parachain, }; use sp_core::Pair; -use substrate_relay_helper::{ - finality::SubstrateFinalitySyncPipeline, - on_demand::{ - headers::OnDemandHeadersRelay, parachains::OnDemandParachainsRelay, OnDemandRelay, - }, -}; /// A base relay between two parachain from different consensus systems. /// @@ -55,6 +57,8 @@ pub struct ParachainToParachainBridge< pub right_relay: Client<::SourceRelay>, } +/// Create set of configuration objects specific to parachain-to-parachain relayer. +#[macro_export] macro_rules! declare_parachain_to_parachain_bridge_schema { // left-parachain, relay-chain-of-left-parachain, right-parachain, relay-chain-of-right-parachain ($left_parachain:ident, $left_chain:ident, $right_parachain:ident, $right_chain:ident) => { @@ -87,20 +91,20 @@ macro_rules! declare_parachain_to_parachain_bridge_schema { impl [<$left_parachain $right_parachain HeadersAndMessages>] { async fn into_bridge< - Left: ChainWithTransactions + CliChain + Parachain, - LeftRelay: CliChain, - Right: ChainWithTransactions + CliChain + Parachain, - RightRelay: CliChain, - L2R: CliBridgeBase + Left: ChainWithTransactions + ChainWithRuntimeVersion + Parachain, + LeftRelay: ChainWithRuntimeVersion, + Right: ChainWithTransactions + ChainWithRuntimeVersion + Parachain, + RightRelay: ChainWithRuntimeVersion, + L2R: $crate::cli::bridge::CliBridgeBase + MessagesCliBridge - + ParachainToRelayHeadersCliBridge, - R2L: CliBridgeBase + + $crate::cli::bridge::ParachainToRelayHeadersCliBridge, + R2L: $crate::cli::bridge::CliBridgeBase + MessagesCliBridge - + ParachainToRelayHeadersCliBridge, + + $crate::cli::bridge::ParachainToRelayHeadersCliBridge, >( self, - ) -> anyhow::Result> { - Ok(ParachainToParachainBridge { + ) -> anyhow::Result<$crate::cli::relay_headers_and_messages::parachain_to_parachain::ParachainToParachainBridge> { + Ok($crate::cli::relay_headers_and_messages::parachain_to_parachain::ParachainToParachainBridge { common: Full2WayBridgeCommonParams::new::( self.shared, BridgeEndCommonParams { @@ -125,12 +129,12 @@ macro_rules! declare_parachain_to_parachain_bridge_schema { #[async_trait] impl< - Left: Chain + ChainWithTransactions + CliChain + Parachain, - Right: Chain + ChainWithTransactions + CliChain + Parachain, + Left: Chain + ChainWithTransactions + ChainWithRuntimeVersion + Parachain, + Right: Chain + ChainWithTransactions + ChainWithRuntimeVersion + Parachain, LeftRelay: Chain - + CliChain, + + ChainWithRuntimeVersion, RightRelay: Chain - + CliChain, + + ChainWithRuntimeVersion, L2R: CliBridgeBase + MessagesCliBridge + ParachainToRelayHeadersCliBridge, diff --git a/relays/bin-substrate/src/cli/relay_headers_and_messages/relay_to_parachain.rs b/relays/lib-substrate-relay/src/cli/relay_headers_and_messages/relay_to_parachain.rs similarity index 88% rename from relays/bin-substrate/src/cli/relay_headers_and_messages/relay_to_parachain.rs rename to relays/lib-substrate-relay/src/cli/relay_headers_and_messages/relay_to_parachain.rs index fd885b6ea..b75ac3e60 100644 --- a/relays/bin-substrate/src/cli/relay_headers_and_messages/relay_to_parachain.rs +++ b/relays/lib-substrate-relay/src/cli/relay_headers_and_messages/relay_to_parachain.rs @@ -14,29 +14,31 @@ // You should have received a copy of the GNU General Public License // along with Parity Bridges Common. If not, see . +//! Relay chain to parachain relayer CLI primitives. + use async_trait::async_trait; use std::sync::Arc; -use crate::cli::{ - bridge::{ - CliBridgeBase, MessagesCliBridge, ParachainToRelayHeadersCliBridge, - RelayToRelayHeadersCliBridge, +use crate::{ + cli::{ + bridge::{ + CliBridgeBase, MessagesCliBridge, ParachainToRelayHeadersCliBridge, + RelayToRelayHeadersCliBridge, + }, + relay_headers_and_messages::{Full2WayBridgeBase, Full2WayBridgeCommonParams}, + }, + finality::SubstrateFinalitySyncPipeline, + on_demand::{ + headers::OnDemandHeadersRelay, parachains::OnDemandParachainsRelay, OnDemandRelay, }, - relay_headers_and_messages::{Full2WayBridgeBase, Full2WayBridgeCommonParams}, - CliChain, }; use bp_polkadot_core::parachains::ParaHash; use pallet_bridge_parachains::{RelayBlockHash, RelayBlockHasher, RelayBlockNumber}; use relay_substrate_client::{ - AccountIdOf, AccountKeyPairOf, Chain, ChainWithTransactions, Client, Parachain, + AccountIdOf, AccountKeyPairOf, Chain, ChainWithRuntimeVersion, ChainWithTransactions, Client, + Parachain, }; use sp_core::Pair; -use substrate_relay_helper::{ - finality::SubstrateFinalitySyncPipeline, - on_demand::{ - headers::OnDemandHeadersRelay, parachains::OnDemandParachainsRelay, OnDemandRelay, - }, -}; /// A base relay between standalone (relay) chain and a parachain from another consensus system. /// @@ -55,6 +57,8 @@ pub struct RelayToParachainBridge< pub right_relay: Client<::SourceRelay>, } +/// Create set of configuration objects specific to relay-to-parachain relayer. +#[macro_export] macro_rules! declare_relay_to_parachain_bridge_schema { // chain, parachain, relay-chain-of-parachain ($left_chain:ident, $right_parachain:ident, $right_chain:ident) => { @@ -84,9 +88,9 @@ macro_rules! declare_relay_to_parachain_bridge_schema { impl [<$left_chain $right_parachain HeadersAndMessages>] { async fn into_bridge< - Left: ChainWithTransactions + CliChain, - Right: ChainWithTransactions + CliChain + Parachain, - RightRelay: CliChain, + Left: ChainWithTransactions + ChainWithRuntimeVersion, + Right: ChainWithTransactions + ChainWithRuntimeVersion + Parachain, + RightRelay: ChainWithRuntimeVersion, L2R: CliBridgeBase + MessagesCliBridge + RelayToRelayHeadersCliBridge, R2L: CliBridgeBase + MessagesCliBridge @@ -118,10 +122,10 @@ macro_rules! declare_relay_to_parachain_bridge_schema { #[async_trait] impl< - Left: ChainWithTransactions + CliChain, - Right: Chain + ChainWithTransactions + CliChain + Parachain, + Left: ChainWithTransactions + ChainWithRuntimeVersion, + Right: Chain + ChainWithTransactions + ChainWithRuntimeVersion + Parachain, RightRelay: Chain - + CliChain, + + ChainWithRuntimeVersion, L2R: CliBridgeBase + MessagesCliBridge + RelayToRelayHeadersCliBridge, diff --git a/relays/bin-substrate/src/cli/relay_headers_and_messages/relay_to_relay.rs b/relays/lib-substrate-relay/src/cli/relay_headers_and_messages/relay_to_relay.rs similarity index 91% rename from relays/bin-substrate/src/cli/relay_headers_and_messages/relay_to_relay.rs rename to relays/lib-substrate-relay/src/cli/relay_headers_and_messages/relay_to_relay.rs index 11425035d..b397ff50a 100644 --- a/relays/bin-substrate/src/cli/relay_headers_and_messages/relay_to_relay.rs +++ b/relays/lib-substrate-relay/src/cli/relay_headers_and_messages/relay_to_relay.rs @@ -18,20 +18,23 @@ // future #![allow(unused_macros)] +//! Relay chain to Relay chain relayer CLI primitives. + use async_trait::async_trait; use std::sync::Arc; -use crate::cli::{ - bridge::{CliBridgeBase, MessagesCliBridge, RelayToRelayHeadersCliBridge}, - relay_headers_and_messages::{Full2WayBridgeBase, Full2WayBridgeCommonParams}, - CliChain, -}; -use relay_substrate_client::{AccountIdOf, AccountKeyPairOf, ChainWithTransactions}; -use sp_core::Pair; -use substrate_relay_helper::{ +use crate::{ + cli::{ + bridge::{CliBridgeBase, MessagesCliBridge, RelayToRelayHeadersCliBridge}, + relay_headers_and_messages::{Full2WayBridgeBase, Full2WayBridgeCommonParams}, + }, finality::SubstrateFinalitySyncPipeline, on_demand::{headers::OnDemandHeadersRelay, OnDemandRelay}, }; +use relay_substrate_client::{ + AccountIdOf, AccountKeyPairOf, ChainWithRuntimeVersion, ChainWithTransactions, +}; +use sp_core::Pair; /// A base relay between two standalone (relay) chains. /// @@ -45,6 +48,7 @@ pub struct RelayToRelayBridge< Full2WayBridgeCommonParams<::Target, ::Target>, } +/// Create set of configuration objects specific to relay-to-relay relayer. macro_rules! declare_relay_to_relay_bridge_schema { ($left_chain:ident, $right_chain:ident) => { bp_runtime::paste::item! { @@ -101,8 +105,8 @@ macro_rules! declare_relay_to_relay_bridge_schema { #[async_trait] impl< - Left: ChainWithTransactions + CliChain, - Right: ChainWithTransactions + CliChain, + Left: ChainWithTransactions + ChainWithRuntimeVersion, + Right: ChainWithTransactions + ChainWithRuntimeVersion, L2R: CliBridgeBase + MessagesCliBridge + RelayToRelayHeadersCliBridge, diff --git a/relays/lib-substrate-relay/src/cli/relay_messages.rs b/relays/lib-substrate-relay/src/cli/relay_messages.rs new file mode 100644 index 000000000..b672bd4f9 --- /dev/null +++ b/relays/lib-substrate-relay/src/cli/relay_messages.rs @@ -0,0 +1,89 @@ +// Copyright 2019-2021 Parity Technologies (UK) Ltd. +// This file is part of Parity Bridges Common. + +// Parity Bridges Common is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Parity Bridges Common is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Parity Bridges Common. If not, see . + +//! Primitives for exposing the messages relaying functionality in the CLI. + +use crate::{ + cli::{bridge::*, chain_schema::*, HexLaneId, PrometheusParams}, + messages_lane::MessagesRelayParams, + TransactionParams, +}; + +use async_trait::async_trait; +use sp_core::Pair; +use structopt::StructOpt; + +use relay_substrate_client::{ + AccountIdOf, AccountKeyPairOf, BalanceOf, ChainWithRuntimeVersion, ChainWithTransactions, +}; + +/// Messages relaying params. +#[derive(StructOpt)] +pub struct RelayMessagesParams { + /// Hex-encoded lane id that should be served by the relay. Defaults to `00000000`. + #[structopt(long, default_value = "00000000")] + lane: HexLaneId, + #[structopt(flatten)] + source: SourceConnectionParams, + #[structopt(flatten)] + source_sign: SourceSigningParams, + #[structopt(flatten)] + target: TargetConnectionParams, + #[structopt(flatten)] + target_sign: TargetSigningParams, + #[structopt(flatten)] + prometheus_params: PrometheusParams, +} + +/// Trait used for relaying messages between 2 chains. +#[async_trait] +pub trait MessagesRelayer: MessagesCliBridge +where + Self::Source: ChainWithTransactions + ChainWithRuntimeVersion, + AccountIdOf: From< as Pair>::Public>, + AccountIdOf: From< as Pair>::Public>, + BalanceOf: TryFrom>, +{ + /// Start relaying messages. + async fn relay_messages(data: RelayMessagesParams) -> anyhow::Result<()> { + let source_client = data.source.into_client::().await?; + let source_sign = data.source_sign.to_keypair::()?; + let source_transactions_mortality = data.source_sign.transactions_mortality()?; + let target_client = data.target.into_client::().await?; + let target_sign = data.target_sign.to_keypair::()?; + let target_transactions_mortality = data.target_sign.transactions_mortality()?; + + crate::messages_lane::run::(MessagesRelayParams { + source_client, + source_transaction_params: TransactionParams { + signer: source_sign, + mortality: source_transactions_mortality, + }, + target_client, + target_transaction_params: TransactionParams { + signer: target_sign, + mortality: target_transactions_mortality, + }, + source_to_target_headers_relay: None, + target_to_source_headers_relay: None, + lane_id: data.lane.into(), + limits: Self::maybe_messages_limits(), + metrics_params: data.prometheus_params.into_metrics_params()?, + }) + .await + .map_err(|e| anyhow::format_err!("{}", e)) + } +} diff --git a/relays/lib-substrate-relay/src/cli/relay_parachains.rs b/relays/lib-substrate-relay/src/cli/relay_parachains.rs new file mode 100644 index 000000000..e5a523494 --- /dev/null +++ b/relays/lib-substrate-relay/src/cli/relay_parachains.rs @@ -0,0 +1,91 @@ +// Copyright 2019-2021 Parity Technologies (UK) Ltd. +// This file is part of Parity Bridges Common. + +// Parity Bridges Common is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Parity Bridges Common is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Parity Bridges Common. If not, see . + +//! Primitives for exposing the parachains finality relaying functionality in the CLI. + +use async_std::sync::Mutex; +use async_trait::async_trait; +use parachains_relay::parachains_loop::{AvailableHeader, SourceClient, TargetClient}; +use relay_substrate_client::Parachain; +use relay_utils::metrics::{GlobalMetrics, StandaloneMetric}; +use std::sync::Arc; +use structopt::StructOpt; + +use crate::{ + cli::{ + bridge::{CliBridgeBase, ParachainToRelayHeadersCliBridge}, + chain_schema::*, + PrometheusParams, + }, + parachains::{source::ParachainsSource, target::ParachainsTarget, ParachainsPipelineAdapter}, + TransactionParams, +}; + +/// Parachains heads relaying params. +#[derive(StructOpt)] +pub struct RelayParachainsParams { + #[structopt(flatten)] + source: SourceConnectionParams, + #[structopt(flatten)] + target: TargetConnectionParams, + #[structopt(flatten)] + target_sign: TargetSigningParams, + #[structopt(flatten)] + prometheus_params: PrometheusParams, +} + +/// Trait used for relaying parachains finality between 2 chains. +#[async_trait] +pub trait ParachainsRelayer: ParachainToRelayHeadersCliBridge +where + ParachainsSource: + SourceClient>, + ParachainsTarget: + TargetClient>, + ::Source: Parachain, +{ + /// Start relaying parachains finality. + async fn relay_parachains(data: RelayParachainsParams) -> anyhow::Result<()> { + let source_client = data.source.into_client::().await?; + let source_client = ParachainsSource::::new( + source_client, + Arc::new(Mutex::new(AvailableHeader::Missing)), + ); + + let target_transaction_params = TransactionParams { + signer: data.target_sign.to_keypair::()?, + mortality: data.target_sign.target_transactions_mortality, + }; + let target_client = data.target.into_client::().await?; + let target_client = ParachainsTarget::::new( + target_client.clone(), + target_transaction_params, + ); + + let metrics_params: relay_utils::metrics::MetricsParams = + data.prometheus_params.into_metrics_params()?; + GlobalMetrics::new()?.register_and_spawn(&metrics_params.registry)?; + + parachains_relay::parachains_loop::run( + source_client, + target_client, + metrics_params, + futures::future::pending(), + ) + .await + .map_err(|e| anyhow::format_err!("{}", e)) + } +} diff --git a/relays/lib-substrate-relay/src/lib.rs b/relays/lib-substrate-relay/src/lib.rs index 6e6203866..b90453ae0 100644 --- a/relays/lib-substrate-relay/src/lib.rs +++ b/relays/lib-substrate-relay/src/lib.rs @@ -22,6 +22,7 @@ use relay_substrate_client::{Chain, ChainWithUtilityPallet, UtilityPallet}; use std::marker::PhantomData; +pub mod cli; pub mod equivocation; pub mod error; pub mod finality; -- GitLab From 28c459be266a031606b6b06e5d0681945a7a7890 Mon Sep 17 00:00:00 2001 From: Serban Iorga Date: Mon, 25 Mar 2024 08:50:57 +0100 Subject: [PATCH 07/39] Backport changes from `polkadot-sdk/master` (#2887) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Add two new zombienet tests for bridges (manual run) (#3072) extracted useful code from #2982 This PR: - adds test 2 for Rococo <> Westend bridge: checks that relayer doesn't submit any extra headers while there are no any messages; - adds test 3 for Rococo <> Westend bridge: checks that relayer doesn't submit any extra headers when there are messages; - fixes most of comments from #2439 (like: log names, ability to run specify test number when calling `run-tests.sh`). Right now of all our tests, only test 2 is working (until BHs will be upgraded to use async backing), so you can test it with `./bridges/zombienet/run-tests.sh --test 2` locally. (cherry picked from commit 2e6067d768a84e780258aa4580116f7180e24290) * [cumulus] Improved check for sane bridge fees calculations (#3175) - [x] change constants when CI fails (should fail :) ) On the AssetHubRococo: 1701175800126 -> 1700929825257 = 0.15 % decreased. ``` Feb 02 12:59:05.520 ERROR bridges::estimate: `bridging::XcmBridgeHubRouterBaseFee` actual value: 1701175800126 for runtime: statemine-1006000 (statemine-0.tx14.au1) Feb 02 13:02:40.647 ERROR bridges::estimate: `bridging::XcmBridgeHubRouterBaseFee` actual value: 1700929825257 for runtime: statemine-1006000 (statemine-0.tx14.au1) ``` On the AssetHubWestend: 2116038876326 -> 1641718372993 = 22.4 % decreased. ``` Feb 02 12:56:00.880 ERROR bridges::estimate: `bridging::XcmBridgeHubRouterBaseFee` actual value: 2116038876326 for runtime: westmint-1006000 (westmint-0.tx14.au1) Feb 02 13:04:42.515 ERROR bridges::estimate: `bridging::XcmBridgeHubRouterBaseFee` actual value: 1641718372993 for runtime: westmint-1006000 (westmint-0.tx14.au1) ``` (cherry picked from commit 74b597fcaf143d8dd7f8d40e59f51065514f21d7) * Enable async backing on all testnet system chains (#2949) Built on top of https://github.com/paritytech/polkadot-sdk/pull/2826/ which was a trial run. Guide: https://github.com/w3f/polkadot-wiki/blob/master/docs/maintain/maintain-guides-async-backing.md --------- Signed-off-by: georgepisaltu Co-authored-by: Branislav Kontur Co-authored-by: Dónal Murray Co-authored-by: Dmitry Sinyavin Co-authored-by: s0me0ne-unkn0wn <48632512+s0me0ne-unkn0wn@users.noreply.github.com> Co-authored-by: Svyatoslav Nikolsky Co-authored-by: Bastian Köcher Co-authored-by: georgepisaltu <52418509+georgepisaltu@users.noreply.github.com> (cherry picked from commit 700d5f85b768fe1867660938aa5edfcf4b26f632) * Introduce submit_finality_proof_ex call to bridges GRANDPA pallet (#3225) backport of https://github.com/paritytech/parity-bridges-common/pull/2821 (see detailed description there) (cherry picked from commit a462207158360b162228d9877fed7b9ca1f23fc2) * Bridge zombienet tests refactoring (#3260) Related to https://github.com/paritytech/polkadot-sdk/issues/3242 Reorganizing the bridge zombienet tests in order to: - separate the environment spawning from the actual tests - offer better control over the tests and some possibility to orchestrate them as opposed to running everything from the zndsl file Only rewrote the asset transfer test using this new "framework". The old logic and old tests weren't functionally modified or deleted. The plan is to get feedback on this approach first and if this is agreed upon, migrate the other 2 tests later in separate PRs and also do other improvements later. (cherry picked from commit dfc8e4696c6edfb76ccb05f469a221ebb5b270ff) * Bridges: add test 0002 to CI (#3310) Bridges: add test 0002 to CI (cherry picked from commit 1b66bb51b52d3e6cacf155bd3e038b6ef44ac5da) * Bridge zombienet tests - move all test scripts to the same folder (#3333) Related to https://github.com/paritytech/polkadot-sdk/issues/3242 (cherry picked from commit 5fc7622cb312f2d32ec8365012ee0a49622db8c8) * Lift dependencies to the workspace (Part 2/x) (#3366) Lifting some more dependencies to the workspace. Just using the most-often updated ones for now. It can be reproduced locally. ```sh $ zepter transpose dependency lift-to-workspace --ignore-errors syn quote thiserror "regex:^serde.*" $ zepter transpose dependency lift-to-workspace --version-resolver=highest syn quote thiserror "regex:^serde.*" --fix $ taplo format --config .config/taplo.toml ``` --------- Signed-off-by: Oliver Tale-Yazdi (cherry picked from commit e89d0fca351de0712f104c55fe45ed124b5c6968) * Add support for BHP local and BHK local (#3443) Related to https://github.com/paritytech/polkadot-sdk/issues/3400 Extracting small parts of https://github.com/paritytech/polkadot-sdk/pull/3429 into separate PR: - Add support for BHP local and BHK local - Increase the timeout for the bridge zomienet tests (cherry picked from commit e4b6b8cd7973633f86d1b92a56abf2a946b7be84) * Bridge zombienet tests: move all "framework" files under one folder (#3462) Related to https://github.com/paritytech/polkadot-sdk/issues/3400 Moving all bridges testing "framework" files under one folder in order to be able to download the entire folder when we want to add tests in other repos No significant functional changes (cherry picked from commit 6fc1d41d4487b9164451cd8214674ce195ab06a0) * Bridge zombienet tests: Check amount received at destination (#3490) Related to https://github.com/paritytech/polkadot-sdk/issues/3475 (cherry picked from commit 2cdda0e62dd3088d2fd09cea627059674070c277) * FRAME: Create `TransactionExtension` as a replacement for `SignedExtension` (#2280) Closes #2160 First part of [Extrinsic Horizon](https://github.com/paritytech/polkadot-sdk/issues/2415) Introduces a new trait `TransactionExtension` to replace `SignedExtension`. Introduce the idea of transactions which obey the runtime's extensions and have according Extension data (né Extra data) yet do not have hard-coded signatures. Deprecate the terminology of "Unsigned" when used for transactions/extrinsics owing to there now being "proper" unsigned transactions which obey the extension framework and "old-style" unsigned which do not. Instead we have __*General*__ for the former and __*Bare*__ for the latter. (Ultimately, the latter will be phased out as a type of transaction, and Bare will only be used for Inherents.) Types of extrinsic are now therefore: - Bare (no hardcoded signature, no Extra data; used to be known as "Unsigned") - Bare transactions (deprecated): Gossiped, validated with `ValidateUnsigned` (deprecated) and the `_bare_compat` bits of `TransactionExtension` (deprecated). - Inherents: Not gossiped, validated with `ProvideInherent`. - Extended (Extra data): Gossiped, validated via `TransactionExtension`. - Signed transactions (with a hardcoded signature). - General transactions (without a hardcoded signature). `TransactionExtension` differs from `SignedExtension` because: - A signature on the underlying transaction may validly not be present. - It may alter the origin during validation. - `pre_dispatch` is renamed to `prepare` and need not contain the checks present in `validate`. - `validate` and `prepare` is passed an `Origin` rather than a `AccountId`. - `validate` may pass arbitrary information into `prepare` via a new user-specifiable type `Val`. - `AdditionalSigned`/`additional_signed` is renamed to `Implicit`/`implicit`. It is encoded *for the entire transaction* and passed in to each extension as a new argument to `validate`. This facilitates the ability of extensions to acts as underlying crypto. There is a new `DispatchTransaction` trait which contains only default function impls and is impl'ed for any `TransactionExtension` impler. It provides several utility functions which reduce some of the tedium from using `TransactionExtension` (indeed, none of its regular functions should now need to be called directly). Three transaction version discriminator ("versions") are now permissible: - 0b000000100: Bare (used to be called "Unsigned"): contains Signature or Extra (extension data). After bare transactions are no longer supported, this will strictly identify an Inherents only. - 0b100000100: Old-school "Signed" Transaction: contains Signature and Extra (extension data). - 0b010000100: New-school "General" Transaction: contains Extra (extension data), but no Signature. For the New-school General Transaction, it becomes trivial for authors to publish extensions to the mechanism for authorizing an Origin, e.g. through new kinds of key-signing schemes, ZK proofs, pallet state, mutations over pre-authenticated origins or any combination of the above. Wrap your `SignedExtension`s in `AsTransactionExtension`. This should be accompanied by renaming your aggregate type in line with the new terminology. E.g. Before: ```rust /// The SignedExtension to the basic transaction logic. pub type SignedExtra = ( /* snip */ MySpecialSignedExtension, ); /// Unchecked extrinsic type as expected by this runtime. pub type UncheckedExtrinsic = generic::UncheckedExtrinsic; ``` After: ```rust /// The extension to the basic transaction logic. pub type TxExtension = ( /* snip */ AsTransactionExtension, ); /// Unchecked extrinsic type as expected by this runtime. pub type UncheckedExtrinsic = generic::UncheckedExtrinsic; ``` You'll also need to alter any transaction building logic to add a `.into()` to make the conversion happen. E.g. Before: ```rust fn construct_extrinsic( /* snip */ ) -> UncheckedExtrinsic { let extra: SignedExtra = ( /* snip */ MySpecialSignedExtension::new(/* snip */), ); let payload = SignedPayload::new(call.clone(), extra.clone()).unwrap(); let signature = payload.using_encoded(|e| sender.sign(e)); UncheckedExtrinsic::new_signed( /* snip */ Signature::Sr25519(signature), extra, ) } ``` After: ```rust fn construct_extrinsic( /* snip */ ) -> UncheckedExtrinsic { let tx_ext: TxExtension = ( /* snip */ MySpecialSignedExtension::new(/* snip */).into(), ); let payload = SignedPayload::new(call.clone(), tx_ext.clone()).unwrap(); let signature = payload.using_encoded(|e| sender.sign(e)); UncheckedExtrinsic::new_signed( /* snip */ Signature::Sr25519(signature), tx_ext, ) } ``` Most `SignedExtension`s can be trivially converted to become a `TransactionExtension`. There are a few things to know. - Instead of a single trait like `SignedExtension`, you should now implement two traits individually: `TransactionExtensionBase` and `TransactionExtension`. - Weights are now a thing and must be provided via the new function `fn weight`. This trait takes care of anything which is not dependent on types specific to your runtime, most notably `Call`. - `AdditionalSigned`/`additional_signed` is renamed to `Implicit`/`implicit`. - Weight must be returned by implementing the `weight` function. If your extension is associated with a pallet, you'll probably want to do this via the pallet's existing benchmarking infrastructure. Generally: - `pre_dispatch` is now `prepare` and you *should not reexecute the `validate` functionality in there*! - You don't get an account ID any more; you get an origin instead. If you need to presume an account ID, then you can use the trait function `AsSystemOriginSigner::as_system_origin_signer`. - You get an additional ticket, similar to `Pre`, called `Val`. This defines data which is passed from `validate` into `prepare`. This is important since you should not be duplicating logic from `validate` to `prepare`, you need a way of passing your working from the former into the latter. This is it. - This trait takes two type parameters: `Call` and `Context`. `Call` is the runtime call type which used to be an associated type; you can just move it to become a type parameter for your trait impl. `Context` is not currently used and you can safely implement over it as an unbounded type. - There's no `AccountId` associated type any more. Just remove it. Regarding `validate`: - You get three new parameters in `validate`; all can be ignored when migrating from `SignedExtension`. - `validate` returns a tuple on success; the second item in the tuple is the new ticket type `Self::Val` which gets passed in to `prepare`. If you use any information extracted during `validate` (off-chain and on-chain, non-mutating) in `prepare` (on-chain, mutating) then you can pass it through with this. For the tuple's last item, just return the `origin` argument. Regarding `prepare`: - This is renamed from `pre_dispatch`, but there is one change: - FUNCTIONALITY TO VALIDATE THE TRANSACTION NEED NOT BE DUPLICATED FROM `validate`!! - (This is different to `SignedExtension` which was required to run the same checks in `pre_dispatch` as in `validate`.) Regarding `post_dispatch`: - Since there are no unsigned transactions handled by `TransactionExtension`, `Pre` is always defined, so the first parameter is `Self::Pre` rather than `Option`. If you make use of `SignedExtension::validate_unsigned` or `SignedExtension::pre_dispatch_unsigned`, then: - Just use the regular versions of these functions instead. - Have your logic execute in the case that the `origin` is `None`. - Ensure your transaction creation logic creates a General Transaction rather than a Bare Transaction; this means having to include all `TransactionExtension`s' data. - `ValidateUnsigned` can still be used (for now) if you need to be able to construct transactions which contain none of the extension data, however these will be phased out in stage 2 of the Transactions Horizon, so you should consider moving to an extension-centric design. - [x] Introduce `CheckSignature` impl of `TransactionExtension` to ensure it's possible to have crypto be done wholly in a `TransactionExtension`. - [x] Deprecate `SignedExtension` and move all uses in codebase to `TransactionExtension`. - [x] `ChargeTransactionPayment` - [x] `DummyExtension` - [x] `ChargeAssetTxPayment` (asset-tx-payment) - [x] `ChargeAssetTxPayment` (asset-conversion-tx-payment) - [x] `CheckWeight` - [x] `CheckTxVersion` - [x] `CheckSpecVersion` - [x] `CheckNonce` - [x] `CheckNonZeroSender` - [x] `CheckMortality` - [x] `CheckGenesis` - [x] `CheckOnlySudoAccount` - [x] `WatchDummy` - [x] `PrevalidateAttests` - [x] `GenericSignedExtension` - [x] `SignedExtension` (chain-polkadot-bulletin) - [x] `RefundSignedExtensionAdapter` - [x] Implement `fn weight` across the board. - [ ] Go through all pre-existing extensions which assume an account signer and explicitly handle the possibility of another kind of origin. - [x] `CheckNonce` should probably succeed in the case of a non-account origin. - [x] `CheckNonZeroSender` should succeed in the case of a non-account origin. - [x] `ChargeTransactionPayment` and family should fail in the case of a non-account origin. - [ ] - [x] Fix any broken tests. --------- Signed-off-by: georgepisaltu Signed-off-by: Alexandru Vasile Signed-off-by: dependabot[bot] Signed-off-by: Oliver Tale-Yazdi Signed-off-by: Alexandru Gheorghe Signed-off-by: Andrei Sandu Co-authored-by: Nikhil Gupta <17176722+gupnik@users.noreply.github.com> Co-authored-by: georgepisaltu <52418509+georgepisaltu@users.noreply.github.com> Co-authored-by: Chevdor Co-authored-by: Bastian Köcher Co-authored-by: Maciej Co-authored-by: Javier Viola Co-authored-by: Marcin S. Co-authored-by: Tsvetomir Dimitrov Co-authored-by: Javier Bullrich Co-authored-by: Koute Co-authored-by: Adrian Catangiu Co-authored-by: Vladimir Istyufeev Co-authored-by: Ross Bulat Co-authored-by: Gonçalo Pestana Co-authored-by: Liam Aharon Co-authored-by: Svyatoslav Nikolsky Co-authored-by: André Silva <123550+andresilva@users.noreply.github.com> Co-authored-by: Oliver Tale-Yazdi Co-authored-by: s0me0ne-unkn0wn <48632512+s0me0ne-unkn0wn@users.noreply.github.com> Co-authored-by: ordian Co-authored-by: Sebastian Kunert Co-authored-by: Aaro Altonen <48052676+altonen@users.noreply.github.com> Co-authored-by: Dmitry Markin Co-authored-by: Alexandru Vasile <60601340+lexnv@users.noreply.github.com> Co-authored-by: Alexander Samusev <41779041+alvicsam@users.noreply.github.com> Co-authored-by: Julian Eager Co-authored-by: Michal Kucharczyk <1728078+michalkucharczyk@users.noreply.github.com> Co-authored-by: Davide Galassi Co-authored-by: Dónal Murray Co-authored-by: yjh Co-authored-by: Tom Mi Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Will | Paradox | ParaNodes.io <79228812+paradox-tt@users.noreply.github.com> Co-authored-by: Bastian Köcher Co-authored-by: Joshy Orndorff Co-authored-by: Joshy Orndorff Co-authored-by: PG Herveou Co-authored-by: Alexander Theißen Co-authored-by: Kian Paimani <5588131+kianenigma@users.noreply.github.com> Co-authored-by: Juan Girini Co-authored-by: bader y Co-authored-by: James Wilson Co-authored-by: joe petrowski <25483142+joepetrowski@users.noreply.github.com> Co-authored-by: asynchronous rob Co-authored-by: Parth Co-authored-by: Andrew Jones Co-authored-by: Jonathan Udd Co-authored-by: Serban Iorga Co-authored-by: Egor_P Co-authored-by: Branislav Kontur Co-authored-by: Evgeny Snitko Co-authored-by: Just van Stam Co-authored-by: Francisco Aguirre Co-authored-by: gupnik Co-authored-by: dzmitry-lahoda Co-authored-by: zhiqiangxu <652732310@qq.com> Co-authored-by: Nazar Mokrynskyi Co-authored-by: Anwesh Co-authored-by: cheme Co-authored-by: Sam Johnson Co-authored-by: kianenigma Co-authored-by: Jegor Sidorenko <5252494+jsidorenko@users.noreply.github.com> Co-authored-by: Muharem Co-authored-by: joepetrowski Co-authored-by: Alexandru Gheorghe <49718502+alexggh@users.noreply.github.com> Co-authored-by: Gabriel Facco de Arruda Co-authored-by: Squirrel Co-authored-by: Andrei Sandu <54316454+sandreim@users.noreply.github.com> Co-authored-by: georgepisaltu Co-authored-by: command-bot <> (cherry picked from commit fd5f9292f500652e1d4792b09fb8ac60e1268ce4) * Revert "FRAME: Create `TransactionExtension` as a replacement for `SignedExtension` (#2280)" (#3665) This PR reverts #2280 which introduced `TransactionExtension` to replace `SignedExtension`. As a result of the discussion [here](https://github.com/paritytech/polkadot-sdk/pull/3623#issuecomment-1986789700), the changes will be reverted for now with plans to reintroduce the concept in the future. --------- Signed-off-by: georgepisaltu (cherry picked from commit bbd51ce867967f71657b901f1a956ad4f75d352e) * Increase timeout for assertions (#3680) Prevents timeouts in ci like https://gitlab.parity.io/parity/mirrors/polkadot-sdk/-/jobs/5516019 (cherry picked from commit c4c9257386036a9e27e7ee001fe8eadb80958cc0) * Removes `as [disambiguation_path]` from `derive_impl` usage (#3652) Step in https://github.com/paritytech/polkadot-sdk/issues/171 This PR removes `as [disambiguation_path]` syntax from `derive_impl` usage across the polkadot-sdk as introduced in https://github.com/paritytech/polkadot-sdk/pull/3505 (cherry picked from commit 7099f6e1b1fa3c8cd894693902263d9ed0e38978) * Fix typo (#3691) (cherry picked from commit 6b1179f13b4815685769c9f523720ec9ed0e2ff4) * Bridge zombienet tests: remove unneeded accounts (#3700) Bridge zombienet tests: remove unneeded accounts (cherry picked from commit 0c6c837f689a287583508506e342ba07687e8d26) * Fix typos (#3753) (cherry picked from commit 7241a8db7b3496816503c6058dae67f66c666b00) * Update polkadot-sdk refs * Fix dependency conflicts * Fix build * cargo fmt * Fix spellcheck test --------- Co-authored-by: Svyatoslav Nikolsky Co-authored-by: Branislav Kontur Co-authored-by: Marcin S Co-authored-by: Oliver Tale-Yazdi Co-authored-by: Gavin Wood Co-authored-by: georgepisaltu <52418509+georgepisaltu@users.noreply.github.com> Co-authored-by: Javier Viola <363911+pepoviola@users.noreply.github.com> Co-authored-by: gupnik Co-authored-by: jokess123 <163112061+jokess123@users.noreply.github.com> Co-authored-by: slicejoke <163888128+slicejoke@users.noreply.github.com> --- .config/lingua.dic | 2 +- Cargo.lock | 355 ++++---- bin/runtime-common/Cargo.toml | 1 - bin/runtime-common/src/lib.rs | 72 +- bin/runtime-common/src/mock.rs | 7 +- bin/runtime-common/src/priority_calculator.rs | 13 +- .../src/refund_relayer_extension.rs | 156 ++-- modules/grandpa/README.md | 2 +- modules/grandpa/src/mock.rs | 2 +- modules/messages/src/mock.rs | 4 +- modules/parachains/src/mock.rs | 2 +- modules/relayers/src/mock.rs | 4 +- modules/xcm-bridge-hub-router/src/mock.rs | 2 +- modules/xcm-bridge-hub/src/mock.rs | 4 +- .../chain-bridge-hub-cumulus/src/lib.rs | 4 +- primitives/chain-bridge-hub-rococo/src/lib.rs | 2 +- primitives/chain-kusama/src/lib.rs | 4 +- primitives/chain-polkadot-bulletin/src/lib.rs | 51 +- primitives/chain-polkadot/src/lib.rs | 4 +- primitives/chain-rococo/src/lib.rs | 4 +- primitives/chain-westend/src/lib.rs | 4 +- primitives/polkadot-core/Cargo.toml | 2 +- primitives/polkadot-core/src/lib.rs | 37 +- primitives/runtime/src/extensions.rs | 135 ++-- relays/bin-substrate/Cargo.toml | 2 +- relays/client-bridge-hub-kusama/src/lib.rs | 9 +- relays/client-bridge-hub-polkadot/src/lib.rs | 9 +- relays/client-bridge-hub-rococo/src/lib.rs | 9 +- relays/client-bridge-hub-westend/src/lib.rs | 9 +- relays/client-kusama/src/lib.rs | 6 +- relays/client-polkadot-bulletin/src/lib.rs | 8 +- relays/client-polkadot/src/lib.rs | 6 +- relays/client-rococo/src/lib.rs | 6 +- relays/client-substrate/Cargo.toml | 2 +- relays/client-westend/src/lib.rs | 6 +- relays/equivocation/Cargo.toml | 2 +- relays/finality/Cargo.toml | 2 +- relays/lib-substrate-relay/Cargo.toml | 2 +- relays/messages/Cargo.toml | 2 +- relays/parachains/Cargo.toml | 2 +- relays/utils/Cargo.toml | 2 +- {zombienet => testing}/README.md | 2 +- .../bridge_hub_rococo_local_network.toml | 88 ++ .../bridge_hub_westend_local_network.toml | 88 ++ .../rococo-westend/bridges_rococo_westend.sh | 401 +++++++++ testing/environments/rococo-westend/helper.sh | 3 + .../rococo-westend/rococo-init.zndsl | 8 + .../environments/rococo-westend/rococo.zndsl | 7 + testing/environments/rococo-westend/spawn.sh | 70 ++ .../rococo-westend/start_relayer.sh | 23 + .../rococo-westend/westend-init.zndsl | 7 + .../environments/rococo-westend/westend.zndsl | 6 + .../best-finalized-header-at-bridged-chain.js | 2 +- .../js-helpers}/chains/rococo-at-westend.js | 0 .../js-helpers}/chains/westend-at-rococo.js | 0 .../native-assets-balance-increased.js | 7 +- ...only-mandatory-headers-synced-when-idle.js | 6 +- .../only-required-headers-synced-when-idle.js | 0 .../framework/js-helpers}/relayer-rewards.js | 2 +- .../framework/js-helpers}/utils.js | 2 +- .../js-helpers}/wait-hrmp-channel-opened.js | 2 +- .../js-helpers}/wrapped-assets-balance.js | 2 +- testing/framework/utils/bridges.sh | 309 +++++++ testing/framework/utils/common.sh | 45 ++ .../utils/generate_hex_encoded_call/index.js | 165 ++++ .../package-lock.json | 759 ++++++++++++++++++ .../generate_hex_encoded_call/package.json | 11 + testing/framework/utils/zombienet.sh | 39 + testing/run-new-test.sh | 48 ++ {zombienet => testing}/run-tests.sh | 23 +- .../scripts/invoke-script.sh | 2 +- .../scripts/start-relayer.sh | 2 +- {zombienet => testing}/scripts/sync-exit.sh | 0 .../roc-reaches-westend.zndsl | 12 + testing/tests/0001-asset-transfer/run.sh | 25 + .../wnd-reaches-rococo.zndsl | 12 + .../wroc-reaches-rococo.zndsl | 10 + .../wwnd-reaches-westend.zndsl | 10 + .../rococo-to-westend.zndsl | 8 + .../run.sh | 35 + .../westend-to-rococo.zndsl | 7 + ...ynced-while-active-rococo-to-westend.zndsl | 6 +- ...ynced-while-active-westend-to-rococo.zndsl | 6 +- ...set-transfer-works-rococo-to-westend.zndsl | 39 - ...set-transfer-works-westend-to-rococo.zndsl | 39 - ...-synced-while-idle-rococo-to-westend.zndsl | 26 - ...-synced-while-idle-westend-to-rococo.zndsl | 26 - 87 files changed, 2665 insertions(+), 682 deletions(-) rename {zombienet => testing}/README.md (94%) create mode 100644 testing/environments/rococo-westend/bridge_hub_rococo_local_network.toml create mode 100644 testing/environments/rococo-westend/bridge_hub_westend_local_network.toml create mode 100755 testing/environments/rococo-westend/bridges_rococo_westend.sh create mode 100755 testing/environments/rococo-westend/helper.sh create mode 100644 testing/environments/rococo-westend/rococo-init.zndsl create mode 100644 testing/environments/rococo-westend/rococo.zndsl create mode 100755 testing/environments/rococo-westend/spawn.sh create mode 100755 testing/environments/rococo-westend/start_relayer.sh create mode 100644 testing/environments/rococo-westend/westend-init.zndsl create mode 100644 testing/environments/rococo-westend/westend.zndsl rename {zombienet/helpers => testing/framework/js-helpers}/best-finalized-header-at-bridged-chain.js (94%) rename {zombienet/helpers => testing/framework/js-helpers}/chains/rococo-at-westend.js (100%) rename {zombienet/helpers => testing/framework/js-helpers}/chains/westend-at-rococo.js (100%) rename {zombienet/helpers => testing/framework/js-helpers}/native-assets-balance-increased.js (74%) rename {zombienet/helpers => testing/framework/js-helpers}/only-mandatory-headers-synced-when-idle.js (88%) rename {zombienet/helpers => testing/framework/js-helpers}/only-required-headers-synced-when-idle.js (100%) rename {zombienet/helpers => testing/framework/js-helpers}/relayer-rewards.js (93%) rename {zombienet/helpers => testing/framework/js-helpers}/utils.js (98%) rename {zombienet/helpers => testing/framework/js-helpers}/wait-hrmp-channel-opened.js (91%) rename {zombienet/helpers => testing/framework/js-helpers}/wrapped-assets-balance.js (93%) create mode 100755 testing/framework/utils/bridges.sh create mode 100644 testing/framework/utils/common.sh create mode 100644 testing/framework/utils/generate_hex_encoded_call/index.js create mode 100644 testing/framework/utils/generate_hex_encoded_call/package-lock.json create mode 100644 testing/framework/utils/generate_hex_encoded_call/package.json create mode 100644 testing/framework/utils/zombienet.sh create mode 100755 testing/run-new-test.sh rename {zombienet => testing}/run-tests.sh (77%) rename {zombienet => testing}/scripts/invoke-script.sh (62%) rename {zombienet => testing}/scripts/start-relayer.sh (63%) rename {zombienet => testing}/scripts/sync-exit.sh (100%) create mode 100644 testing/tests/0001-asset-transfer/roc-reaches-westend.zndsl create mode 100755 testing/tests/0001-asset-transfer/run.sh create mode 100644 testing/tests/0001-asset-transfer/wnd-reaches-rococo.zndsl create mode 100644 testing/tests/0001-asset-transfer/wroc-reaches-rococo.zndsl create mode 100644 testing/tests/0001-asset-transfer/wwnd-reaches-westend.zndsl create mode 100644 testing/tests/0002-mandatory-headers-synced-while-idle/rococo-to-westend.zndsl create mode 100755 testing/tests/0002-mandatory-headers-synced-while-idle/run.sh create mode 100644 testing/tests/0002-mandatory-headers-synced-while-idle/westend-to-rococo.zndsl rename {zombienet => testing}/tests/0003-required-headers-synced-while-active-rococo-to-westend.zndsl (77%) rename {zombienet => testing}/tests/0003-required-headers-synced-while-active-westend-to-rococo.zndsl (77%) delete mode 100644 zombienet/tests/0001-asset-transfer-works-rococo-to-westend.zndsl delete mode 100644 zombienet/tests/0001-asset-transfer-works-westend-to-rococo.zndsl delete mode 100644 zombienet/tests/0002-mandatory-headers-synced-while-idle-rococo-to-westend.zndsl delete mode 100644 zombienet/tests/0002-mandatory-headers-synced-while-idle-westend-to-rococo.zndsl diff --git a/.config/lingua.dic b/.config/lingua.dic index 46ed64175..0ef7f9bef 100644 --- a/.config/lingua.dic +++ b/.config/lingua.dic @@ -84,7 +84,7 @@ SS58Prefix STALL_SYNC_TIMEOUT SURI ServiceFactory/MS -TransactionExtension +SignedExtension Stringified Submitter1 S|N diff --git a/Cargo.lock b/Cargo.lock index 5ae38806b..fc835cbcb 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -243,7 +243,7 @@ dependencies = [ "proc-macro-error", "proc-macro2 1.0.76", "quote 1.0.35", - "syn 2.0.52", + "syn 2.0.53", ] [[package]] @@ -767,7 +767,7 @@ checksum = "461abc97219de0eaaf81fe3ef974a540158f3d079c2ab200f891f1a2ef201e85" dependencies = [ "proc-macro2 1.0.76", "quote 1.0.35", - "syn 2.0.52", + "syn 2.0.53", ] [[package]] @@ -909,7 +909,7 @@ dependencies = [ [[package]] name = "binary-merkle-tree" version = "13.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "hash-db", "log", @@ -930,19 +930,31 @@ version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "93f2635620bf0b9d4576eb7bb9a38a55df78bd1205d26fa994b25911a69f212f" dependencies = [ - "bitcoin_hashes", - "rand", - "rand_core 0.6.4", - "serde", - "unicode-normalization", + "bitcoin_hashes 0.11.0", ] +[[package]] +name = "bitcoin-internals" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9425c3bf7089c983facbae04de54513cce73b41c7f9ff8c845b54e7bc64ebbfb" + [[package]] name = "bitcoin_hashes" version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "90064b8dee6815a6470d60bad07bbbaee885c0e12d04177138fa3291a01b7bc4" +[[package]] +name = "bitcoin_hashes" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1930a4dabfebb8d7d9992db18ebe3ae2876f0a305fab206fd168df931ede293b" +dependencies = [ + "bitcoin-internals", + "hex-conservative", +] + [[package]] name = "bitflags" version = "1.3.2" @@ -2028,7 +2040,7 @@ checksum = "83fdaf97f4804dcebfa5862639bc9ce4121e82140bec2a987ac5140294865b5b" dependencies = [ "proc-macro2 1.0.76", "quote 1.0.35", - "syn 2.0.52", + "syn 2.0.53", ] [[package]] @@ -2089,7 +2101,7 @@ dependencies = [ "proc-macro2 1.0.76", "quote 1.0.35", "strsim 0.10.0", - "syn 2.0.52", + "syn 2.0.53", ] [[package]] @@ -2111,7 +2123,7 @@ checksum = "836a9bbc7ad63342d6d6e7b815ccab164bc77a2d95d84bc3117a8c0d5c98e2d5" dependencies = [ "darling_core 0.20.3", "quote 1.0.35", - "syn 2.0.52", + "syn 2.0.53", ] [[package]] @@ -2273,7 +2285,7 @@ checksum = "487585f4d0c6655fe74905e2504d8ad6908e4db67f744eb140876906c2f3175d" dependencies = [ "proc-macro2 1.0.76", "quote 1.0.35", - "syn 2.0.52", + "syn 2.0.53", ] [[package]] @@ -2313,7 +2325,7 @@ dependencies = [ "proc-macro2 1.0.76", "quote 1.0.35", "regex", - "syn 2.0.52", + "syn 2.0.53", "termcolor", "toml 0.8.11", "walkdir", @@ -2581,7 +2593,7 @@ dependencies = [ "fs-err", "proc-macro2 1.0.76", "quote 1.0.35", - "syn 2.0.52", + "syn 2.0.53", ] [[package]] @@ -2737,7 +2749,7 @@ checksum = "6c2141d6d6c8512188a7891b4b01590a45f6dac67afb4f255c4124dbb86d4eaa" [[package]] name = "frame-benchmarking" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "frame-support", "frame-support-procedural", @@ -2785,7 +2797,7 @@ dependencies = [ [[package]] name = "frame-support" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "aquamarine", "array-bytes 6.1.0", @@ -2826,7 +2838,7 @@ dependencies = [ [[package]] name = "frame-support-procedural" version = "23.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "Inflector", "cfg-expr", @@ -2839,35 +2851,35 @@ dependencies = [ "proc-macro2 1.0.76", "quote 1.0.35", "sp-crypto-hashing", - "syn 2.0.52", + "syn 2.0.53", ] [[package]] name = "frame-support-procedural-tools" version = "10.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "frame-support-procedural-tools-derive", "proc-macro-crate 3.1.0", "proc-macro2 1.0.76", "quote 1.0.35", - "syn 2.0.52", + "syn 2.0.53", ] [[package]] name = "frame-support-procedural-tools-derive" version = "11.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "proc-macro2 1.0.76", "quote 1.0.35", - "syn 2.0.52", + "syn 2.0.53", ] [[package]] name = "frame-system" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "cfg-if 1.0.0", "docify", @@ -2968,7 +2980,7 @@ checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac" dependencies = [ "proc-macro2 1.0.76", "quote 1.0.35", - "syn 2.0.52", + "syn 2.0.53", ] [[package]] @@ -3241,6 +3253,12 @@ version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" +[[package]] +name = "hex-conservative" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30ed443af458ccb6d81c1e7e661545f94d3176752fb1df2f543b902a1e0f51e2" + [[package]] name = "hex-literal" version = "0.4.1" @@ -3877,7 +3895,7 @@ dependencies = [ "proc-macro-crate 3.1.0", "proc-macro2 1.0.76", "quote 1.0.35", - "syn 2.0.52", + "syn 2.0.53", ] [[package]] @@ -4603,7 +4621,7 @@ dependencies = [ "macro_magic_core", "macro_magic_macros", "quote 1.0.35", - "syn 2.0.52", + "syn 2.0.53", ] [[package]] @@ -4617,7 +4635,7 @@ dependencies = [ "macro_magic_core_macros", "proc-macro2 1.0.76", "quote 1.0.35", - "syn 2.0.52", + "syn 2.0.53", ] [[package]] @@ -4628,7 +4646,7 @@ checksum = "9ea73aa640dc01d62a590d48c0c3521ed739d53b27f919b25c3551e233481654" dependencies = [ "proc-macro2 1.0.76", "quote 1.0.35", - "syn 2.0.52", + "syn 2.0.53", ] [[package]] @@ -4639,7 +4657,7 @@ checksum = "ef9d79ae96aaba821963320eb2b6e34d17df1e5a83d8a1985c29cc5be59577b3" dependencies = [ "macro_magic_core", "quote 1.0.35", - "syn 2.0.52", + "syn 2.0.53", ] [[package]] @@ -5216,7 +5234,7 @@ dependencies = [ [[package]] name = "pallet-authorship" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "frame-support", "frame-system", @@ -5230,7 +5248,7 @@ dependencies = [ [[package]] name = "pallet-balances" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "docify", "frame-benchmarking", @@ -5246,7 +5264,7 @@ dependencies = [ [[package]] name = "pallet-beefy" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "frame-support", "frame-system", @@ -5266,7 +5284,7 @@ dependencies = [ [[package]] name = "pallet-beefy-mmr" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "array-bytes 6.1.0", "binary-merkle-tree", @@ -5401,7 +5419,7 @@ dependencies = [ [[package]] name = "pallet-grandpa" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "frame-benchmarking", "frame-support", @@ -5424,7 +5442,7 @@ dependencies = [ [[package]] name = "pallet-mmr" version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "frame-benchmarking", "frame-support", @@ -5442,7 +5460,7 @@ dependencies = [ [[package]] name = "pallet-session" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "frame-support", "frame-system", @@ -5464,7 +5482,7 @@ dependencies = [ [[package]] name = "pallet-timestamp" version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "docify", "frame-benchmarking", @@ -5484,9 +5502,8 @@ dependencies = [ [[package]] name = "pallet-transaction-payment" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ - "frame-benchmarking", "frame-support", "frame-system", "parity-scale-codec", @@ -5501,7 +5518,7 @@ dependencies = [ [[package]] name = "pallet-transaction-payment-rpc-runtime-api" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "pallet-transaction-payment", "parity-scale-codec", @@ -5513,7 +5530,7 @@ dependencies = [ [[package]] name = "pallet-utility" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "frame-benchmarking", "frame-support", @@ -5585,6 +5602,19 @@ dependencies = [ "sp-core", ] +[[package]] +name = "parity-bip39" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4e69bf016dc406eff7d53a7d3f7cf1c2e72c82b9088aac1118591e36dd2cd3e9" +dependencies = [ + "bitcoin_hashes 0.13.0", + "rand", + "rand_core 0.6.4", + "serde", + "unicode-normalization", +] + [[package]] name = "parity-scale-codec" version = "3.6.4" @@ -5782,7 +5812,7 @@ checksum = "4359fd9c9171ec6e8c62926d6faaf553a8dc3f64e1507e76da7911b4f6a04405" dependencies = [ "proc-macro2 1.0.76", "quote 1.0.35", - "syn 2.0.52", + "syn 2.0.53", ] [[package]] @@ -5828,7 +5858,7 @@ checksum = "e3d7ddaed09e0eb771a79ab0fd64609ba0afb0a8366421957936ad14cbd13630" [[package]] name = "polkadot-core-primitives" version = "7.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "parity-scale-codec", "scale-info", @@ -5840,7 +5870,7 @@ dependencies = [ [[package]] name = "polkadot-parachain-primitives" version = "6.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "bounded-collections", "derive_more", @@ -5857,7 +5887,7 @@ dependencies = [ [[package]] name = "polkadot-primitives" version = "7.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "bitvec", "hex-literal", @@ -5930,7 +5960,7 @@ dependencies = [ "polkavm-common", "proc-macro2 1.0.76", "quote 1.0.35", - "syn 2.0.52", + "syn 2.0.53", ] [[package]] @@ -5940,7 +5970,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8ba81f7b5faac81e528eb6158a6f3c9e0bb1008e0ffa19653bc8dea925ecb429" dependencies = [ "polkavm-derive-impl", - "syn 2.0.52", + "syn 2.0.53", ] [[package]] @@ -6123,7 +6153,7 @@ checksum = "9b698b0b09d40e9b7c1a47b132d66a8b54bcd20583d9b6d06e4535e383b4405c" dependencies = [ "proc-macro2 1.0.76", "quote 1.0.35", - "syn 2.0.52", + "syn 2.0.53", ] [[package]] @@ -6478,7 +6508,7 @@ checksum = "68bf53dad9b6086826722cdc99140793afd9f62faa14a1ad07eb4f955e7a7216" dependencies = [ "proc-macro2 1.0.76", "quote 1.0.35", - "syn 2.0.52", + "syn 2.0.53", ] [[package]] @@ -7053,7 +7083,7 @@ dependencies = [ [[package]] name = "sc-allocator" version = "23.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "log", "sp-core", @@ -7064,7 +7094,7 @@ dependencies = [ [[package]] name = "sc-chain-spec" version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "array-bytes 6.1.0", "docify", @@ -7090,18 +7120,18 @@ dependencies = [ [[package]] name = "sc-chain-spec-derive" version = "11.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "proc-macro-crate 3.1.0", "proc-macro2 1.0.76", "quote 1.0.35", - "syn 2.0.52", + "syn 2.0.53", ] [[package]] name = "sc-client-api" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "fnv", "futures", @@ -7128,7 +7158,7 @@ dependencies = [ [[package]] name = "sc-consensus" version = "0.33.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "async-trait", "futures", @@ -7153,7 +7183,7 @@ dependencies = [ [[package]] name = "sc-executor" version = "0.32.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "parity-scale-codec", "parking_lot 0.12.1", @@ -7176,7 +7206,7 @@ dependencies = [ [[package]] name = "sc-executor-common" version = "0.29.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "polkavm", "sc-allocator", @@ -7189,7 +7219,7 @@ dependencies = [ [[package]] name = "sc-executor-polkavm" version = "0.29.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "log", "polkavm", @@ -7200,7 +7230,7 @@ dependencies = [ [[package]] name = "sc-executor-wasmtime" version = "0.29.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "anyhow", "cfg-if 1.0.0", @@ -7218,7 +7248,7 @@ dependencies = [ [[package]] name = "sc-mixnet" version = "0.4.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "array-bytes 4.2.0", "arrayvec 0.7.4", @@ -7247,7 +7277,7 @@ dependencies = [ [[package]] name = "sc-network" version = "0.34.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "array-bytes 6.1.0", "async-channel", @@ -7290,7 +7320,7 @@ dependencies = [ [[package]] name = "sc-network-common" version = "0.33.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "async-trait", "bitflags 1.3.2", @@ -7307,7 +7337,7 @@ dependencies = [ [[package]] name = "sc-rpc-api" version = "0.33.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "jsonrpsee 0.22.2", "parity-scale-codec", @@ -7327,7 +7357,7 @@ dependencies = [ [[package]] name = "sc-telemetry" version = "15.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "chrono", "futures", @@ -7346,7 +7376,7 @@ dependencies = [ [[package]] name = "sc-transaction-pool-api" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "async-trait", "futures", @@ -7362,7 +7392,7 @@ dependencies = [ [[package]] name = "sc-utils" version = "14.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "async-channel", "futures", @@ -7655,7 +7685,7 @@ checksum = "7eb0b34b42edc17f6b7cac84a52a1c5f0e1bb2227e997ca9011ea3dd34e8610b" dependencies = [ "proc-macro2 1.0.76", "quote 1.0.35", - "syn 2.0.52", + "syn 2.0.53", ] [[package]] @@ -7998,7 +8028,7 @@ dependencies = [ [[package]] name = "sp-api" version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "hash-db", "log", @@ -8020,7 +8050,7 @@ dependencies = [ [[package]] name = "sp-api-proc-macro" version = "15.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "Inflector", "blake2 0.10.6", @@ -8028,13 +8058,13 @@ dependencies = [ "proc-macro-crate 3.1.0", "proc-macro2 1.0.76", "quote 1.0.35", - "syn 2.0.52", + "syn 2.0.53", ] [[package]] name = "sp-application-crypto" version = "30.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "parity-scale-codec", "scale-info", @@ -8047,8 +8077,9 @@ dependencies = [ [[package]] name = "sp-arithmetic" version = "23.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ + "docify", "integer-sqrt", "num-traits", "parity-scale-codec", @@ -8079,20 +8110,19 @@ dependencies = [ [[package]] name = "sp-authority-discovery" version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "parity-scale-codec", "scale-info", "sp-api", "sp-application-crypto", "sp-runtime", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=master)", ] [[package]] name = "sp-blockchain" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "futures", "log", @@ -8110,7 +8140,7 @@ dependencies = [ [[package]] name = "sp-consensus" version = "0.32.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "async-trait", "futures", @@ -8125,7 +8155,7 @@ dependencies = [ [[package]] name = "sp-consensus-beefy" version = "13.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "lazy_static", "parity-scale-codec", @@ -8139,14 +8169,13 @@ dependencies = [ "sp-keystore", "sp-mmr-primitives", "sp-runtime", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=master)", "strum 0.24.1", ] [[package]] name = "sp-consensus-grandpa" version = "13.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "finality-grandpa", "log", @@ -8158,29 +8187,26 @@ dependencies = [ "sp-core", "sp-keystore", "sp-runtime", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=master)", ] [[package]] name = "sp-consensus-slots" version = "0.32.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "parity-scale-codec", "scale-info", "serde", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=master)", "sp-timestamp", ] [[package]] name = "sp-core" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "array-bytes 6.1.0", "bandersnatch_vrfs", - "bip39", "bitflags 1.3.2", "blake2 0.10.6", "bounded-collections", @@ -8196,6 +8222,7 @@ dependencies = [ "libsecp256k1", "log", "merlin", + "parity-bip39", "parity-scale-codec", "parking_lot 0.12.1", "paste", @@ -8238,7 +8265,7 @@ dependencies = [ [[package]] name = "sp-crypto-ec-utils" version = "0.10.0" -source = "git+https://github.com/paritytech/polkadot-sdk#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "ark-bls12-377", "ark-bls12-377-ext", @@ -8253,13 +8280,12 @@ dependencies = [ "ark-ed-on-bls12-381-bandersnatch-ext", "ark-scale", "sp-runtime-interface 24.0.0 (git+https://github.com/paritytech/polkadot-sdk)", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk)", ] [[package]] name = "sp-crypto-hashing" version = "0.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "blake2b_simd", "byteorder", @@ -8272,17 +8298,17 @@ dependencies = [ [[package]] name = "sp-crypto-hashing-proc-macro" version = "0.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "quote 1.0.35", "sp-crypto-hashing", - "syn 2.0.52", + "syn 2.0.53", ] [[package]] name = "sp-database" version = "10.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "kvdb", "parking_lot 0.12.1", @@ -8291,74 +8317,70 @@ dependencies = [ [[package]] name = "sp-debug-derive" version = "14.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "proc-macro2 1.0.76", "quote 1.0.35", - "syn 2.0.52", + "syn 2.0.53", ] [[package]] name = "sp-debug-derive" version = "14.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "proc-macro2 1.0.76", "quote 1.0.35", - "syn 2.0.52", + "syn 2.0.53", ] [[package]] name = "sp-externalities" version = "0.25.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "environmental", "parity-scale-codec", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=master)", "sp-storage 19.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=master)", ] [[package]] name = "sp-externalities" version = "0.25.0" -source = "git+https://github.com/paritytech/polkadot-sdk#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "environmental", "parity-scale-codec", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk)", "sp-storage 19.0.0 (git+https://github.com/paritytech/polkadot-sdk)", ] [[package]] name = "sp-genesis-builder" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "serde_json", "sp-api", "sp-runtime", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=master)", ] [[package]] name = "sp-inherents" version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "async-trait", "impl-trait-for-tuples", "parity-scale-codec", "scale-info", "sp-runtime", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=master)", "thiserror", ] [[package]] name = "sp-io" version = "30.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "bytes", "ed25519-dalek", @@ -8384,7 +8406,7 @@ dependencies = [ [[package]] name = "sp-keyring" version = "31.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "sp-core", "sp-runtime", @@ -8394,7 +8416,7 @@ dependencies = [ [[package]] name = "sp-keystore" version = "0.34.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "parity-scale-codec", "parking_lot 0.12.1", @@ -8405,7 +8427,7 @@ dependencies = [ [[package]] name = "sp-maybe-compressed-blob" version = "11.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "thiserror", "zstd 0.12.4", @@ -8414,30 +8436,28 @@ dependencies = [ [[package]] name = "sp-metadata-ir" version = "0.6.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "frame-metadata 16.0.0", "parity-scale-codec", "scale-info", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=master)", ] [[package]] name = "sp-mixnet" version = "0.4.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "parity-scale-codec", "scale-info", "sp-api", "sp-application-crypto", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=master)", ] [[package]] name = "sp-mmr-primitives" version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "ckb-merkle-mountain-range 0.5.2", "log", @@ -8448,14 +8468,13 @@ dependencies = [ "sp-core", "sp-debug-derive 14.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=master)", "sp-runtime", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=master)", "thiserror", ] [[package]] name = "sp-panic-handler" version = "13.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "backtrace", "lazy_static", @@ -8465,7 +8484,7 @@ dependencies = [ [[package]] name = "sp-rpc" version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "rustc-hash", "serde", @@ -8475,7 +8494,7 @@ dependencies = [ [[package]] name = "sp-runtime" version = "31.0.1" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "docify", "either", @@ -8494,13 +8513,12 @@ dependencies = [ "sp-io", "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=master)", "sp-weights", - "tuplex", ] [[package]] name = "sp-runtime-interface" version = "24.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "bytes", "impl-trait-for-tuples", @@ -8519,7 +8537,7 @@ dependencies = [ [[package]] name = "sp-runtime-interface" version = "24.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "bytes", "impl-trait-for-tuples", @@ -8538,33 +8556,33 @@ dependencies = [ [[package]] name = "sp-runtime-interface-proc-macro" version = "17.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "Inflector", "expander", "proc-macro-crate 3.1.0", "proc-macro2 1.0.76", "quote 1.0.35", - "syn 2.0.52", + "syn 2.0.53", ] [[package]] name = "sp-runtime-interface-proc-macro" version = "17.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "Inflector", "expander", "proc-macro-crate 3.1.0", "proc-macro2 1.0.76", "quote 1.0.35", - "syn 2.0.52", + "syn 2.0.53", ] [[package]] name = "sp-session" version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "parity-scale-codec", "scale-info", @@ -8573,13 +8591,12 @@ dependencies = [ "sp-keystore", "sp-runtime", "sp-staking", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=master)", ] [[package]] name = "sp-staking" version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "impl-trait-for-tuples", "parity-scale-codec", @@ -8587,13 +8604,12 @@ dependencies = [ "serde", "sp-core", "sp-runtime", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=master)", ] [[package]] name = "sp-state-machine" version = "0.35.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "hash-db", "log", @@ -8604,7 +8620,6 @@ dependencies = [ "sp-core", "sp-externalities 0.25.0 (git+https://github.com/paritytech/polkadot-sdk?branch=master)", "sp-panic-handler", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=master)", "sp-trie", "thiserror", "tracing", @@ -8614,7 +8629,7 @@ dependencies = [ [[package]] name = "sp-statement-store" version = "10.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "aes-gcm 0.10.2", "curve25519-dalek 4.1.1", @@ -8631,7 +8646,6 @@ dependencies = [ "sp-externalities 0.25.0 (git+https://github.com/paritytech/polkadot-sdk?branch=master)", "sp-runtime", "sp-runtime-interface 24.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=master)", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=master)", "thiserror", "x25519-dalek 2.0.0", ] @@ -8645,59 +8659,55 @@ checksum = "53458e3c57df53698b3401ec0934bea8e8cfce034816873c0b0abbd83d7bac0d" [[package]] name = "sp-std" version = "14.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" [[package]] name = "sp-std" version = "14.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk#9d2963c29d9b7ea949851a166e0cb2792fc66fff" [[package]] name = "sp-storage" version = "19.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "impl-serde", "parity-scale-codec", "ref-cast", "serde", "sp-debug-derive 14.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=master)", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=master)", ] [[package]] name = "sp-storage" version = "19.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "impl-serde", "parity-scale-codec", "ref-cast", "serde", "sp-debug-derive 14.0.0 (git+https://github.com/paritytech/polkadot-sdk)", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk)", ] [[package]] name = "sp-timestamp" version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "async-trait", "parity-scale-codec", "sp-inherents", "sp-runtime", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=master)", "thiserror", ] [[package]] name = "sp-tracing" version = "16.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "parity-scale-codec", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=master)", "tracing", "tracing-core", "tracing-subscriber", @@ -8706,10 +8716,9 @@ dependencies = [ [[package]] name = "sp-tracing" version = "16.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "parity-scale-codec", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk)", "tracing", "tracing-core", "tracing-subscriber", @@ -8718,7 +8727,7 @@ dependencies = [ [[package]] name = "sp-trie" version = "29.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "ahash 0.8.7", "hash-db", @@ -8732,7 +8741,6 @@ dependencies = [ "schnellru", "sp-core", "sp-externalities 0.25.0 (git+https://github.com/paritytech/polkadot-sdk?branch=master)", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=master)", "thiserror", "tracing", "trie-db", @@ -8742,7 +8750,7 @@ dependencies = [ [[package]] name = "sp-version" version = "29.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "impl-serde", "parity-scale-codec", @@ -8759,44 +8767,42 @@ dependencies = [ [[package]] name = "sp-version-proc-macro" version = "13.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "parity-scale-codec", "proc-macro2 1.0.76", "quote 1.0.35", - "syn 2.0.52", + "syn 2.0.53", ] [[package]] name = "sp-wasm-interface" version = "20.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "anyhow", "impl-trait-for-tuples", "log", "parity-scale-codec", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=master)", "wasmtime", ] [[package]] name = "sp-wasm-interface" version = "20.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "anyhow", "impl-trait-for-tuples", "log", "parity-scale-codec", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk)", "wasmtime", ] [[package]] name = "sp-weights" version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "bounded-collections", "parity-scale-codec", @@ -8805,7 +8811,6 @@ dependencies = [ "smallvec", "sp-arithmetic", "sp-debug-derive 14.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=master)", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=master)", ] [[package]] @@ -8854,7 +8859,7 @@ checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" [[package]] name = "staging-xcm" version = "7.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "array-bytes 6.1.0", "bounded-collections", @@ -8872,7 +8877,7 @@ dependencies = [ [[package]] name = "staging-xcm-builder" version = "7.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "frame-support", "frame-system", @@ -8894,7 +8899,7 @@ dependencies = [ [[package]] name = "staging-xcm-executor" version = "7.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "environmental", "frame-benchmarking", @@ -8995,13 +9000,13 @@ dependencies = [ "proc-macro2 1.0.76", "quote 1.0.35", "rustversion", - "syn 2.0.52", + "syn 2.0.53", ] [[package]] name = "substrate-bip39" version = "0.4.7" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "hmac 0.12.1", "pbkdf2", @@ -9013,7 +9018,7 @@ dependencies = [ [[package]] name = "substrate-prometheus-endpoint" version = "0.17.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "hyper", "log", @@ -9189,7 +9194,7 @@ dependencies = [ "quote 1.0.35", "scale-info", "subxt-metadata", - "syn 2.0.52", + "syn 2.0.53", "thiserror", "tokio", ] @@ -9220,7 +9225,7 @@ dependencies = [ "darling 0.20.3", "proc-macro-error", "subxt-codegen", - "syn 2.0.52", + "syn 2.0.53", ] [[package]] @@ -9260,9 +9265,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.52" +version = "2.0.53" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b699d15b36d1f02c3e7c69f8ffef53de37aefae075d8488d4ba1a7788d574a07" +checksum = "7383cd0e49fff4b6b90ca5670bfd3e9d6a733b3f90c686605aa7eec8c4996032" dependencies = [ "proc-macro2 1.0.76", "quote 1.0.35", @@ -9402,7 +9407,7 @@ checksum = "c61f3ba182994efc43764a46c018c347bc492c79f024e705f46567b418f6d4f7" dependencies = [ "proc-macro2 1.0.76", "quote 1.0.35", - "syn 2.0.52", + "syn 2.0.53", ] [[package]] @@ -9498,7 +9503,7 @@ checksum = "5b8a1e28f2deaa14e508979454cb3a223b10b938b45af148bc0986de36f1923b" dependencies = [ "proc-macro2 1.0.76", "quote 1.0.35", - "syn 2.0.52", + "syn 2.0.53", ] [[package]] @@ -9651,7 +9656,7 @@ checksum = "5f4f31f56159e98206da9efd823404b79b6ef3143b4a7ab76e67b1751b25a4ab" dependencies = [ "proc-macro2 1.0.76", "quote 1.0.35", - "syn 2.0.52", + "syn 2.0.53", ] [[package]] @@ -9797,12 +9802,6 @@ version = "1.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f4f195fd851901624eee5a58c4bb2b4f06399148fcd0ed336e6f1cb60a9881df" -[[package]] -name = "tuplex" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "676ac81d5454c4dcf37955d34fa8626ede3490f744b86ca14a7b90168d2a08aa" - [[package]] name = "twox-hash" version = "1.6.3" @@ -10051,7 +10050,7 @@ dependencies = [ "once_cell", "proc-macro2 1.0.76", "quote 1.0.35", - "syn 2.0.52", + "syn 2.0.53", "wasm-bindgen-shared", ] @@ -10085,7 +10084,7 @@ checksum = "54681b18a46765f095758388f2d0cf16eb8d4169b639ab575a8f5693af210c7b" dependencies = [ "proc-macro2 1.0.76", "quote 1.0.35", - "syn 2.0.52", + "syn 2.0.53", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -10803,12 +10802,12 @@ dependencies = [ [[package]] name = "xcm-procedural" version = "7.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#1ead59773e2dab336d2b54295419bbc3fe7c687f" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" dependencies = [ "Inflector", "proc-macro2 1.0.76", "quote 1.0.35", - "syn 2.0.52", + "syn 2.0.53", ] [[package]] @@ -10857,7 +10856,7 @@ checksum = "9ce1b18ccd8e73a9321186f97e46f9f04b778851177567b1975109d26a08d2a6" dependencies = [ "proc-macro2 1.0.76", "quote 1.0.35", - "syn 2.0.52", + "syn 2.0.53", ] [[package]] @@ -10877,7 +10876,7 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ "proc-macro2 1.0.76", "quote 1.0.35", - "syn 2.0.52", + "syn 2.0.53", ] [[package]] diff --git a/bin/runtime-common/Cargo.toml b/bin/runtime-common/Cargo.toml index 835a8cf13..e3c05d1be 100644 --- a/bin/runtime-common/Cargo.toml +++ b/bin/runtime-common/Cargo.toml @@ -93,7 +93,6 @@ runtime-benchmarks = [ "pallet-bridge-messages/runtime-benchmarks", "pallet-bridge-parachains/runtime-benchmarks", "pallet-bridge-relayers/runtime-benchmarks", - "pallet-transaction-payment/runtime-benchmarks", "pallet-utility/runtime-benchmarks", "sp-runtime/runtime-benchmarks", "xcm-builder/runtime-benchmarks", diff --git a/bin/runtime-common/src/lib.rs b/bin/runtime-common/src/lib.rs index 035077408..2722f6f1c 100644 --- a/bin/runtime-common/src/lib.rs +++ b/bin/runtime-common/src/lib.rs @@ -105,48 +105,43 @@ macro_rules! generate_bridge_reject_obsolete_headers_and_messages { ($call:ty, $account_id:ty, $($filter_call:ty),*) => { #[derive(Clone, codec::Decode, Default, codec::Encode, Eq, PartialEq, sp_runtime::RuntimeDebug, scale_info::TypeInfo)] pub struct BridgeRejectObsoleteHeadersAndMessages; - impl sp_runtime::traits::TransactionExtensionBase for BridgeRejectObsoleteHeadersAndMessages { + impl sp_runtime::traits::SignedExtension for BridgeRejectObsoleteHeadersAndMessages { const IDENTIFIER: &'static str = "BridgeRejectObsoleteHeadersAndMessages"; - type Implicit = (); - } - impl sp_runtime::traits::TransactionExtension<$call, Context> for BridgeRejectObsoleteHeadersAndMessages { + type AccountId = $account_id; + type Call = $call; + type AdditionalSigned = (); type Pre = (); - type Val = (); + + fn additional_signed(&self) -> sp_std::result::Result< + (), + sp_runtime::transaction_validity::TransactionValidityError, + > { + Ok(()) + } fn validate( &self, - origin: <$call as sp_runtime::traits::Dispatchable>::RuntimeOrigin, - call: &$call, - _info: &sp_runtime::traits::DispatchInfoOf<$call>, + _who: &Self::AccountId, + call: &Self::Call, + _info: &sp_runtime::traits::DispatchInfoOf, _len: usize, - _context: &mut Context, - _self_implicit: Self::Implicit, - _inherited_implication: &impl codec::Encode, - ) -> Result< - ( - sp_runtime::transaction_validity::ValidTransaction, - Self::Val, - <$call as sp_runtime::traits::Dispatchable>::RuntimeOrigin, - ), sp_runtime::transaction_validity::TransactionValidityError - > { - let tx_validity = sp_runtime::transaction_validity::ValidTransaction::default(); + ) -> sp_runtime::transaction_validity::TransactionValidity { + let valid = sp_runtime::transaction_validity::ValidTransaction::default(); $( - let call_filter_validity = <$filter_call as $crate::BridgeRuntimeFilterCall<$call>>::validate(call)?; - let tx_validity = tx_validity.combine_with(call_filter_validity); + let valid = valid + .combine_with(<$filter_call as $crate::BridgeRuntimeFilterCall<$call>>::validate(call)?); )* - Ok((tx_validity, (), origin)) + Ok(valid) } - fn prepare( + fn pre_dispatch( self, - _val: Self::Val, - _origin: &<$call as sp_runtime::traits::Dispatchable>::RuntimeOrigin, - _call: &$call, - _info: &sp_runtime::traits::DispatchInfoOf<$call>, - _len: usize, - _context: &Context, + who: &Self::AccountId, + call: &Self::Call, + info: &sp_runtime::traits::DispatchInfoOf, + len: usize, ) -> Result { - Ok(()) + self.validate(who, call, info, len).map(drop) } } }; @@ -155,14 +150,12 @@ macro_rules! generate_bridge_reject_obsolete_headers_and_messages { #[cfg(test)] mod tests { use crate::BridgeRuntimeFilterCall; - use codec::Encode; - use frame_support::assert_err; + use frame_support::{assert_err, assert_ok}; use sp_runtime::{ - traits::DispatchTransaction, + traits::SignedExtension, transaction_validity::{InvalidTransaction, TransactionValidity, ValidTransaction}, }; - #[derive(Encode)] pub struct MockCall { data: u32, } @@ -213,20 +206,17 @@ mod tests { ); assert_err!( - BridgeRejectObsoleteHeadersAndMessages.validate_only((), &MockCall { data: 1 }, &(), 0), + BridgeRejectObsoleteHeadersAndMessages.validate(&(), &MockCall { data: 1 }, &(), 0), InvalidTransaction::Custom(1) ); assert_err!( - BridgeRejectObsoleteHeadersAndMessages.validate_only((), &MockCall { data: 2 }, &(), 0), + BridgeRejectObsoleteHeadersAndMessages.validate(&(), &MockCall { data: 2 }, &(), 0), InvalidTransaction::Custom(2) ); - assert_eq!( - BridgeRejectObsoleteHeadersAndMessages - .validate_only((), &MockCall { data: 3 }, &(), 0) - .unwrap() - .0, + assert_ok!( + BridgeRejectObsoleteHeadersAndMessages.validate(&(), &MockCall { data: 3 }, &(), 0), ValidTransaction { priority: 3, ..Default::default() } ) } diff --git a/bin/runtime-common/src/mock.rs b/bin/runtime-common/src/mock.rs index f147f1404..deee4524e 100644 --- a/bin/runtime-common/src/mock.rs +++ b/bin/runtime-common/src/mock.rs @@ -141,7 +141,7 @@ parameter_types! { pub const ReserveId: [u8; 8] = *b"brdgrlrs"; } -#[derive_impl(frame_system::config_preludes::TestDefaultConfig as frame_system::DefaultConfig)] +#[derive_impl(frame_system::config_preludes::TestDefaultConfig)] impl frame_system::Config for TestRuntime { type Hash = ThisChainHash; type Hashing = ThisChainHasher; @@ -158,13 +158,13 @@ impl pallet_utility::Config for TestRuntime { type WeightInfo = (); } -#[derive_impl(pallet_balances::config_preludes::TestDefaultConfig as pallet_balances::DefaultConfig)] +#[derive_impl(pallet_balances::config_preludes::TestDefaultConfig)] impl pallet_balances::Config for TestRuntime { type ReserveIdentifier = [u8; 8]; type AccountStore = System; } -#[derive_impl(pallet_transaction_payment::config_preludes::TestDefaultConfig as pallet_transaction_payment::DefaultConfig)] +#[derive_impl(pallet_transaction_payment::config_preludes::TestDefaultConfig)] impl pallet_transaction_payment::Config for TestRuntime { type OnChargeTransaction = pallet_transaction_payment::CurrencyAdapter; type OperationalFeeMultiplier = ConstU8<5>; @@ -177,6 +177,7 @@ impl pallet_transaction_payment::Config for TestRuntime { MinimumMultiplier, MaximumMultiplier, >; + type RuntimeEvent = RuntimeEvent; } impl pallet_bridge_grandpa::Config for TestRuntime { diff --git a/bin/runtime-common/src/priority_calculator.rs b/bin/runtime-common/src/priority_calculator.rs index 0c5301833..c2737128e 100644 --- a/bin/runtime-common/src/priority_calculator.rs +++ b/bin/runtime-common/src/priority_calculator.rs @@ -128,7 +128,7 @@ mod integrity_tests { Runtime::RuntimeCall: Dispatchable, BalanceOf: Send + Sync + FixedPointOperand, { - // esimate priority of transaction that delivers one message and has large tip + // estimate priority of transaction that delivers one message and has large tip let maximal_messages_in_delivery_transaction = Runtime::MaxUnconfirmedMessagesAtInboundLane::get(); let small_with_tip_priority = @@ -169,15 +169,12 @@ mod integrity_tests { // nodes to the proof (x0.5 because we expect some nodes to be reused) let estimated_message_size = 512; // let's say all our messages have the same dispatch weight - let estimated_message_dispatch_weight = >::WeightInfo::message_dispatch_weight( - estimated_message_size - ); + let estimated_message_dispatch_weight = + Runtime::WeightInfo::message_dispatch_weight(estimated_message_size); // messages proof argument size is (for every message) messages size + some additional // trie nodes. Some of them are reused by different messages, so let's take 2/3 of default // "overhead" constant - let messages_proof_size = >::WeightInfo::expected_extra_storage_proof_size() + let messages_proof_size = Runtime::WeightInfo::expected_extra_storage_proof_size() .saturating_mul(2) .saturating_div(3) .saturating_add(estimated_message_size) @@ -185,7 +182,7 @@ mod integrity_tests { // finally we are able to estimate transaction size and weight let transaction_size = base_tx_size.saturating_add(messages_proof_size); - let transaction_weight = >::WeightInfo::receive_messages_proof_weight( + let transaction_weight = Runtime::WeightInfo::receive_messages_proof_weight( &PreComputedSize(transaction_size as _), messages as _, estimated_message_dispatch_weight.saturating_mul(messages), diff --git a/bin/runtime-common/src/refund_relayer_extension.rs b/bin/runtime-common/src/refund_relayer_extension.rs index b912f8445..8e901d728 100644 --- a/bin/runtime-common/src/refund_relayer_extension.rs +++ b/bin/runtime-common/src/refund_relayer_extension.rs @@ -16,7 +16,7 @@ //! Signed extension that refunds relayer if he has delivered some new messages. //! It also refunds transaction cost if the transaction is an `utility.batchAll()` -//! with calls that are: delivering new messsage and all necessary underlying headers +//! with calls that are: delivering new message and all necessary underlying headers //! (parachain or relay chain). use crate::messages_call_ext::{ @@ -48,12 +48,9 @@ use pallet_transaction_payment::{Config as TransactionPaymentConfig, OnChargeTra use pallet_utility::{Call as UtilityCall, Config as UtilityConfig, Pallet as UtilityPallet}; use scale_info::TypeInfo; use sp_runtime::{ - traits::{ - AsSystemOriginSigner, DispatchInfoOf, Dispatchable, Get, PostDispatchInfoOf, - TransactionExtension, TransactionExtensionBase, ValidateResult, Zero, - }, + traits::{DispatchInfoOf, Dispatchable, Get, PostDispatchInfoOf, SignedExtension, Zero}, transaction_validity::{ - InvalidTransaction, TransactionPriority, TransactionValidityError, ValidTransactionBuilder, + TransactionPriority, TransactionValidity, TransactionValidityError, ValidTransactionBuilder, }, DispatchResult, FixedPointOperand, RuntimeDebug, }; @@ -242,8 +239,8 @@ pub enum RelayerAccountAction { Slash(AccountId, RewardsAccountParams), } -/// Everything common among our refund transaction extensions. -pub trait RefundTransactionExtension: +/// Everything common among our refund signed extensions. +pub trait RefundSignedExtension: 'static + Clone + Codec + sp_std::fmt::Debug + Default + Eq + PartialEq + Send + Sync + TypeInfo where >::BridgedChain: @@ -459,8 +456,8 @@ where } } -/// Adapter that allow implementing `sp_runtime::traits::TransactionExtension` for any -/// `RefundTransactionExtension`. +/// Adapter that allow implementing `sp_runtime::traits::SignedExtension` for any +/// `RefundSignedExtension`. #[derive( DefaultNoBound, CloneNoBound, @@ -471,13 +468,12 @@ where RuntimeDebugNoBound, TypeInfo, )] -pub struct RefundTransactionExtensionAdapter(T) +pub struct RefundSignedExtensionAdapter(T) where >::BridgedChain: Chain; -impl TransactionExtensionBase - for RefundTransactionExtensionAdapter +impl SignedExtension for RefundSignedExtensionAdapter where >::BridgedChain: Chain, @@ -487,35 +483,22 @@ where + MessagesCallSubType::Instance>, { const IDENTIFIER: &'static str = T::Id::STR; - type Implicit = (); -} - -impl TransactionExtension, Context> - for RefundTransactionExtensionAdapter -where - >::BridgedChain: - Chain, - CallOf: Dispatchable - + IsSubType, T::Runtime>> - + GrandpaCallSubType - + MessagesCallSubType::Instance>, - as Dispatchable>::RuntimeOrigin: - AsSystemOriginSigner> + Clone, -{ + type AccountId = AccountIdOf; + type Call = CallOf; + type AdditionalSigned = (); type Pre = Option>>; - type Val = Option; + + fn additional_signed(&self) -> Result<(), TransactionValidityError> { + Ok(()) + } fn validate( &self, - origin: as Dispatchable>::RuntimeOrigin, - call: &CallOf, - _info: &DispatchInfoOf>, + who: &Self::AccountId, + call: &Self::Call, + _info: &DispatchInfoOf, _len: usize, - _context: &mut Context, - _self_implicit: Self::Implicit, - _inherited_implication: &impl Encode, - ) -> ValidateResult> { - let who = origin.as_system_origin_signer().ok_or(InvalidTransaction::BadSigner)?; + ) -> TransactionValidity { // this is the only relevant line of code for the `pre_dispatch` // // we're not calling `validate` from `pre_dispatch` directly because of performance @@ -528,12 +511,12 @@ where // we only boost priority of presumably correct message delivery transactions let bundled_messages = match T::bundled_messages_for_priority_boost(parsed_call.as_ref()) { Some(bundled_messages) => bundled_messages, - None => return Ok((Default::default(), parsed_call, origin)), + None => return Ok(Default::default()), }; // we only boost priority if relayer has staked required balance if !RelayersPallet::::is_registration_active(who) { - return Ok((Default::default(), parsed_call, origin)) + return Ok(Default::default()) } // compute priority boost @@ -552,21 +535,20 @@ where priority_boost, ); - let validity = valid_transaction.build()?; - Ok((validity, parsed_call, origin)) + valid_transaction.build() } - fn prepare( + fn pre_dispatch( self, - val: Self::Val, - origin: & as Dispatchable>::RuntimeOrigin, - _call: &CallOf, - _info: &DispatchInfoOf>, + who: &Self::AccountId, + call: &Self::Call, + _info: &DispatchInfoOf, _len: usize, - _context: &Context, ) -> Result { - let who = origin.as_system_origin_signer().ok_or(InvalidTransaction::BadSigner)?; - Ok(val.map(|call_info| { + // this is a relevant piece of `validate` that we need here (in `pre_dispatch`) + let parsed_call = T::parse_and_check_for_obsolete_call(call)?; + + Ok(parsed_call.map(|call_info| { log::trace!( target: "runtime::bridge", "{} via {:?} parsed bridge transaction in pre-dispatch: {:?}", @@ -579,14 +561,13 @@ where } fn post_dispatch( - pre: Self::Pre, - info: &DispatchInfoOf>, - post_info: &PostDispatchInfoOf>, + pre: Option, + info: &DispatchInfoOf, + post_info: &PostDispatchInfoOf, len: usize, result: &DispatchResult, - _context: &Context, ) -> Result<(), TransactionValidityError> { - let call_result = T::analyze_call_result(Some(pre), info, post_info, len, result); + let call_result = T::analyze_call_result(pre, info, post_info, len, result); match call_result { RelayerAccountAction::None => (), @@ -614,7 +595,7 @@ where } } -/// Transaction extension that refunds a relayer for new messages coming from a parachain. +/// Signed extension that refunds a relayer for new messages coming from a parachain. /// /// Also refunds relayer for successful finality delivery if it comes in batch (`utility.batchAll`) /// with message delivery transaction. Batch may deliver either both relay chain header and @@ -655,7 +636,7 @@ pub struct RefundBridgedParachainMessages, ); -impl RefundTransactionExtension +impl RefundSignedExtension for RefundBridgedParachainMessages where Self: 'static + Send + Sync, @@ -749,13 +730,13 @@ where } } -/// Transaction extension that refunds a relayer for new messages coming from a standalone (GRANDPA) +/// Signed extension that refunds a relayer for new messages coming from a standalone (GRANDPA) /// chain. /// /// Also refunds relayer for successful finality delivery if it comes in batch (`utility.batchAll`) /// with message delivery transaction. Batch may deliver either both relay chain header and -/// parachain head, or just parachain head. Corresponding headers must be used in messages proof -/// verification. +/// parachain head, or just parachain head. Corresponding headers must be used in messages +/// proof verification. /// /// Extension does not refund transaction tip due to security reasons. #[derive( @@ -790,7 +771,7 @@ pub struct RefundBridgedGrandpaMessages, ); -impl RefundTransactionExtension +impl RefundSignedExtension for RefundBridgedGrandpaMessages where Self: 'static + Send + Sync, @@ -888,8 +869,8 @@ mod tests { Call as ParachainsCall, Pallet as ParachainsPallet, RelayBlockHash, }; use sp_runtime::{ - traits::{ConstU64, DispatchTransaction, Header as HeaderT}, - transaction_validity::{InvalidTransaction, TransactionValidity, ValidTransaction}, + traits::{ConstU64, Header as HeaderT}, + transaction_validity::{InvalidTransaction, ValidTransaction}, DispatchError, }; @@ -918,7 +899,7 @@ mod tests { ConstU64<1>, StrTestExtension, >; - type TestGrandpaExtension = RefundTransactionExtensionAdapter; + type TestGrandpaExtension = RefundSignedExtensionAdapter; type TestExtensionProvider = RefundBridgedParachainMessages< TestRuntime, DefaultRefundableParachainId<(), TestParachain>, @@ -927,7 +908,7 @@ mod tests { ConstU64<1>, StrTestExtension, >; - type TestExtension = RefundTransactionExtensionAdapter; + type TestExtension = RefundSignedExtensionAdapter; fn initial_balance_of_relayer_account_at_this_chain() -> ThisChainBalance { let test_stake: ThisChainBalance = TestStake::get(); @@ -1426,28 +1407,14 @@ mod tests { fn run_validate(call: RuntimeCall) -> TransactionValidity { let extension: TestExtension = - RefundTransactionExtensionAdapter(RefundBridgedParachainMessages(PhantomData)); - extension - .validate_only( - Some(relayer_account_at_this_chain()).into(), - &call, - &DispatchInfo::default(), - 0, - ) - .map(|res| res.0) + RefundSignedExtensionAdapter(RefundBridgedParachainMessages(PhantomData)); + extension.validate(&relayer_account_at_this_chain(), &call, &DispatchInfo::default(), 0) } fn run_grandpa_validate(call: RuntimeCall) -> TransactionValidity { let extension: TestGrandpaExtension = - RefundTransactionExtensionAdapter(RefundBridgedGrandpaMessages(PhantomData)); - extension - .validate_only( - Some(relayer_account_at_this_chain()).into(), - &call, - &DispatchInfo::default(), - 0, - ) - .map(|res| res.0) + RefundSignedExtensionAdapter(RefundBridgedGrandpaMessages(PhantomData)); + extension.validate(&relayer_account_at_this_chain(), &call, &DispatchInfo::default(), 0) } fn run_validate_ignore_priority(call: RuntimeCall) -> TransactionValidity { @@ -1461,30 +1428,16 @@ mod tests { call: RuntimeCall, ) -> Result>, TransactionValidityError> { let extension: TestExtension = - RefundTransactionExtensionAdapter(RefundBridgedParachainMessages(PhantomData)); - extension - .validate_and_prepare( - Some(relayer_account_at_this_chain()).into(), - &call, - &DispatchInfo::default(), - 0, - ) - .map(|(pre, _)| pre) + RefundSignedExtensionAdapter(RefundBridgedParachainMessages(PhantomData)); + extension.pre_dispatch(&relayer_account_at_this_chain(), &call, &DispatchInfo::default(), 0) } fn run_grandpa_pre_dispatch( call: RuntimeCall, ) -> Result>, TransactionValidityError> { let extension: TestGrandpaExtension = - RefundTransactionExtensionAdapter(RefundBridgedGrandpaMessages(PhantomData)); - extension - .validate_and_prepare( - Some(relayer_account_at_this_chain()).into(), - &call, - &DispatchInfo::default(), - 0, - ) - .map(|(pre, _)| pre) + RefundSignedExtensionAdapter(RefundBridgedGrandpaMessages(PhantomData)); + extension.pre_dispatch(&relayer_account_at_this_chain(), &call, &DispatchInfo::default(), 0) } fn dispatch_info() -> DispatchInfo { @@ -1507,12 +1460,11 @@ mod tests { dispatch_result: DispatchResult, ) { let post_dispatch_result = TestExtension::post_dispatch( - pre_dispatch_data, + Some(pre_dispatch_data), &dispatch_info(), &post_dispatch_info(), 1024, &dispatch_result, - &(), ); assert_eq!(post_dispatch_result, Ok(())); } diff --git a/modules/grandpa/README.md b/modules/grandpa/README.md index 43ee5c316..992bd2cc4 100644 --- a/modules/grandpa/README.md +++ b/modules/grandpa/README.md @@ -27,7 +27,7 @@ for provided header. There are two main things in GRANDPA that help building light clients: - there's no need to import all headers of the bridged chain. Light client may import finalized headers or just - some of finalized headders that it consider useful. While the validators set stays the same, the client may + some of finalized headers that it consider useful. While the validators set stays the same, the client may import any header that is finalized by this set; - when validators set changes, the GRANDPA gadget adds next set to the header. So light client doesn't need to diff --git a/modules/grandpa/src/mock.rs b/modules/grandpa/src/mock.rs index e41e89341..4318d663a 100644 --- a/modules/grandpa/src/mock.rs +++ b/modules/grandpa/src/mock.rs @@ -42,7 +42,7 @@ construct_runtime! { } } -#[derive_impl(frame_system::config_preludes::TestDefaultConfig as frame_system::DefaultConfig)] +#[derive_impl(frame_system::config_preludes::TestDefaultConfig)] impl frame_system::Config for TestRuntime { type Block = Block; } diff --git a/modules/messages/src/mock.rs b/modules/messages/src/mock.rs index af9212053..ec63f15b9 100644 --- a/modules/messages/src/mock.rs +++ b/modules/messages/src/mock.rs @@ -77,14 +77,14 @@ frame_support::construct_runtime! { pub type DbWeight = RocksDbWeight; -#[derive_impl(frame_system::config_preludes::TestDefaultConfig as frame_system::DefaultConfig)] +#[derive_impl(frame_system::config_preludes::TestDefaultConfig)] impl frame_system::Config for TestRuntime { type Block = Block; type AccountData = pallet_balances::AccountData; type DbWeight = DbWeight; } -#[derive_impl(pallet_balances::config_preludes::TestDefaultConfig as pallet_balances::DefaultConfig)] +#[derive_impl(pallet_balances::config_preludes::TestDefaultConfig)] impl pallet_balances::Config for TestRuntime { type ReserveIdentifier = [u8; 8]; type AccountStore = System; diff --git a/modules/parachains/src/mock.rs b/modules/parachains/src/mock.rs index 143f11d98..3af3fd3e7 100644 --- a/modules/parachains/src/mock.rs +++ b/modules/parachains/src/mock.rs @@ -161,7 +161,7 @@ construct_runtime! { } } -#[derive_impl(frame_system::config_preludes::TestDefaultConfig as frame_system::DefaultConfig)] +#[derive_impl(frame_system::config_preludes::TestDefaultConfig)] impl frame_system::Config for TestRuntime { type Block = Block; } diff --git a/modules/relayers/src/mock.rs b/modules/relayers/src/mock.rs index 667b10e5c..312478789 100644 --- a/modules/relayers/src/mock.rs +++ b/modules/relayers/src/mock.rs @@ -59,14 +59,14 @@ parameter_types! { pub const Lease: BlockNumber = 8; } -#[derive_impl(frame_system::config_preludes::TestDefaultConfig as frame_system::DefaultConfig)] +#[derive_impl(frame_system::config_preludes::TestDefaultConfig)] impl frame_system::Config for TestRuntime { type Block = Block; type AccountData = pallet_balances::AccountData; type DbWeight = DbWeight; } -#[derive_impl(pallet_balances::config_preludes::TestDefaultConfig as pallet_balances::DefaultConfig)] +#[derive_impl(pallet_balances::config_preludes::TestDefaultConfig)] impl pallet_balances::Config for TestRuntime { type ReserveIdentifier = [u8; 8]; type AccountStore = System; diff --git a/modules/xcm-bridge-hub-router/src/mock.rs b/modules/xcm-bridge-hub-router/src/mock.rs index 6dbfba5f6..54e10966d 100644 --- a/modules/xcm-bridge-hub-router/src/mock.rs +++ b/modules/xcm-bridge-hub-router/src/mock.rs @@ -64,7 +64,7 @@ parameter_types! { pub UnknownXcmVersionLocation: Location = Location::new(2, [GlobalConsensus(BridgedNetworkId::get()), Parachain(9999)]); } -#[derive_impl(frame_system::config_preludes::TestDefaultConfig as frame_system::DefaultConfig)] +#[derive_impl(frame_system::config_preludes::TestDefaultConfig)] impl frame_system::Config for TestRuntime { type Block = Block; } diff --git a/modules/xcm-bridge-hub/src/mock.rs b/modules/xcm-bridge-hub/src/mock.rs index e40e1f9fb..4c09bce56 100644 --- a/modules/xcm-bridge-hub/src/mock.rs +++ b/modules/xcm-bridge-hub/src/mock.rs @@ -64,7 +64,7 @@ parameter_types! { pub const ExistentialDeposit: Balance = 1; } -#[derive_impl(frame_system::config_preludes::TestDefaultConfig as frame_system::DefaultConfig)] +#[derive_impl(frame_system::config_preludes::TestDefaultConfig)] impl frame_system::Config for TestRuntime { type AccountId = AccountId; type AccountData = pallet_balances::AccountData; @@ -72,7 +72,7 @@ impl frame_system::Config for TestRuntime { type Lookup = IdentityLookup; } -#[derive_impl(pallet_balances::config_preludes::TestDefaultConfig as pallet_balances::DefaultConfig)] +#[derive_impl(pallet_balances::config_preludes::TestDefaultConfig)] impl pallet_balances::Config for TestRuntime { type AccountStore = System; } diff --git a/primitives/chain-bridge-hub-cumulus/src/lib.rs b/primitives/chain-bridge-hub-cumulus/src/lib.rs index f186f6427..c49aa4b85 100644 --- a/primitives/chain-bridge-hub-cumulus/src/lib.rs +++ b/primitives/chain-bridge-hub-cumulus/src/lib.rs @@ -26,7 +26,7 @@ pub use bp_polkadot_core::{ }; use bp_messages::*; -use bp_polkadot_core::SuffixedCommonTransactionExtension; +use bp_polkadot_core::SuffixedCommonSignedExtension; use bp_runtime::extensions::{ BridgeRejectObsoleteHeadersAndMessages, RefundBridgedParachainMessagesSchema, }; @@ -164,7 +164,7 @@ pub const MAX_UNREWARDED_RELAYERS_IN_CONFIRMATION_TX: MessageNonce = 1024; pub const MAX_UNCONFIRMED_MESSAGES_IN_CONFIRMATION_TX: MessageNonce = 4096; /// Signed extension that is used by all bridge hubs. -pub type TransactionExtension = SuffixedCommonTransactionExtension<( +pub type SignedExtension = SuffixedCommonSignedExtension<( BridgeRejectObsoleteHeadersAndMessages, RefundBridgedParachainMessagesSchema, )>; diff --git a/primitives/chain-bridge-hub-rococo/src/lib.rs b/primitives/chain-bridge-hub-rococo/src/lib.rs index 992ef1bd7..c4e697fbe 100644 --- a/primitives/chain-bridge-hub-rococo/src/lib.rs +++ b/primitives/chain-bridge-hub-rococo/src/lib.rs @@ -107,5 +107,5 @@ frame_support::parameter_types! { /// Transaction fee that is paid at the Rococo BridgeHub for delivering single outbound message confirmation. /// (initially was calculated by test `BridgeHubRococo::can_calculate_fee_for_complex_message_confirmation_transaction` + `33%`) - pub const BridgeHubRococoBaseConfirmationFeeInRocs: u128 = 5_380_904_835; + pub const BridgeHubRococoBaseConfirmationFeeInRocs: u128 = 5_380_829_647; } diff --git a/primitives/chain-kusama/src/lib.rs b/primitives/chain-kusama/src/lib.rs index 253a1010e..e3b4d0520 100644 --- a/primitives/chain-kusama/src/lib.rs +++ b/primitives/chain-kusama/src/lib.rs @@ -59,8 +59,8 @@ impl ChainWithGrandpa for Kusama { const AVERAGE_HEADER_SIZE: u32 = AVERAGE_HEADER_SIZE; } -// The TransactionExtension used by Kusama. -pub use bp_polkadot_core::CommonTransactionExtension as TransactionExtension; +// The SignedExtension used by Kusama. +pub use bp_polkadot_core::CommonSignedExtension as SignedExtension; /// Name of the parachains pallet in the Kusama runtime. pub const PARAS_PALLET_NAME: &str = "Paras"; diff --git a/primitives/chain-polkadot-bulletin/src/lib.rs b/primitives/chain-polkadot-bulletin/src/lib.rs index 73dd122bd..f2eebf931 100644 --- a/primitives/chain-polkadot-bulletin/src/lib.rs +++ b/primitives/chain-polkadot-bulletin/src/lib.rs @@ -25,7 +25,7 @@ use bp_runtime::{ decl_bridge_finality_runtime_apis, decl_bridge_messages_runtime_apis, extensions::{ CheckEra, CheckGenesis, CheckNonZeroSender, CheckNonce, CheckSpecVersion, CheckTxVersion, - CheckWeight, GenericTransactionExtension, GenericTransactionExtensionSchema, + CheckWeight, GenericSignedExtension, GenericSignedExtensionSchema, }, Chain, ChainId, TransactionEra, }; @@ -37,12 +37,7 @@ use frame_support::{ }; use frame_system::limits; use scale_info::TypeInfo; -use sp_runtime::{ - impl_tx_ext_default, - traits::{Dispatchable, TransactionExtensionBase}, - transaction_validity::TransactionValidityError, - Perbill, -}; +use sp_runtime::{traits::DispatchInfoOf, transaction_validity::TransactionValidityError, Perbill}; // This chain reuses most of Polkadot primitives. pub use bp_polkadot_core::{ @@ -76,10 +71,10 @@ pub const MAX_UNREWARDED_RELAYERS_IN_CONFIRMATION_TX: MessageNonce = 1024; pub const MAX_UNCONFIRMED_MESSAGES_IN_CONFIRMATION_TX: MessageNonce = 4096; /// This signed extension is used to ensure that the chain transactions are signed by proper -pub type ValidateSigned = GenericTransactionExtensionSchema<(), ()>; +pub type ValidateSigned = GenericSignedExtensionSchema<(), ()>; /// Signed extension schema, used by Polkadot Bulletin. -pub type TransactionExtensionSchema = GenericTransactionExtension<( +pub type SignedExtensionSchema = GenericSignedExtension<( ( CheckNonZeroSender, CheckSpecVersion, @@ -92,30 +87,34 @@ pub type TransactionExtensionSchema = GenericTransactionExtension<( ValidateSigned, )>; -/// Transaction extension, used by Polkadot Bulletin. +/// Signed extension, used by Polkadot Bulletin. #[derive(Encode, Decode, Debug, PartialEq, Eq, Clone, TypeInfo)] -pub struct TransactionExtension(TransactionExtensionSchema); +pub struct SignedExtension(SignedExtensionSchema); -impl TransactionExtensionBase for TransactionExtension { +impl sp_runtime::traits::SignedExtension for SignedExtension { const IDENTIFIER: &'static str = "Not needed."; - type Implicit = ::Implicit; + type AccountId = (); + type Call = (); + type AdditionalSigned = + ::AdditionalSigned; + type Pre = (); - fn implicit(&self) -> Result { - ::implicit(&self.0) + fn additional_signed(&self) -> Result { + self.0.additional_signed() } -} -impl sp_runtime::traits::TransactionExtension for TransactionExtension -where - C: Dispatchable, -{ - type Pre = (); - type Val = (); - - impl_tx_ext_default!(C; Context; validate prepare); + fn pre_dispatch( + self, + _who: &Self::AccountId, + _call: &Self::Call, + _info: &DispatchInfoOf, + _len: usize, + ) -> Result { + Ok(()) + } } -impl TransactionExtension { +impl SignedExtension { /// Create signed extension from its components. pub fn from_params( spec_version: u32, @@ -124,7 +123,7 @@ impl TransactionExtension { genesis_hash: Hash, nonce: Nonce, ) -> Self { - Self(GenericTransactionExtension::new( + Self(GenericSignedExtension::new( ( ( (), // non-zero sender diff --git a/primitives/chain-polkadot/src/lib.rs b/primitives/chain-polkadot/src/lib.rs index e5e2e7c3a..fc5e10308 100644 --- a/primitives/chain-polkadot/src/lib.rs +++ b/primitives/chain-polkadot/src/lib.rs @@ -61,8 +61,8 @@ impl ChainWithGrandpa for Polkadot { const AVERAGE_HEADER_SIZE: u32 = AVERAGE_HEADER_SIZE; } -/// The TransactionExtension used by Polkadot. -pub type TransactionExtension = SuffixedCommonTransactionExtension; +/// The SignedExtension used by Polkadot. +pub type SignedExtension = SuffixedCommonSignedExtension; /// Name of the parachains pallet in the Polkadot runtime. pub const PARAS_PALLET_NAME: &str = "Paras"; diff --git a/primitives/chain-rococo/src/lib.rs b/primitives/chain-rococo/src/lib.rs index 267c6b2b1..f1b256f0f 100644 --- a/primitives/chain-rococo/src/lib.rs +++ b/primitives/chain-rococo/src/lib.rs @@ -59,8 +59,8 @@ impl ChainWithGrandpa for Rococo { const AVERAGE_HEADER_SIZE: u32 = AVERAGE_HEADER_SIZE; } -// The TransactionExtension used by Rococo. -pub use bp_polkadot_core::CommonTransactionExtension as TransactionExtension; +// The SignedExtension used by Rococo. +pub use bp_polkadot_core::CommonSignedExtension as SignedExtension; /// Name of the parachains pallet in the Rococo runtime. pub const PARAS_PALLET_NAME: &str = "Paras"; diff --git a/primitives/chain-westend/src/lib.rs b/primitives/chain-westend/src/lib.rs index afa02e8ee..f03fd2160 100644 --- a/primitives/chain-westend/src/lib.rs +++ b/primitives/chain-westend/src/lib.rs @@ -59,8 +59,8 @@ impl ChainWithGrandpa for Westend { const AVERAGE_HEADER_SIZE: u32 = AVERAGE_HEADER_SIZE; } -// The TransactionExtension used by Westend. -pub use bp_polkadot_core::CommonTransactionExtension as TransactionExtension; +// The SignedExtension used by Westend. +pub use bp_polkadot_core::CommonSignedExtension as SignedExtension; /// Name of the parachains pallet in the Rococo runtime. pub const PARAS_PALLET_NAME: &str = "Paras"; diff --git a/primitives/polkadot-core/Cargo.toml b/primitives/polkadot-core/Cargo.toml index dd3912429..4851ce14c 100644 --- a/primitives/polkadot-core/Cargo.toml +++ b/primitives/polkadot-core/Cargo.toml @@ -13,7 +13,7 @@ workspace = true codec = { package = "parity-scale-codec", version = "3.1.5", default-features = false, features = ["derive"] } parity-util-mem = { version = "0.12.0", optional = true } scale-info = { version = "2.11.0", default-features = false, features = ["derive"] } -serde = { default-features = false, features = ["derive"], optional = true, workspace = true } +serde = { optional = true, features = ["derive"], workspace = true, default-features = true } # Bridge Dependencies diff --git a/primitives/polkadot-core/src/lib.rs b/primitives/polkadot-core/src/lib.rs index d59b99db4..df2836495 100644 --- a/primitives/polkadot-core/src/lib.rs +++ b/primitives/polkadot-core/src/lib.rs @@ -24,8 +24,8 @@ use bp_runtime::{ self, extensions::{ ChargeTransactionPayment, CheckEra, CheckGenesis, CheckNonZeroSender, CheckNonce, - CheckSpecVersion, CheckTxVersion, CheckWeight, GenericTransactionExtension, - TransactionExtensionSchema, + CheckSpecVersion, CheckTxVersion, CheckWeight, GenericSignedExtension, + SignedExtensionSchema, }, EncodedOrDecodedCall, StorageMapKeyProvider, TransactionEra, }; @@ -229,12 +229,8 @@ pub type SignedBlock = generic::SignedBlock; pub type Balance = u128; /// Unchecked Extrinsic type. -pub type UncheckedExtrinsic = generic::UncheckedExtrinsic< - AccountAddress, - EncodedOrDecodedCall, - Signature, - TransactionExt, ->; +pub type UncheckedExtrinsic = + generic::UncheckedExtrinsic, Signature, SignedExt>; /// Account address, used by the Polkadot-like chain. pub type Address = MultiAddress; @@ -279,7 +275,7 @@ impl AccountInfoStorageMapKeyProvider { } /// Extra signed extension data that is used by most chains. -pub type CommonTransactionExtra = ( +pub type CommonSignedExtra = ( CheckNonZeroSender, CheckSpecVersion, CheckTxVersion, @@ -290,12 +286,12 @@ pub type CommonTransactionExtra = ( ChargeTransactionPayment, ); -/// Extra transaction extension data that starts with `CommonTransactionExtra`. -pub type SuffixedCommonTransactionExtension = - GenericTransactionExtension<(CommonTransactionExtra, Suffix)>; +/// Extra signed extension data that starts with `CommonSignedExtra`. +pub type SuffixedCommonSignedExtension = + GenericSignedExtension<(CommonSignedExtra, Suffix)>; -/// Helper trait to define some extra methods on `SuffixedCommonTransactionExtension`. -pub trait SuffixedCommonTransactionExtensionExt { +/// Helper trait to define some extra methods on `SuffixedCommonSignedExtension`. +pub trait SuffixedCommonSignedExtensionExt { /// Create signed extension from its components. fn from_params( spec_version: u32, @@ -304,7 +300,7 @@ pub trait SuffixedCommonTransactionExtensionExt Self; /// Return transaction nonce. @@ -314,10 +310,9 @@ pub trait SuffixedCommonTransactionExtensionExt Balance; } -impl SuffixedCommonTransactionExtensionExt - for SuffixedCommonTransactionExtension +impl SuffixedCommonSignedExtensionExt for SuffixedCommonSignedExtension where - Suffix: TransactionExtensionSchema, + Suffix: SignedExtensionSchema, { fn from_params( spec_version: u32, @@ -326,9 +321,9 @@ where genesis_hash: Hash, nonce: Nonce, tip: Balance, - extra: (Suffix::Payload, Suffix::Implicit), + extra: (Suffix::Payload, Suffix::AdditionalSigned), ) -> Self { - GenericTransactionExtension::new( + GenericSignedExtension::new( ( ( (), // non-zero sender @@ -370,7 +365,7 @@ where } /// Signed extension that is used by most chains. -pub type CommonTransactionExtension = SuffixedCommonTransactionExtension<()>; +pub type CommonSignedExtension = SuffixedCommonSignedExtension<()>; #[cfg(test)] mod tests { diff --git a/primitives/runtime/src/extensions.rs b/primitives/runtime/src/extensions.rs index a31e7b5bb..d896bc92e 100644 --- a/primitives/runtime/src/extensions.rs +++ b/primitives/runtime/src/extensions.rs @@ -20,138 +20,135 @@ use codec::{Compact, Decode, Encode}; use impl_trait_for_tuples::impl_for_tuples; use scale_info::{StaticTypeInfo, TypeInfo}; use sp_runtime::{ - impl_tx_ext_default, - traits::{Dispatchable, TransactionExtension, TransactionExtensionBase}, + traits::{DispatchInfoOf, SignedExtension}, transaction_validity::TransactionValidityError, }; use sp_std::{fmt::Debug, marker::PhantomData}; -/// Trait that describes some properties of a `TransactionExtension` that are needed in order to -/// send a transaction to the chain. -pub trait TransactionExtensionSchema: - Encode + Decode + Debug + Eq + Clone + StaticTypeInfo -{ +/// Trait that describes some properties of a `SignedExtension` that are needed in order to send a +/// transaction to the chain. +pub trait SignedExtensionSchema: Encode + Decode + Debug + Eq + Clone + StaticTypeInfo { /// A type of the data encoded as part of the transaction. type Payload: Encode + Decode + Debug + Eq + Clone + StaticTypeInfo; /// Parameters which are part of the payload used to produce transaction signature, /// but don't end up in the transaction itself (i.e. inherent part of the runtime). - type Implicit: Encode + Decode + Debug + Eq + Clone + StaticTypeInfo; + type AdditionalSigned: Encode + Debug + Eq + Clone + StaticTypeInfo; } -impl TransactionExtensionSchema for () { +impl SignedExtensionSchema for () { type Payload = (); - type Implicit = (); + type AdditionalSigned = (); } -/// An implementation of `TransactionExtensionSchema` using generic params. +/// An implementation of `SignedExtensionSchema` using generic params. #[derive(Encode, Decode, Clone, Debug, PartialEq, Eq, TypeInfo)] -pub struct GenericTransactionExtensionSchema(PhantomData<(P, S)>); +pub struct GenericSignedExtensionSchema(PhantomData<(P, S)>); -impl TransactionExtensionSchema for GenericTransactionExtensionSchema +impl SignedExtensionSchema for GenericSignedExtensionSchema where P: Encode + Decode + Debug + Eq + Clone + StaticTypeInfo, - S: Encode + Decode + Debug + Eq + Clone + StaticTypeInfo, + S: Encode + Debug + Eq + Clone + StaticTypeInfo, { type Payload = P; - type Implicit = S; + type AdditionalSigned = S; } -/// The `TransactionExtensionSchema` for `frame_system::CheckNonZeroSender`. -pub type CheckNonZeroSender = GenericTransactionExtensionSchema<(), ()>; +/// The `SignedExtensionSchema` for `frame_system::CheckNonZeroSender`. +pub type CheckNonZeroSender = GenericSignedExtensionSchema<(), ()>; -/// The `TransactionExtensionSchema` for `frame_system::CheckSpecVersion`. -pub type CheckSpecVersion = GenericTransactionExtensionSchema<(), u32>; +/// The `SignedExtensionSchema` for `frame_system::CheckSpecVersion`. +pub type CheckSpecVersion = GenericSignedExtensionSchema<(), u32>; -/// The `TransactionExtensionSchema` for `frame_system::CheckTxVersion`. -pub type CheckTxVersion = GenericTransactionExtensionSchema<(), u32>; +/// The `SignedExtensionSchema` for `frame_system::CheckTxVersion`. +pub type CheckTxVersion = GenericSignedExtensionSchema<(), u32>; -/// The `TransactionExtensionSchema` for `frame_system::CheckGenesis`. -pub type CheckGenesis = GenericTransactionExtensionSchema<(), Hash>; +/// The `SignedExtensionSchema` for `frame_system::CheckGenesis`. +pub type CheckGenesis = GenericSignedExtensionSchema<(), Hash>; -/// The `TransactionExtensionSchema` for `frame_system::CheckEra`. -pub type CheckEra = GenericTransactionExtensionSchema; +/// The `SignedExtensionSchema` for `frame_system::CheckEra`. +pub type CheckEra = GenericSignedExtensionSchema; -/// The `TransactionExtensionSchema` for `frame_system::CheckNonce`. -pub type CheckNonce = GenericTransactionExtensionSchema, ()>; +/// The `SignedExtensionSchema` for `frame_system::CheckNonce`. +pub type CheckNonce = GenericSignedExtensionSchema, ()>; -/// The `TransactionExtensionSchema` for `frame_system::CheckWeight`. -pub type CheckWeight = GenericTransactionExtensionSchema<(), ()>; +/// The `SignedExtensionSchema` for `frame_system::CheckWeight`. +pub type CheckWeight = GenericSignedExtensionSchema<(), ()>; -/// The `TransactionExtensionSchema` for `pallet_transaction_payment::ChargeTransactionPayment`. -pub type ChargeTransactionPayment = - GenericTransactionExtensionSchema, ()>; +/// The `SignedExtensionSchema` for `pallet_transaction_payment::ChargeTransactionPayment`. +pub type ChargeTransactionPayment = GenericSignedExtensionSchema, ()>; -/// The `TransactionExtensionSchema` for `polkadot-runtime-common::PrevalidateAttests`. -pub type PrevalidateAttests = GenericTransactionExtensionSchema<(), ()>; +/// The `SignedExtensionSchema` for `polkadot-runtime-common::PrevalidateAttests`. +pub type PrevalidateAttests = GenericSignedExtensionSchema<(), ()>; -/// The `TransactionExtensionSchema` for `BridgeRejectObsoleteHeadersAndMessages`. -pub type BridgeRejectObsoleteHeadersAndMessages = GenericTransactionExtensionSchema<(), ()>; +/// The `SignedExtensionSchema` for `BridgeRejectObsoleteHeadersAndMessages`. +pub type BridgeRejectObsoleteHeadersAndMessages = GenericSignedExtensionSchema<(), ()>; -/// The `TransactionExtensionSchema` for `RefundBridgedParachainMessages`. +/// The `SignedExtensionSchema` for `RefundBridgedParachainMessages`. /// This schema is dedicated for `RefundBridgedParachainMessages` signed extension as /// wildcard/placeholder, which relies on the scale encoding for `()` or `((), ())`, or `((), (), /// ())` is the same. So runtime can contains any kind of tuple: /// `(BridgeRefundBridgeHubRococoMessages)` /// `(BridgeRefundBridgeHubRococoMessages, BridgeRefundBridgeHubWestendMessages)` /// `(BridgeRefundParachainMessages1, ..., BridgeRefundParachainMessagesN)` -pub type RefundBridgedParachainMessagesSchema = GenericTransactionExtensionSchema<(), ()>; +pub type RefundBridgedParachainMessagesSchema = GenericSignedExtensionSchema<(), ()>; #[impl_for_tuples(1, 12)] -impl TransactionExtensionSchema for Tuple { +impl SignedExtensionSchema for Tuple { for_tuples!( type Payload = ( #( Tuple::Payload ),* ); ); - for_tuples!( type Implicit = ( #( Tuple::Implicit ),* ); ); + for_tuples!( type AdditionalSigned = ( #( Tuple::AdditionalSigned ),* ); ); } /// A simplified version of signed extensions meant for producing signed transactions /// and signed payloads in the client code. #[derive(Encode, Decode, Debug, PartialEq, Eq, Clone, TypeInfo)] -pub struct GenericTransactionExtension { +pub struct GenericSignedExtension { /// A payload that is included in the transaction. pub payload: S::Payload, #[codec(skip)] // It may be set to `None` if extensions are decoded. We are never reconstructing transactions - // (and it makes no sense to do that) => decoded version of `TransactionExtensions` is only - // used to read fields of the `payload`. And when resigning transaction, we're reconstructing - // `TransactionExtensions` from scratch. - implicit: Option, + // (and it makes no sense to do that) => decoded version of `SignedExtensions` is only used to + // read fields of the `payload`. And when resigning transaction, we're reconstructing + // `SignedExtensions` from scratch. + additional_signed: Option, } -impl GenericTransactionExtension { - /// Create new `GenericTransactionExtension` object. - pub fn new(payload: S::Payload, implicit: Option) -> Self { - Self { payload, implicit } +impl GenericSignedExtension { + /// Create new `GenericSignedExtension` object. + pub fn new(payload: S::Payload, additional_signed: Option) -> Self { + Self { payload, additional_signed } } } -impl TransactionExtensionBase for GenericTransactionExtension +impl SignedExtension for GenericSignedExtension where - S: TransactionExtensionSchema, + S: SignedExtensionSchema, S::Payload: Send + Sync, - S::Implicit: Send + Sync, + S::AdditionalSigned: Send + Sync, { const IDENTIFIER: &'static str = "Not needed."; - type Implicit = S::Implicit; + type AccountId = (); + type Call = (); + type AdditionalSigned = S::AdditionalSigned; + type Pre = (); - fn implicit(&self) -> Result { + fn additional_signed(&self) -> Result { // we shall not ever see this error in relay, because we are never signing decoded // transactions. Instead we're constructing and signing new transactions. So the error code // is kinda random here - self.implicit - .clone() - .ok_or(frame_support::unsigned::TransactionValidityError::Unknown( + self.additional_signed.clone().ok_or( + frame_support::unsigned::TransactionValidityError::Unknown( frame_support::unsigned::UnknownTransaction::Custom(0xFF), - )) + ), + ) } -} -impl TransactionExtension for GenericTransactionExtension -where - C: Dispatchable, - S: TransactionExtensionSchema, - S::Payload: Send + Sync, - S::Implicit: Send + Sync, -{ - type Pre = (); - type Val = (); - impl_tx_ext_default!(C; Context; validate prepare); + fn pre_dispatch( + self, + _who: &Self::AccountId, + _call: &Self::Call, + _info: &DispatchInfoOf, + _len: usize, + ) -> Result { + Ok(()) + } } diff --git a/relays/bin-substrate/Cargo.toml b/relays/bin-substrate/Cargo.toml index d5873752e..2b89ce4f1 100644 --- a/relays/bin-substrate/Cargo.toml +++ b/relays/bin-substrate/Cargo.toml @@ -11,7 +11,7 @@ workspace = true [dependencies] anyhow = "1.0" async-std = "1.9.0" -async-trait = "0.1" +async-trait = "0.1.74" codec = { package = "parity-scale-codec", version = "3.1.5" } env_logger = "0.11" futures = "0.3.30" diff --git a/relays/client-bridge-hub-kusama/src/lib.rs b/relays/client-bridge-hub-kusama/src/lib.rs index 80f621dee..43dd53d2d 100644 --- a/relays/client-bridge-hub-kusama/src/lib.rs +++ b/relays/client-bridge-hub-kusama/src/lib.rs @@ -18,8 +18,8 @@ pub mod codegen_runtime; -use bp_bridge_hub_kusama::{TransactionExtension, AVERAGE_BLOCK_INTERVAL}; -use bp_polkadot::SuffixedCommonTransactionExtensionExt; +use bp_bridge_hub_kusama::{SignedExtension, AVERAGE_BLOCK_INTERVAL}; +use bp_polkadot::SuffixedCommonSignedExtensionExt; use codec::Encode; use relay_substrate_client::{ calls::UtilityCall as MockUtilityCall, Chain, ChainWithBalances, ChainWithMessages, @@ -37,8 +37,7 @@ pub type RuntimeCall = runtime_types::bridge_hub_kusama_runtime::RuntimeCall; pub type BridgeMessagesCall = runtime_types::pallet_bridge_messages::pallet::Call; pub type BridgeGrandpaCall = runtime_types::pallet_bridge_grandpa::pallet::Call; pub type BridgeParachainCall = runtime_types::pallet_bridge_parachains::pallet::Call; -type UncheckedExtrinsic = - bp_bridge_hub_kusama::UncheckedExtrinsic; +type UncheckedExtrinsic = bp_bridge_hub_kusama::UncheckedExtrinsic; type UtilityCall = runtime_types::pallet_utility::pallet::Call; /// Kusama chain definition @@ -88,7 +87,7 @@ impl ChainWithTransactions for BridgeHubKusama { ) -> Result { let raw_payload = SignedPayload::new( unsigned.call, - TransactionExtension::from_params( + SignedExtension::from_params( param.spec_version, param.transaction_version, unsigned.era, diff --git a/relays/client-bridge-hub-polkadot/src/lib.rs b/relays/client-bridge-hub-polkadot/src/lib.rs index ed147e92d..88b69065f 100644 --- a/relays/client-bridge-hub-polkadot/src/lib.rs +++ b/relays/client-bridge-hub-polkadot/src/lib.rs @@ -18,8 +18,8 @@ pub mod codegen_runtime; -use bp_bridge_hub_polkadot::{TransactionExtension, AVERAGE_BLOCK_INTERVAL}; -use bp_polkadot_core::SuffixedCommonTransactionExtensionExt; +use bp_bridge_hub_polkadot::{SignedExtension, AVERAGE_BLOCK_INTERVAL}; +use bp_polkadot_core::SuffixedCommonSignedExtensionExt; use codec::Encode; use relay_substrate_client::{ calls::UtilityCall as MockUtilityCall, Chain, ChainWithBalances, ChainWithMessages, @@ -41,8 +41,7 @@ pub type BridgeKusamaMessagesCall = runtime_types::pallet_bridge_messages::palle pub type BridgePolkadotBulletinGrandpaCall = runtime_types::pallet_bridge_grandpa::pallet::Call; pub type BridgeKusamaGrandpaCall = runtime_types::pallet_bridge_grandpa::pallet::Call; pub type BridgeParachainCall = runtime_types::pallet_bridge_parachains::pallet::Call; -type UncheckedExtrinsic = - bp_bridge_hub_polkadot::UncheckedExtrinsic; +type UncheckedExtrinsic = bp_bridge_hub_polkadot::UncheckedExtrinsic; type UtilityCall = runtime_types::pallet_utility::pallet::Call; /// Polkadot chain definition @@ -92,7 +91,7 @@ impl ChainWithTransactions for BridgeHubPolkadot { ) -> Result { let raw_payload = SignedPayload::new( unsigned.call, - TransactionExtension::from_params( + SignedExtension::from_params( param.spec_version, param.transaction_version, unsigned.era, diff --git a/relays/client-bridge-hub-rococo/src/lib.rs b/relays/client-bridge-hub-rococo/src/lib.rs index 169135646..cae9e4f77 100644 --- a/relays/client-bridge-hub-rococo/src/lib.rs +++ b/relays/client-bridge-hub-rococo/src/lib.rs @@ -18,8 +18,8 @@ pub mod codegen_runtime; -use bp_bridge_hub_rococo::{TransactionExtension, AVERAGE_BLOCK_INTERVAL}; -use bp_polkadot_core::SuffixedCommonTransactionExtensionExt; +use bp_bridge_hub_rococo::{SignedExtension, AVERAGE_BLOCK_INTERVAL}; +use bp_polkadot_core::SuffixedCommonSignedExtensionExt; use codec::Encode; use relay_substrate_client::{ calls::UtilityCall as MockUtilityCall, Chain, ChainWithBalances, ChainWithMessages, @@ -39,8 +39,7 @@ pub type BridgeBulletinMessagesCall = runtime_types::pallet_bridge_messages::pal pub type BridgeGrandpaCall = runtime_types::pallet_bridge_grandpa::pallet::Call; pub type BridgeBulletinGrandpaCall = runtime_types::pallet_bridge_grandpa::pallet::Call2; pub type BridgeParachainCall = runtime_types::pallet_bridge_parachains::pallet::Call; -type UncheckedExtrinsic = - bp_bridge_hub_rococo::UncheckedExtrinsic; +type UncheckedExtrinsic = bp_bridge_hub_rococo::UncheckedExtrinsic; type UtilityCall = runtime_types::pallet_utility::pallet::Call; /// Rococo chain definition @@ -90,7 +89,7 @@ impl ChainWithTransactions for BridgeHubRococo { ) -> Result { let raw_payload = SignedPayload::new( unsigned.call, - TransactionExtension::from_params( + SignedExtension::from_params( param.spec_version, param.transaction_version, unsigned.era, diff --git a/relays/client-bridge-hub-westend/src/lib.rs b/relays/client-bridge-hub-westend/src/lib.rs index 5e4462f86..049fa4649 100644 --- a/relays/client-bridge-hub-westend/src/lib.rs +++ b/relays/client-bridge-hub-westend/src/lib.rs @@ -18,8 +18,8 @@ pub mod codegen_runtime; -use bp_bridge_hub_westend::{TransactionExtension, AVERAGE_BLOCK_INTERVAL}; -use bp_polkadot_core::SuffixedCommonTransactionExtensionExt; +use bp_bridge_hub_westend::{SignedExtension, AVERAGE_BLOCK_INTERVAL}; +use bp_polkadot_core::SuffixedCommonSignedExtensionExt; use codec::Encode; use relay_substrate_client::{ calls::UtilityCall as MockUtilityCall, Chain, ChainWithBalances, ChainWithMessages, @@ -37,8 +37,7 @@ pub type RuntimeCall = runtime_types::bridge_hub_westend_runtime::RuntimeCall; pub type BridgeMessagesCall = runtime_types::pallet_bridge_messages::pallet::Call; pub type BridgeGrandpaCall = runtime_types::pallet_bridge_grandpa::pallet::Call; pub type BridgeParachainCall = runtime_types::pallet_bridge_parachains::pallet::Call; -type UncheckedExtrinsic = - bp_bridge_hub_westend::UncheckedExtrinsic; +type UncheckedExtrinsic = bp_bridge_hub_westend::UncheckedExtrinsic; type UtilityCall = runtime_types::pallet_utility::pallet::Call; /// Westend chain definition @@ -88,7 +87,7 @@ impl ChainWithTransactions for BridgeHubWestend { ) -> Result { let raw_payload = SignedPayload::new( unsigned.call, - TransactionExtension::from_params( + SignedExtension::from_params( param.spec_version, param.transaction_version, unsigned.era, diff --git a/relays/client-kusama/src/lib.rs b/relays/client-kusama/src/lib.rs index a953a383e..24a6adfe9 100644 --- a/relays/client-kusama/src/lib.rs +++ b/relays/client-kusama/src/lib.rs @@ -19,7 +19,7 @@ pub mod codegen_runtime; use bp_kusama::{AccountInfoStorageMapKeyProvider, KUSAMA_SYNCED_HEADERS_GRANDPA_INFO_METHOD}; -use bp_polkadot_core::SuffixedCommonTransactionExtensionExt; +use bp_polkadot_core::SuffixedCommonSignedExtensionExt; use codec::Encode; use relay_substrate_client::{ Chain, ChainWithBalances, ChainWithGrandpa, ChainWithRuntimeVersion, ChainWithTransactions, @@ -84,7 +84,7 @@ impl RelayChain for Kusama { impl ChainWithTransactions for Kusama { type AccountKeyPair = sp_core::sr25519::Pair; type SignedTransaction = - bp_polkadot_core::UncheckedExtrinsic; + bp_polkadot_core::UncheckedExtrinsic; fn sign_transaction( param: SignParam, @@ -92,7 +92,7 @@ impl ChainWithTransactions for Kusama { ) -> Result { let raw_payload = SignedPayload::new( unsigned.call, - bp_kusama::TransactionExtension::from_params( + bp_kusama::SignedExtension::from_params( param.spec_version, param.transaction_version, unsigned.era, diff --git a/relays/client-polkadot-bulletin/src/lib.rs b/relays/client-polkadot-bulletin/src/lib.rs index a6cdd8ee6..1f18b25a9 100644 --- a/relays/client-polkadot-bulletin/src/lib.rs +++ b/relays/client-polkadot-bulletin/src/lib.rs @@ -100,10 +100,8 @@ impl ChainWithBalances for PolkadotBulletin { impl ChainWithTransactions for PolkadotBulletin { type AccountKeyPair = sp_core::sr25519::Pair; - type SignedTransaction = bp_polkadot_bulletin::UncheckedExtrinsic< - Self::Call, - bp_polkadot_bulletin::TransactionExtension, - >; + type SignedTransaction = + bp_polkadot_bulletin::UncheckedExtrinsic; fn sign_transaction( param: SignParam, @@ -111,7 +109,7 @@ impl ChainWithTransactions for PolkadotBulletin { ) -> Result { let raw_payload = SignedPayload::new( unsigned.call, - bp_polkadot_bulletin::TransactionExtension::from_params( + bp_polkadot_bulletin::SignedExtension::from_params( param.spec_version, param.transaction_version, unsigned.era, diff --git a/relays/client-polkadot/src/lib.rs b/relays/client-polkadot/src/lib.rs index af75f0dbb..9b655528b 100644 --- a/relays/client-polkadot/src/lib.rs +++ b/relays/client-polkadot/src/lib.rs @@ -19,7 +19,7 @@ mod codegen_runtime; use bp_polkadot::{AccountInfoStorageMapKeyProvider, POLKADOT_SYNCED_HEADERS_GRANDPA_INFO_METHOD}; -use bp_polkadot_core::SuffixedCommonTransactionExtensionExt; +use bp_polkadot_core::SuffixedCommonSignedExtensionExt; use codec::Encode; use relay_substrate_client::{ Chain, ChainWithBalances, ChainWithGrandpa, ChainWithRuntimeVersion, ChainWithTransactions, @@ -84,7 +84,7 @@ impl RelayChain for Polkadot { impl ChainWithTransactions for Polkadot { type AccountKeyPair = sp_core::sr25519::Pair; type SignedTransaction = - bp_polkadot_core::UncheckedExtrinsic; + bp_polkadot_core::UncheckedExtrinsic; fn sign_transaction( param: SignParam, @@ -92,7 +92,7 @@ impl ChainWithTransactions for Polkadot { ) -> Result { let raw_payload = SignedPayload::new( unsigned.call, - bp_polkadot::TransactionExtension::from_params( + bp_polkadot::SignedExtension::from_params( param.spec_version, param.transaction_version, unsigned.era, diff --git a/relays/client-rococo/src/lib.rs b/relays/client-rococo/src/lib.rs index 5fad80d5c..575660504 100644 --- a/relays/client-rococo/src/lib.rs +++ b/relays/client-rococo/src/lib.rs @@ -18,7 +18,7 @@ pub mod codegen_runtime; -use bp_polkadot_core::SuffixedCommonTransactionExtensionExt; +use bp_polkadot_core::SuffixedCommonSignedExtensionExt; use bp_rococo::ROCOCO_SYNCED_HEADERS_GRANDPA_INFO_METHOD; use codec::Encode; use relay_substrate_client::{ @@ -84,7 +84,7 @@ impl RelayChain for Rococo { impl ChainWithTransactions for Rococo { type AccountKeyPair = sp_core::sr25519::Pair; type SignedTransaction = - bp_polkadot_core::UncheckedExtrinsic; + bp_polkadot_core::UncheckedExtrinsic; fn sign_transaction( param: SignParam, @@ -92,7 +92,7 @@ impl ChainWithTransactions for Rococo { ) -> Result { let raw_payload = SignedPayload::new( unsigned.call, - bp_rococo::TransactionExtension::from_params( + bp_rococo::SignedExtension::from_params( param.spec_version, param.transaction_version, unsigned.era, diff --git a/relays/client-substrate/Cargo.toml b/relays/client-substrate/Cargo.toml index ea02a0ad2..48895eff3 100644 --- a/relays/client-substrate/Cargo.toml +++ b/relays/client-substrate/Cargo.toml @@ -10,7 +10,7 @@ workspace = true [dependencies] async-std = { version = "1.6.5", features = ["attributes"] } -async-trait = "0.1" +async-trait = "0.1.74" codec = { package = "parity-scale-codec", version = "3.1.5" } futures = "0.3.30" jsonrpsee = { version = "0.17", features = ["macros", "ws-client"] } diff --git a/relays/client-westend/src/lib.rs b/relays/client-westend/src/lib.rs index 737c6c085..42206baec 100644 --- a/relays/client-westend/src/lib.rs +++ b/relays/client-westend/src/lib.rs @@ -18,7 +18,7 @@ pub mod codegen_runtime; -use bp_polkadot_core::SuffixedCommonTransactionExtensionExt; +use bp_polkadot_core::SuffixedCommonSignedExtensionExt; use bp_westend::WESTEND_SYNCED_HEADERS_GRANDPA_INFO_METHOD; use codec::Encode; use relay_substrate_client::{ @@ -84,7 +84,7 @@ impl ChainWithBalances for Westend { impl ChainWithTransactions for Westend { type AccountKeyPair = sp_core::sr25519::Pair; type SignedTransaction = - bp_polkadot_core::UncheckedExtrinsic; + bp_polkadot_core::UncheckedExtrinsic; fn sign_transaction( param: SignParam, @@ -92,7 +92,7 @@ impl ChainWithTransactions for Westend { ) -> Result { let raw_payload = SignedPayload::new( unsigned.call, - bp_westend::TransactionExtension::from_params( + bp_westend::SignedExtension::from_params( param.spec_version, param.transaction_version, unsigned.era, diff --git a/relays/equivocation/Cargo.toml b/relays/equivocation/Cargo.toml index 0b4a7e983..23dd4a087 100644 --- a/relays/equivocation/Cargo.toml +++ b/relays/equivocation/Cargo.toml @@ -11,7 +11,7 @@ workspace = true [dependencies] async-std = { version = "1.6.5", features = ["attributes"] } -async-trait = "0.1" +async-trait = "0.1.74" bp-header-chain = { path = "../../primitives/header-chain" } finality-relay = { path = "../finality" } frame-support = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } diff --git a/relays/finality/Cargo.toml b/relays/finality/Cargo.toml index 9e8bf56f5..80159b4db 100644 --- a/relays/finality/Cargo.toml +++ b/relays/finality/Cargo.toml @@ -11,7 +11,7 @@ workspace = true [dependencies] async-std = "1.6.5" -async-trait = "0.1" +async-trait = "0.1.74" backoff = "0.4" bp-header-chain = { path = "../../primitives/header-chain" } futures = "0.3.30" diff --git a/relays/lib-substrate-relay/Cargo.toml b/relays/lib-substrate-relay/Cargo.toml index d85e2762a..27c5ee02c 100644 --- a/relays/lib-substrate-relay/Cargo.toml +++ b/relays/lib-substrate-relay/Cargo.toml @@ -11,7 +11,7 @@ workspace = true [dependencies] anyhow = "1.0" async-std = "1.9.0" -async-trait = "0.1" +async-trait = "0.1.74" codec = { package = "parity-scale-codec", version = "3.1.5" } futures = "0.3.30" hex = "0.4" diff --git a/relays/messages/Cargo.toml b/relays/messages/Cargo.toml index 3367e4bbd..699649dd6 100644 --- a/relays/messages/Cargo.toml +++ b/relays/messages/Cargo.toml @@ -10,7 +10,7 @@ workspace = true [dependencies] async-std = { version = "1.6.5", features = ["attributes"] } -async-trait = "0.1" +async-trait = "0.1.74" env_logger = "0.11" futures = "0.3.30" hex = "0.4" diff --git a/relays/parachains/Cargo.toml b/relays/parachains/Cargo.toml index 9dc35343b..d2eea9eb6 100644 --- a/relays/parachains/Cargo.toml +++ b/relays/parachains/Cargo.toml @@ -10,7 +10,7 @@ workspace = true [dependencies] async-std = "1.6.5" -async-trait = "0.1" +async-trait = "0.1.74" futures = "0.3.30" log = { workspace = true } relay-utils = { path = "../utils" } diff --git a/relays/utils/Cargo.toml b/relays/utils/Cargo.toml index ed6093318..81574a685 100644 --- a/relays/utils/Cargo.toml +++ b/relays/utils/Cargo.toml @@ -12,7 +12,7 @@ workspace = true ansi_term = "0.12" anyhow = "1.0" async-std = "1.6.5" -async-trait = "0.1" +async-trait = "0.1.74" backoff = "0.4" isahc = "1.2" env_logger = "0.11.3" diff --git a/zombienet/README.md b/testing/README.md similarity index 94% rename from zombienet/README.md rename to testing/README.md index b601154b6..bd467a410 100644 --- a/zombienet/README.md +++ b/testing/README.md @@ -23,7 +23,7 @@ To start those tests, you need to: - copy fresh `substrate-relay` binary, built in previous point, to the `~/local_bridge_testing/bin/substrate-relay`; -- change the `POLKADOT_SDK_FOLDER` and `ZOMBIENET_BINARY_PATH` (and ensure that the nearby variables +- change the `POLKADOT_SDK_PATH` and `ZOMBIENET_BINARY_PATH` (and ensure that the nearby variables have correct values) in the `./run-tests.sh`. After that, you could run tests with the `./run-tests.sh` command. Hopefully, it'll show the diff --git a/testing/environments/rococo-westend/bridge_hub_rococo_local_network.toml b/testing/environments/rococo-westend/bridge_hub_rococo_local_network.toml new file mode 100644 index 000000000..52271f944 --- /dev/null +++ b/testing/environments/rococo-westend/bridge_hub_rococo_local_network.toml @@ -0,0 +1,88 @@ +[settings] +node_spawn_timeout = 240 + +[relaychain] +default_command = "{{POLKADOT_BINARY}}" +default_args = [ "-lparachain=debug,xcm=trace" ] +chain = "rococo-local" + + [[relaychain.nodes]] + name = "alice-rococo-validator" + validator = true + rpc_port = 9932 + ws_port = 9942 + balance = 2000000000000 + + [[relaychain.nodes]] + name = "bob-rococo-validator" + validator = true + rpc_port = 9933 + ws_port = 9943 + balance = 2000000000000 + + [[relaychain.nodes]] + name = "charlie-rococo-validator" + validator = true + rpc_port = 9934 + ws_port = 9944 + balance = 2000000000000 + +[[parachains]] +id = 1013 +chain = "bridge-hub-rococo-local" +cumulus_based = true + + # run alice as parachain collator + [[parachains.collators]] + name = "bridge-hub-rococo-collator1" + validator = true + command = "{{POLKADOT_PARACHAIN_BINARY}}" + rpc_port = 8933 + ws_port = 8943 + args = [ + "-lparachain=debug,runtime::bridge-hub=trace,runtime::bridge=trace,runtime::bridge-dispatch=trace,bridge=trace,runtime::bridge-messages=trace,xcm=trace" + ] + + # run bob as parachain collator + [[parachains.collators]] + name = "bridge-hub-rococo-collator2" + validator = true + command = "{{POLKADOT_PARACHAIN_BINARY}}" + rpc_port = 8934 + ws_port = 8944 + args = [ + "-lparachain=trace,runtime::bridge-hub=trace,runtime::bridge=trace,runtime::bridge-dispatch=trace,bridge=trace,runtime::bridge-messages=trace,xcm=trace" + ] + +[[parachains]] +id = 1000 +chain = "asset-hub-rococo-local" +cumulus_based = true + + [[parachains.collators]] + name = "asset-hub-rococo-collator1" + rpc_port = 9911 + ws_port = 9910 + command = "{{POLKADOT_PARACHAIN_BINARY}}" + args = [ + "-lparachain=debug,xcm=trace,runtime::bridge-transfer=trace" + ] + + [[parachains.collators]] + name = "asset-hub-rococo-collator2" + command = "{{POLKADOT_PARACHAIN_BINARY}}" + args = [ + "-lparachain=debug,xcm=trace,runtime::bridge-transfer=trace" + ] + +#[[hrmp_channels]] +#sender = 1000 +#recipient = 1013 +#max_capacity = 4 +#max_message_size = 524288 +# +#[[hrmp_channels]] +#sender = 1013 +#recipient = 1000 +#max_capacity = 4 +#max_message_size = 524288 diff --git a/testing/environments/rococo-westend/bridge_hub_westend_local_network.toml b/testing/environments/rococo-westend/bridge_hub_westend_local_network.toml new file mode 100644 index 000000000..f2550bcc9 --- /dev/null +++ b/testing/environments/rococo-westend/bridge_hub_westend_local_network.toml @@ -0,0 +1,88 @@ +[settings] +node_spawn_timeout = 240 + +[relaychain] +default_command = "{{POLKADOT_BINARY}}" +default_args = [ "-lparachain=debug,xcm=trace" ] +chain = "westend-local" + + [[relaychain.nodes]] + name = "alice-westend-validator" + validator = true + rpc_port = 9935 + ws_port = 9945 + balance = 2000000000000 + + [[relaychain.nodes]] + name = "bob-westend-validator" + validator = true + rpc_port = 9936 + ws_port = 9946 + balance = 2000000000000 + + [[relaychain.nodes]] + name = "charlie-westend-validator" + validator = true + rpc_port = 9937 + ws_port = 9947 + balance = 2000000000000 + +[[parachains]] +id = 1002 +chain = "bridge-hub-westend-local" +cumulus_based = true + + # run alice as parachain collator + [[parachains.collators]] + name = "bridge-hub-westend-collator1" + validator = true + command = "{{POLKADOT_PARACHAIN_BINARY}}" + rpc_port = 8935 + ws_port = 8945 + args = [ + "-lparachain=debug,runtime::mmr=info,substrate=info,runtime=info,runtime::bridge-hub=trace,runtime::bridge=trace,runtime::bridge-dispatch=trace,bridge=trace,runtime::bridge-messages=trace,xcm=trace" + ] + + # run bob as parachain collator + [[parachains.collators]] + name = "bridge-hub-westend-collator2" + validator = true + command = "{{POLKADOT_PARACHAIN_BINARY}}" + rpc_port = 8936 + ws_port = 8946 + args = [ + "-lparachain=trace,runtime::mmr=info,substrate=info,runtime=info,runtime::bridge-hub=trace,runtime::bridge=trace,runtime::bridge-dispatch=trace,bridge=trace,runtime::bridge-messages=trace,xcm=trace" + ] + +[[parachains]] +id = 1000 +chain = "asset-hub-westend-local" +cumulus_based = true + + [[parachains.collators]] + name = "asset-hub-westend-collator1" + rpc_port = 9011 + ws_port = 9010 + command = "{{POLKADOT_PARACHAIN_BINARY}}" + args = [ + "-lparachain=debug,xcm=trace,runtime::bridge-transfer=trace" + ] + + [[parachains.collators]] + name = "asset-hub-westend-collator2" + command = "{{POLKADOT_PARACHAIN_BINARY}}" + args = [ + "-lparachain=debug,xcm=trace,runtime::bridge-transfer=trace" + ] + +#[[hrmp_channels]] +#sender = 1000 +#recipient = 1002 +#max_capacity = 4 +#max_message_size = 524288 +# +#[[hrmp_channels]] +#sender = 1002 +#recipient = 1000 +#max_capacity = 4 +#max_message_size = 524288 diff --git a/testing/environments/rococo-westend/bridges_rococo_westend.sh b/testing/environments/rococo-westend/bridges_rococo_westend.sh new file mode 100755 index 000000000..66c9ddc03 --- /dev/null +++ b/testing/environments/rococo-westend/bridges_rococo_westend.sh @@ -0,0 +1,401 @@ +#!/bin/bash + +# import common functions +source "$FRAMEWORK_PATH/utils/bridges.sh" + +# Expected sovereign accounts. +# +# Generated by: +# +# #[test] +# fn generate_sovereign_accounts() { +# use sp_core::crypto::Ss58Codec; +# use polkadot_parachain_primitives::primitives::Sibling; +# +# parameter_types! { +# pub UniversalLocationAHR: InteriorMultiLocation = X2(GlobalConsensus(Rococo), Parachain(1000)); +# pub UniversalLocationAHW: InteriorMultiLocation = X2(GlobalConsensus(Westend), Parachain(1000)); +# } +# +# // SS58=42 +# println!("GLOBAL_CONSENSUS_ROCOCO_SOVEREIGN_ACCOUNT=\"{}\"", +# frame_support::sp_runtime::AccountId32::new( +# GlobalConsensusConvertsFor::::convert_location( +# &MultiLocation { parents: 2, interior: X1(GlobalConsensus(Rococo)) }).unwrap() +# ).to_ss58check_with_version(42_u16.into()) +# ); +# println!("ASSET_HUB_WESTEND_SOVEREIGN_ACCOUNT_AT_BRIDGE_HUB_WESTEND=\"{}\"", +# frame_support::sp_runtime::AccountId32::new( +# SiblingParachainConvertsVia::::convert_location( +# &MultiLocation { parents: 1, interior: X1(Parachain(1000)) }).unwrap() +# ).to_ss58check_with_version(42_u16.into()) +# ); +# +# // SS58=42 +# println!("GLOBAL_CONSENSUS_WESTEND_SOVEREIGN_ACCOUNT=\"{}\"", +# frame_support::sp_runtime::AccountId32::new( +# GlobalConsensusConvertsFor::::convert_location( +# &MultiLocation { parents: 2, interior: X1(GlobalConsensus(Westend)) }).unwrap() +# ).to_ss58check_with_version(42_u16.into()) +# ); +# println!("ASSET_HUB_ROCOCO_SOVEREIGN_ACCOUNT_AT_BRIDGE_HUB_ROCOCO=\"{}\"", +# frame_support::sp_runtime::AccountId32::new( +# SiblingParachainConvertsVia::::convert_location( +# &MultiLocation { parents: 1, interior: X1(Parachain(1000)) }).unwrap() +# ).to_ss58check_with_version(42_u16.into()) +# ); +# } +GLOBAL_CONSENSUS_ROCOCO_SOVEREIGN_ACCOUNT="5GxRGwT8bU1JeBPTUXc7LEjZMxNrK8MyL2NJnkWFQJTQ4sii" +ASSET_HUB_WESTEND_SOVEREIGN_ACCOUNT_AT_BRIDGE_HUB_WESTEND="5Eg2fntNprdN3FgH4sfEaaZhYtddZQSQUqvYJ1f2mLtinVhV" +GLOBAL_CONSENSUS_WESTEND_SOVEREIGN_ACCOUNT="5He2Qdztyxxa4GoagY6q1jaiLMmKy1gXS7PdZkhfj8ZG9hk5" +ASSET_HUB_ROCOCO_SOVEREIGN_ACCOUNT_AT_BRIDGE_HUB_ROCOCO="5Eg2fntNprdN3FgH4sfEaaZhYtddZQSQUqvYJ1f2mLtinVhV" + +# Expected sovereign accounts for rewards on BridgeHubs. +# +# Generated by: +# #[test] +# fn generate_sovereign_accounts_for_rewards() { +# use bp_messages::LaneId; +# use bp_relayers::{PayRewardFromAccount, RewardsAccountOwner, RewardsAccountParams}; +# use sp_core::crypto::Ss58Codec; +# +# // SS58=42 +# println!( +# "ON_BRIDGE_HUB_ROCOCO_SOVEREIGN_ACCOUNT_FOR_LANE_00000002_bhwd_ThisChain=\"{}\"", +# frame_support::sp_runtime::AccountId32::new( +# PayRewardFromAccount::<[u8; 32], [u8; 32]>::rewards_account(RewardsAccountParams::new( +# LaneId([0, 0, 0, 2]), +# *b"bhwd", +# RewardsAccountOwner::ThisChain +# )) +# ) +# .to_ss58check_with_version(42_u16.into()) +# ); +# // SS58=42 +# println!( +# "ON_BRIDGE_HUB_ROCOCO_SOVEREIGN_ACCOUNT_FOR_LANE_00000002_bhwd_BridgedChain=\"{}\"", +# frame_support::sp_runtime::AccountId32::new( +# PayRewardFromAccount::<[u8; 32], [u8; 32]>::rewards_account(RewardsAccountParams::new( +# LaneId([0, 0, 0, 2]), +# *b"bhwd", +# RewardsAccountOwner::BridgedChain +# )) +# ) +# .to_ss58check_with_version(42_u16.into()) +# ); +# +# // SS58=42 +# println!( +# "ON_BRIDGE_HUB_WESTEND_SOVEREIGN_ACCOUNT_FOR_LANE_00000002_bhro_ThisChain=\"{}\"", +# frame_support::sp_runtime::AccountId32::new( +# PayRewardFromAccount::<[u8; 32], [u8; 32]>::rewards_account(RewardsAccountParams::new( +# LaneId([0, 0, 0, 2]), +# *b"bhro", +# RewardsAccountOwner::ThisChain +# )) +# ) +# .to_ss58check_with_version(42_u16.into()) +# ); +# // SS58=42 +# println!( +# "ON_BRIDGE_HUB_WESTEND_SOVEREIGN_ACCOUNT_FOR_LANE_00000002_bhro_BridgedChain=\"{}\"", +# frame_support::sp_runtime::AccountId32::new( +# PayRewardFromAccount::<[u8; 32], [u8; 32]>::rewards_account(RewardsAccountParams::new( +# LaneId([0, 0, 0, 2]), +# *b"bhro", +# RewardsAccountOwner::BridgedChain +# )) +# ) +# .to_ss58check_with_version(42_u16.into()) +# ); +# } +ON_BRIDGE_HUB_ROCOCO_SOVEREIGN_ACCOUNT_FOR_LANE_00000002_bhwd_ThisChain="5EHnXaT5BhiSGP5hbdsoVGtzi2sQVgpDNToTxLYeQvKoMPEm" +ON_BRIDGE_HUB_ROCOCO_SOVEREIGN_ACCOUNT_FOR_LANE_00000002_bhwd_BridgedChain="5EHnXaT5BhiSGP5hbdt5EJSapXYbxEv678jyWHEUskCXcjqo" +ON_BRIDGE_HUB_WESTEND_SOVEREIGN_ACCOUNT_FOR_LANE_00000002_bhro_ThisChain="5EHnXaT5BhiSGP5h9Rg8sgUJqoLym3iEaWUiboT8S9AT5xFh" +ON_BRIDGE_HUB_WESTEND_SOVEREIGN_ACCOUNT_FOR_LANE_00000002_bhro_BridgedChain="5EHnXaT5BhiSGP5h9RgQci1txJ2BDbp7KBRE9k8xty3BMUSi" + +LANE_ID="00000002" +XCM_VERSION=3 + +function init_ro_wnd() { + local relayer_path=$(ensure_relayer) + + RUST_LOG=runtime=trace,rpc=trace,bridge=trace \ + $relayer_path init-bridge rococo-to-bridge-hub-westend \ + --source-host localhost \ + --source-port 9942 \ + --source-version-mode Auto \ + --target-host localhost \ + --target-port 8945 \ + --target-version-mode Auto \ + --target-signer //Bob +} + +function init_wnd_ro() { + local relayer_path=$(ensure_relayer) + + RUST_LOG=runtime=trace,rpc=trace,bridge=trace \ + $relayer_path init-bridge westend-to-bridge-hub-rococo \ + --source-host localhost \ + --source-port 9945 \ + --source-version-mode Auto \ + --target-host localhost \ + --target-port 8943 \ + --target-version-mode Auto \ + --target-signer //Bob +} + +function run_relay() { + local relayer_path=$(ensure_relayer) + + RUST_LOG=runtime=trace,rpc=trace,bridge=trace \ + $relayer_path relay-headers-and-messages bridge-hub-rococo-bridge-hub-westend \ + --rococo-host localhost \ + --rococo-port 9942 \ + --rococo-version-mode Auto \ + --bridge-hub-rococo-host localhost \ + --bridge-hub-rococo-port 8943 \ + --bridge-hub-rococo-version-mode Auto \ + --bridge-hub-rococo-signer //Charlie \ + --bridge-hub-rococo-transactions-mortality 4 \ + --westend-host localhost \ + --westend-port 9945 \ + --westend-version-mode Auto \ + --bridge-hub-westend-host localhost \ + --bridge-hub-westend-port 8945 \ + --bridge-hub-westend-version-mode Auto \ + --bridge-hub-westend-signer //Charlie \ + --bridge-hub-westend-transactions-mortality 4 \ + --lane "${LANE_ID}" +} + +case "$1" in + run-relay) + init_wnd_ro + init_ro_wnd + run_relay + ;; + init-asset-hub-rococo-local) + ensure_polkadot_js_api + # create foreign assets for native Westend token (governance call on Rococo) + force_create_foreign_asset \ + "ws://127.0.0.1:9942" \ + "//Alice" \ + 1000 \ + "ws://127.0.0.1:9910" \ + "$(jq --null-input '{ "parents": 2, "interior": { "X1": { "GlobalConsensus": "Westend" } } }')" \ + "$GLOBAL_CONSENSUS_WESTEND_SOVEREIGN_ACCOUNT" \ + 10000000000 \ + true + # HRMP + open_hrmp_channels \ + "ws://127.0.0.1:9942" \ + "//Alice" \ + 1000 1013 4 524288 + open_hrmp_channels \ + "ws://127.0.0.1:9942" \ + "//Alice" \ + 1013 1000 4 524288 + # set XCM version of remote AssetHubWestend + force_xcm_version \ + "ws://127.0.0.1:9942" \ + "//Alice" \ + 1000 \ + "ws://127.0.0.1:9910" \ + "$(jq --null-input '{ "parents": 2, "interior": { "X2": [ { "GlobalConsensus": "Westend" }, { "Parachain": 1000 } ] } }')" \ + $XCM_VERSION + ;; + init-bridge-hub-rococo-local) + ensure_polkadot_js_api + # SA of sibling asset hub pays for the execution + transfer_balance \ + "ws://127.0.0.1:8943" \ + "//Alice" \ + "$ASSET_HUB_ROCOCO_SOVEREIGN_ACCOUNT_AT_BRIDGE_HUB_ROCOCO" \ + $((1000000000000 + 50000000000 * 20)) + # drip SA of lane dedicated to asset hub for paying rewards for delivery + transfer_balance \ + "ws://127.0.0.1:8943" \ + "//Alice" \ + "$ON_BRIDGE_HUB_ROCOCO_SOVEREIGN_ACCOUNT_FOR_LANE_00000002_bhwd_ThisChain" \ + $((1000000000000 + 2000000000000)) + # drip SA of lane dedicated to asset hub for paying rewards for delivery confirmation + transfer_balance \ + "ws://127.0.0.1:8943" \ + "//Alice" \ + "$ON_BRIDGE_HUB_ROCOCO_SOVEREIGN_ACCOUNT_FOR_LANE_00000002_bhwd_BridgedChain" \ + $((1000000000000 + 2000000000000)) + # set XCM version of remote BridgeHubWestend + force_xcm_version \ + "ws://127.0.0.1:9942" \ + "//Alice" \ + 1013 \ + "ws://127.0.0.1:8943" \ + "$(jq --null-input '{ "parents": 2, "interior": { "X2": [ { "GlobalConsensus": "Westend" }, { "Parachain": 1002 } ] } }')" \ + $XCM_VERSION + ;; + init-asset-hub-westend-local) + ensure_polkadot_js_api + # create foreign assets for native Rococo token (governance call on Westend) + force_create_foreign_asset \ + "ws://127.0.0.1:9945" \ + "//Alice" \ + 1000 \ + "ws://127.0.0.1:9010" \ + "$(jq --null-input '{ "parents": 2, "interior": { "X1": { "GlobalConsensus": "Rococo" } } }')" \ + "$GLOBAL_CONSENSUS_ROCOCO_SOVEREIGN_ACCOUNT" \ + 10000000000 \ + true + # HRMP + open_hrmp_channels \ + "ws://127.0.0.1:9945" \ + "//Alice" \ + 1000 1002 4 524288 + open_hrmp_channels \ + "ws://127.0.0.1:9945" \ + "//Alice" \ + 1002 1000 4 524288 + # set XCM version of remote AssetHubRococo + force_xcm_version \ + "ws://127.0.0.1:9945" \ + "//Alice" \ + 1000 \ + "ws://127.0.0.1:9010" \ + "$(jq --null-input '{ "parents": 2, "interior": { "X2": [ { "GlobalConsensus": "Rococo" }, { "Parachain": 1000 } ] } }')" \ + $XCM_VERSION + ;; + init-bridge-hub-westend-local) + # SA of sibling asset hub pays for the execution + transfer_balance \ + "ws://127.0.0.1:8945" \ + "//Alice" \ + "$ASSET_HUB_WESTEND_SOVEREIGN_ACCOUNT_AT_BRIDGE_HUB_WESTEND" \ + $((1000000000000000 + 50000000000 * 20)) + # drip SA of lane dedicated to asset hub for paying rewards for delivery + transfer_balance \ + "ws://127.0.0.1:8945" \ + "//Alice" \ + "$ON_BRIDGE_HUB_WESTEND_SOVEREIGN_ACCOUNT_FOR_LANE_00000002_bhro_ThisChain" \ + $((1000000000000000 + 2000000000000)) + # drip SA of lane dedicated to asset hub for paying rewards for delivery confirmation + transfer_balance \ + "ws://127.0.0.1:8945" \ + "//Alice" \ + "$ON_BRIDGE_HUB_WESTEND_SOVEREIGN_ACCOUNT_FOR_LANE_00000002_bhro_BridgedChain" \ + $((1000000000000000 + 2000000000000)) + # set XCM version of remote BridgeHubRococo + force_xcm_version \ + "ws://127.0.0.1:9945" \ + "//Alice" \ + 1002 \ + "ws://127.0.0.1:8945" \ + "$(jq --null-input '{ "parents": 2, "interior": { "X2": [ { "GlobalConsensus": "Rococo" }, { "Parachain": 1013 } ] } }')" \ + $XCM_VERSION + ;; + reserve-transfer-assets-from-asset-hub-rococo-local) + amount=$2 + ensure_polkadot_js_api + # send ROCs to Alice account on AHW + limited_reserve_transfer_assets \ + "ws://127.0.0.1:9910" \ + "//Alice" \ + "$(jq --null-input '{ "V3": { "parents": 2, "interior": { "X2": [ { "GlobalConsensus": "Westend" }, { "Parachain": 1000 } ] } } }')" \ + "$(jq --null-input '{ "V3": { "parents": 0, "interior": { "X1": { "AccountId32": { "id": [212, 53, 147, 199, 21, 253, 211, 28, 97, 20, 26, 189, 4, 169, 159, 214, 130, 44, 133, 88, 133, 76, 205, 227, 154, 86, 132, 231, 165, 109, 162, 125] } } } } }')" \ + "$(jq --null-input '{ "V3": [ { "id": { "Concrete": { "parents": 1, "interior": "Here" } }, "fun": { "Fungible": '$amount' } } ] }')" \ + 0 \ + "Unlimited" + ;; + withdraw-reserve-assets-from-asset-hub-rococo-local) + amount=$2 + ensure_polkadot_js_api + # send back only 100000000000 wrappedWNDs to Alice account on AHW + limited_reserve_transfer_assets \ + "ws://127.0.0.1:9910" \ + "//Alice" \ + "$(jq --null-input '{ "V3": { "parents": 2, "interior": { "X2": [ { "GlobalConsensus": "Westend" }, { "Parachain": 1000 } ] } } }')" \ + "$(jq --null-input '{ "V3": { "parents": 0, "interior": { "X1": { "AccountId32": { "id": [212, 53, 147, 199, 21, 253, 211, 28, 97, 20, 26, 189, 4, 169, 159, 214, 130, 44, 133, 88, 133, 76, 205, 227, 154, 86, 132, 231, 165, 109, 162, 125] } } } } }')" \ + "$(jq --null-input '{ "V3": [ { "id": { "Concrete": { "parents": 2, "interior": { "X1": { "GlobalConsensus": "Westend" } } } }, "fun": { "Fungible": '$amount' } } ] }')" \ + 0 \ + "Unlimited" + ;; + reserve-transfer-assets-from-asset-hub-westend-local) + amount=$2 + ensure_polkadot_js_api + # send WNDs to Alice account on AHR + limited_reserve_transfer_assets \ + "ws://127.0.0.1:9010" \ + "//Alice" \ + "$(jq --null-input '{ "V3": { "parents": 2, "interior": { "X2": [ { "GlobalConsensus": "Rococo" }, { "Parachain": 1000 } ] } } }')" \ + "$(jq --null-input '{ "V3": { "parents": 0, "interior": { "X1": { "AccountId32": { "id": [212, 53, 147, 199, 21, 253, 211, 28, 97, 20, 26, 189, 4, 169, 159, 214, 130, 44, 133, 88, 133, 76, 205, 227, 154, 86, 132, 231, 165, 109, 162, 125] } } } } }')" \ + "$(jq --null-input '{ "V3": [ { "id": { "Concrete": { "parents": 1, "interior": "Here" } }, "fun": { "Fungible": '$amount' } } ] }')" \ + 0 \ + "Unlimited" + ;; + withdraw-reserve-assets-from-asset-hub-westend-local) + amount=$2 + ensure_polkadot_js_api + # send back only 100000000000 wrappedROCs to Alice account on AHR + limited_reserve_transfer_assets \ + "ws://127.0.0.1:9010" \ + "//Alice" \ + "$(jq --null-input '{ "V3": { "parents": 2, "interior": { "X2": [ { "GlobalConsensus": "Rococo" }, { "Parachain": 1000 } ] } } }')" \ + "$(jq --null-input '{ "V3": { "parents": 0, "interior": { "X1": { "AccountId32": { "id": [212, 53, 147, 199, 21, 253, 211, 28, 97, 20, 26, 189, 4, 169, 159, 214, 130, 44, 133, 88, 133, 76, 205, 227, 154, 86, 132, 231, 165, 109, 162, 125] } } } } }')" \ + "$(jq --null-input '{ "V3": [ { "id": { "Concrete": { "parents": 2, "interior": { "X1": { "GlobalConsensus": "Rococo" } } } }, "fun": { "Fungible": '$amount' } } ] }')" \ + 0 \ + "Unlimited" + ;; + claim-rewards-bridge-hub-rococo-local) + ensure_polkadot_js_api + # bhwd -> [62, 68, 77, 64] -> 0x62687764 + claim_rewards \ + "ws://127.0.0.1:8943" \ + "//Charlie" \ + "0x${LANE_ID}" \ + "0x62687764" \ + "ThisChain" + claim_rewards \ + "ws://127.0.0.1:8943" \ + "//Charlie" \ + "0x${LANE_ID}" \ + "0x62687764" \ + "BridgedChain" + ;; + claim-rewards-bridge-hub-westend-local) + # bhro -> [62, 68, 72, 6f] -> 0x6268726f + claim_rewards \ + "ws://127.0.0.1:8945" \ + "//Charlie" \ + "0x${LANE_ID}" \ + "0x6268726f" \ + "ThisChain" + claim_rewards \ + "ws://127.0.0.1:8945" \ + "//Charlie" \ + "0x${LANE_ID}" \ + "0x6268726f" \ + "BridgedChain" + ;; + stop) + pkill -f polkadot + pkill -f parachain + ;; + import) + # to avoid trigger anything here + ;; + *) + echo "A command is require. Supported commands for: + Local (zombienet) run: + - run-relay + - init-asset-hub-rococo-local + - init-bridge-hub-rococo-local + - init-asset-hub-westend-local + - init-bridge-hub-westend-local + - reserve-transfer-assets-from-asset-hub-rococo-local + - withdraw-reserve-assets-from-asset-hub-rococo-local + - reserve-transfer-assets-from-asset-hub-westend-local + - withdraw-reserve-assets-from-asset-hub-westend-local + - claim-rewards-bridge-hub-rococo-local + - claim-rewards-bridge-hub-westend-local"; + exit 1 + ;; +esac diff --git a/testing/environments/rococo-westend/helper.sh b/testing/environments/rococo-westend/helper.sh new file mode 100755 index 000000000..0a13ded21 --- /dev/null +++ b/testing/environments/rococo-westend/helper.sh @@ -0,0 +1,3 @@ +#!/bin/bash + +$ENV_PATH/bridges_rococo_westend.sh "$@" diff --git a/testing/environments/rococo-westend/rococo-init.zndsl b/testing/environments/rococo-westend/rococo-init.zndsl new file mode 100644 index 000000000..c913e4db3 --- /dev/null +++ b/testing/environments/rococo-westend/rococo-init.zndsl @@ -0,0 +1,8 @@ +Description: Check if the HRMP channel between Rococo BH and Rococo AH was opened successfully +Network: ./bridge_hub_rococo_local_network.toml +Creds: config + +# ensure that initialization has completed +asset-hub-rococo-collator1: js-script {{FRAMEWORK_PATH}}/js-helpers/wait-hrmp-channel-opened.js with "1013" within 300 seconds + + diff --git a/testing/environments/rococo-westend/rococo.zndsl b/testing/environments/rococo-westend/rococo.zndsl new file mode 100644 index 000000000..5b49c7c63 --- /dev/null +++ b/testing/environments/rococo-westend/rococo.zndsl @@ -0,0 +1,7 @@ +Description: Check if the with-Westend GRANPDA pallet was initialized at Rococo BH +Network: ./bridge_hub_rococo_local_network.toml +Creds: config + +# relay is already started - let's wait until with-Westend GRANPDA pallet is initialized at Rococo +bridge-hub-rococo-collator1: js-script {{FRAMEWORK_PATH}}/js-helpers/best-finalized-header-at-bridged-chain.js with "Westend,0" within 400 seconds + diff --git a/testing/environments/rococo-westend/spawn.sh b/testing/environments/rococo-westend/spawn.sh new file mode 100755 index 000000000..cbd0b1bc6 --- /dev/null +++ b/testing/environments/rococo-westend/spawn.sh @@ -0,0 +1,70 @@ +#!/bin/bash + +set -e + +trap "trap - SIGTERM && kill -9 -$$" SIGINT SIGTERM EXIT + +source "$FRAMEWORK_PATH/utils/zombienet.sh" + +# whether to init the chains (open HRMP channels, set XCM version, create reserve assets, etc) +init=0 +start_relayer=0 +while [ $# -ne 0 ] +do + arg="$1" + case "$arg" in + --init) + init=1 + ;; + --start-relayer) + start_relayer=1 + ;; + esac + shift +done + +logs_dir=$TEST_DIR/logs +helper_script="${BASH_SOURCE%/*}/helper.sh" + +rococo_def=${BASH_SOURCE%/*}/bridge_hub_rococo_local_network.toml +start_zombienet $TEST_DIR $rococo_def rococo_dir rococo_pid +echo + +westend_def=${BASH_SOURCE%/*}/bridge_hub_westend_local_network.toml +start_zombienet $TEST_DIR $westend_def westend_dir westend_pid +echo + +if [[ $init -eq 1 ]]; then + rococo_init_log=$logs_dir/rococo-init.log + echo -e "Setting up the rococo side of the bridge. Logs available at: $rococo_init_log\n" + + westend_init_log=$logs_dir/westend-init.log + echo -e "Setting up the westend side of the bridge. Logs available at: $westend_init_log\n" + + $helper_script init-asset-hub-rococo-local >> $rococo_init_log 2>&1 & + rococo_init_pid=$! + $helper_script init-asset-hub-westend-local >> $westend_init_log 2>&1 & + westend_init_pid=$! + wait -n $rococo_init_pid $westend_init_pid + + + $helper_script init-bridge-hub-rococo-local >> $rococo_init_log 2>&1 & + rococo_init_pid=$! + $helper_script init-bridge-hub-westend-local >> $westend_init_log 2>&1 & + westend_init_pid=$! + wait -n $rococo_init_pid $westend_init_pid + + run_zndsl ${BASH_SOURCE%/*}/rococo-init.zndsl $rococo_dir + run_zndsl ${BASH_SOURCE%/*}/westend-init.zndsl $westend_dir +fi + +if [[ $start_relayer -eq 1 ]]; then + ${BASH_SOURCE%/*}/start_relayer.sh $rococo_dir $westend_dir relayer_pid +fi + +echo $rococo_dir > $TEST_DIR/rococo.env +echo $westend_dir > $TEST_DIR/westend.env +echo + +wait -n $rococo_pid $westend_pid $relayer_pid +kill -9 -$$ diff --git a/testing/environments/rococo-westend/start_relayer.sh b/testing/environments/rococo-westend/start_relayer.sh new file mode 100755 index 000000000..7ddd312d3 --- /dev/null +++ b/testing/environments/rococo-westend/start_relayer.sh @@ -0,0 +1,23 @@ +#!/bin/bash + +set -e + +source "$FRAMEWORK_PATH/utils/common.sh" +source "$FRAMEWORK_PATH/utils/zombienet.sh" + +rococo_dir=$1 +westend_dir=$2 +__relayer_pid=$3 + +logs_dir=$TEST_DIR/logs +helper_script="${BASH_SOURCE%/*}/helper.sh" + +relayer_log=$logs_dir/relayer.log +echo -e "Starting rococo-westend relayer. Logs available at: $relayer_log\n" +start_background_process "$helper_script run-relay" $relayer_log relayer_pid + +run_zndsl ${BASH_SOURCE%/*}/rococo.zndsl $rococo_dir +run_zndsl ${BASH_SOURCE%/*}/westend.zndsl $westend_dir + +eval $__relayer_pid="'$relayer_pid'" + diff --git a/testing/environments/rococo-westend/westend-init.zndsl b/testing/environments/rococo-westend/westend-init.zndsl new file mode 100644 index 000000000..0f5428eed --- /dev/null +++ b/testing/environments/rococo-westend/westend-init.zndsl @@ -0,0 +1,7 @@ +Description: Check if the HRMP channel between Westend BH and Westend AH was opened successfully +Network: ./bridge_hub_westend_local_network.toml +Creds: config + +# ensure that initialization has completed +asset-hub-westend-collator1: js-script {{FRAMEWORK_PATH}}/js-helpers/wait-hrmp-channel-opened.js with "1002" within 600 seconds + diff --git a/testing/environments/rococo-westend/westend.zndsl b/testing/environments/rococo-westend/westend.zndsl new file mode 100644 index 000000000..079688388 --- /dev/null +++ b/testing/environments/rococo-westend/westend.zndsl @@ -0,0 +1,6 @@ +Description: Check if the with-Rococo GRANPDA pallet was initialized at Westend BH +Network: ./bridge_hub_westend_local_network.toml +Creds: config + +# relay is already started - let's wait until with-Rococo GRANPDA pallet is initialized at Westend +bridge-hub-westend-collator1: js-script {{FRAMEWORK_PATH}}/js-helpers/best-finalized-header-at-bridged-chain.js with "Rococo,0" within 400 seconds diff --git a/zombienet/helpers/best-finalized-header-at-bridged-chain.js b/testing/framework/js-helpers/best-finalized-header-at-bridged-chain.js similarity index 94% rename from zombienet/helpers/best-finalized-header-at-bridged-chain.js rename to testing/framework/js-helpers/best-finalized-header-at-bridged-chain.js index f7e1eefc8..af4f18aee 100644 --- a/zombienet/helpers/best-finalized-header-at-bridged-chain.js +++ b/testing/framework/js-helpers/best-finalized-header-at-bridged-chain.js @@ -18,7 +18,7 @@ async function run(nodeName, networkInfo, args) { } // else sleep and retry - await new Promise((resolve) => setTimeout(resolve, 12000)); + await new Promise((resolve) => setTimeout(resolve, 6000)); } } diff --git a/zombienet/helpers/chains/rococo-at-westend.js b/testing/framework/js-helpers/chains/rococo-at-westend.js similarity index 100% rename from zombienet/helpers/chains/rococo-at-westend.js rename to testing/framework/js-helpers/chains/rococo-at-westend.js diff --git a/zombienet/helpers/chains/westend-at-rococo.js b/testing/framework/js-helpers/chains/westend-at-rococo.js similarity index 100% rename from zombienet/helpers/chains/westend-at-rococo.js rename to testing/framework/js-helpers/chains/westend-at-rococo.js diff --git a/zombienet/helpers/native-assets-balance-increased.js b/testing/framework/js-helpers/native-assets-balance-increased.js similarity index 74% rename from zombienet/helpers/native-assets-balance-increased.js rename to testing/framework/js-helpers/native-assets-balance-increased.js index 9ee1a769e..749c3e2fe 100644 --- a/zombienet/helpers/native-assets-balance-increased.js +++ b/testing/framework/js-helpers/native-assets-balance-increased.js @@ -3,18 +3,19 @@ async function run(nodeName, networkInfo, args) { const api = await zombie.connect(wsUri, userDefinedTypes); const accountAddress = args[0]; + const expectedIncrease = BigInt(args[1]); const initialAccountData = await api.query.system.account(accountAddress); const initialAccountBalance = initialAccountData.data['free']; while (true) { const accountData = await api.query.system.account(accountAddress); const accountBalance = accountData.data['free']; - if (accountBalance > initialAccountBalance) { + if (accountBalance > initialAccountBalance + expectedIncrease) { return accountBalance; } // else sleep and retry - await new Promise((resolve) => setTimeout(resolve, 12000)); + await new Promise((resolve) => setTimeout(resolve, 6000)); } } -module.exports = { run } +module.exports = {run} diff --git a/zombienet/helpers/only-mandatory-headers-synced-when-idle.js b/testing/framework/js-helpers/only-mandatory-headers-synced-when-idle.js similarity index 88% rename from zombienet/helpers/only-mandatory-headers-synced-when-idle.js rename to testing/framework/js-helpers/only-mandatory-headers-synced-when-idle.js index 3a3432cfa..979179245 100644 --- a/zombienet/helpers/only-mandatory-headers-synced-when-idle.js +++ b/testing/framework/js-helpers/only-mandatory-headers-synced-when-idle.js @@ -10,7 +10,7 @@ async function run(nodeName, networkInfo, args) { // start listening to new blocks let totalGrandpaHeaders = 0; - let totalParachainHeaders = 0; + let initialParachainHeaderImported = false; api.rpc.chain.subscribeNewHeads(async function (header) { const apiAtParent = await api.at(header.parentHash); const apiAtCurrent = await api.at(header.hash); @@ -22,7 +22,7 @@ async function run(nodeName, networkInfo, args) { apiAtCurrent, currentEvents, ); - totalParachainHeaders += await utils.ensureOnlyInitialParachainHeaderImported( + initialParachainHeaderImported = await utils.ensureOnlyInitialParachainHeaderImported( bridgedChain, apiAtParent, apiAtCurrent, @@ -36,7 +36,7 @@ async function run(nodeName, networkInfo, args) { if (totalGrandpaHeaders == 0) { throw new Error("No bridged relay chain headers imported"); } - if (totalParachainHeaders == 0) { + if (!initialParachainHeaderImported) { throw new Error("No bridged parachain headers imported"); } } diff --git a/zombienet/helpers/only-required-headers-synced-when-idle.js b/testing/framework/js-helpers/only-required-headers-synced-when-idle.js similarity index 100% rename from zombienet/helpers/only-required-headers-synced-when-idle.js rename to testing/framework/js-helpers/only-required-headers-synced-when-idle.js diff --git a/zombienet/helpers/relayer-rewards.js b/testing/framework/js-helpers/relayer-rewards.js similarity index 93% rename from zombienet/helpers/relayer-rewards.js rename to testing/framework/js-helpers/relayer-rewards.js index a5f567db7..5347c6496 100644 --- a/zombienet/helpers/relayer-rewards.js +++ b/testing/framework/js-helpers/relayer-rewards.js @@ -21,7 +21,7 @@ async function run(nodeName, networkInfo, args) { } // else sleep and retry - await new Promise((resolve) => setTimeout(resolve, 12000)); + await new Promise((resolve) => setTimeout(resolve, 6000)); } } diff --git a/zombienet/helpers/utils.js b/testing/framework/js-helpers/utils.js similarity index 98% rename from zombienet/helpers/utils.js rename to testing/framework/js-helpers/utils.js index 5a5542b56..f6e9f5623 100644 --- a/zombienet/helpers/utils.js +++ b/testing/framework/js-helpers/utils.js @@ -98,6 +98,6 @@ module.exports = { throw new Error("Unexpected parachain header import: " + newParachainHeaders + " / " + maxNewParachainHeaders); } - return newParachainHeaders; + return hasBestBridgedParachainHeader; }, } diff --git a/zombienet/helpers/wait-hrmp-channel-opened.js b/testing/framework/js-helpers/wait-hrmp-channel-opened.js similarity index 91% rename from zombienet/helpers/wait-hrmp-channel-opened.js rename to testing/framework/js-helpers/wait-hrmp-channel-opened.js index e700cab1d..765d48cc4 100644 --- a/zombienet/helpers/wait-hrmp-channel-opened.js +++ b/testing/framework/js-helpers/wait-hrmp-channel-opened.js @@ -15,7 +15,7 @@ async function run(nodeName, networkInfo, args) { } // else sleep and retry - await new Promise((resolve) => setTimeout(resolve, 12000)); + await new Promise((resolve) => setTimeout(resolve, 6000)); } } diff --git a/zombienet/helpers/wrapped-assets-balance.js b/testing/framework/js-helpers/wrapped-assets-balance.js similarity index 93% rename from zombienet/helpers/wrapped-assets-balance.js rename to testing/framework/js-helpers/wrapped-assets-balance.js index bb3cea885..272871185 100644 --- a/zombienet/helpers/wrapped-assets-balance.js +++ b/testing/framework/js-helpers/wrapped-assets-balance.js @@ -19,7 +19,7 @@ async function run(nodeName, networkInfo, args) { } // else sleep and retry - await new Promise((resolve) => setTimeout(resolve, 12000)); + await new Promise((resolve) => setTimeout(resolve, 6000)); } } diff --git a/testing/framework/utils/bridges.sh b/testing/framework/utils/bridges.sh new file mode 100755 index 000000000..7c8399461 --- /dev/null +++ b/testing/framework/utils/bridges.sh @@ -0,0 +1,309 @@ +#!/bin/bash + +function relayer_path() { + local default_path=~/local_bridge_testing/bin/substrate-relay + local path="${SUBSTRATE_RELAY_BINARY:-$default_path}" + echo "$path" +} + +function ensure_relayer() { + local path=$(relayer_path) + if [[ ! -f "$path" ]]; then + echo " Required substrate-relay binary '$path' does not exist!" + echo " You need to build it and copy to this location!" + echo " Please, check ./parachains/runtimes/bridge-hubs/README.md (Prepare/Build/Deploy)" + exit 1 + fi + + echo $path +} + +function ensure_polkadot_js_api() { + if ! which polkadot-js-api &> /dev/null; then + echo '' + echo 'Required command `polkadot-js-api` not in PATH, please, install, e.g.:' + echo "npm install -g @polkadot/api-cli@beta" + echo " or" + echo "yarn global add @polkadot/api-cli" + echo '' + exit 1 + fi + if ! which jq &> /dev/null; then + echo '' + echo 'Required command `jq` not in PATH, please, install, e.g.:' + echo "apt install -y jq" + echo '' + exit 1 + fi + generate_hex_encoded_call_data "check" "--" + local retVal=$? + if [ $retVal -ne 0 ]; then + echo "" + echo "" + echo "-------------------" + echo "Installing (nodejs) sub module: ${BASH_SOURCE%/*}/generate_hex_encoded_call" + pushd ${BASH_SOURCE%/*}/generate_hex_encoded_call + npm install + popd + fi +} + +function call_polkadot_js_api() { + # --noWait: without that argument `polkadot-js-api` waits until transaction is included into the block. + # With it, it just submits it to the tx pool and exits. + # --nonce -1: means to compute transaction nonce using `system_accountNextIndex` RPC, which includes all + # transaction that are in the tx pool. + polkadot-js-api --noWait --nonce -1 "$@" +} + +function generate_hex_encoded_call_data() { + local type=$1 + local endpoint=$2 + local output=$3 + shift + shift + shift + echo "Input params: $@" + + node ${BASH_SOURCE%/*}/../utils/generate_hex_encoded_call "$type" "$endpoint" "$output" "$@" + local retVal=$? + + if [ $type != "check" ]; then + local hex_encoded_data=$(cat $output) + echo "Generated hex-encoded bytes to file '$output': $hex_encoded_data" + fi + + return $retVal +} + +function transfer_balance() { + local runtime_para_endpoint=$1 + local seed=$2 + local target_account=$3 + local amount=$4 + echo " calling transfer_balance:" + echo " runtime_para_endpoint: ${runtime_para_endpoint}" + echo " seed: ${seed}" + echo " target_account: ${target_account}" + echo " amount: ${amount}" + echo "--------------------------------------------------" + + call_polkadot_js_api \ + --ws "${runtime_para_endpoint}" \ + --seed "${seed?}" \ + tx.balances.transferAllowDeath \ + "${target_account}" \ + "${amount}" +} + +function send_governance_transact() { + local relay_url=$1 + local relay_chain_seed=$2 + local para_id=$3 + local hex_encoded_data=$4 + local require_weight_at_most_ref_time=$5 + local require_weight_at_most_proof_size=$6 + echo " calling send_governance_transact:" + echo " relay_url: ${relay_url}" + echo " relay_chain_seed: ${relay_chain_seed}" + echo " para_id: ${para_id}" + echo " hex_encoded_data: ${hex_encoded_data}" + echo " require_weight_at_most_ref_time: ${require_weight_at_most_ref_time}" + echo " require_weight_at_most_proof_size: ${require_weight_at_most_proof_size}" + echo " params:" + + local dest=$(jq --null-input \ + --arg para_id "$para_id" \ + '{ "V3": { "parents": 0, "interior": { "X1": { "Parachain": $para_id } } } }') + + local message=$(jq --null-input \ + --argjson hex_encoded_data $hex_encoded_data \ + --arg require_weight_at_most_ref_time "$require_weight_at_most_ref_time" \ + --arg require_weight_at_most_proof_size "$require_weight_at_most_proof_size" \ + ' + { + "V3": [ + { + "UnpaidExecution": { + "weight_limit": "Unlimited" + } + }, + { + "Transact": { + "origin_kind": "Superuser", + "require_weight_at_most": { + "ref_time": $require_weight_at_most_ref_time, + "proof_size": $require_weight_at_most_proof_size, + }, + "call": { + "encoded": $hex_encoded_data + } + } + } + ] + } + ') + + echo "" + echo " dest:" + echo "${dest}" + echo "" + echo " message:" + echo "${message}" + echo "" + echo "--------------------------------------------------" + + call_polkadot_js_api \ + --ws "${relay_url?}" \ + --seed "${relay_chain_seed?}" \ + --sudo \ + tx.xcmPallet.send \ + "${dest}" \ + "${message}" +} + +function open_hrmp_channels() { + local relay_url=$1 + local relay_chain_seed=$2 + local sender_para_id=$3 + local recipient_para_id=$4 + local max_capacity=$5 + local max_message_size=$6 + echo " calling open_hrmp_channels:" + echo " relay_url: ${relay_url}" + echo " relay_chain_seed: ${relay_chain_seed}" + echo " sender_para_id: ${sender_para_id}" + echo " recipient_para_id: ${recipient_para_id}" + echo " max_capacity: ${max_capacity}" + echo " max_message_size: ${max_message_size}" + echo " params:" + echo "--------------------------------------------------" + call_polkadot_js_api \ + --ws "${relay_url?}" \ + --seed "${relay_chain_seed?}" \ + --sudo \ + tx.hrmp.forceOpenHrmpChannel \ + ${sender_para_id} \ + ${recipient_para_id} \ + ${max_capacity} \ + ${max_message_size} +} + +function force_xcm_version() { + local relay_url=$1 + local relay_chain_seed=$2 + local runtime_para_id=$3 + local runtime_para_endpoint=$4 + local dest=$5 + local xcm_version=$6 + echo " calling force_xcm_version:" + echo " relay_url: ${relay_url}" + echo " relay_chain_seed: ${relay_chain_seed}" + echo " runtime_para_id: ${runtime_para_id}" + echo " runtime_para_endpoint: ${runtime_para_endpoint}" + echo " dest: ${dest}" + echo " xcm_version: ${xcm_version}" + echo " params:" + + # 1. generate data for Transact (PolkadotXcm::force_xcm_version) + local tmp_output_file=$(mktemp) + generate_hex_encoded_call_data "force-xcm-version" "${runtime_para_endpoint}" "${tmp_output_file}" "$dest" "$xcm_version" + local hex_encoded_data=$(cat $tmp_output_file) + + # 2. trigger governance call + send_governance_transact "${relay_url}" "${relay_chain_seed}" "${runtime_para_id}" "${hex_encoded_data}" 200000000 12000 +} + +function force_create_foreign_asset() { + local relay_url=$1 + local relay_chain_seed=$2 + local runtime_para_id=$3 + local runtime_para_endpoint=$4 + local asset_multilocation=$5 + local asset_owner_account_id=$6 + local min_balance=$7 + local is_sufficient=$8 + echo " calling force_create_foreign_asset:" + echo " relay_url: ${relay_url}" + echo " relay_chain_seed: ${relay_chain_seed}" + echo " runtime_para_id: ${runtime_para_id}" + echo " runtime_para_endpoint: ${runtime_para_endpoint}" + echo " asset_multilocation: ${asset_multilocation}" + echo " asset_owner_account_id: ${asset_owner_account_id}" + echo " min_balance: ${min_balance}" + echo " is_sufficient: ${is_sufficient}" + echo " params:" + + # 1. generate data for Transact (ForeignAssets::force_create) + local tmp_output_file=$(mktemp) + generate_hex_encoded_call_data "force-create-asset" "${runtime_para_endpoint}" "${tmp_output_file}" "$asset_multilocation" "$asset_owner_account_id" $is_sufficient $min_balance + local hex_encoded_data=$(cat $tmp_output_file) + + # 2. trigger governance call + send_governance_transact "${relay_url}" "${relay_chain_seed}" "${runtime_para_id}" "${hex_encoded_data}" 200000000 12000 +} + +function limited_reserve_transfer_assets() { + local url=$1 + local seed=$2 + local destination=$3 + local beneficiary=$4 + local assets=$5 + local fee_asset_item=$6 + local weight_limit=$7 + echo " calling limited_reserve_transfer_assets:" + echo " url: ${url}" + echo " seed: ${seed}" + echo " destination: ${destination}" + echo " beneficiary: ${beneficiary}" + echo " assets: ${assets}" + echo " fee_asset_item: ${fee_asset_item}" + echo " weight_limit: ${weight_limit}" + echo "" + echo "--------------------------------------------------" + + call_polkadot_js_api \ + --ws "${url?}" \ + --seed "${seed?}" \ + tx.polkadotXcm.limitedReserveTransferAssets \ + "${destination}" \ + "${beneficiary}" \ + "${assets}" \ + "${fee_asset_item}" \ + "${weight_limit}" +} + +function claim_rewards() { + local runtime_para_endpoint=$1 + local seed=$2 + local lane_id=$3 + local bridged_chain_id=$4 + local owner=$5 + echo " calling claim_rewards:" + echo " runtime_para_endpoint: ${runtime_para_endpoint}" + echo " seed: ${seed}" + echo " lane_id: ${lane_id}" + echo " bridged_chain_id: ${bridged_chain_id}" + echo " owner: ${owner}" + echo "" + + local rewards_account_params=$(jq --null-input \ + --arg lane_id "$lane_id" \ + --arg bridged_chain_id "$bridged_chain_id" \ + --arg owner "$owner" \ + '{ + "laneId": $lane_id, + "bridgedChainId": $bridged_chain_id, + "owner": $owner + }') + + echo " rewards_account_params:" + echo "${rewards_account_params}" + echo "--------------------------------------------------" + + call_polkadot_js_api \ + --ws "${runtime_para_endpoint}" \ + --seed "${seed?}" \ + tx.bridgeRelayers.claimRewards \ + "${rewards_account_params}" +} \ No newline at end of file diff --git a/testing/framework/utils/common.sh b/testing/framework/utils/common.sh new file mode 100644 index 000000000..06f41320b --- /dev/null +++ b/testing/framework/utils/common.sh @@ -0,0 +1,45 @@ +#!/bin/bash + +function start_background_process() { + local command=$1 + local log_file=$2 + local __pid=$3 + + $command > $log_file 2>&1 & + eval $__pid="'$!'" +} + +function wait_for_process_file() { + local pid=$1 + local file=$2 + local timeout=$3 + local __found=$4 + + local time=0 + until [ -e $file ]; do + if ! kill -0 $pid; then + echo "Process finished unsuccessfully" + return + fi + if (( time++ >= timeout )); then + echo "Timeout waiting for file $file: $timeout seconds" + eval $__found=0 + return + fi + sleep 1 + done + + echo "File $file found after $time seconds" + eval $__found=1 +} + +function ensure_process_file() { + local pid=$1 + local file=$2 + local timeout=$3 + + wait_for_process_file $pid $file $timeout file_found + if [ "$file_found" != "1" ]; then + exit 1 + fi +} diff --git a/testing/framework/utils/generate_hex_encoded_call/index.js b/testing/framework/utils/generate_hex_encoded_call/index.js new file mode 100644 index 000000000..30f89d754 --- /dev/null +++ b/testing/framework/utils/generate_hex_encoded_call/index.js @@ -0,0 +1,165 @@ +const fs = require("fs"); +const { exit } = require("process"); +const { WsProvider, ApiPromise } = require("@polkadot/api"); +const util = require("@polkadot/util"); + +// connect to a substrate chain and return the api object +async function connect(endpoint, types = {}) { + const provider = new WsProvider(endpoint); + const api = await ApiPromise.create({ + provider, + types, + throwOnConnect: false, + }); + return api; +} + +function writeHexEncodedBytesToOutput(method, outputFile) { + console.log("Payload (hex): ", method.toHex()); + console.log("Payload (bytes): ", Array.from(method.toU8a())); + console.log("Payload (plain): ", JSON.stringify(method)); + fs.writeFileSync(outputFile, JSON.stringify(Array.from(method.toU8a()))); +} + +function remarkWithEvent(endpoint, outputFile) { + console.log(`Generating remarkWithEvent from RPC endpoint: ${endpoint} to outputFile: ${outputFile}`); + connect(endpoint) + .then((api) => { + const call = api.tx.system.remarkWithEvent("Hello"); + writeHexEncodedBytesToOutput(call.method, outputFile); + exit(0); + }) + .catch((e) => { + console.error(e); + exit(1); + }); +} + +function addExporterConfig(endpoint, outputFile, bridgedNetwork, bridgeConfig) { + console.log(`Generating addExporterConfig from RPC endpoint: ${endpoint} to outputFile: ${outputFile} based on bridgedNetwork: ${bridgedNetwork}, bridgeConfig: ${bridgeConfig}`); + connect(endpoint) + .then((api) => { + const call = api.tx.bridgeTransfer.addExporterConfig(bridgedNetwork, JSON.parse(bridgeConfig)); + writeHexEncodedBytesToOutput(call.method, outputFile); + exit(0); + }) + .catch((e) => { + console.error(e); + exit(1); + }); +} + +function addUniversalAlias(endpoint, outputFile, location, junction) { + console.log(`Generating addUniversalAlias from RPC endpoint: ${endpoint} to outputFile: ${outputFile} based on location: ${location}, junction: ${junction}`); + connect(endpoint) + .then((api) => { + const call = api.tx.bridgeTransfer.addUniversalAlias(JSON.parse(location), JSON.parse(junction)); + writeHexEncodedBytesToOutput(call.method, outputFile); + exit(0); + }) + .catch((e) => { + console.error(e); + exit(1); + }); +} + +function addReserveLocation(endpoint, outputFile, reserve_location) { + console.log(`Generating addReserveLocation from RPC endpoint: ${endpoint} to outputFile: ${outputFile} based on reserve_location: ${reserve_location}`); + connect(endpoint) + .then((api) => { + const call = api.tx.bridgeTransfer.addReserveLocation(JSON.parse(reserve_location)); + writeHexEncodedBytesToOutput(call.method, outputFile); + exit(0); + }) + .catch((e) => { + console.error(e); + exit(1); + }); +} + +function removeExporterConfig(endpoint, outputFile, bridgedNetwork) { + console.log(`Generating removeExporterConfig from RPC endpoint: ${endpoint} to outputFile: ${outputFile} based on bridgedNetwork: ${bridgedNetwork}`); + connect(endpoint) + .then((api) => { + const call = api.tx.bridgeTransfer.removeExporterConfig(bridgedNetwork); + writeHexEncodedBytesToOutput(call.method, outputFile); + exit(0); + }) + .catch((e) => { + console.error(e); + exit(1); + }); +} + +function forceCreateAsset(endpoint, outputFile, assetId, assetOwnerAccountId, isSufficient, minBalance) { + var isSufficient = isSufficient == "true" ? true : false; + console.log(`Generating forceCreateAsset from RPC endpoint: ${endpoint} to outputFile: ${outputFile} based on assetId: ${assetId}, assetOwnerAccountId: ${assetOwnerAccountId}, isSufficient: ${isSufficient}, minBalance: ${minBalance}`); + connect(endpoint) + .then((api) => { + const call = api.tx.foreignAssets.forceCreate(JSON.parse(assetId), assetOwnerAccountId, isSufficient, minBalance); + writeHexEncodedBytesToOutput(call.method, outputFile); + exit(0); + }) + .catch((e) => { + console.error(e); + exit(1); + }); +} + +function forceXcmVersion(endpoint, outputFile, dest, xcm_version) { + console.log(`Generating forceXcmVersion from RPC endpoint: ${endpoint} to outputFile: ${outputFile}, dest: ${dest}, xcm_version: ${xcm_version}`); + connect(endpoint) + .then((api) => { + const call = api.tx.polkadotXcm.forceXcmVersion(JSON.parse(dest), xcm_version); + writeHexEncodedBytesToOutput(call.method, outputFile); + exit(0); + }) + .catch((e) => { + console.error(e); + exit(1); + }); +} + +if (!process.argv[2] || !process.argv[3]) { + console.log("usage: node ./script/generate_hex_encoded_call "); + exit(1); +} + +const type = process.argv[2]; +const rpcEnpoint = process.argv[3]; +const output = process.argv[4]; +const inputArgs = process.argv.slice(5, process.argv.length); +console.log(`Generating hex-encoded call data for:`); +console.log(` type: ${type}`); +console.log(` rpcEnpoint: ${rpcEnpoint}`); +console.log(` output: ${output}`); +console.log(` inputArgs: ${inputArgs}`); + +switch (type) { + case 'remark-with-event': + remarkWithEvent(rpcEnpoint, output); + break; + case 'add-exporter-config': + addExporterConfig(rpcEnpoint, output, inputArgs[0], inputArgs[1]); + break; + case 'remove-exporter-config': + removeExporterConfig(rpcEnpoint, output, inputArgs[0], inputArgs[1]); + break; + case 'add-universal-alias': + addUniversalAlias(rpcEnpoint, output, inputArgs[0], inputArgs[1]); + break; + case 'add-reserve-location': + addReserveLocation(rpcEnpoint, output, inputArgs[0]); + break; + case 'force-create-asset': + forceCreateAsset(rpcEnpoint, output, inputArgs[0], inputArgs[1], inputArgs[2], inputArgs[3]); + break; + case 'force-xcm-version': + forceXcmVersion(rpcEnpoint, output, inputArgs[0], inputArgs[1]); + break; + case 'check': + console.log(`Checking nodejs installation, if you see this everything is ready!`); + break; + default: + console.log(`Sorry, we are out of ${type} - not yet supported!`); +} diff --git a/testing/framework/utils/generate_hex_encoded_call/package-lock.json b/testing/framework/utils/generate_hex_encoded_call/package-lock.json new file mode 100644 index 000000000..b2dddaa19 --- /dev/null +++ b/testing/framework/utils/generate_hex_encoded_call/package-lock.json @@ -0,0 +1,759 @@ +{ + "name": "y", + "version": "y", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "y", + "version": "y", + "license": "MIT", + "dependencies": { + "@polkadot/api": "^10.11", + "@polkadot/util": "^12.6" + } + }, + "node_modules/@noble/curves": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/@noble/curves/-/curves-1.3.0.tgz", + "integrity": "sha512-t01iSXPuN+Eqzb4eBX0S5oubSqXbK/xXa1Ne18Hj8f9pStxztHCE2gfboSp/dZRLSqfuLpRK2nDXDK+W9puocA==", + "dependencies": { + "@noble/hashes": "1.3.3" + }, + "funding": { + "url": "https://paulmillr.com/funding/" + } + }, + "node_modules/@noble/hashes": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/@noble/hashes/-/hashes-1.3.3.tgz", + "integrity": "sha512-V7/fPHgl+jsVPXqqeOzT8egNj2iBIVt+ECeMMG8TdcnTikP3oaBtUVqpT/gYCR68aEBJSF+XbYUxStjbFMqIIA==", + "engines": { + "node": ">= 16" + }, + "funding": { + "url": "https://paulmillr.com/funding/" + } + }, + "node_modules/@polkadot/api": { + "version": "10.11.2", + "resolved": "https://registry.npmjs.org/@polkadot/api/-/api-10.11.2.tgz", + "integrity": "sha512-AorCZxCWCoTtdbl4DPUZh+ACe/pbLIS1BkdQY0AFJuZllm0x/yWzjgampcPd5jQAA/O3iKShRBkZqj6Mk9yG/A==", + "dependencies": { + "@polkadot/api-augment": "10.11.2", + "@polkadot/api-base": "10.11.2", + "@polkadot/api-derive": "10.11.2", + "@polkadot/keyring": "^12.6.2", + "@polkadot/rpc-augment": "10.11.2", + "@polkadot/rpc-core": "10.11.2", + "@polkadot/rpc-provider": "10.11.2", + "@polkadot/types": "10.11.2", + "@polkadot/types-augment": "10.11.2", + "@polkadot/types-codec": "10.11.2", + "@polkadot/types-create": "10.11.2", + "@polkadot/types-known": "10.11.2", + "@polkadot/util": "^12.6.2", + "@polkadot/util-crypto": "^12.6.2", + "eventemitter3": "^5.0.1", + "rxjs": "^7.8.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@polkadot/api-augment": { + "version": "10.11.2", + "resolved": "https://registry.npmjs.org/@polkadot/api-augment/-/api-augment-10.11.2.tgz", + "integrity": "sha512-PTpnqpezc75qBqUtgrc0GYB8h9UHjfbHSRZamAbecIVAJ2/zc6CqtnldeaBlIu1IKTgBzi3FFtTyYu+ZGbNT2Q==", + "dependencies": { + "@polkadot/api-base": "10.11.2", + "@polkadot/rpc-augment": "10.11.2", + "@polkadot/types": "10.11.2", + "@polkadot/types-augment": "10.11.2", + "@polkadot/types-codec": "10.11.2", + "@polkadot/util": "^12.6.2", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@polkadot/api-base": { + "version": "10.11.2", + "resolved": "https://registry.npmjs.org/@polkadot/api-base/-/api-base-10.11.2.tgz", + "integrity": "sha512-4LIjaUfO9nOzilxo7XqzYKCNMtmUypdk8oHPdrRnSjKEsnK7vDsNi+979z2KXNXd2KFSCFHENmI523fYnMnReg==", + "dependencies": { + "@polkadot/rpc-core": "10.11.2", + "@polkadot/types": "10.11.2", + "@polkadot/util": "^12.6.2", + "rxjs": "^7.8.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@polkadot/api-derive": { + "version": "10.11.2", + "resolved": "https://registry.npmjs.org/@polkadot/api-derive/-/api-derive-10.11.2.tgz", + "integrity": "sha512-m3BQbPionkd1iSlknddxnL2hDtolPIsT+aRyrtn4zgMRPoLjHFmTmovvg8RaUyYofJtZeYrnjMw0mdxiSXx7eA==", + "dependencies": { + "@polkadot/api": "10.11.2", + "@polkadot/api-augment": "10.11.2", + "@polkadot/api-base": "10.11.2", + "@polkadot/rpc-core": "10.11.2", + "@polkadot/types": "10.11.2", + "@polkadot/types-codec": "10.11.2", + "@polkadot/util": "^12.6.2", + "@polkadot/util-crypto": "^12.6.2", + "rxjs": "^7.8.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@polkadot/keyring": { + "version": "12.6.2", + "resolved": "https://registry.npmjs.org/@polkadot/keyring/-/keyring-12.6.2.tgz", + "integrity": "sha512-O3Q7GVmRYm8q7HuB3S0+Yf/q/EB2egKRRU3fv9b3B7V+A52tKzA+vIwEmNVaD1g5FKW9oB97rmpggs0zaKFqHw==", + "dependencies": { + "@polkadot/util": "12.6.2", + "@polkadot/util-crypto": "12.6.2", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "@polkadot/util": "12.6.2", + "@polkadot/util-crypto": "12.6.2" + } + }, + "node_modules/@polkadot/networks": { + "version": "12.6.2", + "resolved": "https://registry.npmjs.org/@polkadot/networks/-/networks-12.6.2.tgz", + "integrity": "sha512-1oWtZm1IvPWqvMrldVH6NI2gBoCndl5GEwx7lAuQWGr7eNL+6Bdc5K3Z9T0MzFvDGoi2/CBqjX9dRKo39pDC/w==", + "dependencies": { + "@polkadot/util": "12.6.2", + "@substrate/ss58-registry": "^1.44.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@polkadot/rpc-augment": { + "version": "10.11.2", + "resolved": "https://registry.npmjs.org/@polkadot/rpc-augment/-/rpc-augment-10.11.2.tgz", + "integrity": "sha512-9AhT0WW81/8jYbRcAC6PRmuxXqNhJje8OYiulBQHbG1DTCcjAfz+6VQBke9BwTStzPq7d526+yyBKD17O3zlAA==", + "dependencies": { + "@polkadot/rpc-core": "10.11.2", + "@polkadot/types": "10.11.2", + "@polkadot/types-codec": "10.11.2", + "@polkadot/util": "^12.6.2", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@polkadot/rpc-core": { + "version": "10.11.2", + "resolved": "https://registry.npmjs.org/@polkadot/rpc-core/-/rpc-core-10.11.2.tgz", + "integrity": "sha512-Ot0CFLWx8sZhLZog20WDuniPA01Bk2StNDsdAQgcFKPwZw6ShPaZQCHuKLQK6I6DodOrem9FXX7c1hvoKJP5Ww==", + "dependencies": { + "@polkadot/rpc-augment": "10.11.2", + "@polkadot/rpc-provider": "10.11.2", + "@polkadot/types": "10.11.2", + "@polkadot/util": "^12.6.2", + "rxjs": "^7.8.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@polkadot/rpc-provider": { + "version": "10.11.2", + "resolved": "https://registry.npmjs.org/@polkadot/rpc-provider/-/rpc-provider-10.11.2.tgz", + "integrity": "sha512-he5jWMpDJp7e+vUzTZDzpkB7ps3H8psRally+/ZvZZScPvFEjfczT7I1WWY9h58s8+ImeVP/lkXjL9h/gUOt3Q==", + "dependencies": { + "@polkadot/keyring": "^12.6.2", + "@polkadot/types": "10.11.2", + "@polkadot/types-support": "10.11.2", + "@polkadot/util": "^12.6.2", + "@polkadot/util-crypto": "^12.6.2", + "@polkadot/x-fetch": "^12.6.2", + "@polkadot/x-global": "^12.6.2", + "@polkadot/x-ws": "^12.6.2", + "eventemitter3": "^5.0.1", + "mock-socket": "^9.3.1", + "nock": "^13.4.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18" + }, + "optionalDependencies": { + "@substrate/connect": "0.7.35" + } + }, + "node_modules/@polkadot/types": { + "version": "10.11.2", + "resolved": "https://registry.npmjs.org/@polkadot/types/-/types-10.11.2.tgz", + "integrity": "sha512-d52j3xXni+C8GdYZVTSfu8ROAnzXFMlyRvXtor0PudUc8UQHOaC4+mYAkTBGA2gKdmL8MHSfRSbhcxHhsikY6Q==", + "dependencies": { + "@polkadot/keyring": "^12.6.2", + "@polkadot/types-augment": "10.11.2", + "@polkadot/types-codec": "10.11.2", + "@polkadot/types-create": "10.11.2", + "@polkadot/util": "^12.6.2", + "@polkadot/util-crypto": "^12.6.2", + "rxjs": "^7.8.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@polkadot/types-augment": { + "version": "10.11.2", + "resolved": "https://registry.npmjs.org/@polkadot/types-augment/-/types-augment-10.11.2.tgz", + "integrity": "sha512-8eB8ew04wZiE5GnmFvEFW1euJWmF62SGxb1O+8wL3zoUtB9Xgo1vB6w6xbTrd+HLV6jNSeXXnbbF1BEUvi9cNg==", + "dependencies": { + "@polkadot/types": "10.11.2", + "@polkadot/types-codec": "10.11.2", + "@polkadot/util": "^12.6.2", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@polkadot/types-codec": { + "version": "10.11.2", + "resolved": "https://registry.npmjs.org/@polkadot/types-codec/-/types-codec-10.11.2.tgz", + "integrity": "sha512-3xjOQL+LOOMzYqlgP9ROL0FQnzU8lGflgYewzau7AsDlFziSEtb49a9BpYo6zil4koC+QB8zQ9OHGFumG08T8w==", + "dependencies": { + "@polkadot/util": "^12.6.2", + "@polkadot/x-bigint": "^12.6.2", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@polkadot/types-create": { + "version": "10.11.2", + "resolved": "https://registry.npmjs.org/@polkadot/types-create/-/types-create-10.11.2.tgz", + "integrity": "sha512-SJt23NxYvefRxVZZm6mT9ed1pR6FDoIGQ3xUpbjhTLfU2wuhpKjekMVorYQ6z/gK2JLMu2kV92Ardsz+6GX5XQ==", + "dependencies": { + "@polkadot/types-codec": "10.11.2", + "@polkadot/util": "^12.6.2", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@polkadot/types-known": { + "version": "10.11.2", + "resolved": "https://registry.npmjs.org/@polkadot/types-known/-/types-known-10.11.2.tgz", + "integrity": "sha512-kbEIX7NUQFxpDB0FFGNyXX/odY7jbp56RGD+Z4A731fW2xh/DgAQrI994xTzuh0c0EqPE26oQm3kATSpseqo9w==", + "dependencies": { + "@polkadot/networks": "^12.6.2", + "@polkadot/types": "10.11.2", + "@polkadot/types-codec": "10.11.2", + "@polkadot/types-create": "10.11.2", + "@polkadot/util": "^12.6.2", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@polkadot/types-support": { + "version": "10.11.2", + "resolved": "https://registry.npmjs.org/@polkadot/types-support/-/types-support-10.11.2.tgz", + "integrity": "sha512-X11hoykFYv/3efg4coZy2hUOUc97JhjQMJLzDhHniFwGLlYU8MeLnPdCVGkXx0xDDjTo4/ptS1XpZ5HYcg+gRw==", + "dependencies": { + "@polkadot/util": "^12.6.2", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@polkadot/util": { + "version": "12.6.2", + "resolved": "https://registry.npmjs.org/@polkadot/util/-/util-12.6.2.tgz", + "integrity": "sha512-l8TubR7CLEY47240uki0TQzFvtnxFIO7uI/0GoWzpYD/O62EIAMRsuY01N4DuwgKq2ZWD59WhzsLYmA5K6ksdw==", + "dependencies": { + "@polkadot/x-bigint": "12.6.2", + "@polkadot/x-global": "12.6.2", + "@polkadot/x-textdecoder": "12.6.2", + "@polkadot/x-textencoder": "12.6.2", + "@types/bn.js": "^5.1.5", + "bn.js": "^5.2.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@polkadot/util-crypto": { + "version": "12.6.2", + "resolved": "https://registry.npmjs.org/@polkadot/util-crypto/-/util-crypto-12.6.2.tgz", + "integrity": "sha512-FEWI/dJ7wDMNN1WOzZAjQoIcCP/3vz3wvAp5QQm+lOrzOLj0iDmaIGIcBkz8HVm3ErfSe/uKP0KS4jgV/ib+Mg==", + "dependencies": { + "@noble/curves": "^1.3.0", + "@noble/hashes": "^1.3.3", + "@polkadot/networks": "12.6.2", + "@polkadot/util": "12.6.2", + "@polkadot/wasm-crypto": "^7.3.2", + "@polkadot/wasm-util": "^7.3.2", + "@polkadot/x-bigint": "12.6.2", + "@polkadot/x-randomvalues": "12.6.2", + "@scure/base": "^1.1.5", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "@polkadot/util": "12.6.2" + } + }, + "node_modules/@polkadot/wasm-bridge": { + "version": "7.3.2", + "resolved": "https://registry.npmjs.org/@polkadot/wasm-bridge/-/wasm-bridge-7.3.2.tgz", + "integrity": "sha512-AJEXChcf/nKXd5Q/YLEV5dXQMle3UNT7jcXYmIffZAo/KI394a+/24PaISyQjoNC0fkzS1Q8T5pnGGHmXiVz2g==", + "dependencies": { + "@polkadot/wasm-util": "7.3.2", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "@polkadot/util": "*", + "@polkadot/x-randomvalues": "*" + } + }, + "node_modules/@polkadot/wasm-crypto": { + "version": "7.3.2", + "resolved": "https://registry.npmjs.org/@polkadot/wasm-crypto/-/wasm-crypto-7.3.2.tgz", + "integrity": "sha512-+neIDLSJ6jjVXsjyZ5oLSv16oIpwp+PxFqTUaZdZDoA2EyFRQB8pP7+qLsMNk+WJuhuJ4qXil/7XiOnZYZ+wxw==", + "dependencies": { + "@polkadot/wasm-bridge": "7.3.2", + "@polkadot/wasm-crypto-asmjs": "7.3.2", + "@polkadot/wasm-crypto-init": "7.3.2", + "@polkadot/wasm-crypto-wasm": "7.3.2", + "@polkadot/wasm-util": "7.3.2", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "@polkadot/util": "*", + "@polkadot/x-randomvalues": "*" + } + }, + "node_modules/@polkadot/wasm-crypto-asmjs": { + "version": "7.3.2", + "resolved": "https://registry.npmjs.org/@polkadot/wasm-crypto-asmjs/-/wasm-crypto-asmjs-7.3.2.tgz", + "integrity": "sha512-QP5eiUqUFur/2UoF2KKKYJcesc71fXhQFLT3D4ZjG28Mfk2ZPI0QNRUfpcxVQmIUpV5USHg4geCBNuCYsMm20Q==", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "@polkadot/util": "*" + } + }, + "node_modules/@polkadot/wasm-crypto-init": { + "version": "7.3.2", + "resolved": "https://registry.npmjs.org/@polkadot/wasm-crypto-init/-/wasm-crypto-init-7.3.2.tgz", + "integrity": "sha512-FPq73zGmvZtnuJaFV44brze3Lkrki3b4PebxCy9Fplw8nTmisKo9Xxtfew08r0njyYh+uiJRAxPCXadkC9sc8g==", + "dependencies": { + "@polkadot/wasm-bridge": "7.3.2", + "@polkadot/wasm-crypto-asmjs": "7.3.2", + "@polkadot/wasm-crypto-wasm": "7.3.2", + "@polkadot/wasm-util": "7.3.2", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "@polkadot/util": "*", + "@polkadot/x-randomvalues": "*" + } + }, + "node_modules/@polkadot/wasm-crypto-wasm": { + "version": "7.3.2", + "resolved": "https://registry.npmjs.org/@polkadot/wasm-crypto-wasm/-/wasm-crypto-wasm-7.3.2.tgz", + "integrity": "sha512-15wd0EMv9IXs5Abp1ZKpKKAVyZPhATIAHfKsyoWCEFDLSOA0/K0QGOxzrAlsrdUkiKZOq7uzSIgIDgW8okx2Mw==", + "dependencies": { + "@polkadot/wasm-util": "7.3.2", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "@polkadot/util": "*" + } + }, + "node_modules/@polkadot/wasm-util": { + "version": "7.3.2", + "resolved": "https://registry.npmjs.org/@polkadot/wasm-util/-/wasm-util-7.3.2.tgz", + "integrity": "sha512-bmD+Dxo1lTZyZNxbyPE380wd82QsX+43mgCm40boyKrRppXEyQmWT98v/Poc7chLuskYb6X8IQ6lvvK2bGR4Tg==", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "@polkadot/util": "*" + } + }, + "node_modules/@polkadot/x-bigint": { + "version": "12.6.2", + "resolved": "https://registry.npmjs.org/@polkadot/x-bigint/-/x-bigint-12.6.2.tgz", + "integrity": "sha512-HSIk60uFPX4GOFZSnIF7VYJz7WZA7tpFJsne7SzxOooRwMTWEtw3fUpFy5cYYOeLh17/kHH1Y7SVcuxzVLc74Q==", + "dependencies": { + "@polkadot/x-global": "12.6.2", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@polkadot/x-fetch": { + "version": "12.6.2", + "resolved": "https://registry.npmjs.org/@polkadot/x-fetch/-/x-fetch-12.6.2.tgz", + "integrity": "sha512-8wM/Z9JJPWN1pzSpU7XxTI1ldj/AfC8hKioBlUahZ8gUiJaOF7K9XEFCrCDLis/A1BoOu7Ne6WMx/vsJJIbDWw==", + "dependencies": { + "@polkadot/x-global": "12.6.2", + "node-fetch": "^3.3.2", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@polkadot/x-global": { + "version": "12.6.2", + "resolved": "https://registry.npmjs.org/@polkadot/x-global/-/x-global-12.6.2.tgz", + "integrity": "sha512-a8d6m+PW98jmsYDtAWp88qS4dl8DyqUBsd0S+WgyfSMtpEXu6v9nXDgPZgwF5xdDvXhm+P0ZfVkVTnIGrScb5g==", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@polkadot/x-randomvalues": { + "version": "12.6.2", + "resolved": "https://registry.npmjs.org/@polkadot/x-randomvalues/-/x-randomvalues-12.6.2.tgz", + "integrity": "sha512-Vr8uG7rH2IcNJwtyf5ebdODMcr0XjoCpUbI91Zv6AlKVYOGKZlKLYJHIwpTaKKB+7KPWyQrk4Mlym/rS7v9feg==", + "dependencies": { + "@polkadot/x-global": "12.6.2", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "@polkadot/util": "12.6.2", + "@polkadot/wasm-util": "*" + } + }, + "node_modules/@polkadot/x-textdecoder": { + "version": "12.6.2", + "resolved": "https://registry.npmjs.org/@polkadot/x-textdecoder/-/x-textdecoder-12.6.2.tgz", + "integrity": "sha512-M1Bir7tYvNappfpFWXOJcnxUhBUFWkUFIdJSyH0zs5LmFtFdbKAeiDXxSp2Swp5ddOZdZgPac294/o2TnQKN1w==", + "dependencies": { + "@polkadot/x-global": "12.6.2", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@polkadot/x-textencoder": { + "version": "12.6.2", + "resolved": "https://registry.npmjs.org/@polkadot/x-textencoder/-/x-textencoder-12.6.2.tgz", + "integrity": "sha512-4N+3UVCpI489tUJ6cv3uf0PjOHvgGp9Dl+SZRLgFGt9mvxnvpW/7+XBADRMtlG4xi5gaRK7bgl5bmY6OMDsNdw==", + "dependencies": { + "@polkadot/x-global": "12.6.2", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@polkadot/x-ws": { + "version": "12.6.2", + "resolved": "https://registry.npmjs.org/@polkadot/x-ws/-/x-ws-12.6.2.tgz", + "integrity": "sha512-cGZWo7K5eRRQCRl2LrcyCYsrc3lRbTlixZh3AzgU8uX4wASVGRlNWi/Hf4TtHNe1ExCDmxabJzdIsABIfrr7xw==", + "dependencies": { + "@polkadot/x-global": "12.6.2", + "tslib": "^2.6.2", + "ws": "^8.15.1" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@scure/base": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/@scure/base/-/base-1.1.5.tgz", + "integrity": "sha512-Brj9FiG2W1MRQSTB212YVPRrcbjkv48FoZi/u4l/zds/ieRrqsh7aUf6CLwkAq61oKXr/ZlTzlY66gLIj3TFTQ==", + "funding": { + "url": "https://paulmillr.com/funding/" + } + }, + "node_modules/@substrate/connect": { + "version": "0.7.35", + "resolved": "https://registry.npmjs.org/@substrate/connect/-/connect-0.7.35.tgz", + "integrity": "sha512-Io8vkalbwaye+7yXfG1Nj52tOOoJln2bMlc7Q9Yy3vEWqZEVkgKmcPVzbwV0CWL3QD+KMPDA2Dnw/X7EdwgoLw==", + "hasInstallScript": true, + "optional": true, + "dependencies": { + "@substrate/connect-extension-protocol": "^1.0.1", + "smoldot": "2.0.7" + } + }, + "node_modules/@substrate/connect-extension-protocol": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@substrate/connect-extension-protocol/-/connect-extension-protocol-1.0.1.tgz", + "integrity": "sha512-161JhCC1csjH3GE5mPLEd7HbWtwNSPJBg3p1Ksz9SFlTzj/bgEwudiRN2y5i0MoLGCIJRYKyKGMxVnd29PzNjg==", + "optional": true + }, + "node_modules/@substrate/ss58-registry": { + "version": "1.44.0", + "resolved": "https://registry.npmjs.org/@substrate/ss58-registry/-/ss58-registry-1.44.0.tgz", + "integrity": "sha512-7lQ/7mMCzVNSEfDS4BCqnRnKCFKpcOaPrxMeGTXHX1YQzM/m2BBHjbK2C3dJvjv7GYxMiaTq/HdWQj1xS6ss+A==" + }, + "node_modules/@types/bn.js": { + "version": "5.1.5", + "resolved": "https://registry.npmjs.org/@types/bn.js/-/bn.js-5.1.5.tgz", + "integrity": "sha512-V46N0zwKRF5Q00AZ6hWtN0T8gGmDUaUzLWQvHFo5yThtVwK/VCenFY3wXVbOvNfajEpsTfQM4IN9k/d6gUVX3A==", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/node": { + "version": "20.10.5", + "resolved": "https://registry.npmjs.org/@types/node/-/node-20.10.5.tgz", + "integrity": "sha512-nNPsNE65wjMxEKI93yOP+NPGGBJz/PoN3kZsVLee0XMiJolxSekEVD8wRwBUBqkwc7UWop0edW50yrCQW4CyRw==", + "dependencies": { + "undici-types": "~5.26.4" + } + }, + "node_modules/bn.js": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-5.2.1.tgz", + "integrity": "sha512-eXRvHzWyYPBuB4NBy0cmYQjGitUrtqwbvlzP3G6VFnNRbsZQIxQ10PbKKHt8gZ/HW/D/747aDl+QkDqg3KQLMQ==" + }, + "node_modules/data-uri-to-buffer": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/data-uri-to-buffer/-/data-uri-to-buffer-4.0.1.tgz", + "integrity": "sha512-0R9ikRb668HB7QDxT1vkpuUBtqc53YyAwMwGeUFKRojY/NWKvdZ+9UYtRfGmhqNbRkTSVpMbmyhXipFFv2cb/A==", + "engines": { + "node": ">= 12" + } + }, + "node_modules/debug": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/eventemitter3": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/eventemitter3/-/eventemitter3-5.0.1.tgz", + "integrity": "sha512-GWkBvjiSZK87ELrYOSESUYeVIc9mvLLf/nXalMOS5dYrgZq9o5OVkbZAVM06CVxYsCwH9BDZFPlQTlPA1j4ahA==" + }, + "node_modules/fetch-blob": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/fetch-blob/-/fetch-blob-3.2.0.tgz", + "integrity": "sha512-7yAQpD2UMJzLi1Dqv7qFYnPbaPx7ZfFK6PiIxQ4PfkGPyNyl2Ugx+a/umUonmKqjhM4DnfbMvdX6otXq83soQQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/jimmywarting" + }, + { + "type": "paypal", + "url": "https://paypal.me/jimmywarting" + } + ], + "dependencies": { + "node-domexception": "^1.0.0", + "web-streams-polyfill": "^3.0.3" + }, + "engines": { + "node": "^12.20 || >= 14.13" + } + }, + "node_modules/formdata-polyfill": { + "version": "4.0.10", + "resolved": "https://registry.npmjs.org/formdata-polyfill/-/formdata-polyfill-4.0.10.tgz", + "integrity": "sha512-buewHzMvYL29jdeQTVILecSaZKnt/RJWjoZCF5OW60Z67/GmSLBkOFM7qh1PI3zFNtJbaZL5eQu1vLfazOwj4g==", + "dependencies": { + "fetch-blob": "^3.1.2" + }, + "engines": { + "node": ">=12.20.0" + } + }, + "node_modules/json-stringify-safe": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", + "integrity": "sha512-ZClg6AaYvamvYEE82d3Iyd3vSSIjQ+odgjaTzRuO3s7toCdFKczob2i0zCh7JE8kWn17yvAWhUVxvqGwUalsRA==" + }, + "node_modules/mock-socket": { + "version": "9.3.1", + "resolved": "https://registry.npmjs.org/mock-socket/-/mock-socket-9.3.1.tgz", + "integrity": "sha512-qxBgB7Qa2sEQgHFjj0dSigq7fX4k6Saisd5Nelwp2q8mlbAFh5dHV9JTTlF8viYJLSSWgMCZFUom8PJcMNBoJw==", + "engines": { + "node": ">= 8" + } + }, + "node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + }, + "node_modules/nock": { + "version": "13.4.0", + "resolved": "https://registry.npmjs.org/nock/-/nock-13.4.0.tgz", + "integrity": "sha512-W8NVHjO/LCTNA64yxAPHV/K47LpGYcVzgKd3Q0n6owhwvD0Dgoterc25R4rnZbckJEb6Loxz1f5QMuJpJnbSyQ==", + "dependencies": { + "debug": "^4.1.0", + "json-stringify-safe": "^5.0.1", + "propagate": "^2.0.0" + }, + "engines": { + "node": ">= 10.13" + } + }, + "node_modules/node-domexception": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/node-domexception/-/node-domexception-1.0.0.tgz", + "integrity": "sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/jimmywarting" + }, + { + "type": "github", + "url": "https://paypal.me/jimmywarting" + } + ], + "engines": { + "node": ">=10.5.0" + } + }, + "node_modules/node-fetch": { + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-3.3.2.tgz", + "integrity": "sha512-dRB78srN/l6gqWulah9SrxeYnxeddIG30+GOqK/9OlLVyLg3HPnr6SqOWTWOXKRwC2eGYCkZ59NNuSgvSrpgOA==", + "dependencies": { + "data-uri-to-buffer": "^4.0.0", + "fetch-blob": "^3.1.4", + "formdata-polyfill": "^4.0.10" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/node-fetch" + } + }, + "node_modules/propagate": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/propagate/-/propagate-2.0.1.tgz", + "integrity": "sha512-vGrhOavPSTz4QVNuBNdcNXePNdNMaO1xj9yBeH1ScQPjk/rhg9sSlCXPhMkFuaNNW/syTvYqsnbIJxMBfRbbag==", + "engines": { + "node": ">= 8" + } + }, + "node_modules/rxjs": { + "version": "7.8.1", + "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-7.8.1.tgz", + "integrity": "sha512-AA3TVj+0A2iuIoQkWEK/tqFjBq2j+6PO6Y0zJcvzLAFhEFIO3HL0vls9hWLncZbAAbK0mar7oZ4V079I/qPMxg==", + "dependencies": { + "tslib": "^2.1.0" + } + }, + "node_modules/smoldot": { + "version": "2.0.7", + "resolved": "https://registry.npmjs.org/smoldot/-/smoldot-2.0.7.tgz", + "integrity": "sha512-VAOBqEen6vises36/zgrmAT1GWk2qE3X8AGnO7lmQFdskbKx8EovnwS22rtPAG+Y1Rk23/S22kDJUdPANyPkBA==", + "optional": true, + "dependencies": { + "ws": "^8.8.1" + } + }, + "node_modules/tslib": { + "version": "2.6.2", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.2.tgz", + "integrity": "sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==" + }, + "node_modules/undici-types": { + "version": "5.26.5", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz", + "integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==" + }, + "node_modules/web-streams-polyfill": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/web-streams-polyfill/-/web-streams-polyfill-3.2.1.tgz", + "integrity": "sha512-e0MO3wdXWKrLbL0DgGnUV7WHVuw9OUvL4hjgnPkIeEvESk74gAITi5G606JtZPp39cd8HA9VQzCIvA49LpPN5Q==", + "engines": { + "node": ">= 8" + } + }, + "node_modules/ws": { + "version": "8.16.0", + "resolved": "https://registry.npmjs.org/ws/-/ws-8.16.0.tgz", + "integrity": "sha512-HS0c//TP7Ina87TfiPUz1rQzMhHrl/SG2guqRcTOIUYD2q8uhUdNHZYJUaQ8aTGPzCh+c6oawMKW35nFl1dxyQ==", + "engines": { + "node": ">=10.0.0" + }, + "peerDependencies": { + "bufferutil": "^4.0.1", + "utf-8-validate": ">=5.0.2" + }, + "peerDependenciesMeta": { + "bufferutil": { + "optional": true + }, + "utf-8-validate": { + "optional": true + } + } + } + } +} diff --git a/testing/framework/utils/generate_hex_encoded_call/package.json b/testing/framework/utils/generate_hex_encoded_call/package.json new file mode 100644 index 000000000..ecf0a2483 --- /dev/null +++ b/testing/framework/utils/generate_hex_encoded_call/package.json @@ -0,0 +1,11 @@ +{ + "name": "y", + "version": "y", + "description": "create a scale hex-encoded call values from given message", + "main": "index.js", + "license": "MIT", + "dependencies": { + "@polkadot/api": "^10.11", + "@polkadot/util": "^12.6" + } +} diff --git a/testing/framework/utils/zombienet.sh b/testing/framework/utils/zombienet.sh new file mode 100644 index 000000000..bbcd1a306 --- /dev/null +++ b/testing/framework/utils/zombienet.sh @@ -0,0 +1,39 @@ +#!/bin/bash + +source "${BASH_SOURCE%/*}/common.sh" + +function start_zombienet() { + local test_dir=$1 + local definition_path=$2 + local __zombienet_dir=$3 + local __zombienet_pid=$4 + + local zombienet_name=`basename $definition_path .toml` + local zombienet_dir=$test_dir/$zombienet_name + eval $__zombienet_dir="'$zombienet_dir'" + mkdir -p $zombienet_dir + rm -rf $zombienet_dir + + local logs_dir=$test_dir/logs + mkdir -p $logs_dir + local zombienet_log=$logs_dir/$zombienet_name.log + + echo "Starting $zombienet_name zombienet. Logs available at: $zombienet_log" + start_background_process \ + "$ZOMBIENET_BINARY spawn --dir $zombienet_dir --provider native $definition_path" \ + "$zombienet_log" zombienet_pid + + ensure_process_file $zombienet_pid "$zombienet_dir/zombie.json" 180 + echo "$zombienet_name zombienet started successfully" + + eval $__zombienet_pid="'$zombienet_pid'" +} + +function run_zndsl() { + local zndsl_file=$1 + local zombienet_dir=$2 + + echo "Running $zndsl_file." + $ZOMBIENET_BINARY test --dir $zombienet_dir --provider native $zndsl_file $zombienet_dir/zombie.json + echo +} diff --git a/testing/run-new-test.sh b/testing/run-new-test.sh new file mode 100755 index 000000000..7c84a69aa --- /dev/null +++ b/testing/run-new-test.sh @@ -0,0 +1,48 @@ +#!/bin/bash + +set -e + +trap 'kill -9 -$$ || echo "Environment already teared down"' SIGINT SIGTERM EXIT + +test=$1 +shift + +# whether to use paths for zombienet+bridges tests container or for local testing +ZOMBIENET_DOCKER_PATHS=0 +while [ $# -ne 0 ] +do + arg="$1" + case "$arg" in + --docker) + ZOMBIENET_DOCKER_PATHS=1 + ;; + esac + shift +done + +export POLKADOT_SDK_PATH=`realpath ${BASH_SOURCE%/*}/../..` +export FRAMEWORK_PATH=`realpath ${BASH_SOURCE%/*}/framework` + +# set path to binaries +if [ "$ZOMBIENET_DOCKER_PATHS" -eq 1 ]; then + # otherwise zombienet uses some hardcoded paths + unset RUN_IN_CONTAINER + unset ZOMBIENET_IMAGE + + export POLKADOT_BINARY=/usr/local/bin/polkadot + export POLKADOT_PARACHAIN_BINARY=/usr/local/bin/polkadot-parachain + + export ZOMBIENET_BINARY=/usr/local/bin/zombie + export SUBSTRATE_RELAY_BINARY=/usr/local/bin/substrate-relay +else + export POLKADOT_BINARY=$POLKADOT_SDK_PATH/target/release/polkadot + export POLKADOT_PARACHAIN_BINARY=$POLKADOT_SDK_PATH/target/release/polkadot-parachain + + export ZOMBIENET_BINARY=~/local_bridge_testing/bin/zombienet-linux-x64 + export SUBSTRATE_RELAY_BINARY=~/local_bridge_testing/bin/substrate-relay +fi + +export TEST_DIR=`mktemp -d /tmp/bridges-tests-run-XXXXX` +echo -e "Test folder: $TEST_DIR\n" + +${BASH_SOURCE%/*}/tests/$test/run.sh diff --git a/zombienet/run-tests.sh b/testing/run-tests.sh similarity index 77% rename from zombienet/run-tests.sh rename to testing/run-tests.sh index cf3b529e6..6149d9912 100755 --- a/zombienet/run-tests.sh +++ b/testing/run-tests.sh @@ -27,34 +27,27 @@ done # assuming that we'll be using native provide && all processes will be executing locally # (we need absolute paths here, because they're used when scripts are called by zombienet from tmp folders) -export POLKADOT_SDK_FOLDER=`realpath $(dirname "$0")/../..` -export BRIDGE_TESTS_FOLDER=$POLKADOT_SDK_FOLDER/bridges/zombienet/tests +export POLKADOT_SDK_PATH=`realpath $(dirname "$0")/../..` +export BRIDGE_TESTS_FOLDER=$POLKADOT_SDK_PATH/bridges/testing/tests # set pathc to binaries if [ "$ZOMBIENET_DOCKER_PATHS" -eq 1 ]; then - export POLKADOT_BINARY_PATH=/usr/local/bin/polkadot - export POLKADOT_PARACHAIN_BINARY_PATH=/usr/local/bin/polkadot-parachain - export POLKADOT_PARACHAIN_BINARY_PATH_FOR_ASSET_HUB_ROCOCO=/usr/local/bin/polkadot-parachain - export POLKADOT_PARACHAIN_BINARY_PATH_FOR_ASSET_HUB_WESTEND=/usr/local/bin/polkadot-parachain + export POLKADOT_BINARY=/usr/local/bin/polkadot + export POLKADOT_PARACHAIN_BINARY=/usr/local/bin/polkadot-parachain - export SUBSTRATE_RELAY_PATH=/usr/local/bin/substrate-relay + export SUBSTRATE_RELAY_BINARY=/usr/local/bin/substrate-relay export ZOMBIENET_BINARY_PATH=/usr/local/bin/zombie else - export POLKADOT_BINARY_PATH=$POLKADOT_SDK_FOLDER/target/release/polkadot - export POLKADOT_PARACHAIN_BINARY_PATH=$POLKADOT_SDK_FOLDER/target/release/polkadot-parachain - export POLKADOT_PARACHAIN_BINARY_PATH_FOR_ASSET_HUB_ROCOCO=$POLKADOT_PARACHAIN_BINARY_PATH - export POLKADOT_PARACHAIN_BINARY_PATH_FOR_ASSET_HUB_WESTEND=$POLKADOT_PARACHAIN_BINARY_PATH + export POLKADOT_BINARY=$POLKADOT_SDK_PATH/target/release/polkadot + export POLKADOT_PARACHAIN_BINARY=$POLKADOT_SDK_PATH/target/release/polkadot-parachain - export SUBSTRATE_RELAY_PATH=~/local_bridge_testing/bin/substrate-relay + export SUBSTRATE_RELAY_BINARY=~/local_bridge_testing/bin/substrate-relay export ZOMBIENET_BINARY_PATH=~/local_bridge_testing/bin/zombienet-linux fi # check if `wait` supports -p flag if [ `printf "$BASH_VERSION\n5.1" | sort -V | head -n 1` = "5.1" ]; then IS_BASH_5_1=1; else IS_BASH_5_1=0; fi -# check if `wait` supports -p flag -if [ `printf "$BASH_VERSION\n5.1" | sort -V | head -n 1` = "5.1" ]; then IS_BASH_5_1=1; else IS_BASH_5_1=0; fi - # bridge configuration export LANE_ID="00000002" diff --git a/zombienet/scripts/invoke-script.sh b/testing/scripts/invoke-script.sh similarity index 62% rename from zombienet/scripts/invoke-script.sh rename to testing/scripts/invoke-script.sh index 835b4fe50..cd0557b07 100755 --- a/zombienet/scripts/invoke-script.sh +++ b/testing/scripts/invoke-script.sh @@ -2,6 +2,6 @@ INVOKE_LOG=`mktemp -p $TEST_FOLDER invoke.XXXXX` -pushd $POLKADOT_SDK_FOLDER/cumulus/scripts +pushd $POLKADOT_SDK_PATH/bridges/testing/environments/rococo-westend ./bridges_rococo_westend.sh $1 >$INVOKE_LOG 2>&1 popd diff --git a/zombienet/scripts/start-relayer.sh b/testing/scripts/start-relayer.sh similarity index 63% rename from zombienet/scripts/start-relayer.sh rename to testing/scripts/start-relayer.sh index 2f72b5ee5..38ea62fad 100755 --- a/zombienet/scripts/start-relayer.sh +++ b/testing/scripts/start-relayer.sh @@ -2,6 +2,6 @@ RELAY_LOG=`mktemp -p $TEST_FOLDER relay.XXXXX` -pushd $POLKADOT_SDK_FOLDER/cumulus/scripts +pushd $POLKADOT_SDK_PATH/bridges/testing/environments/rococo-westend ./bridges_rococo_westend.sh run-relay >$RELAY_LOG 2>&1& popd diff --git a/zombienet/scripts/sync-exit.sh b/testing/scripts/sync-exit.sh similarity index 100% rename from zombienet/scripts/sync-exit.sh rename to testing/scripts/sync-exit.sh diff --git a/testing/tests/0001-asset-transfer/roc-reaches-westend.zndsl b/testing/tests/0001-asset-transfer/roc-reaches-westend.zndsl new file mode 100644 index 000000000..cdb7d28e9 --- /dev/null +++ b/testing/tests/0001-asset-transfer/roc-reaches-westend.zndsl @@ -0,0 +1,12 @@ +Description: User is able to transfer ROC from Rococo Asset Hub to Westend Asset Hub and back +Network: {{ENV_PATH}}/bridge_hub_westend_local_network.toml +Creds: config + +# send 5 ROC to //Alice from Rococo AH to Westend AH +asset-hub-westend-collator1: run {{ENV_PATH}}/helper.sh with "reserve-transfer-assets-from-asset-hub-rococo-local 5000000000000" within 120 seconds + +# check that //Alice received at least 4.8 ROC on Westend AH +asset-hub-westend-collator1: js-script {{FRAMEWORK_PATH}}/js-helpers/wrapped-assets-balance.js with "5GrwvaEF5zXb26Fz9rcQpDWS57CtERHpNehXCPcNoHGKutQY,4800000000000,Rococo" within 600 seconds + +# check that the relayer //Charlie is rewarded by Westend AH +bridge-hub-westend-collator1: js-script {{FRAMEWORK_PATH}}/js-helpers/relayer-rewards.js with "5FLSigC9HGRKVhB9FiEo4Y3koPsNmBmLJbpXg2mp1hXcS59Y,0x00000002,0x6268726F,ThisChain,0" within 30 seconds diff --git a/testing/tests/0001-asset-transfer/run.sh b/testing/tests/0001-asset-transfer/run.sh new file mode 100755 index 000000000..a7bb12291 --- /dev/null +++ b/testing/tests/0001-asset-transfer/run.sh @@ -0,0 +1,25 @@ +#!/bin/bash + +set -e + +source "${BASH_SOURCE%/*}/../../framework/utils/common.sh" +source "${BASH_SOURCE%/*}/../../framework/utils/zombienet.sh" + +export ENV_PATH=`realpath ${BASH_SOURCE%/*}/../../environments/rococo-westend` + +$ENV_PATH/spawn.sh --init --start-relayer & +env_pid=$! + +ensure_process_file $env_pid $TEST_DIR/rococo.env 600 +rococo_dir=`cat $TEST_DIR/rococo.env` +echo + +ensure_process_file $env_pid $TEST_DIR/westend.env 300 +westend_dir=`cat $TEST_DIR/westend.env` +echo + +run_zndsl ${BASH_SOURCE%/*}/roc-reaches-westend.zndsl $westend_dir +run_zndsl ${BASH_SOURCE%/*}/wnd-reaches-rococo.zndsl $rococo_dir + +run_zndsl ${BASH_SOURCE%/*}/wroc-reaches-rococo.zndsl $rococo_dir +run_zndsl ${BASH_SOURCE%/*}/wwnd-reaches-westend.zndsl $westend_dir diff --git a/testing/tests/0001-asset-transfer/wnd-reaches-rococo.zndsl b/testing/tests/0001-asset-transfer/wnd-reaches-rococo.zndsl new file mode 100644 index 000000000..dbc03864e --- /dev/null +++ b/testing/tests/0001-asset-transfer/wnd-reaches-rococo.zndsl @@ -0,0 +1,12 @@ +Description: User is able to transfer WND from Westend Asset Hub to Rococo Asset Hub and back +Network: {{ENV_PATH}}/bridge_hub_rococo_local_network.toml +Creds: config + +# send 5 WND to //Alice from Westend AH to Rococo AH +asset-hub-rococo-collator1: run {{ENV_PATH}}/helper.sh with "reserve-transfer-assets-from-asset-hub-westend-local 5000000000000" within 120 seconds + +# check that //Alice received at least 4.8 WND on Rococo AH +asset-hub-rococo-collator1: js-script {{FRAMEWORK_PATH}}/js-helpers/wrapped-assets-balance.js with "5GrwvaEF5zXb26Fz9rcQpDWS57CtERHpNehXCPcNoHGKutQY,4800000000000,Westend" within 600 seconds + +# check that the relayer //Charlie is rewarded by Rococo AH +bridge-hub-rococo-collator1: js-script {{FRAMEWORK_PATH}}/js-helpers/relayer-rewards.js with "5FLSigC9HGRKVhB9FiEo4Y3koPsNmBmLJbpXg2mp1hXcS59Y,0x00000002,0x62687764,ThisChain,0" within 30 seconds diff --git a/testing/tests/0001-asset-transfer/wroc-reaches-rococo.zndsl b/testing/tests/0001-asset-transfer/wroc-reaches-rococo.zndsl new file mode 100644 index 000000000..9967732ca --- /dev/null +++ b/testing/tests/0001-asset-transfer/wroc-reaches-rococo.zndsl @@ -0,0 +1,10 @@ +Description: User is able to transfer ROC from Rococo Asset Hub to Westend Asset Hub and back +Network: {{ENV_PATH}}/bridge_hub_westend_local_network.toml +Creds: config + +# send 3 wROC back to Alice from Westend AH to Rococo AH +asset-hub-rococo-collator1: run {{ENV_PATH}}/helper.sh with "withdraw-reserve-assets-from-asset-hub-westend-local 3000000000000" within 120 seconds + +# check that //Alice received at least 2.8 wROC on Rococo AH +# (we wait until //Alice account increases here - there are no other transactions that may increase it) +asset-hub-rococo-collator1: js-script {{FRAMEWORK_PATH}}/js-helpers/native-assets-balance-increased.js with "5GrwvaEF5zXb26Fz9rcQpDWS57CtERHpNehXCPcNoHGKutQY,2800000000000" within 600 seconds diff --git a/testing/tests/0001-asset-transfer/wwnd-reaches-westend.zndsl b/testing/tests/0001-asset-transfer/wwnd-reaches-westend.zndsl new file mode 100644 index 000000000..2037b0baf --- /dev/null +++ b/testing/tests/0001-asset-transfer/wwnd-reaches-westend.zndsl @@ -0,0 +1,10 @@ +Description: User is able to transfer ROC from Rococo Asset Hub to Westend Asset Hub and back +Network: {{ENV_PATH}}/bridge_hub_westend_local_network.toml +Creds: config + +# send 3 wWND back to Alice from Rococo AH to Westend AH +asset-hub-westend-collator1: run {{ENV_PATH}}/helper.sh with "withdraw-reserve-assets-from-asset-hub-rococo-local 3000000000000" within 120 seconds + +# check that //Alice received at least 2.8 wWND on Westend AH +# (we wait until //Alice account increases here - there are no other transactions that may increase it) +asset-hub-westend-collator1: js-script {{FRAMEWORK_PATH}}/js-helpers/native-assets-balance-increased.js with "5GrwvaEF5zXb26Fz9rcQpDWS57CtERHpNehXCPcNoHGKutQY,2800000000000" within 600 seconds diff --git a/testing/tests/0002-mandatory-headers-synced-while-idle/rococo-to-westend.zndsl b/testing/tests/0002-mandatory-headers-synced-while-idle/rococo-to-westend.zndsl new file mode 100644 index 000000000..6e381f537 --- /dev/null +++ b/testing/tests/0002-mandatory-headers-synced-while-idle/rococo-to-westend.zndsl @@ -0,0 +1,8 @@ +Description: While relayer is idle, we only sync mandatory Rococo (and a single Rococo BH) headers to Westend BH. +Network: {{ENV_PATH}}/bridge_hub_westend_local_network.toml +Creds: config + +# ensure that relayer is only syncing mandatory headers while idle. This includes both headers that were +# generated while relay was offline and those in the next 100 seconds while script is active. +bridge-hub-westend-collator1: js-script {{FRAMEWORK_PATH}}/js-helpers/only-mandatory-headers-synced-when-idle.js with "300,rococo-at-westend" within 600 seconds + diff --git a/testing/tests/0002-mandatory-headers-synced-while-idle/run.sh b/testing/tests/0002-mandatory-headers-synced-while-idle/run.sh new file mode 100755 index 000000000..7d5b8d927 --- /dev/null +++ b/testing/tests/0002-mandatory-headers-synced-while-idle/run.sh @@ -0,0 +1,35 @@ +#!/bin/bash + +set -e + +source "${BASH_SOURCE%/*}/../../framework/utils/common.sh" +source "${BASH_SOURCE%/*}/../../framework/utils/zombienet.sh" + +export ENV_PATH=`realpath ${BASH_SOURCE%/*}/../../environments/rococo-westend` + +$ENV_PATH/spawn.sh & +env_pid=$! + +ensure_process_file $env_pid $TEST_DIR/rococo.env 600 +rococo_dir=`cat $TEST_DIR/rococo.env` +echo + +ensure_process_file $env_pid $TEST_DIR/westend.env 300 +westend_dir=`cat $TEST_DIR/westend.env` +echo + +# Sleep for some time before starting the relayer. We want to sleep for at least 1 session, +# which is expected to be 60 seconds for the test environment. +echo -e "Sleeping 90s before starting relayer ...\n" +sleep 90 +${BASH_SOURCE%/*}/../../environments/rococo-westend/start_relayer.sh $rococo_dir $westend_dir relayer_pid + +# Sometimes the relayer syncs multiple parachain heads in the begining leading to test failures. +# See issue: https://github.com/paritytech/parity-bridges-common/issues/2838. +# TODO: Remove this sleep after the issue is fixed. +echo -e "Sleeping 180s before runing the tests ...\n" +sleep 180 + +run_zndsl ${BASH_SOURCE%/*}/rococo-to-westend.zndsl $westend_dir +run_zndsl ${BASH_SOURCE%/*}/westend-to-rococo.zndsl $rococo_dir + diff --git a/testing/tests/0002-mandatory-headers-synced-while-idle/westend-to-rococo.zndsl b/testing/tests/0002-mandatory-headers-synced-while-idle/westend-to-rococo.zndsl new file mode 100644 index 000000000..b4b3e4367 --- /dev/null +++ b/testing/tests/0002-mandatory-headers-synced-while-idle/westend-to-rococo.zndsl @@ -0,0 +1,7 @@ +Description: While relayer is idle, we only sync mandatory Westend (and a single Westend BH) headers to Rococo BH. +Network: {{ENV_PATH}}/bridge_hub_rococo_local_network.toml +Creds: config + +# ensure that relayer is only syncing mandatory headers while idle. This includes both headers that were +# generated while relay was offline and those in the next 100 seconds while script is active. +bridge-hub-rococo-collator1: js-script {{FRAMEWORK_PATH}}/js-helpers/only-mandatory-headers-synced-when-idle.js with "300,westend-at-rococo" within 600 seconds diff --git a/zombienet/tests/0003-required-headers-synced-while-active-rococo-to-westend.zndsl b/testing/tests/0003-required-headers-synced-while-active-rococo-to-westend.zndsl similarity index 77% rename from zombienet/tests/0003-required-headers-synced-while-active-rococo-to-westend.zndsl rename to testing/tests/0003-required-headers-synced-while-active-rococo-to-westend.zndsl index a4960344f..07b91481d 100644 --- a/zombienet/tests/0003-required-headers-synced-while-active-rococo-to-westend.zndsl +++ b/testing/tests/0003-required-headers-synced-while-active-rococo-to-westend.zndsl @@ -1,5 +1,5 @@ Description: While relayer is active, we only sync mandatory and required Rococo (and Rococo BH) headers to Westend BH. -Network: ../../../cumulus/zombienet/bridge-hubs/bridge_hub_westend_local_network.toml +Network: ../environments/rococo-westend/bridge_hub_westend_local_network.toml Creds: config # step 1: initialize Westend AH @@ -9,7 +9,7 @@ asset-hub-westend-collator1: run ../scripts/invoke-script.sh with "init-asset-hu bridge-hub-westend-collator1: run ../scripts/invoke-script.sh with "init-bridge-hub-westend-local" within 60 seconds # step 3: ensure that initialization has completed -asset-hub-westend-collator1: js-script ../helpers/wait-hrmp-channel-opened.js with "1002" within 600 seconds +asset-hub-westend-collator1: js-script ../js-helpers/wait-hrmp-channel-opened.js with "1002" within 600 seconds # step 4: send message from Westend to Rococo asset-hub-westend-collator1: run ../scripts/invoke-script.sh with "reserve-transfer-assets-from-asset-hub-westend-local" within 60 seconds @@ -20,7 +20,7 @@ asset-hub-westend-collator1: run ../scripts/invoke-script.sh with "reserve-trans # (it is started by sibling 0003-required-headers-synced-while-active-westend-to-rococo.zndsl) # step 6: ensure that relayer won't sync any extra headers while delivering messages and confirmations -bridge-hub-westend-collator1: js-script ../helpers/only-required-headers-synced-when-active.js with "500,rococo-at-westend" within 600 seconds +bridge-hub-westend-collator1: js-script ../js-helpers/only-required-headers-synced-when-active.js with "500,rococo-at-westend" within 600 seconds # wait until other network test has completed OR exit with an error too asset-hub-westend-collator1: run ../scripts/sync-exit.sh within 600 seconds diff --git a/zombienet/tests/0003-required-headers-synced-while-active-westend-to-rococo.zndsl b/testing/tests/0003-required-headers-synced-while-active-westend-to-rococo.zndsl similarity index 77% rename from zombienet/tests/0003-required-headers-synced-while-active-westend-to-rococo.zndsl rename to testing/tests/0003-required-headers-synced-while-active-westend-to-rococo.zndsl index 33c3ceebc..a6b11fc24 100644 --- a/zombienet/tests/0003-required-headers-synced-while-active-westend-to-rococo.zndsl +++ b/testing/tests/0003-required-headers-synced-while-active-westend-to-rococo.zndsl @@ -1,5 +1,5 @@ Description: While relayer is active, we only sync mandatory and required Westend (and Westend BH) headers to Rococo BH. -Network: ../../../cumulus/zombienet/bridge-hubs/bridge_hub_rococo_local_network.toml +Network: ../environments/rococo-westend/bridge_hub_rococo_local_network.toml Creds: config # step 1: initialize Rococo AH @@ -9,7 +9,7 @@ asset-hub-rococo-collator1: run ../scripts/invoke-script.sh with "init-asset-hub bridge-hub-rococo-collator1: run ../scripts/invoke-script.sh with "init-bridge-hub-rococo-local" within 60 seconds # step 3: ensure that initialization has completed -asset-hub-rococo-collator1: js-script ../helpers/wait-hrmp-channel-opened.js with "1013" within 600 seconds +asset-hub-rococo-collator1: js-script ../js-helpers/wait-hrmp-channel-opened.js with "1013" within 600 seconds # step 4: send message from Rococo to Westend asset-hub-rococo-collator1: run ../scripts/invoke-script.sh with "reserve-transfer-assets-from-asset-hub-rococo-local" within 60 seconds @@ -20,7 +20,7 @@ asset-hub-rococo-collator1: run ../scripts/invoke-script.sh with "reserve-transf bridge-hub-rococo-collator1: run ../scripts/start-relayer.sh within 60 seconds # step 6: ensure that relayer won't sync any extra headers while delivering messages and confirmations -bridge-hub-rococo-collator1: js-script ../helpers/only-required-headers-synced-when-active.js with "500,westend-at-rococo" within 600 seconds +bridge-hub-rococo-collator1: js-script ../js-helpers/only-required-headers-synced-when-active.js with "500,westend-at-rococo" within 600 seconds # wait until other network test has completed OR exit with an error too asset-hub-rococo-collator1: run ../scripts/sync-exit.sh within 600 seconds diff --git a/zombienet/tests/0001-asset-transfer-works-rococo-to-westend.zndsl b/zombienet/tests/0001-asset-transfer-works-rococo-to-westend.zndsl deleted file mode 100644 index 82d1eee2f..000000000 --- a/zombienet/tests/0001-asset-transfer-works-rococo-to-westend.zndsl +++ /dev/null @@ -1,39 +0,0 @@ -Description: User is able to transfer ROC from Rococo Asset Hub to Westend Asset Hub and back -Network: ../../../cumulus/zombienet/bridge-hubs/bridge_hub_westend_local_network.toml -Creds: config - -# step 0: start relayer -# (started by sibling 0001-asset-transfer-works-westend-to-rococo.zndsl test) - -# step 1: initialize Westend AH -asset-hub-westend-collator1: run ../scripts/invoke-script.sh with "init-asset-hub-westend-local" within 60 seconds - -# step 2: initialize Westend bridge hub -bridge-hub-westend-collator1: run ../scripts/invoke-script.sh with "init-bridge-hub-westend-local" within 60 seconds - -# step 3: ensure that initialization has completed -asset-hub-westend-collator1: js-script ../helpers/wait-hrmp-channel-opened.js with "1002" within 600 seconds - -# step 4: relay is already started - let's wait until with-Rococo GRANPDA pallet is initialized at Westend -bridge-hub-westend-collator1: js-script ../helpers/best-finalized-header-at-bridged-chain.js with "Rococo,0" within 400 seconds - -# step 5: send WND to //Alice on Rococo AH -# (that's a required part of a sibling 0001-asset-transfer-works-westend-to-rococo.zndsl test) -asset-hub-westend-collator1: run ../scripts/invoke-script.sh with "reserve-transfer-assets-from-asset-hub-westend-local" within 60 seconds - -# step 6: elsewhere Rococo has sent ROC to //Alice - let's wait for it -asset-hub-westend-collator1: js-script ../helpers/wrapped-assets-balance.js with "5GrwvaEF5zXb26Fz9rcQpDWS57CtERHpNehXCPcNoHGKutQY,0,Rococo" within 600 seconds - -# step 7: check that the relayer //Charlie is rewarded by both our AH and target AH -bridge-hub-westend-collator1: js-script ../helpers/relayer-rewards.js with "5FLSigC9HGRKVhB9FiEo4Y3koPsNmBmLJbpXg2mp1hXcS59Y,0x00000002,0x6268726f,BridgedChain,0" within 300 seconds -bridge-hub-westend-collator1: js-script ../helpers/relayer-rewards.js with "5FLSigC9HGRKVhB9FiEo4Y3koPsNmBmLJbpXg2mp1hXcS59Y,0x00000002,0x6268726F,ThisChain,0" within 300 seconds - -# step 8: send wROC back to Alice at Rococo AH -asset-hub-westend-collator1: run ../scripts/invoke-script.sh with "withdraw-reserve-assets-from-asset-hub-westend-local" within 60 seconds - -# step 9: elsewhere Rococo has sent wWND to //Alice - let's wait for it -# (we wait until //Alice account increases here - there are no other transactionc that may increase it) -asset-hub-westend-collator1: js-script ../helpers/native-assets-balance-increased.js with "5GrwvaEF5zXb26Fz9rcQpDWS57CtERHpNehXCPcNoHGKutQY" within 600 seconds - -# wait until other network test has completed OR exit with an error too -asset-hub-westend-collator1: run ../scripts/sync-exit.sh within 600 seconds diff --git a/zombienet/tests/0001-asset-transfer-works-westend-to-rococo.zndsl b/zombienet/tests/0001-asset-transfer-works-westend-to-rococo.zndsl deleted file mode 100644 index acfe0df03..000000000 --- a/zombienet/tests/0001-asset-transfer-works-westend-to-rococo.zndsl +++ /dev/null @@ -1,39 +0,0 @@ -Description: User is able to transfer WND from Westend Asset Hub to Rococo Asset Hub and back -Network: ../../../cumulus/zombienet/bridge-hubs/bridge_hub_rococo_local_network.toml -Creds: config - -# step 0: start relayer -bridge-hub-rococo-collator1: run ../scripts/start-relayer.sh within 60 seconds - -# step 1: initialize Rococo AH -asset-hub-rococo-collator1: run ../scripts/invoke-script.sh with "init-asset-hub-rococo-local" within 60 seconds - -# step 2: initialize Rococo bridge hub -bridge-hub-rococo-collator1: run ../scripts/invoke-script.sh with "init-bridge-hub-rococo-local" within 60 seconds - -# step 3: ensure that initialization has completed -asset-hub-rococo-collator1: js-script ../helpers/wait-hrmp-channel-opened.js with "1013" within 600 seconds - -# step 4: relay is already started - let's wait until with-Westend GRANPDA pallet is initialized at Rococo -bridge-hub-rococo-collator1: js-script ../helpers/best-finalized-header-at-bridged-chain.js with "Westend,0" within 400 seconds - -# step 5: send ROC to //Alice on Westend AH -# (that's a required part of a sibling 0001-asset-transfer-works-rococo-to-westend.zndsl test) -asset-hub-rococo-collator1: run ../scripts/invoke-script.sh with "reserve-transfer-assets-from-asset-hub-rococo-local" within 60 seconds - -# step 6: elsewhere Westend has sent WND to //Alice - let's wait for it -asset-hub-rococo-collator1: js-script ../helpers/wrapped-assets-balance.js with "5GrwvaEF5zXb26Fz9rcQpDWS57CtERHpNehXCPcNoHGKutQY,0,Westend" within 600 seconds - -# step 7: check that the relayer //Charlie is rewarded by both our AH and target AH -bridge-hub-rococo-collator1: js-script ../helpers/relayer-rewards.js with "5FLSigC9HGRKVhB9FiEo4Y3koPsNmBmLJbpXg2mp1hXcS59Y,0x00000002,0x62687764,BridgedChain,0" within 300 seconds -bridge-hub-rococo-collator1: js-script ../helpers/relayer-rewards.js with "5FLSigC9HGRKVhB9FiEo4Y3koPsNmBmLJbpXg2mp1hXcS59Y,0x00000002,0x62687764,ThisChain,0" within 300 seconds - -# step 8: send wWND back to Alice at Westend AH -asset-hub-rococo-collator1: run ../scripts/invoke-script.sh with "withdraw-reserve-assets-from-asset-hub-rococo-local" within 60 seconds - -# step 9: elsewhere Westend has sent wROC to //Alice - let's wait for it -# (we wait until //Alice account increases here - there are no other transactionc that may increase it) -asset-hub-rococo-collator1: js-script ../helpers/native-assets-balance-increased.js with "5GrwvaEF5zXb26Fz9rcQpDWS57CtERHpNehXCPcNoHGKutQY" within 600 seconds - -# wait until other network test has completed OR exit with an error too -asset-hub-rococo-collator1: run ../scripts/sync-exit.sh within 600 seconds diff --git a/zombienet/tests/0002-mandatory-headers-synced-while-idle-rococo-to-westend.zndsl b/zombienet/tests/0002-mandatory-headers-synced-while-idle-rococo-to-westend.zndsl deleted file mode 100644 index eb6a75c37..000000000 --- a/zombienet/tests/0002-mandatory-headers-synced-while-idle-rococo-to-westend.zndsl +++ /dev/null @@ -1,26 +0,0 @@ -Description: While relayer is idle, we only sync mandatory Rococo (and a single Rococo BH) headers to Westend BH. -Network: ../../../cumulus/zombienet/bridge-hubs/bridge_hub_westend_local_network.toml -Creds: config - -# step 1: initialize Westend bridge hub -bridge-hub-westend-collator1: run ../scripts/invoke-script.sh with "init-bridge-hub-westend-local" within 60 seconds - -# step 2: sleep some time before starting relayer. We want to sleep for at least 1 session, which is expected to -# be 60 seconds for test environment. -sleep 120 seconds - -# step 3: start relayer -# (it is started by the sibling 0002-mandatory-headers-synced-while-idle-westend-to-rococo.zndsl test file) - -# it also takes some time for relayer to initialize bridge, so let's sleep for 5 minutes to be sure that parachain -# header has been synced - -# step 4: ensure that relayer is only syncing mandatory headers while idle. This includes both headers that were -# born while relay was offline and those in the next 100 seconds while script is active. -bridge-hub-westend-collator1: js-script ../helpers/only-mandatory-headers-synced-when-idle.js with "300,rococo-at-westend" within 600 seconds - -# wait until other network test has completed OR exit with an error too -asset-hub-westend-collator1: run ../scripts/sync-exit.sh within 600 seconds - -# wait until other network test has completed OR exit with an error too -asset-hub-westend-collator1: run ../scripts/sync-exit.sh within 600 seconds diff --git a/zombienet/tests/0002-mandatory-headers-synced-while-idle-westend-to-rococo.zndsl b/zombienet/tests/0002-mandatory-headers-synced-while-idle-westend-to-rococo.zndsl deleted file mode 100644 index 728d54d58..000000000 --- a/zombienet/tests/0002-mandatory-headers-synced-while-idle-westend-to-rococo.zndsl +++ /dev/null @@ -1,26 +0,0 @@ -Description: While relayer is idle, we only sync mandatory Westend (and a single Westend BH) headers to Rococo BH. -Network: ../../../cumulus/zombienet/bridge-hubs/bridge_hub_rococo_local_network.toml -Creds: config - -# step 1: initialize Rococo bridge hub -bridge-hub-rococo-collator1: run ../scripts/invoke-script.sh with "init-bridge-hub-rococo-local" within 60 seconds - -# step 2: sleep some time before starting relayer. We want to sleep for at least 1 session, which is expected to -# be 60 seconds for test environment. -sleep 120 seconds - -# step 3: start relayer -bridge-hub-rococo-collator1: run ../scripts/start-relayer.sh within 60 seconds - -# it also takes some time for relayer to initialize bridge, so let's sleep for 5 minutes to be sure that parachain -# header has been synced - -# step 4: ensure that relayer is only syncing mandatory headers while idle. This includes both headers that were -# born while relay was offline and those in the next 100 seconds while script is active. -bridge-hub-rococo-collator1: js-script ../helpers/only-mandatory-headers-synced-when-idle.js with "300,westend-at-rococo" within 600 seconds - -# wait until other network test has completed OR exit with an error too -asset-hub-rococo-collator1: run ../scripts/sync-exit.sh within 600 seconds - -# wait until other network test has completed OR exit with an error too -asset-hub-rococo-collator1: run ../scripts/sync-exit.sh within 600 seconds -- GitLab From 8e58eb92e95a662c0b6f40fc2bd6ad8332d5969c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 25 Mar 2024 09:09:18 +0000 Subject: [PATCH 08/39] Bump async-trait from 0.1.78 to 0.1.79 Bumps [async-trait](https://github.com/dtolnay/async-trait) from 0.1.78 to 0.1.79. - [Release notes](https://github.com/dtolnay/async-trait/releases) - [Commits](https://github.com/dtolnay/async-trait/compare/0.1.78...0.1.79) --- updated-dependencies: - dependency-name: async-trait dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- Cargo.lock | 4 ++-- relays/bin-substrate/Cargo.toml | 2 +- relays/client-substrate/Cargo.toml | 2 +- relays/equivocation/Cargo.toml | 2 +- relays/finality/Cargo.toml | 2 +- relays/lib-substrate-relay/Cargo.toml | 2 +- relays/messages/Cargo.toml | 2 +- relays/parachains/Cargo.toml | 2 +- relays/utils/Cargo.toml | 2 +- 9 files changed, 10 insertions(+), 10 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index fc835cbcb..2c7e89ab1 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -761,9 +761,9 @@ checksum = "ecc7ab41815b3c653ccd2978ec3255c81349336702dfdf62ee6f7069b12a3aae" [[package]] name = "async-trait" -version = "0.1.78" +version = "0.1.79" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "461abc97219de0eaaf81fe3ef974a540158f3d079c2ab200f891f1a2ef201e85" +checksum = "a507401cad91ec6a857ed5513a2073c82a9b9048762b885bb98655b306964681" dependencies = [ "proc-macro2 1.0.76", "quote 1.0.35", diff --git a/relays/bin-substrate/Cargo.toml b/relays/bin-substrate/Cargo.toml index 2b89ce4f1..032fc8ce7 100644 --- a/relays/bin-substrate/Cargo.toml +++ b/relays/bin-substrate/Cargo.toml @@ -11,7 +11,7 @@ workspace = true [dependencies] anyhow = "1.0" async-std = "1.9.0" -async-trait = "0.1.74" +async-trait = "0.1.79" codec = { package = "parity-scale-codec", version = "3.1.5" } env_logger = "0.11" futures = "0.3.30" diff --git a/relays/client-substrate/Cargo.toml b/relays/client-substrate/Cargo.toml index 48895eff3..c1dea9b50 100644 --- a/relays/client-substrate/Cargo.toml +++ b/relays/client-substrate/Cargo.toml @@ -10,7 +10,7 @@ workspace = true [dependencies] async-std = { version = "1.6.5", features = ["attributes"] } -async-trait = "0.1.74" +async-trait = "0.1.79" codec = { package = "parity-scale-codec", version = "3.1.5" } futures = "0.3.30" jsonrpsee = { version = "0.17", features = ["macros", "ws-client"] } diff --git a/relays/equivocation/Cargo.toml b/relays/equivocation/Cargo.toml index 23dd4a087..6f65d40fc 100644 --- a/relays/equivocation/Cargo.toml +++ b/relays/equivocation/Cargo.toml @@ -11,7 +11,7 @@ workspace = true [dependencies] async-std = { version = "1.6.5", features = ["attributes"] } -async-trait = "0.1.74" +async-trait = "0.1.79" bp-header-chain = { path = "../../primitives/header-chain" } finality-relay = { path = "../finality" } frame-support = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } diff --git a/relays/finality/Cargo.toml b/relays/finality/Cargo.toml index 80159b4db..8cf3d409c 100644 --- a/relays/finality/Cargo.toml +++ b/relays/finality/Cargo.toml @@ -11,7 +11,7 @@ workspace = true [dependencies] async-std = "1.6.5" -async-trait = "0.1.74" +async-trait = "0.1.79" backoff = "0.4" bp-header-chain = { path = "../../primitives/header-chain" } futures = "0.3.30" diff --git a/relays/lib-substrate-relay/Cargo.toml b/relays/lib-substrate-relay/Cargo.toml index 27c5ee02c..db4f37417 100644 --- a/relays/lib-substrate-relay/Cargo.toml +++ b/relays/lib-substrate-relay/Cargo.toml @@ -11,7 +11,7 @@ workspace = true [dependencies] anyhow = "1.0" async-std = "1.9.0" -async-trait = "0.1.74" +async-trait = "0.1.79" codec = { package = "parity-scale-codec", version = "3.1.5" } futures = "0.3.30" hex = "0.4" diff --git a/relays/messages/Cargo.toml b/relays/messages/Cargo.toml index 699649dd6..1233c8af7 100644 --- a/relays/messages/Cargo.toml +++ b/relays/messages/Cargo.toml @@ -10,7 +10,7 @@ workspace = true [dependencies] async-std = { version = "1.6.5", features = ["attributes"] } -async-trait = "0.1.74" +async-trait = "0.1.79" env_logger = "0.11" futures = "0.3.30" hex = "0.4" diff --git a/relays/parachains/Cargo.toml b/relays/parachains/Cargo.toml index d2eea9eb6..05a995b85 100644 --- a/relays/parachains/Cargo.toml +++ b/relays/parachains/Cargo.toml @@ -10,7 +10,7 @@ workspace = true [dependencies] async-std = "1.6.5" -async-trait = "0.1.74" +async-trait = "0.1.79" futures = "0.3.30" log = { workspace = true } relay-utils = { path = "../utils" } diff --git a/relays/utils/Cargo.toml b/relays/utils/Cargo.toml index 81574a685..d1eeba2b8 100644 --- a/relays/utils/Cargo.toml +++ b/relays/utils/Cargo.toml @@ -12,7 +12,7 @@ workspace = true ansi_term = "0.12" anyhow = "1.0" async-std = "1.6.5" -async-trait = "0.1.74" +async-trait = "0.1.79" backoff = "0.4" isahc = "1.2" env_logger = "0.11.3" -- GitLab From 4bc73d80526d1bab3ddbe1c94af1b72bb965d02a Mon Sep 17 00:00:00 2001 From: Serban Iorga Date: Mon, 25 Mar 2024 14:07:25 +0100 Subject: [PATCH 09/39] [Backport from `polkadot-sdk`] Move chain definitions to separate folder (#2892) * [Bridges] Move chain definitions to separate folder (#3822) Related to https://github.com/paritytech/parity-bridges-common/issues/2538 This PR doesn't contain any functional changes. The PR moves specific bridged chain definitions from `bridges/primitives` to `bridges/chains` folder in order to facilitate the migration of the `parity-bridges-repo` into `polkadot-sdk` as discussed in https://hackmd.io/LprWjZ0bQXKpFeveYHIRXw?view Apart from this it also includes some cosmetic changes to some `Cargo.toml` files as a result of running `diener workspacify`. (cherry picked from commit 0711729d251efebf3486db602119ecfa67d98366) * diener workspacify --- Cargo.toml | 26 +++++++++---------- .../chain-asset-hub-rococo/Cargo.toml | 2 +- .../chain-asset-hub-rococo/src/lib.rs | 0 .../chain-asset-hub-westend/Cargo.toml | 2 +- .../chain-asset-hub-westend/src/lib.rs | 0 .../chain-bridge-hub-cumulus/Cargo.toml | 0 .../chain-bridge-hub-cumulus/src/lib.rs | 0 .../chain-bridge-hub-kusama/Cargo.toml | 0 .../chain-bridge-hub-kusama/src/lib.rs | 0 .../chain-bridge-hub-polkadot/Cargo.toml | 0 .../chain-bridge-hub-polkadot/src/lib.rs | 0 .../chain-bridge-hub-rococo/Cargo.toml | 0 .../chain-bridge-hub-rococo/src/lib.rs | 0 .../chain-bridge-hub-westend/Cargo.toml | 0 .../chain-bridge-hub-westend/src/lib.rs | 0 .../chain-kusama/Cargo.toml | 6 ++--- .../chain-kusama/src/lib.rs | 0 .../chain-polkadot-bulletin/Cargo.toml | 8 +++--- .../chain-polkadot-bulletin/src/lib.rs | 0 .../chain-polkadot/Cargo.toml | 6 ++--- .../chain-polkadot/src/lib.rs | 0 .../chain-rococo/Cargo.toml | 6 ++--- .../chain-rococo/src/lib.rs | 0 .../chain-westend/Cargo.toml | 6 ++--- .../chain-westend/src/lib.rs | 0 relays/bin-substrate/Cargo.toml | 10 +++---- relays/client-bridge-hub-kusama/Cargo.toml | 4 +-- relays/client-bridge-hub-polkadot/Cargo.toml | 6 ++--- relays/client-bridge-hub-rococo/Cargo.toml | 2 +- relays/client-bridge-hub-westend/Cargo.toml | 4 +-- relays/client-kusama/Cargo.toml | 2 +- relays/client-polkadot-bulletin/Cargo.toml | 2 +- relays/client-polkadot/Cargo.toml | 2 +- relays/client-rococo/Cargo.toml | 2 +- relays/client-westend/Cargo.toml | 2 +- relays/lib-substrate-relay/Cargo.toml | 2 +- 36 files changed, 50 insertions(+), 50 deletions(-) rename {primitives => chains}/chain-asset-hub-rococo/Cargo.toml (87%) rename {primitives => chains}/chain-asset-hub-rococo/src/lib.rs (100%) rename {primitives => chains}/chain-asset-hub-westend/Cargo.toml (87%) rename {primitives => chains}/chain-asset-hub-westend/src/lib.rs (100%) rename {primitives => chains}/chain-bridge-hub-cumulus/Cargo.toml (100%) rename {primitives => chains}/chain-bridge-hub-cumulus/src/lib.rs (100%) rename {primitives => chains}/chain-bridge-hub-kusama/Cargo.toml (100%) rename {primitives => chains}/chain-bridge-hub-kusama/src/lib.rs (100%) rename {primitives => chains}/chain-bridge-hub-polkadot/Cargo.toml (100%) rename {primitives => chains}/chain-bridge-hub-polkadot/src/lib.rs (100%) rename {primitives => chains}/chain-bridge-hub-rococo/Cargo.toml (100%) rename {primitives => chains}/chain-bridge-hub-rococo/src/lib.rs (100%) rename {primitives => chains}/chain-bridge-hub-westend/Cargo.toml (100%) rename {primitives => chains}/chain-bridge-hub-westend/src/lib.rs (100%) rename {primitives => chains}/chain-kusama/Cargo.toml (75%) rename {primitives => chains}/chain-kusama/src/lib.rs (100%) rename {primitives => chains}/chain-polkadot-bulletin/Cargo.toml (80%) rename {primitives => chains}/chain-polkadot-bulletin/src/lib.rs (100%) rename {primitives => chains}/chain-polkadot/Cargo.toml (75%) rename {primitives => chains}/chain-polkadot/src/lib.rs (100%) rename {primitives => chains}/chain-rococo/Cargo.toml (75%) rename {primitives => chains}/chain-rococo/src/lib.rs (100%) rename {primitives => chains}/chain-westend/Cargo.toml (75%) rename {primitives => chains}/chain-westend/src/lib.rs (100%) diff --git a/Cargo.toml b/Cargo.toml index b71199959..a6edce3ea 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -6,9 +6,20 @@ license = "GPL-3.0-only" [workspace] resolver = "2" - members = [ "bin/runtime-common", + "chains/chain-asset-hub-rococo", + "chains/chain-asset-hub-westend", + "chains/chain-bridge-hub-cumulus", + "chains/chain-bridge-hub-kusama", + "chains/chain-bridge-hub-polkadot", + "chains/chain-bridge-hub-rococo", + "chains/chain-bridge-hub-westend", + "chains/chain-kusama", + "chains/chain-polkadot", + "chains/chain-polkadot-bulletin", + "chains/chain-rococo", + "chains/chain-westend", "modules/beefy", "modules/grandpa", "modules/messages", @@ -17,18 +28,6 @@ members = [ "modules/xcm-bridge-hub", "modules/xcm-bridge-hub-router", "primitives/beefy", - "primitives/chain-asset-hub-rococo", - "primitives/chain-asset-hub-westend", - "primitives/chain-bridge-hub-cumulus", - "primitives/chain-bridge-hub-kusama", - "primitives/chain-bridge-hub-polkadot", - "primitives/chain-bridge-hub-rococo", - "primitives/chain-bridge-hub-westend", - "primitives/chain-kusama", - "primitives/chain-polkadot", - "primitives/chain-polkadot-bulletin", - "primitives/chain-rococo", - "primitives/chain-westend", "primitives/header-chain", "primitives/messages", "primitives/parachains", @@ -36,6 +35,7 @@ members = [ "primitives/relayers", "primitives/runtime", "primitives/test-utils", + "primitives/xcm-bridge-hub", "primitives/xcm-bridge-hub-router", "relays/bin-substrate", "relays/client-bridge-hub-kusama", diff --git a/primitives/chain-asset-hub-rococo/Cargo.toml b/chains/chain-asset-hub-rococo/Cargo.toml similarity index 87% rename from primitives/chain-asset-hub-rococo/Cargo.toml rename to chains/chain-asset-hub-rococo/Cargo.toml index 08be719bf..336e2b4d4 100644 --- a/primitives/chain-asset-hub-rococo/Cargo.toml +++ b/chains/chain-asset-hub-rococo/Cargo.toml @@ -17,7 +17,7 @@ scale-info = { version = "2.11.0", default-features = false, features = ["derive frame-support = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } # Bridge Dependencies -bp-xcm-bridge-hub-router = { path = "../xcm-bridge-hub-router", default-features = false } +bp-xcm-bridge-hub-router = { path = "../../primitives/xcm-bridge-hub-router", default-features = false } [features] default = ["std"] diff --git a/primitives/chain-asset-hub-rococo/src/lib.rs b/chains/chain-asset-hub-rococo/src/lib.rs similarity index 100% rename from primitives/chain-asset-hub-rococo/src/lib.rs rename to chains/chain-asset-hub-rococo/src/lib.rs diff --git a/primitives/chain-asset-hub-westend/Cargo.toml b/chains/chain-asset-hub-westend/Cargo.toml similarity index 87% rename from primitives/chain-asset-hub-westend/Cargo.toml rename to chains/chain-asset-hub-westend/Cargo.toml index d8245c7a8..99b6c26e2 100644 --- a/primitives/chain-asset-hub-westend/Cargo.toml +++ b/chains/chain-asset-hub-westend/Cargo.toml @@ -17,7 +17,7 @@ scale-info = { version = "2.11.0", default-features = false, features = ["derive frame-support = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } # Bridge Dependencies -bp-xcm-bridge-hub-router = { path = "../xcm-bridge-hub-router", default-features = false } +bp-xcm-bridge-hub-router = { path = "../../primitives/xcm-bridge-hub-router", default-features = false } [features] default = ["std"] diff --git a/primitives/chain-asset-hub-westend/src/lib.rs b/chains/chain-asset-hub-westend/src/lib.rs similarity index 100% rename from primitives/chain-asset-hub-westend/src/lib.rs rename to chains/chain-asset-hub-westend/src/lib.rs diff --git a/primitives/chain-bridge-hub-cumulus/Cargo.toml b/chains/chain-bridge-hub-cumulus/Cargo.toml similarity index 100% rename from primitives/chain-bridge-hub-cumulus/Cargo.toml rename to chains/chain-bridge-hub-cumulus/Cargo.toml diff --git a/primitives/chain-bridge-hub-cumulus/src/lib.rs b/chains/chain-bridge-hub-cumulus/src/lib.rs similarity index 100% rename from primitives/chain-bridge-hub-cumulus/src/lib.rs rename to chains/chain-bridge-hub-cumulus/src/lib.rs diff --git a/primitives/chain-bridge-hub-kusama/Cargo.toml b/chains/chain-bridge-hub-kusama/Cargo.toml similarity index 100% rename from primitives/chain-bridge-hub-kusama/Cargo.toml rename to chains/chain-bridge-hub-kusama/Cargo.toml diff --git a/primitives/chain-bridge-hub-kusama/src/lib.rs b/chains/chain-bridge-hub-kusama/src/lib.rs similarity index 100% rename from primitives/chain-bridge-hub-kusama/src/lib.rs rename to chains/chain-bridge-hub-kusama/src/lib.rs diff --git a/primitives/chain-bridge-hub-polkadot/Cargo.toml b/chains/chain-bridge-hub-polkadot/Cargo.toml similarity index 100% rename from primitives/chain-bridge-hub-polkadot/Cargo.toml rename to chains/chain-bridge-hub-polkadot/Cargo.toml diff --git a/primitives/chain-bridge-hub-polkadot/src/lib.rs b/chains/chain-bridge-hub-polkadot/src/lib.rs similarity index 100% rename from primitives/chain-bridge-hub-polkadot/src/lib.rs rename to chains/chain-bridge-hub-polkadot/src/lib.rs diff --git a/primitives/chain-bridge-hub-rococo/Cargo.toml b/chains/chain-bridge-hub-rococo/Cargo.toml similarity index 100% rename from primitives/chain-bridge-hub-rococo/Cargo.toml rename to chains/chain-bridge-hub-rococo/Cargo.toml diff --git a/primitives/chain-bridge-hub-rococo/src/lib.rs b/chains/chain-bridge-hub-rococo/src/lib.rs similarity index 100% rename from primitives/chain-bridge-hub-rococo/src/lib.rs rename to chains/chain-bridge-hub-rococo/src/lib.rs diff --git a/primitives/chain-bridge-hub-westend/Cargo.toml b/chains/chain-bridge-hub-westend/Cargo.toml similarity index 100% rename from primitives/chain-bridge-hub-westend/Cargo.toml rename to chains/chain-bridge-hub-westend/Cargo.toml diff --git a/primitives/chain-bridge-hub-westend/src/lib.rs b/chains/chain-bridge-hub-westend/src/lib.rs similarity index 100% rename from primitives/chain-bridge-hub-westend/src/lib.rs rename to chains/chain-bridge-hub-westend/src/lib.rs diff --git a/primitives/chain-kusama/Cargo.toml b/chains/chain-kusama/Cargo.toml similarity index 75% rename from primitives/chain-kusama/Cargo.toml rename to chains/chain-kusama/Cargo.toml index 465acf121..56a4386af 100644 --- a/primitives/chain-kusama/Cargo.toml +++ b/chains/chain-kusama/Cargo.toml @@ -13,9 +13,9 @@ workspace = true # Bridge Dependencies -bp-header-chain = { path = "../header-chain", default-features = false } -bp-polkadot-core = { path = "../polkadot-core", default-features = false } -bp-runtime = { path = "../runtime", default-features = false } +bp-header-chain = { path = "../../primitives/header-chain", default-features = false } +bp-polkadot-core = { path = "../../primitives/polkadot-core", default-features = false } +bp-runtime = { path = "../../primitives/runtime", default-features = false } # Substrate Based Dependencies diff --git a/primitives/chain-kusama/src/lib.rs b/chains/chain-kusama/src/lib.rs similarity index 100% rename from primitives/chain-kusama/src/lib.rs rename to chains/chain-kusama/src/lib.rs diff --git a/primitives/chain-polkadot-bulletin/Cargo.toml b/chains/chain-polkadot-bulletin/Cargo.toml similarity index 80% rename from primitives/chain-polkadot-bulletin/Cargo.toml rename to chains/chain-polkadot-bulletin/Cargo.toml index 797a893b2..1e2bbaf6b 100644 --- a/primitives/chain-polkadot-bulletin/Cargo.toml +++ b/chains/chain-polkadot-bulletin/Cargo.toml @@ -15,10 +15,10 @@ scale-info = { version = "2.11.0", default-features = false, features = ["derive # Bridge Dependencies -bp-header-chain = { path = "../header-chain", default-features = false } -bp-messages = { path = "../messages", default-features = false } -bp-polkadot-core = { path = "../polkadot-core", default-features = false } -bp-runtime = { path = "../runtime", default-features = false } +bp-header-chain = { path = "../../primitives/header-chain", default-features = false } +bp-messages = { path = "../../primitives/messages", default-features = false } +bp-polkadot-core = { path = "../../primitives/polkadot-core", default-features = false } +bp-runtime = { path = "../../primitives/runtime", default-features = false } # Substrate Based Dependencies diff --git a/primitives/chain-polkadot-bulletin/src/lib.rs b/chains/chain-polkadot-bulletin/src/lib.rs similarity index 100% rename from primitives/chain-polkadot-bulletin/src/lib.rs rename to chains/chain-polkadot-bulletin/src/lib.rs diff --git a/primitives/chain-polkadot/Cargo.toml b/chains/chain-polkadot/Cargo.toml similarity index 75% rename from primitives/chain-polkadot/Cargo.toml rename to chains/chain-polkadot/Cargo.toml index 20080a790..5faee3bd3 100644 --- a/primitives/chain-polkadot/Cargo.toml +++ b/chains/chain-polkadot/Cargo.toml @@ -13,9 +13,9 @@ workspace = true # Bridge Dependencies -bp-header-chain = { path = "../header-chain", default-features = false } -bp-polkadot-core = { path = "../polkadot-core", default-features = false } -bp-runtime = { path = "../runtime", default-features = false } +bp-header-chain = { path = "../../primitives/header-chain", default-features = false } +bp-polkadot-core = { path = "../../primitives/polkadot-core", default-features = false } +bp-runtime = { path = "../../primitives/runtime", default-features = false } # Substrate Based Dependencies diff --git a/primitives/chain-polkadot/src/lib.rs b/chains/chain-polkadot/src/lib.rs similarity index 100% rename from primitives/chain-polkadot/src/lib.rs rename to chains/chain-polkadot/src/lib.rs diff --git a/primitives/chain-rococo/Cargo.toml b/chains/chain-rococo/Cargo.toml similarity index 75% rename from primitives/chain-rococo/Cargo.toml rename to chains/chain-rococo/Cargo.toml index 459ee6f9f..401611beb 100644 --- a/primitives/chain-rococo/Cargo.toml +++ b/chains/chain-rococo/Cargo.toml @@ -13,9 +13,9 @@ workspace = true # Bridge Dependencies -bp-header-chain = { path = "../header-chain", default-features = false } -bp-polkadot-core = { path = "../polkadot-core", default-features = false } -bp-runtime = { path = "../runtime", default-features = false } +bp-header-chain = { path = "../../primitives/header-chain", default-features = false } +bp-polkadot-core = { path = "../../primitives/polkadot-core", default-features = false } +bp-runtime = { path = "../../primitives/runtime", default-features = false } # Substrate Based Dependencies diff --git a/primitives/chain-rococo/src/lib.rs b/chains/chain-rococo/src/lib.rs similarity index 100% rename from primitives/chain-rococo/src/lib.rs rename to chains/chain-rococo/src/lib.rs diff --git a/primitives/chain-westend/Cargo.toml b/chains/chain-westend/Cargo.toml similarity index 75% rename from primitives/chain-westend/Cargo.toml rename to chains/chain-westend/Cargo.toml index 4710da7bb..d697e2b36 100644 --- a/primitives/chain-westend/Cargo.toml +++ b/chains/chain-westend/Cargo.toml @@ -13,9 +13,9 @@ workspace = true # Bridge Dependencies -bp-header-chain = { path = "../header-chain", default-features = false } -bp-polkadot-core = { path = "../polkadot-core", default-features = false } -bp-runtime = { path = "../runtime", default-features = false } +bp-header-chain = { path = "../../primitives/header-chain", default-features = false } +bp-polkadot-core = { path = "../../primitives/polkadot-core", default-features = false } +bp-runtime = { path = "../../primitives/runtime", default-features = false } # Substrate Based Dependencies diff --git a/primitives/chain-westend/src/lib.rs b/chains/chain-westend/src/lib.rs similarity index 100% rename from primitives/chain-westend/src/lib.rs rename to chains/chain-westend/src/lib.rs diff --git a/relays/bin-substrate/Cargo.toml b/relays/bin-substrate/Cargo.toml index 032fc8ce7..6c0ac1da6 100644 --- a/relays/bin-substrate/Cargo.toml +++ b/relays/bin-substrate/Cargo.toml @@ -26,15 +26,15 @@ signal-hook-async-std = "0.2.2" strum = { version = "0.26.2", features = ["derive"] } # Bridge dependencies -bp-bridge-hub-polkadot = { path = "../../primitives/chain-bridge-hub-polkadot" } -bp-bridge-hub-rococo = { path = "../../primitives/chain-bridge-hub-rococo" } +bp-bridge-hub-polkadot = { path = "../../chains/chain-bridge-hub-polkadot" } +bp-bridge-hub-rococo = { path = "../../chains/chain-bridge-hub-rococo" } bp-header-chain = { path = "../../primitives/header-chain" } bp-messages = { path = "../../primitives/messages" } bp-parachains = { path = "../../primitives/parachains" } -bp-polkadot-bulletin = { path = "../../primitives/chain-polkadot-bulletin" } -bp-polkadot = { path = "../../primitives/chain-polkadot" } +bp-polkadot-bulletin = { path = "../../chains/chain-polkadot-bulletin" } +bp-polkadot = { path = "../../chains/chain-polkadot" } bp-polkadot-core = { path = "../../primitives/polkadot-core" } -bp-rococo = { path = "../../primitives/chain-rococo" } +bp-rococo = { path = "../../chains/chain-rococo" } bp-runtime = { path = "../../primitives/runtime" } bridge-runtime-common = { path = "../../bin/runtime-common" } pallet-bridge-parachains = { path = "../../modules/parachains" } diff --git a/relays/client-bridge-hub-kusama/Cargo.toml b/relays/client-bridge-hub-kusama/Cargo.toml index 6e41bb3fc..a02dc26a0 100644 --- a/relays/client-bridge-hub-kusama/Cargo.toml +++ b/relays/client-bridge-hub-kusama/Cargo.toml @@ -16,11 +16,11 @@ subxt = { version = "0.32.1", default-features = false, features = ["native"] } # Bridge dependencies -bp-bridge-hub-kusama = { path = "../../primitives/chain-bridge-hub-kusama" } +bp-bridge-hub-kusama = { path = "../../chains/chain-bridge-hub-kusama" } bp-header-chain = { path = "../../primitives/header-chain" } bp-messages = { path = "../../primitives/messages" } bp-parachains = { path = "../../primitives/parachains" } -bp-polkadot = { path = "../../primitives/chain-polkadot" } +bp-polkadot = { path = "../../chains/chain-polkadot" } bp-polkadot-core = { path = "../../primitives/polkadot-core" } bridge-runtime-common = { path = "../../bin/runtime-common" } relay-substrate-client = { path = "../client-substrate" } diff --git a/relays/client-bridge-hub-polkadot/Cargo.toml b/relays/client-bridge-hub-polkadot/Cargo.toml index f5fc69d02..abfd5bae4 100644 --- a/relays/client-bridge-hub-polkadot/Cargo.toml +++ b/relays/client-bridge-hub-polkadot/Cargo.toml @@ -15,13 +15,13 @@ subxt = { version = "0.32.1", default-features = false, features = ["native"] } # Bridge dependencies -bp-bridge-hub-polkadot = { path = "../../primitives/chain-bridge-hub-polkadot" } +bp-bridge-hub-polkadot = { path = "../../chains/chain-bridge-hub-polkadot" } bp-header-chain = { path = "../../primitives/header-chain" } bp-messages = { path = "../../primitives/messages" } bp-parachains = { path = "../../primitives/parachains" } -bp-polkadot = { path = "../../primitives/chain-polkadot" } +bp-polkadot = { path = "../../chains/chain-polkadot" } bp-polkadot-core = { path = "../../primitives/polkadot-core" } -bp-kusama = { path = "../../primitives/chain-kusama" } +bp-kusama = { path = "../../chains/chain-kusama" } bp-runtime = { path = "../../primitives/runtime" } bridge-runtime-common = { path = "../../bin/runtime-common" } relay-substrate-client = { path = "../client-substrate" } diff --git a/relays/client-bridge-hub-rococo/Cargo.toml b/relays/client-bridge-hub-rococo/Cargo.toml index efccfa5fb..70d65ac19 100644 --- a/relays/client-bridge-hub-rococo/Cargo.toml +++ b/relays/client-bridge-hub-rococo/Cargo.toml @@ -15,7 +15,7 @@ subxt = { version = "0.32.1", default-features = false, features = ["native"] } # Bridge dependencies -bp-bridge-hub-rococo = { path = "../../primitives/chain-bridge-hub-rococo" } +bp-bridge-hub-rococo = { path = "../../chains/chain-bridge-hub-rococo" } bp-header-chain = { path = "../../primitives/header-chain" } bp-messages = { path = "../../primitives/messages" } bp-parachains = { path = "../../primitives/parachains" } diff --git a/relays/client-bridge-hub-westend/Cargo.toml b/relays/client-bridge-hub-westend/Cargo.toml index 188839e2d..c65c7ec00 100644 --- a/relays/client-bridge-hub-westend/Cargo.toml +++ b/relays/client-bridge-hub-westend/Cargo.toml @@ -15,12 +15,12 @@ subxt = { version = "0.32.1", default-features = false, features = ["native"] } # Bridge dependencies -bp-bridge-hub-westend = { path = "../../primitives/chain-bridge-hub-westend" } +bp-bridge-hub-westend = { path = "../../chains/chain-bridge-hub-westend" } bp-header-chain = { path = "../../primitives/header-chain" } bp-messages = { path = "../../primitives/messages" } bp-parachains = { path = "../../primitives/parachains" } bp-polkadot-core = { path = "../../primitives/polkadot-core" } -bp-rococo = { path = "../../primitives/chain-rococo" } +bp-rococo = { path = "../../chains/chain-rococo" } bridge-runtime-common = { path = "../../bin/runtime-common" } relay-substrate-client = { path = "../client-substrate" } diff --git a/relays/client-kusama/Cargo.toml b/relays/client-kusama/Cargo.toml index 6d3a71b38..c459f347a 100644 --- a/relays/client-kusama/Cargo.toml +++ b/relays/client-kusama/Cargo.toml @@ -15,7 +15,7 @@ subxt = { version = "0.32.1", default-features = false, features = ["native"] } # Bridge dependencies -bp-kusama = { path = "../../primitives/chain-kusama" } +bp-kusama = { path = "../../chains/chain-kusama" } bp-polkadot-core = { path = "../../primitives/polkadot-core" } bp-runtime = { path = "../../primitives/runtime" } diff --git a/relays/client-polkadot-bulletin/Cargo.toml b/relays/client-polkadot-bulletin/Cargo.toml index 2160a35fb..842b53c48 100644 --- a/relays/client-polkadot-bulletin/Cargo.toml +++ b/relays/client-polkadot-bulletin/Cargo.toml @@ -18,7 +18,7 @@ subxt = { version = "0.32.1", default-features = false, features = ["native"] } bp-header-chain = { path = "../../primitives/header-chain" } bp-messages = { path = "../../primitives/messages" } bp-polkadot-core = { path = "../../primitives/polkadot-core" } -bp-polkadot-bulletin = { path = "../../primitives/chain-polkadot-bulletin" } +bp-polkadot-bulletin = { path = "../../chains/chain-polkadot-bulletin" } bp-runtime = { path = "../../primitives/runtime" } bridge-runtime-common = { path = "../../bin/runtime-common" } relay-substrate-client = { path = "../client-substrate" } diff --git a/relays/client-polkadot/Cargo.toml b/relays/client-polkadot/Cargo.toml index daa3423bb..19cf9f933 100644 --- a/relays/client-polkadot/Cargo.toml +++ b/relays/client-polkadot/Cargo.toml @@ -15,7 +15,7 @@ subxt = { version = "0.32.1", default-features = false, features = ["native"] } # Bridge dependencies -bp-polkadot = { path = "../../primitives/chain-polkadot" } +bp-polkadot = { path = "../../chains/chain-polkadot" } bp-polkadot-core = { path = "../../primitives/polkadot-core" } bp-runtime = { path = "../../primitives/runtime" } diff --git a/relays/client-rococo/Cargo.toml b/relays/client-rococo/Cargo.toml index 7a6b7d3ba..5e847600f 100644 --- a/relays/client-rococo/Cargo.toml +++ b/relays/client-rococo/Cargo.toml @@ -16,7 +16,7 @@ subxt = { version = "0.32.1", default-features = false, features = ["native"] } # Bridge dependencies bp-polkadot-core = { path = "../../primitives/polkadot-core" } -bp-rococo = { path = "../../primitives/chain-rococo" } +bp-rococo = { path = "../../chains/chain-rococo" } relay-substrate-client = { path = "../client-substrate" } relay-utils = { path = "../utils" } diff --git a/relays/client-westend/Cargo.toml b/relays/client-westend/Cargo.toml index 2c9aacc16..8b56e51d0 100644 --- a/relays/client-westend/Cargo.toml +++ b/relays/client-westend/Cargo.toml @@ -17,7 +17,7 @@ subxt = { version = "0.32.1", default-features = false, features = ["native"] } bp-polkadot-core = { path = "../../primitives/polkadot-core" } bp-runtime = { path = "../../primitives/runtime" } -bp-westend = { path = "../../primitives/chain-westend" } +bp-westend = { path = "../../chains/chain-westend" } relay-substrate-client = { path = "../client-substrate" } relay-utils = { path = "../utils" } diff --git a/relays/lib-substrate-relay/Cargo.toml b/relays/lib-substrate-relay/Cargo.toml index db4f37417..db4c3a7f8 100644 --- a/relays/lib-substrate-relay/Cargo.toml +++ b/relays/lib-substrate-relay/Cargo.toml @@ -56,7 +56,7 @@ sp-consensus-grandpa = { git = "https://github.com/paritytech/polkadot-sdk", bra sp-runtime = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } [dev-dependencies] -bp-rococo = { path = "../../primitives/chain-rococo" } +bp-rococo = { path = "../../chains/chain-rococo" } pallet-transaction-payment = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } relay-bridge-hub-rococo-client = { path = "../client-bridge-hub-rococo" } relay-bridge-hub-westend-client = { path = "../client-bridge-hub-westend" } -- GitLab From a6bac6bcfb8b527b369f5101c23539e3e83fde65 Mon Sep 17 00:00:00 2001 From: Serban Iorga Date: Mon, 25 Mar 2024 15:14:25 +0100 Subject: [PATCH 10/39] Move relay clients to separate folder (#2893) --- Cargo.toml | 22 +++---- .../client-bridge-hub-kusama/Cargo.toml | 2 +- .../src/codegen_runtime.rs | 0 .../client-bridge-hub-kusama/src/lib.rs | 0 .../src/runtime_wrapper.rs | 0 .../client-bridge-hub-polkadot/Cargo.toml | 2 +- .../src/codegen_runtime.rs | 0 .../client-bridge-hub-polkadot/src/lib.rs | 0 .../src/runtime_wrapper.rs | 0 .../client-bridge-hub-rococo/Cargo.toml | 2 +- .../src/codegen_runtime.rs | 0 .../client-bridge-hub-rococo/src/lib.rs | 0 .../client-bridge-hub-westend/Cargo.toml | 2 +- .../src/codegen_runtime.rs | 0 .../client-bridge-hub-westend/src/lib.rs | 0 .../client-kusama/Cargo.toml | 4 +- .../client-kusama/src/codegen_runtime.rs | 0 .../client-kusama/src/lib.rs | 0 .../client-polkadot-bulletin/Cargo.toml | 4 +- .../src/codegen_runtime.rs | 0 .../client-polkadot-bulletin/src/lib.rs | 0 .../client-polkadot/Cargo.toml | 4 +- .../client-polkadot/src/codegen_runtime.rs | 0 .../client-polkadot/src/lib.rs | 0 .../client-rococo/Cargo.toml | 4 +- .../client-rococo/src/codegen_runtime.rs | 0 .../client-rococo/src/lib.rs | 0 .../client-westend/Cargo.toml | 4 +- .../client-westend/src/codegen_runtime.rs | 0 .../client-westend/src/lib.rs | 0 relays/bin-substrate/Cargo.toml | 66 ------------------- relays/lib-substrate-relay/Cargo.toml | 6 +- substrate-relay/Cargo.toml | 66 +++++++++++++++++++ ..._kusama_messages_to_bridge_hub_polkadot.rs | 0 ..._polkadot_messages_to_bridge_hub_kusama.rs | 0 .../kusama_headers_to_bridge_hub_polkadot.rs | 0 ...usama_parachains_to_bridge_hub_polkadot.rs | 0 .../src/bridges/kusama_polkadot/mod.rs | 0 .../polkadot_headers_to_bridge_hub_kusama.rs | 0 ...olkadot_parachains_to_bridge_hub_kusama.rs | 0 .../src/bridges/mod.rs | 0 ..._polkadot_messages_to_polkadot_bulletin.rs | 0 .../src/bridges/polkadot_bulletin/mod.rs | 0 ...bulletin_headers_to_bridge_hub_polkadot.rs | 0 ...ulletin_messages_to_bridge_hub_polkadot.rs | 0 .../polkadot_headers_to_polkadot_bulletin.rs | 0 ...olkadot_parachains_to_polkadot_bulletin.rs | 0 ..._hub_rococo_messages_to_rococo_bulletin.rs | 0 .../src/bridges/rococo_bulletin/mod.rs | 0 ...o_bulletin_headers_to_bridge_hub_rococo.rs | 0 ..._bulletin_messages_to_bridge_hub_rococo.rs | 0 .../rococo_headers_to_rococo_bulletin.rs | 0 .../rococo_parachains_to_rococo_bulletin.rs | 0 ...b_rococo_messages_to_bridge_hub_westend.rs | 0 ...b_westend_messages_to_bridge_hub_rococo.rs | 0 .../src/bridges/rococo_westend/mod.rs | 0 .../rococo_headers_to_bridge_hub_westend.rs | 0 ...rococo_parachains_to_bridge_hub_westend.rs | 0 .../westend_headers_to_bridge_hub_rococo.rs | 0 ...westend_parachains_to_bridge_hub_rococo.rs | 0 .../src/cli/chain_schema.rs | 0 .../src/cli/detect_equivocations.rs | 0 .../src/cli/init_bridge.rs | 0 .../src/cli/mod.rs | 0 .../src/cli/relay_headers.rs | 0 .../src/cli/relay_headers_and_messages.rs | 0 .../src/cli/relay_messages.rs | 0 .../src/cli/relay_parachains.rs | 0 .../src/main.rs | 0 69 files changed, 94 insertions(+), 94 deletions(-) rename {relays => relay-clients}/client-bridge-hub-kusama/Cargo.toml (94%) rename {relays => relay-clients}/client-bridge-hub-kusama/src/codegen_runtime.rs (100%) rename {relays => relay-clients}/client-bridge-hub-kusama/src/lib.rs (100%) rename {relays => relay-clients}/client-bridge-hub-kusama/src/runtime_wrapper.rs (100%) rename {relays => relay-clients}/client-bridge-hub-polkadot/Cargo.toml (95%) rename {relays => relay-clients}/client-bridge-hub-polkadot/src/codegen_runtime.rs (100%) rename {relays => relay-clients}/client-bridge-hub-polkadot/src/lib.rs (100%) rename {relays => relay-clients}/client-bridge-hub-polkadot/src/runtime_wrapper.rs (100%) rename {relays => relay-clients}/client-bridge-hub-rococo/Cargo.toml (94%) rename {relays => relay-clients}/client-bridge-hub-rococo/src/codegen_runtime.rs (100%) rename {relays => relay-clients}/client-bridge-hub-rococo/src/lib.rs (100%) rename {relays => relay-clients}/client-bridge-hub-westend/Cargo.toml (95%) rename {relays => relay-clients}/client-bridge-hub-westend/src/codegen_runtime.rs (100%) rename {relays => relay-clients}/client-bridge-hub-westend/src/lib.rs (100%) rename {relays => relay-clients}/client-kusama/Cargo.toml (90%) rename {relays => relay-clients}/client-kusama/src/codegen_runtime.rs (100%) rename {relays => relay-clients}/client-kusama/src/lib.rs (100%) rename {relays => relay-clients}/client-polkadot-bulletin/Cargo.toml (92%) rename {relays => relay-clients}/client-polkadot-bulletin/src/codegen_runtime.rs (100%) rename {relays => relay-clients}/client-polkadot-bulletin/src/lib.rs (100%) rename {relays => relay-clients}/client-polkadot/Cargo.toml (90%) rename {relays => relay-clients}/client-polkadot/src/codegen_runtime.rs (100%) rename {relays => relay-clients}/client-polkadot/src/lib.rs (100%) rename {relays => relay-clients}/client-rococo/Cargo.toml (90%) rename {relays => relay-clients}/client-rococo/src/codegen_runtime.rs (100%) rename {relays => relay-clients}/client-rococo/src/lib.rs (100%) rename {relays => relay-clients}/client-westend/Cargo.toml (90%) rename {relays => relay-clients}/client-westend/src/codegen_runtime.rs (100%) rename {relays => relay-clients}/client-westend/src/lib.rs (100%) delete mode 100644 relays/bin-substrate/Cargo.toml create mode 100644 substrate-relay/Cargo.toml rename {relays/bin-substrate => substrate-relay}/src/bridges/kusama_polkadot/bridge_hub_kusama_messages_to_bridge_hub_polkadot.rs (100%) rename {relays/bin-substrate => substrate-relay}/src/bridges/kusama_polkadot/bridge_hub_polkadot_messages_to_bridge_hub_kusama.rs (100%) rename {relays/bin-substrate => substrate-relay}/src/bridges/kusama_polkadot/kusama_headers_to_bridge_hub_polkadot.rs (100%) rename {relays/bin-substrate => substrate-relay}/src/bridges/kusama_polkadot/kusama_parachains_to_bridge_hub_polkadot.rs (100%) rename {relays/bin-substrate => substrate-relay}/src/bridges/kusama_polkadot/mod.rs (100%) rename {relays/bin-substrate => substrate-relay}/src/bridges/kusama_polkadot/polkadot_headers_to_bridge_hub_kusama.rs (100%) rename {relays/bin-substrate => substrate-relay}/src/bridges/kusama_polkadot/polkadot_parachains_to_bridge_hub_kusama.rs (100%) rename {relays/bin-substrate => substrate-relay}/src/bridges/mod.rs (100%) rename {relays/bin-substrate => substrate-relay}/src/bridges/polkadot_bulletin/bridge_hub_polkadot_messages_to_polkadot_bulletin.rs (100%) rename {relays/bin-substrate => substrate-relay}/src/bridges/polkadot_bulletin/mod.rs (100%) rename {relays/bin-substrate => substrate-relay}/src/bridges/polkadot_bulletin/polkadot_bulletin_headers_to_bridge_hub_polkadot.rs (100%) rename {relays/bin-substrate => substrate-relay}/src/bridges/polkadot_bulletin/polkadot_bulletin_messages_to_bridge_hub_polkadot.rs (100%) rename {relays/bin-substrate => substrate-relay}/src/bridges/polkadot_bulletin/polkadot_headers_to_polkadot_bulletin.rs (100%) rename {relays/bin-substrate => substrate-relay}/src/bridges/polkadot_bulletin/polkadot_parachains_to_polkadot_bulletin.rs (100%) rename {relays/bin-substrate => substrate-relay}/src/bridges/rococo_bulletin/bridge_hub_rococo_messages_to_rococo_bulletin.rs (100%) rename {relays/bin-substrate => substrate-relay}/src/bridges/rococo_bulletin/mod.rs (100%) rename {relays/bin-substrate => substrate-relay}/src/bridges/rococo_bulletin/rococo_bulletin_headers_to_bridge_hub_rococo.rs (100%) rename {relays/bin-substrate => substrate-relay}/src/bridges/rococo_bulletin/rococo_bulletin_messages_to_bridge_hub_rococo.rs (100%) rename {relays/bin-substrate => substrate-relay}/src/bridges/rococo_bulletin/rococo_headers_to_rococo_bulletin.rs (100%) rename {relays/bin-substrate => substrate-relay}/src/bridges/rococo_bulletin/rococo_parachains_to_rococo_bulletin.rs (100%) rename {relays/bin-substrate => substrate-relay}/src/bridges/rococo_westend/bridge_hub_rococo_messages_to_bridge_hub_westend.rs (100%) rename {relays/bin-substrate => substrate-relay}/src/bridges/rococo_westend/bridge_hub_westend_messages_to_bridge_hub_rococo.rs (100%) rename {relays/bin-substrate => substrate-relay}/src/bridges/rococo_westend/mod.rs (100%) rename {relays/bin-substrate => substrate-relay}/src/bridges/rococo_westend/rococo_headers_to_bridge_hub_westend.rs (100%) rename {relays/bin-substrate => substrate-relay}/src/bridges/rococo_westend/rococo_parachains_to_bridge_hub_westend.rs (100%) rename {relays/bin-substrate => substrate-relay}/src/bridges/rococo_westend/westend_headers_to_bridge_hub_rococo.rs (100%) rename {relays/bin-substrate => substrate-relay}/src/bridges/rococo_westend/westend_parachains_to_bridge_hub_rococo.rs (100%) rename {relays/bin-substrate => substrate-relay}/src/cli/chain_schema.rs (100%) rename {relays/bin-substrate => substrate-relay}/src/cli/detect_equivocations.rs (100%) rename {relays/bin-substrate => substrate-relay}/src/cli/init_bridge.rs (100%) rename {relays/bin-substrate => substrate-relay}/src/cli/mod.rs (100%) rename {relays/bin-substrate => substrate-relay}/src/cli/relay_headers.rs (100%) rename {relays/bin-substrate => substrate-relay}/src/cli/relay_headers_and_messages.rs (100%) rename {relays/bin-substrate => substrate-relay}/src/cli/relay_messages.rs (100%) rename {relays/bin-substrate => substrate-relay}/src/cli/relay_parachains.rs (100%) rename {relays/bin-substrate => substrate-relay}/src/main.rs (100%) diff --git a/Cargo.toml b/Cargo.toml index a6edce3ea..0bd3a2898 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -37,23 +37,23 @@ members = [ "primitives/test-utils", "primitives/xcm-bridge-hub", "primitives/xcm-bridge-hub-router", - "relays/bin-substrate", - "relays/client-bridge-hub-kusama", - "relays/client-bridge-hub-polkadot", - "relays/client-bridge-hub-rococo", - "relays/client-bridge-hub-westend", - "relays/client-kusama", - "relays/client-polkadot", - "relays/client-polkadot-bulletin", - "relays/client-rococo", + "relay-clients/client-bridge-hub-kusama", + "relay-clients/client-bridge-hub-polkadot", + "relay-clients/client-bridge-hub-rococo", + "relay-clients/client-bridge-hub-westend", + "relay-clients/client-kusama", + "relay-clients/client-polkadot", + "relay-clients/client-polkadot-bulletin", + "relay-clients/client-rococo", + "relay-clients/client-westend", "relays/client-substrate", - "relays/client-westend", "relays/equivocation", "relays/finality", "relays/lib-substrate-relay", "relays/messages", "relays/parachains", - "relays/utils" + "relays/utils", + "substrate-relay", ] # Setup clippy lints as `polkadot-sdk`, diff --git a/relays/client-bridge-hub-kusama/Cargo.toml b/relay-clients/client-bridge-hub-kusama/Cargo.toml similarity index 94% rename from relays/client-bridge-hub-kusama/Cargo.toml rename to relay-clients/client-bridge-hub-kusama/Cargo.toml index a02dc26a0..6ce688e9f 100644 --- a/relays/client-bridge-hub-kusama/Cargo.toml +++ b/relay-clients/client-bridge-hub-kusama/Cargo.toml @@ -23,7 +23,7 @@ bp-parachains = { path = "../../primitives/parachains" } bp-polkadot = { path = "../../chains/chain-polkadot" } bp-polkadot-core = { path = "../../primitives/polkadot-core" } bridge-runtime-common = { path = "../../bin/runtime-common" } -relay-substrate-client = { path = "../client-substrate" } +relay-substrate-client = { path = "../../relays/client-substrate" } # Substrate Dependencies diff --git a/relays/client-bridge-hub-kusama/src/codegen_runtime.rs b/relay-clients/client-bridge-hub-kusama/src/codegen_runtime.rs similarity index 100% rename from relays/client-bridge-hub-kusama/src/codegen_runtime.rs rename to relay-clients/client-bridge-hub-kusama/src/codegen_runtime.rs diff --git a/relays/client-bridge-hub-kusama/src/lib.rs b/relay-clients/client-bridge-hub-kusama/src/lib.rs similarity index 100% rename from relays/client-bridge-hub-kusama/src/lib.rs rename to relay-clients/client-bridge-hub-kusama/src/lib.rs diff --git a/relays/client-bridge-hub-kusama/src/runtime_wrapper.rs b/relay-clients/client-bridge-hub-kusama/src/runtime_wrapper.rs similarity index 100% rename from relays/client-bridge-hub-kusama/src/runtime_wrapper.rs rename to relay-clients/client-bridge-hub-kusama/src/runtime_wrapper.rs diff --git a/relays/client-bridge-hub-polkadot/Cargo.toml b/relay-clients/client-bridge-hub-polkadot/Cargo.toml similarity index 95% rename from relays/client-bridge-hub-polkadot/Cargo.toml rename to relay-clients/client-bridge-hub-polkadot/Cargo.toml index abfd5bae4..1c49636ad 100644 --- a/relays/client-bridge-hub-polkadot/Cargo.toml +++ b/relay-clients/client-bridge-hub-polkadot/Cargo.toml @@ -24,7 +24,7 @@ bp-polkadot-core = { path = "../../primitives/polkadot-core" } bp-kusama = { path = "../../chains/chain-kusama" } bp-runtime = { path = "../../primitives/runtime" } bridge-runtime-common = { path = "../../bin/runtime-common" } -relay-substrate-client = { path = "../client-substrate" } +relay-substrate-client = { path = "../../relays/client-substrate" } # Substrate Dependencies diff --git a/relays/client-bridge-hub-polkadot/src/codegen_runtime.rs b/relay-clients/client-bridge-hub-polkadot/src/codegen_runtime.rs similarity index 100% rename from relays/client-bridge-hub-polkadot/src/codegen_runtime.rs rename to relay-clients/client-bridge-hub-polkadot/src/codegen_runtime.rs diff --git a/relays/client-bridge-hub-polkadot/src/lib.rs b/relay-clients/client-bridge-hub-polkadot/src/lib.rs similarity index 100% rename from relays/client-bridge-hub-polkadot/src/lib.rs rename to relay-clients/client-bridge-hub-polkadot/src/lib.rs diff --git a/relays/client-bridge-hub-polkadot/src/runtime_wrapper.rs b/relay-clients/client-bridge-hub-polkadot/src/runtime_wrapper.rs similarity index 100% rename from relays/client-bridge-hub-polkadot/src/runtime_wrapper.rs rename to relay-clients/client-bridge-hub-polkadot/src/runtime_wrapper.rs diff --git a/relays/client-bridge-hub-rococo/Cargo.toml b/relay-clients/client-bridge-hub-rococo/Cargo.toml similarity index 94% rename from relays/client-bridge-hub-rococo/Cargo.toml rename to relay-clients/client-bridge-hub-rococo/Cargo.toml index 70d65ac19..246c7ed1d 100644 --- a/relays/client-bridge-hub-rococo/Cargo.toml +++ b/relay-clients/client-bridge-hub-rococo/Cargo.toml @@ -23,7 +23,7 @@ bp-polkadot-core = { path = "../../primitives/polkadot-core" } bp-runtime = { path = "../../primitives/runtime" } bridge-runtime-common = { path = "../../bin/runtime-common" } -relay-substrate-client = { path = "../client-substrate" } +relay-substrate-client = { path = "../../relays/client-substrate" } # Substrate Dependencies diff --git a/relays/client-bridge-hub-rococo/src/codegen_runtime.rs b/relay-clients/client-bridge-hub-rococo/src/codegen_runtime.rs similarity index 100% rename from relays/client-bridge-hub-rococo/src/codegen_runtime.rs rename to relay-clients/client-bridge-hub-rococo/src/codegen_runtime.rs diff --git a/relays/client-bridge-hub-rococo/src/lib.rs b/relay-clients/client-bridge-hub-rococo/src/lib.rs similarity index 100% rename from relays/client-bridge-hub-rococo/src/lib.rs rename to relay-clients/client-bridge-hub-rococo/src/lib.rs diff --git a/relays/client-bridge-hub-westend/Cargo.toml b/relay-clients/client-bridge-hub-westend/Cargo.toml similarity index 95% rename from relays/client-bridge-hub-westend/Cargo.toml rename to relay-clients/client-bridge-hub-westend/Cargo.toml index c65c7ec00..7f5f01910 100644 --- a/relays/client-bridge-hub-westend/Cargo.toml +++ b/relay-clients/client-bridge-hub-westend/Cargo.toml @@ -23,7 +23,7 @@ bp-polkadot-core = { path = "../../primitives/polkadot-core" } bp-rococo = { path = "../../chains/chain-rococo" } bridge-runtime-common = { path = "../../bin/runtime-common" } -relay-substrate-client = { path = "../client-substrate" } +relay-substrate-client = { path = "../../relays/client-substrate" } # Substrate Dependencies diff --git a/relays/client-bridge-hub-westend/src/codegen_runtime.rs b/relay-clients/client-bridge-hub-westend/src/codegen_runtime.rs similarity index 100% rename from relays/client-bridge-hub-westend/src/codegen_runtime.rs rename to relay-clients/client-bridge-hub-westend/src/codegen_runtime.rs diff --git a/relays/client-bridge-hub-westend/src/lib.rs b/relay-clients/client-bridge-hub-westend/src/lib.rs similarity index 100% rename from relays/client-bridge-hub-westend/src/lib.rs rename to relay-clients/client-bridge-hub-westend/src/lib.rs diff --git a/relays/client-kusama/Cargo.toml b/relay-clients/client-kusama/Cargo.toml similarity index 90% rename from relays/client-kusama/Cargo.toml rename to relay-clients/client-kusama/Cargo.toml index c459f347a..95b3318f5 100644 --- a/relays/client-kusama/Cargo.toml +++ b/relay-clients/client-kusama/Cargo.toml @@ -19,8 +19,8 @@ bp-kusama = { path = "../../chains/chain-kusama" } bp-polkadot-core = { path = "../../primitives/polkadot-core" } bp-runtime = { path = "../../primitives/runtime" } -relay-substrate-client = { path = "../client-substrate" } -relay-utils = { path = "../utils" } +relay-substrate-client = { path = "../../relays/client-substrate" } +relay-utils = { path = "../../relays/utils" } # Substrate Dependencies diff --git a/relays/client-kusama/src/codegen_runtime.rs b/relay-clients/client-kusama/src/codegen_runtime.rs similarity index 100% rename from relays/client-kusama/src/codegen_runtime.rs rename to relay-clients/client-kusama/src/codegen_runtime.rs diff --git a/relays/client-kusama/src/lib.rs b/relay-clients/client-kusama/src/lib.rs similarity index 100% rename from relays/client-kusama/src/lib.rs rename to relay-clients/client-kusama/src/lib.rs diff --git a/relays/client-polkadot-bulletin/Cargo.toml b/relay-clients/client-polkadot-bulletin/Cargo.toml similarity index 92% rename from relays/client-polkadot-bulletin/Cargo.toml rename to relay-clients/client-polkadot-bulletin/Cargo.toml index 842b53c48..c563c145a 100644 --- a/relays/client-polkadot-bulletin/Cargo.toml +++ b/relay-clients/client-polkadot-bulletin/Cargo.toml @@ -21,8 +21,8 @@ bp-polkadot-core = { path = "../../primitives/polkadot-core" } bp-polkadot-bulletin = { path = "../../chains/chain-polkadot-bulletin" } bp-runtime = { path = "../../primitives/runtime" } bridge-runtime-common = { path = "../../bin/runtime-common" } -relay-substrate-client = { path = "../client-substrate" } -relay-utils = { path = "../utils" } +relay-substrate-client = { path = "../../relays/client-substrate" } +relay-utils = { path = "../../relays/utils" } # Substrate Dependencies diff --git a/relays/client-polkadot-bulletin/src/codegen_runtime.rs b/relay-clients/client-polkadot-bulletin/src/codegen_runtime.rs similarity index 100% rename from relays/client-polkadot-bulletin/src/codegen_runtime.rs rename to relay-clients/client-polkadot-bulletin/src/codegen_runtime.rs diff --git a/relays/client-polkadot-bulletin/src/lib.rs b/relay-clients/client-polkadot-bulletin/src/lib.rs similarity index 100% rename from relays/client-polkadot-bulletin/src/lib.rs rename to relay-clients/client-polkadot-bulletin/src/lib.rs diff --git a/relays/client-polkadot/Cargo.toml b/relay-clients/client-polkadot/Cargo.toml similarity index 90% rename from relays/client-polkadot/Cargo.toml rename to relay-clients/client-polkadot/Cargo.toml index 19cf9f933..b66df4c84 100644 --- a/relays/client-polkadot/Cargo.toml +++ b/relay-clients/client-polkadot/Cargo.toml @@ -19,8 +19,8 @@ bp-polkadot = { path = "../../chains/chain-polkadot" } bp-polkadot-core = { path = "../../primitives/polkadot-core" } bp-runtime = { path = "../../primitives/runtime" } -relay-substrate-client = { path = "../client-substrate" } -relay-utils = { path = "../utils" } +relay-substrate-client = { path = "../../relays/client-substrate" } +relay-utils = { path = "../../relays/utils" } # Substrate Dependencies diff --git a/relays/client-polkadot/src/codegen_runtime.rs b/relay-clients/client-polkadot/src/codegen_runtime.rs similarity index 100% rename from relays/client-polkadot/src/codegen_runtime.rs rename to relay-clients/client-polkadot/src/codegen_runtime.rs diff --git a/relays/client-polkadot/src/lib.rs b/relay-clients/client-polkadot/src/lib.rs similarity index 100% rename from relays/client-polkadot/src/lib.rs rename to relay-clients/client-polkadot/src/lib.rs diff --git a/relays/client-rococo/Cargo.toml b/relay-clients/client-rococo/Cargo.toml similarity index 90% rename from relays/client-rococo/Cargo.toml rename to relay-clients/client-rococo/Cargo.toml index 5e847600f..7e6cbe358 100644 --- a/relays/client-rococo/Cargo.toml +++ b/relay-clients/client-rococo/Cargo.toml @@ -18,8 +18,8 @@ subxt = { version = "0.32.1", default-features = false, features = ["native"] } bp-polkadot-core = { path = "../../primitives/polkadot-core" } bp-rococo = { path = "../../chains/chain-rococo" } -relay-substrate-client = { path = "../client-substrate" } -relay-utils = { path = "../utils" } +relay-substrate-client = { path = "../../relays/client-substrate" } +relay-utils = { path = "../../relays/utils" } # Substrate Dependencies diff --git a/relays/client-rococo/src/codegen_runtime.rs b/relay-clients/client-rococo/src/codegen_runtime.rs similarity index 100% rename from relays/client-rococo/src/codegen_runtime.rs rename to relay-clients/client-rococo/src/codegen_runtime.rs diff --git a/relays/client-rococo/src/lib.rs b/relay-clients/client-rococo/src/lib.rs similarity index 100% rename from relays/client-rococo/src/lib.rs rename to relay-clients/client-rococo/src/lib.rs diff --git a/relays/client-westend/Cargo.toml b/relay-clients/client-westend/Cargo.toml similarity index 90% rename from relays/client-westend/Cargo.toml rename to relay-clients/client-westend/Cargo.toml index 8b56e51d0..1f148a1c4 100644 --- a/relays/client-westend/Cargo.toml +++ b/relay-clients/client-westend/Cargo.toml @@ -19,8 +19,8 @@ bp-polkadot-core = { path = "../../primitives/polkadot-core" } bp-runtime = { path = "../../primitives/runtime" } bp-westend = { path = "../../chains/chain-westend" } -relay-substrate-client = { path = "../client-substrate" } -relay-utils = { path = "../utils" } +relay-substrate-client = { path = "../../relays/client-substrate" } +relay-utils = { path = "../../relays/utils" } # Substrate Dependencies diff --git a/relays/client-westend/src/codegen_runtime.rs b/relay-clients/client-westend/src/codegen_runtime.rs similarity index 100% rename from relays/client-westend/src/codegen_runtime.rs rename to relay-clients/client-westend/src/codegen_runtime.rs diff --git a/relays/client-westend/src/lib.rs b/relay-clients/client-westend/src/lib.rs similarity index 100% rename from relays/client-westend/src/lib.rs rename to relay-clients/client-westend/src/lib.rs diff --git a/relays/bin-substrate/Cargo.toml b/relays/bin-substrate/Cargo.toml deleted file mode 100644 index 6c0ac1da6..000000000 --- a/relays/bin-substrate/Cargo.toml +++ /dev/null @@ -1,66 +0,0 @@ -[package] -name = "substrate-relay" -version = "1.2.0" -authors = ["Parity Technologies "] -edition = "2021" -license = "GPL-3.0-or-later WITH Classpath-exception-2.0" - -[lints] -workspace = true - -[dependencies] -anyhow = "1.0" -async-std = "1.9.0" -async-trait = "0.1.79" -codec = { package = "parity-scale-codec", version = "3.1.5" } -env_logger = "0.11" -futures = "0.3.30" -hex = "0.4" -log = { workspace = true } -num-format = "0.4" -num-traits = "0.2" -rbtag = "0.3" -structopt = "0.3" -signal-hook = "0.3.15" -signal-hook-async-std = "0.2.2" -strum = { version = "0.26.2", features = ["derive"] } - -# Bridge dependencies -bp-bridge-hub-polkadot = { path = "../../chains/chain-bridge-hub-polkadot" } -bp-bridge-hub-rococo = { path = "../../chains/chain-bridge-hub-rococo" } -bp-header-chain = { path = "../../primitives/header-chain" } -bp-messages = { path = "../../primitives/messages" } -bp-parachains = { path = "../../primitives/parachains" } -bp-polkadot-bulletin = { path = "../../chains/chain-polkadot-bulletin" } -bp-polkadot = { path = "../../chains/chain-polkadot" } -bp-polkadot-core = { path = "../../primitives/polkadot-core" } -bp-rococo = { path = "../../chains/chain-rococo" } -bp-runtime = { path = "../../primitives/runtime" } -bridge-runtime-common = { path = "../../bin/runtime-common" } -pallet-bridge-parachains = { path = "../../modules/parachains" } -parachains-relay = { path = "../parachains" } -relay-bridge-hub-kusama-client = { path = "../client-bridge-hub-kusama" } -relay-bridge-hub-polkadot-client = { path = "../client-bridge-hub-polkadot" } -relay-bridge-hub-rococo-client = { path = "../client-bridge-hub-rococo" } -relay-bridge-hub-westend-client = { path = "../client-bridge-hub-westend" } -relay-kusama-client = { path = "../client-kusama" } -relay-polkadot-client = { path = "../client-polkadot" } -relay-polkadot-bulletin-client = { path = "../client-polkadot-bulletin" } -relay-rococo-client = { path = "../client-rococo" } -relay-substrate-client = { path = "../client-substrate" } -relay-utils = { path = "../utils" } -relay-westend-client = { path = "../client-westend" } -substrate-relay-helper = { path = "../lib-substrate-relay" } - -# Substrate Dependencies - -frame-support = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } -sp-core = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } -sp-runtime = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } - -[dev-dependencies] -bp-test-utils = { path = "../../primitives/test-utils" } -hex-literal = "0.4" -sp-keyring = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } -tempfile = "3.10" -finality-grandpa = { version = "0.16.2" } diff --git a/relays/lib-substrate-relay/Cargo.toml b/relays/lib-substrate-relay/Cargo.toml index db4c3a7f8..5bd14bb83 100644 --- a/relays/lib-substrate-relay/Cargo.toml +++ b/relays/lib-substrate-relay/Cargo.toml @@ -58,6 +58,6 @@ sp-runtime = { git = "https://github.com/paritytech/polkadot-sdk", branch = "mas [dev-dependencies] bp-rococo = { path = "../../chains/chain-rococo" } pallet-transaction-payment = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } -relay-bridge-hub-rococo-client = { path = "../client-bridge-hub-rococo" } -relay-bridge-hub-westend-client = { path = "../client-bridge-hub-westend" } -relay-rococo-client = { path = "../client-rococo" } +relay-bridge-hub-rococo-client = { path = "../../relay-clients/client-bridge-hub-rococo" } +relay-bridge-hub-westend-client = { path = "../../relay-clients/client-bridge-hub-westend" } +relay-rococo-client = { path = "../../relay-clients/client-rococo" } diff --git a/substrate-relay/Cargo.toml b/substrate-relay/Cargo.toml new file mode 100644 index 000000000..8a87186ec --- /dev/null +++ b/substrate-relay/Cargo.toml @@ -0,0 +1,66 @@ +[package] +name = "substrate-relay" +version = "1.2.0" +authors = ["Parity Technologies "] +edition = "2021" +license = "GPL-3.0-or-later WITH Classpath-exception-2.0" + +[lints] +workspace = true + +[dependencies] +anyhow = "1.0" +async-std = "1.9.0" +async-trait = "0.1.79" +codec = { package = "parity-scale-codec", version = "3.1.5" } +env_logger = "0.11" +futures = "0.3.30" +hex = "0.4" +log = { workspace = true } +num-format = "0.4" +num-traits = "0.2" +rbtag = "0.3" +structopt = "0.3" +signal-hook = "0.3.15" +signal-hook-async-std = "0.2.2" +strum = { version = "0.26.2", features = ["derive"] } + +# Bridge dependencies +bp-bridge-hub-polkadot = { path = "../chains/chain-bridge-hub-polkadot" } +bp-bridge-hub-rococo = { path = "../chains/chain-bridge-hub-rococo" } +bp-header-chain = { path = "../primitives/header-chain" } +bp-messages = { path = "../primitives/messages" } +bp-parachains = { path = "../primitives/parachains" } +bp-polkadot-bulletin = { path = "../chains/chain-polkadot-bulletin" } +bp-polkadot = { path = "../chains/chain-polkadot" } +bp-polkadot-core = { path = "../primitives/polkadot-core" } +bp-rococo = { path = "../chains/chain-rococo" } +bp-runtime = { path = "../primitives/runtime" } +bridge-runtime-common = { path = "../bin/runtime-common" } +pallet-bridge-parachains = { path = "../modules/parachains" } +parachains-relay = { path = "../relays/parachains" } +relay-bridge-hub-kusama-client = { path = "../relay-clients/client-bridge-hub-kusama" } +relay-bridge-hub-polkadot-client = { path = "../relay-clients/client-bridge-hub-polkadot" } +relay-bridge-hub-rococo-client = { path = "../relay-clients/client-bridge-hub-rococo" } +relay-bridge-hub-westend-client = { path = "../relay-clients/client-bridge-hub-westend" } +relay-kusama-client = { path = "../relay-clients/client-kusama" } +relay-polkadot-client = { path = "../relay-clients/client-polkadot" } +relay-polkadot-bulletin-client = { path = "../relay-clients/client-polkadot-bulletin" } +relay-rococo-client = { path = "../relay-clients/client-rococo" } +relay-substrate-client = { path = "../relays/client-substrate" } +relay-utils = { path = "../relays/utils" } +relay-westend-client = { path = "../relay-clients/client-westend" } +substrate-relay-helper = { path = "../relays/lib-substrate-relay" } + +# Substrate Dependencies + +frame-support = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } +sp-core = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } +sp-runtime = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } + +[dev-dependencies] +bp-test-utils = { path = "../primitives/test-utils" } +hex-literal = "0.4" +sp-keyring = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } +tempfile = "3.10" +finality-grandpa = { version = "0.16.2" } diff --git a/relays/bin-substrate/src/bridges/kusama_polkadot/bridge_hub_kusama_messages_to_bridge_hub_polkadot.rs b/substrate-relay/src/bridges/kusama_polkadot/bridge_hub_kusama_messages_to_bridge_hub_polkadot.rs similarity index 100% rename from relays/bin-substrate/src/bridges/kusama_polkadot/bridge_hub_kusama_messages_to_bridge_hub_polkadot.rs rename to substrate-relay/src/bridges/kusama_polkadot/bridge_hub_kusama_messages_to_bridge_hub_polkadot.rs diff --git a/relays/bin-substrate/src/bridges/kusama_polkadot/bridge_hub_polkadot_messages_to_bridge_hub_kusama.rs b/substrate-relay/src/bridges/kusama_polkadot/bridge_hub_polkadot_messages_to_bridge_hub_kusama.rs similarity index 100% rename from relays/bin-substrate/src/bridges/kusama_polkadot/bridge_hub_polkadot_messages_to_bridge_hub_kusama.rs rename to substrate-relay/src/bridges/kusama_polkadot/bridge_hub_polkadot_messages_to_bridge_hub_kusama.rs diff --git a/relays/bin-substrate/src/bridges/kusama_polkadot/kusama_headers_to_bridge_hub_polkadot.rs b/substrate-relay/src/bridges/kusama_polkadot/kusama_headers_to_bridge_hub_polkadot.rs similarity index 100% rename from relays/bin-substrate/src/bridges/kusama_polkadot/kusama_headers_to_bridge_hub_polkadot.rs rename to substrate-relay/src/bridges/kusama_polkadot/kusama_headers_to_bridge_hub_polkadot.rs diff --git a/relays/bin-substrate/src/bridges/kusama_polkadot/kusama_parachains_to_bridge_hub_polkadot.rs b/substrate-relay/src/bridges/kusama_polkadot/kusama_parachains_to_bridge_hub_polkadot.rs similarity index 100% rename from relays/bin-substrate/src/bridges/kusama_polkadot/kusama_parachains_to_bridge_hub_polkadot.rs rename to substrate-relay/src/bridges/kusama_polkadot/kusama_parachains_to_bridge_hub_polkadot.rs diff --git a/relays/bin-substrate/src/bridges/kusama_polkadot/mod.rs b/substrate-relay/src/bridges/kusama_polkadot/mod.rs similarity index 100% rename from relays/bin-substrate/src/bridges/kusama_polkadot/mod.rs rename to substrate-relay/src/bridges/kusama_polkadot/mod.rs diff --git a/relays/bin-substrate/src/bridges/kusama_polkadot/polkadot_headers_to_bridge_hub_kusama.rs b/substrate-relay/src/bridges/kusama_polkadot/polkadot_headers_to_bridge_hub_kusama.rs similarity index 100% rename from relays/bin-substrate/src/bridges/kusama_polkadot/polkadot_headers_to_bridge_hub_kusama.rs rename to substrate-relay/src/bridges/kusama_polkadot/polkadot_headers_to_bridge_hub_kusama.rs diff --git a/relays/bin-substrate/src/bridges/kusama_polkadot/polkadot_parachains_to_bridge_hub_kusama.rs b/substrate-relay/src/bridges/kusama_polkadot/polkadot_parachains_to_bridge_hub_kusama.rs similarity index 100% rename from relays/bin-substrate/src/bridges/kusama_polkadot/polkadot_parachains_to_bridge_hub_kusama.rs rename to substrate-relay/src/bridges/kusama_polkadot/polkadot_parachains_to_bridge_hub_kusama.rs diff --git a/relays/bin-substrate/src/bridges/mod.rs b/substrate-relay/src/bridges/mod.rs similarity index 100% rename from relays/bin-substrate/src/bridges/mod.rs rename to substrate-relay/src/bridges/mod.rs diff --git a/relays/bin-substrate/src/bridges/polkadot_bulletin/bridge_hub_polkadot_messages_to_polkadot_bulletin.rs b/substrate-relay/src/bridges/polkadot_bulletin/bridge_hub_polkadot_messages_to_polkadot_bulletin.rs similarity index 100% rename from relays/bin-substrate/src/bridges/polkadot_bulletin/bridge_hub_polkadot_messages_to_polkadot_bulletin.rs rename to substrate-relay/src/bridges/polkadot_bulletin/bridge_hub_polkadot_messages_to_polkadot_bulletin.rs diff --git a/relays/bin-substrate/src/bridges/polkadot_bulletin/mod.rs b/substrate-relay/src/bridges/polkadot_bulletin/mod.rs similarity index 100% rename from relays/bin-substrate/src/bridges/polkadot_bulletin/mod.rs rename to substrate-relay/src/bridges/polkadot_bulletin/mod.rs diff --git a/relays/bin-substrate/src/bridges/polkadot_bulletin/polkadot_bulletin_headers_to_bridge_hub_polkadot.rs b/substrate-relay/src/bridges/polkadot_bulletin/polkadot_bulletin_headers_to_bridge_hub_polkadot.rs similarity index 100% rename from relays/bin-substrate/src/bridges/polkadot_bulletin/polkadot_bulletin_headers_to_bridge_hub_polkadot.rs rename to substrate-relay/src/bridges/polkadot_bulletin/polkadot_bulletin_headers_to_bridge_hub_polkadot.rs diff --git a/relays/bin-substrate/src/bridges/polkadot_bulletin/polkadot_bulletin_messages_to_bridge_hub_polkadot.rs b/substrate-relay/src/bridges/polkadot_bulletin/polkadot_bulletin_messages_to_bridge_hub_polkadot.rs similarity index 100% rename from relays/bin-substrate/src/bridges/polkadot_bulletin/polkadot_bulletin_messages_to_bridge_hub_polkadot.rs rename to substrate-relay/src/bridges/polkadot_bulletin/polkadot_bulletin_messages_to_bridge_hub_polkadot.rs diff --git a/relays/bin-substrate/src/bridges/polkadot_bulletin/polkadot_headers_to_polkadot_bulletin.rs b/substrate-relay/src/bridges/polkadot_bulletin/polkadot_headers_to_polkadot_bulletin.rs similarity index 100% rename from relays/bin-substrate/src/bridges/polkadot_bulletin/polkadot_headers_to_polkadot_bulletin.rs rename to substrate-relay/src/bridges/polkadot_bulletin/polkadot_headers_to_polkadot_bulletin.rs diff --git a/relays/bin-substrate/src/bridges/polkadot_bulletin/polkadot_parachains_to_polkadot_bulletin.rs b/substrate-relay/src/bridges/polkadot_bulletin/polkadot_parachains_to_polkadot_bulletin.rs similarity index 100% rename from relays/bin-substrate/src/bridges/polkadot_bulletin/polkadot_parachains_to_polkadot_bulletin.rs rename to substrate-relay/src/bridges/polkadot_bulletin/polkadot_parachains_to_polkadot_bulletin.rs diff --git a/relays/bin-substrate/src/bridges/rococo_bulletin/bridge_hub_rococo_messages_to_rococo_bulletin.rs b/substrate-relay/src/bridges/rococo_bulletin/bridge_hub_rococo_messages_to_rococo_bulletin.rs similarity index 100% rename from relays/bin-substrate/src/bridges/rococo_bulletin/bridge_hub_rococo_messages_to_rococo_bulletin.rs rename to substrate-relay/src/bridges/rococo_bulletin/bridge_hub_rococo_messages_to_rococo_bulletin.rs diff --git a/relays/bin-substrate/src/bridges/rococo_bulletin/mod.rs b/substrate-relay/src/bridges/rococo_bulletin/mod.rs similarity index 100% rename from relays/bin-substrate/src/bridges/rococo_bulletin/mod.rs rename to substrate-relay/src/bridges/rococo_bulletin/mod.rs diff --git a/relays/bin-substrate/src/bridges/rococo_bulletin/rococo_bulletin_headers_to_bridge_hub_rococo.rs b/substrate-relay/src/bridges/rococo_bulletin/rococo_bulletin_headers_to_bridge_hub_rococo.rs similarity index 100% rename from relays/bin-substrate/src/bridges/rococo_bulletin/rococo_bulletin_headers_to_bridge_hub_rococo.rs rename to substrate-relay/src/bridges/rococo_bulletin/rococo_bulletin_headers_to_bridge_hub_rococo.rs diff --git a/relays/bin-substrate/src/bridges/rococo_bulletin/rococo_bulletin_messages_to_bridge_hub_rococo.rs b/substrate-relay/src/bridges/rococo_bulletin/rococo_bulletin_messages_to_bridge_hub_rococo.rs similarity index 100% rename from relays/bin-substrate/src/bridges/rococo_bulletin/rococo_bulletin_messages_to_bridge_hub_rococo.rs rename to substrate-relay/src/bridges/rococo_bulletin/rococo_bulletin_messages_to_bridge_hub_rococo.rs diff --git a/relays/bin-substrate/src/bridges/rococo_bulletin/rococo_headers_to_rococo_bulletin.rs b/substrate-relay/src/bridges/rococo_bulletin/rococo_headers_to_rococo_bulletin.rs similarity index 100% rename from relays/bin-substrate/src/bridges/rococo_bulletin/rococo_headers_to_rococo_bulletin.rs rename to substrate-relay/src/bridges/rococo_bulletin/rococo_headers_to_rococo_bulletin.rs diff --git a/relays/bin-substrate/src/bridges/rococo_bulletin/rococo_parachains_to_rococo_bulletin.rs b/substrate-relay/src/bridges/rococo_bulletin/rococo_parachains_to_rococo_bulletin.rs similarity index 100% rename from relays/bin-substrate/src/bridges/rococo_bulletin/rococo_parachains_to_rococo_bulletin.rs rename to substrate-relay/src/bridges/rococo_bulletin/rococo_parachains_to_rococo_bulletin.rs diff --git a/relays/bin-substrate/src/bridges/rococo_westend/bridge_hub_rococo_messages_to_bridge_hub_westend.rs b/substrate-relay/src/bridges/rococo_westend/bridge_hub_rococo_messages_to_bridge_hub_westend.rs similarity index 100% rename from relays/bin-substrate/src/bridges/rococo_westend/bridge_hub_rococo_messages_to_bridge_hub_westend.rs rename to substrate-relay/src/bridges/rococo_westend/bridge_hub_rococo_messages_to_bridge_hub_westend.rs diff --git a/relays/bin-substrate/src/bridges/rococo_westend/bridge_hub_westend_messages_to_bridge_hub_rococo.rs b/substrate-relay/src/bridges/rococo_westend/bridge_hub_westend_messages_to_bridge_hub_rococo.rs similarity index 100% rename from relays/bin-substrate/src/bridges/rococo_westend/bridge_hub_westend_messages_to_bridge_hub_rococo.rs rename to substrate-relay/src/bridges/rococo_westend/bridge_hub_westend_messages_to_bridge_hub_rococo.rs diff --git a/relays/bin-substrate/src/bridges/rococo_westend/mod.rs b/substrate-relay/src/bridges/rococo_westend/mod.rs similarity index 100% rename from relays/bin-substrate/src/bridges/rococo_westend/mod.rs rename to substrate-relay/src/bridges/rococo_westend/mod.rs diff --git a/relays/bin-substrate/src/bridges/rococo_westend/rococo_headers_to_bridge_hub_westend.rs b/substrate-relay/src/bridges/rococo_westend/rococo_headers_to_bridge_hub_westend.rs similarity index 100% rename from relays/bin-substrate/src/bridges/rococo_westend/rococo_headers_to_bridge_hub_westend.rs rename to substrate-relay/src/bridges/rococo_westend/rococo_headers_to_bridge_hub_westend.rs diff --git a/relays/bin-substrate/src/bridges/rococo_westend/rococo_parachains_to_bridge_hub_westend.rs b/substrate-relay/src/bridges/rococo_westend/rococo_parachains_to_bridge_hub_westend.rs similarity index 100% rename from relays/bin-substrate/src/bridges/rococo_westend/rococo_parachains_to_bridge_hub_westend.rs rename to substrate-relay/src/bridges/rococo_westend/rococo_parachains_to_bridge_hub_westend.rs diff --git a/relays/bin-substrate/src/bridges/rococo_westend/westend_headers_to_bridge_hub_rococo.rs b/substrate-relay/src/bridges/rococo_westend/westend_headers_to_bridge_hub_rococo.rs similarity index 100% rename from relays/bin-substrate/src/bridges/rococo_westend/westend_headers_to_bridge_hub_rococo.rs rename to substrate-relay/src/bridges/rococo_westend/westend_headers_to_bridge_hub_rococo.rs diff --git a/relays/bin-substrate/src/bridges/rococo_westend/westend_parachains_to_bridge_hub_rococo.rs b/substrate-relay/src/bridges/rococo_westend/westend_parachains_to_bridge_hub_rococo.rs similarity index 100% rename from relays/bin-substrate/src/bridges/rococo_westend/westend_parachains_to_bridge_hub_rococo.rs rename to substrate-relay/src/bridges/rococo_westend/westend_parachains_to_bridge_hub_rococo.rs diff --git a/relays/bin-substrate/src/cli/chain_schema.rs b/substrate-relay/src/cli/chain_schema.rs similarity index 100% rename from relays/bin-substrate/src/cli/chain_schema.rs rename to substrate-relay/src/cli/chain_schema.rs diff --git a/relays/bin-substrate/src/cli/detect_equivocations.rs b/substrate-relay/src/cli/detect_equivocations.rs similarity index 100% rename from relays/bin-substrate/src/cli/detect_equivocations.rs rename to substrate-relay/src/cli/detect_equivocations.rs diff --git a/relays/bin-substrate/src/cli/init_bridge.rs b/substrate-relay/src/cli/init_bridge.rs similarity index 100% rename from relays/bin-substrate/src/cli/init_bridge.rs rename to substrate-relay/src/cli/init_bridge.rs diff --git a/relays/bin-substrate/src/cli/mod.rs b/substrate-relay/src/cli/mod.rs similarity index 100% rename from relays/bin-substrate/src/cli/mod.rs rename to substrate-relay/src/cli/mod.rs diff --git a/relays/bin-substrate/src/cli/relay_headers.rs b/substrate-relay/src/cli/relay_headers.rs similarity index 100% rename from relays/bin-substrate/src/cli/relay_headers.rs rename to substrate-relay/src/cli/relay_headers.rs diff --git a/relays/bin-substrate/src/cli/relay_headers_and_messages.rs b/substrate-relay/src/cli/relay_headers_and_messages.rs similarity index 100% rename from relays/bin-substrate/src/cli/relay_headers_and_messages.rs rename to substrate-relay/src/cli/relay_headers_and_messages.rs diff --git a/relays/bin-substrate/src/cli/relay_messages.rs b/substrate-relay/src/cli/relay_messages.rs similarity index 100% rename from relays/bin-substrate/src/cli/relay_messages.rs rename to substrate-relay/src/cli/relay_messages.rs diff --git a/relays/bin-substrate/src/cli/relay_parachains.rs b/substrate-relay/src/cli/relay_parachains.rs similarity index 100% rename from relays/bin-substrate/src/cli/relay_parachains.rs rename to substrate-relay/src/cli/relay_parachains.rs diff --git a/relays/bin-substrate/src/main.rs b/substrate-relay/src/main.rs similarity index 100% rename from relays/bin-substrate/src/main.rs rename to substrate-relay/src/main.rs -- GitLab From 47b4c48cdc903ddba6fd482e0e44319a89cee502 Mon Sep 17 00:00:00 2001 From: Svyatoslav Nikolsky Date: Tue, 26 Mar 2024 11:34:22 +0300 Subject: [PATCH 11/39] relayer waits until chain spec version matches the configured in Client constructor/reconnect (#2894) --- relays/client-substrate/Cargo.toml | 2 +- relays/client-substrate/src/client.rs | 141 ++++++++++++++++++++++++-- relays/client-substrate/src/error.rs | 12 +++ relays/client-substrate/src/guard.rs | 10 +- 4 files changed, 153 insertions(+), 12 deletions(-) diff --git a/relays/client-substrate/Cargo.toml b/relays/client-substrate/Cargo.toml index c1dea9b50..9240af7b5 100644 --- a/relays/client-substrate/Cargo.toml +++ b/relays/client-substrate/Cargo.toml @@ -52,7 +52,7 @@ sp-version = { git = "https://github.com/paritytech/polkadot-sdk", branch = "mas # Polkadot Dependencies -xcm = { package = "staging-xcm", git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } +xcm = { package = "staging-xcm", git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } [features] default = [] diff --git a/relays/client-substrate/src/client.rs b/relays/client-substrate/src/client.rs index 8328e1ce8..676fea487 100644 --- a/relays/client-substrate/src/client.rs +++ b/relays/client-substrate/src/client.rs @@ -18,6 +18,7 @@ use crate::{ chain::{Chain, ChainWithTransactions}, + guard::Environment, rpc::{ SubstrateAuthorClient, SubstrateChainClient, SubstrateFinalityClient, SubstrateFrameSystemClient, SubstrateStateClient, SubstrateSystemClient, @@ -49,7 +50,7 @@ use sp_runtime::{ }; use sp_trie::StorageProof; use sp_version::RuntimeVersion; -use std::future::Future; +use std::{cmp::Ordering, future::Future}; const SUB_API_GRANDPA_AUTHORITIES: &str = "GrandpaApi_grandpa_authorities"; const SUB_API_GRANDPA_GENERATE_KEY_OWNERSHIP_PROOF: &str = @@ -101,7 +102,7 @@ impl SimpleRuntimeVersion { } /// Chain runtime version in client -#[derive(Clone, Debug)] +#[derive(Copy, Clone, Debug)] pub enum ChainRuntimeVersion { /// Auto query from chain. Auto, @@ -164,7 +165,7 @@ impl Clone for Client { fn clone(&self) -> Self { Client { params: self.params.clone(), - chain_runtime_version: self.chain_runtime_version.clone(), + chain_runtime_version: self.chain_runtime_version, submit_signed_extrinsic_lock: self.submit_signed_extrinsic_lock.clone(), genesis_hash: self.genesis_hash, data: self.data.clone(), @@ -214,14 +215,48 @@ impl Client { }) .await??; - let chain_runtime_version = params.chain_runtime_version.clone(); - Ok(Self { + let chain_runtime_version = params.chain_runtime_version; + let mut client = Self { params, chain_runtime_version, submit_signed_extrinsic_lock: Arc::new(Mutex::new(())), genesis_hash, data: Arc::new(RwLock::new(ClientData { tokio, client })), - }) + }; + Self::ensure_correct_runtime_version(&mut client, chain_runtime_version).await?; + Ok(client) + } + + // Check runtime version to understand if we need are connected to expected version, or we + // need to wait for upgrade, we need to abort immediately. + async fn ensure_correct_runtime_version>( + env: &mut E, + expected: ChainRuntimeVersion, + ) -> Result<()> { + // we are only interested if version mode is bundled or passed using CLI + let expected = match expected { + ChainRuntimeVersion::Auto => return Ok(()), + ChainRuntimeVersion::Custom(expected) => expected, + }; + + // we need to wait if actual version is < than expected, we are OK of versions are the + // same and we need to abort if actual version is > than expected + let actual = SimpleRuntimeVersion::from_runtime_version(&env.runtime_version().await?); + match actual.spec_version.cmp(&expected.spec_version) { + Ordering::Less => + Err(Error::WaitingForRuntimeUpgrade { chain: C::NAME.into(), expected, actual }), + Ordering::Equal => Ok(()), + Ordering::Greater => { + log::error!( + target: "bridge", + "The {} client is configured to use runtime version {expected:?} and actual \ + version is {actual:?}. Aborting", + C::NAME, + ); + env.abort().await; + Err(Error::Custom("Aborted".into())) + }, + } } /// Build client to use in connection. @@ -849,3 +884,97 @@ impl Subscription { } } } + +#[cfg(test)] +mod tests { + use super::*; + use crate::{guard::tests::TestEnvironment, test_chain::TestChain}; + use futures::{channel::mpsc::unbounded, FutureExt}; + + async fn run_ensure_correct_runtime_version( + expected: ChainRuntimeVersion, + actual: RuntimeVersion, + ) -> Result<()> { + let ( + (mut runtime_version_tx, runtime_version_rx), + (slept_tx, _slept_rx), + (aborted_tx, mut aborted_rx), + ) = (unbounded(), unbounded(), unbounded()); + runtime_version_tx.send(actual).await.unwrap(); + let mut env = TestEnvironment { runtime_version_rx, slept_tx, aborted_tx }; + + let ensure_correct_runtime_version = + Client::::ensure_correct_runtime_version(&mut env, expected).boxed(); + let aborted = aborted_rx.next().map(|_| Err(Error::Custom("".into()))).boxed(); + futures::pin_mut!(ensure_correct_runtime_version, aborted); + futures::future::select(ensure_correct_runtime_version, aborted) + .await + .into_inner() + .0 + } + + #[async_std::test] + async fn ensure_correct_runtime_version_works() { + // when we are configured to use auto version + assert!(matches!( + run_ensure_correct_runtime_version( + ChainRuntimeVersion::Auto, + RuntimeVersion { + spec_version: 100, + transaction_version: 100, + ..Default::default() + }, + ) + .await, + Ok(()), + )); + // when actual == expected + assert!(matches!( + run_ensure_correct_runtime_version( + ChainRuntimeVersion::Custom(SimpleRuntimeVersion { + spec_version: 100, + transaction_version: 100 + }), + RuntimeVersion { + spec_version: 100, + transaction_version: 100, + ..Default::default() + }, + ) + .await, + Ok(()), + )); + // when actual spec version < expected spec version + assert!(matches!( + run_ensure_correct_runtime_version( + ChainRuntimeVersion::Custom(SimpleRuntimeVersion { + spec_version: 100, + transaction_version: 100 + }), + RuntimeVersion { spec_version: 99, transaction_version: 100, ..Default::default() }, + ) + .await, + Err(Error::WaitingForRuntimeUpgrade { + expected: SimpleRuntimeVersion { spec_version: 100, transaction_version: 100 }, + actual: SimpleRuntimeVersion { spec_version: 99, transaction_version: 100 }, + .. + }), + )); + // when actual spec version > expected spec version + assert!(matches!( + run_ensure_correct_runtime_version( + ChainRuntimeVersion::Custom(SimpleRuntimeVersion { + spec_version: 100, + transaction_version: 100 + }), + RuntimeVersion { + spec_version: 101, + transaction_version: 100, + ..Default::default() + }, + ) + .await, + Err(Error::Custom(_)), + )); + } +} diff --git a/relays/client-substrate/src/error.rs b/relays/client-substrate/src/error.rs index 40015c122..257771b70 100644 --- a/relays/client-substrate/src/error.rs +++ b/relays/client-substrate/src/error.rs @@ -16,6 +16,7 @@ //! Substrate node RPC errors. +use crate::SimpleRuntimeVersion; use bp_polkadot_core::parachains::ParaId; use jsonrpsee::core::Error as RpcError; use relay_utils::MaybeConnectionError; @@ -117,6 +118,17 @@ pub enum Error { /// The Substrate transaction is invalid. #[error("Substrate transaction is invalid: {0:?}")] TransactionInvalid(#[from] TransactionValidityError), + /// The client is configured to use newer runtime version than the connected chain uses. + /// The client will keep waiting until chain is upgraded to given version. + #[error("Waiting for {chain} runtime upgrade: expected {expected:?} actual {actual:?}")] + WaitingForRuntimeUpgrade { + /// Name of the chain where the error has happened. + chain: String, + /// Expected runtime version. + expected: SimpleRuntimeVersion, + /// Actual runtime version. + actual: SimpleRuntimeVersion, + }, /// Custom logic error. #[error("{0}")] Custom(String), diff --git a/relays/client-substrate/src/guard.rs b/relays/client-substrate/src/guard.rs index 545396b30..47454892c 100644 --- a/relays/client-substrate/src/guard.rs +++ b/relays/client-substrate/src/guard.rs @@ -107,7 +107,7 @@ impl Environment for Client { } #[cfg(test)] -mod tests { +pub(crate) mod tests { use super::*; use crate::test_chain::TestChain; use futures::{ @@ -117,10 +117,10 @@ mod tests { SinkExt, }; - struct TestEnvironment { - runtime_version_rx: UnboundedReceiver, - slept_tx: UnboundedSender<()>, - aborted_tx: UnboundedSender<()>, + pub struct TestEnvironment { + pub runtime_version_rx: UnboundedReceiver, + pub slept_tx: UnboundedSender<()>, + pub aborted_tx: UnboundedSender<()>, } #[async_trait] -- GitLab From 1022b6d4abcc5147f86f30baf617f7444d1d2bfd Mon Sep 17 00:00:00 2001 From: Svyatoslav Nikolsky Date: Tue, 26 Mar 2024 12:03:13 +0300 Subject: [PATCH 12/39] Relayer v1.2.1 (#2895) * bump relayer version * bump supported chain versions * updated lock file --- Cargo.lock | 2 +- relay-clients/client-bridge-hub-rococo/src/lib.rs | 2 +- relay-clients/client-bridge-hub-westend/src/lib.rs | 2 +- relay-clients/client-rococo/src/lib.rs | 2 +- relay-clients/client-westend/src/lib.rs | 2 +- substrate-relay/Cargo.toml | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 2c7e89ab1..d7c7a63df 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -9029,7 +9029,7 @@ dependencies = [ [[package]] name = "substrate-relay" -version = "1.2.0" +version = "1.2.1" dependencies = [ "anyhow", "async-std", diff --git a/relay-clients/client-bridge-hub-rococo/src/lib.rs b/relay-clients/client-bridge-hub-rococo/src/lib.rs index cae9e4f77..35e675817 100644 --- a/relay-clients/client-bridge-hub-rococo/src/lib.rs +++ b/relay-clients/client-bridge-hub-rococo/src/lib.rs @@ -125,5 +125,5 @@ impl ChainWithMessages for BridgeHubRococo { impl ChainWithRuntimeVersion for BridgeHubRococo { const RUNTIME_VERSION: Option = - Some(SimpleRuntimeVersion { spec_version: 1_008_000, transaction_version: 4 }); + Some(SimpleRuntimeVersion { spec_version: 1_009_000, transaction_version: 4 }); } diff --git a/relay-clients/client-bridge-hub-westend/src/lib.rs b/relay-clients/client-bridge-hub-westend/src/lib.rs index 049fa4649..d3668a49f 100644 --- a/relay-clients/client-bridge-hub-westend/src/lib.rs +++ b/relay-clients/client-bridge-hub-westend/src/lib.rs @@ -123,5 +123,5 @@ impl ChainWithMessages for BridgeHubWestend { impl ChainWithRuntimeVersion for BridgeHubWestend { const RUNTIME_VERSION: Option = - Some(SimpleRuntimeVersion { spec_version: 1_008_000, transaction_version: 4 }); + Some(SimpleRuntimeVersion { spec_version: 1_009_000, transaction_version: 4 }); } diff --git a/relay-clients/client-rococo/src/lib.rs b/relay-clients/client-rococo/src/lib.rs index 575660504..ae2891857 100644 --- a/relay-clients/client-rococo/src/lib.rs +++ b/relay-clients/client-rococo/src/lib.rs @@ -118,5 +118,5 @@ impl ChainWithTransactions for Rococo { impl ChainWithRuntimeVersion for Rococo { const RUNTIME_VERSION: Option = - Some(SimpleRuntimeVersion { spec_version: 1_008_000, transaction_version: 24 }); + Some(SimpleRuntimeVersion { spec_version: 1_009_000, transaction_version: 24 }); } diff --git a/relay-clients/client-westend/src/lib.rs b/relay-clients/client-westend/src/lib.rs index 42206baec..c33914ddd 100644 --- a/relay-clients/client-westend/src/lib.rs +++ b/relay-clients/client-westend/src/lib.rs @@ -118,5 +118,5 @@ impl ChainWithTransactions for Westend { impl ChainWithRuntimeVersion for Westend { const RUNTIME_VERSION: Option = - Some(SimpleRuntimeVersion { spec_version: 1_008_000, transaction_version: 24 }); + Some(SimpleRuntimeVersion { spec_version: 1_009_000, transaction_version: 24 }); } diff --git a/substrate-relay/Cargo.toml b/substrate-relay/Cargo.toml index 8a87186ec..8f9489ab2 100644 --- a/substrate-relay/Cargo.toml +++ b/substrate-relay/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "substrate-relay" -version = "1.2.0" +version = "1.2.1" authors = ["Parity Technologies "] edition = "2021" license = "GPL-3.0-or-later WITH Classpath-exception-2.0" -- GitLab From b9acdabb039c79ac489f0ab5a1411dbfbf778f3f Mon Sep 17 00:00:00 2001 From: Serban Iorga Date: Tue, 26 Mar 2024 12:57:23 +0100 Subject: [PATCH 13/39] polkadot-sdk backport leftovers (#2896) --- bin/.keep | 0 primitives/runtime/src/chain.rs | 11 ----------- scripts/verify-pallets-build.sh | 3 +++ 3 files changed, 3 insertions(+), 11 deletions(-) delete mode 100644 bin/.keep diff --git a/bin/.keep b/bin/.keep deleted file mode 100644 index e69de29bb..000000000 diff --git a/primitives/runtime/src/chain.rs b/primitives/runtime/src/chain.rs index 00bea2b3c..9ba21a1cd 100644 --- a/primitives/runtime/src/chain.rs +++ b/primitives/runtime/src/chain.rs @@ -98,17 +98,6 @@ impl Encode for EncodedOrDecodedCall { } } -// dummy implementation to satisfy `SignedPayload` requirements -impl sp_runtime::traits::Dispatchable for EncodedOrDecodedCall { - type RuntimeOrigin = (); - type Config = (); - type Info = (); - type PostInfo = (); - fn dispatch(self, _origin: ()) -> sp_runtime::DispatchResultWithInfo<()> { - unreachable!("never used by relayer; qed") - } -} - /// Minimal Substrate-based chain representation that may be used from no_std environment. pub trait Chain: Send + Sync + 'static { /// Chain id. diff --git a/scripts/verify-pallets-build.sh b/scripts/verify-pallets-build.sh index b96bbf183..4eefaa8ef 100755 --- a/scripts/verify-pallets-build.sh +++ b/scripts/verify-pallets-build.sh @@ -68,6 +68,7 @@ rm -rf $BRIDGES_FOLDER/modules/beefy rm -rf $BRIDGES_FOLDER/modules/shift-session-manager rm -rf $BRIDGES_FOLDER/primitives/beefy rm -rf $BRIDGES_FOLDER/relays +rm -rf $BRIDGES_FOLDER/relay-clients rm -rf $BRIDGES_FOLDER/scripts/add_license.sh rm -rf $BRIDGES_FOLDER/scripts/build-containers.sh rm -rf $BRIDGES_FOLDER/scripts/ci-cache.sh @@ -77,6 +78,7 @@ rm -rf $BRIDGES_FOLDER/scripts/regenerate_runtimes.sh rm -rf $BRIDGES_FOLDER/scripts/update-weights.sh rm -rf $BRIDGES_FOLDER/scripts/update-weights-setup.sh rm -rf $BRIDGES_FOLDER/scripts/update_substrate.sh +rm -rf $BRIDGES_FOLDER/substrate-relay rm -rf $BRIDGES_FOLDER/tools rm -f $BRIDGES_FOLDER/.dockerignore rm -f $BRIDGES_FOLDER/local.Dockerfile.dockerignore @@ -89,6 +91,7 @@ rm -f $BRIDGES_FOLDER/local.Dockerfile rm -f $BRIDGES_FOLDER/CODEOWNERS rm -f $BRIDGES_FOLDER/Dockerfile rm -f $BRIDGES_FOLDER/rustfmt.toml +rm -f $BRIDGES_FOLDER/RELEASE.md # let's fix Cargo.toml a bit (it'll be helpful if we are in the bridges repo) if [[ ! -f "Cargo.toml" ]]; then -- GitLab From e4e1ea60cfb70f9340bf97b2db8f5ec6beb34268 Mon Sep 17 00:00:00 2001 From: Serban Iorga Date: Wed, 27 Mar 2024 12:16:30 +0100 Subject: [PATCH 14/39] Remove test dependecies on specific relay clients (#2898) --- Cargo.lock | 3 -- relays/client-substrate/src/test_chain.rs | 17 ++++++++-- relays/lib-substrate-relay/Cargo.toml | 4 +-- .../src/messages_source.rs | 34 +++++++------------ .../src/on_demand/headers.rs | 3 +- 5 files changed, 29 insertions(+), 32 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index d7c7a63df..25acccb2b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -9115,9 +9115,6 @@ dependencies = [ "parachains-relay", "parity-scale-codec", "rbtag", - "relay-bridge-hub-rococo-client", - "relay-bridge-hub-westend-client", - "relay-rococo-client", "relay-substrate-client", "relay-utils", "sp-consensus-grandpa", diff --git a/relays/client-substrate/src/test_chain.rs b/relays/client-substrate/src/test_chain.rs index 214f8d87a..77240d158 100644 --- a/relays/client-substrate/src/test_chain.rs +++ b/relays/client-substrate/src/test_chain.rs @@ -21,7 +21,8 @@ #![cfg(any(feature = "test-helpers", test))] -use crate::{Chain, ChainWithBalances}; +use crate::{Chain, ChainWithBalances, ChainWithMessages}; +use bp_messages::{ChainWithMessages as ChainWithMessagesBase, MessageNonce}; use bp_runtime::ChainId; use frame_support::weights::Weight; use std::time::Duration; @@ -44,7 +45,7 @@ impl bp_runtime::Chain for TestChain { type Signature = sp_runtime::testing::TestSignature; fn max_extrinsic_size() -> u32 { - unreachable!() + 100000 } fn max_extrinsic_weight() -> Weight { @@ -69,6 +70,18 @@ impl ChainWithBalances for TestChain { } } +impl ChainWithMessagesBase for TestChain { + const WITH_CHAIN_MESSAGES_PALLET_NAME: &'static str = "Test"; + const MAX_UNREWARDED_RELAYERS_IN_CONFIRMATION_TX: MessageNonce = 0; + const MAX_UNCONFIRMED_MESSAGES_IN_CONFIRMATION_TX: MessageNonce = 0; +} + +impl ChainWithMessages for TestChain { + const WITH_CHAIN_RELAYERS_PALLET_NAME: Option<&'static str> = None; + const TO_CHAIN_MESSAGE_DETAILS_METHOD: &'static str = "TestMessagesDetailsMethod"; + const FROM_CHAIN_MESSAGE_DETAILS_METHOD: &'static str = "TestFromMessagesDetailsMethod"; +} + /// Primitives-level parachain that may be used in tests. #[derive(Clone, Debug, PartialEq, Eq)] pub struct TestParachainBase; diff --git a/relays/lib-substrate-relay/Cargo.toml b/relays/lib-substrate-relay/Cargo.toml index 5bd14bb83..99ff91a75 100644 --- a/relays/lib-substrate-relay/Cargo.toml +++ b/relays/lib-substrate-relay/Cargo.toml @@ -58,6 +58,4 @@ sp-runtime = { git = "https://github.com/paritytech/polkadot-sdk", branch = "mas [dev-dependencies] bp-rococo = { path = "../../chains/chain-rococo" } pallet-transaction-payment = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } -relay-bridge-hub-rococo-client = { path = "../../relay-clients/client-bridge-hub-rococo" } -relay-bridge-hub-westend-client = { path = "../../relay-clients/client-bridge-hub-westend" } -relay-rococo-client = { path = "../../relay-clients/client-rococo" } +relay-substrate-client = { path = "../client-substrate", features = ["test-helpers"] } diff --git a/relays/lib-substrate-relay/src/messages_source.rs b/relays/lib-substrate-relay/src/messages_source.rs index 26e10f886..49deff046 100644 --- a/relays/lib-substrate-relay/src/messages_source.rs +++ b/relays/lib-substrate-relay/src/messages_source.rs @@ -554,9 +554,7 @@ fn split_msgs_to_refine( #[cfg(test)] mod tests { use super::*; - use bp_runtime::Chain as ChainBase; - use relay_bridge_hub_rococo_client::BridgeHubRococo; - use relay_bridge_hub_westend_client::BridgeHubWestend; + use relay_substrate_client::test_chain::TestChain; fn message_details_from_rpc( nonces: RangeInclusive, @@ -573,20 +571,14 @@ mod tests { #[test] fn validate_out_msgs_details_succeeds_if_no_messages_are_missing() { - assert!(validate_out_msgs_details::( - &message_details_from_rpc(1..=3), - 1..=3, - ) - .is_ok()); + assert!(validate_out_msgs_details::(&message_details_from_rpc(1..=3), 1..=3,) + .is_ok()); } #[test] fn validate_out_msgs_details_succeeds_if_head_messages_are_missing() { - assert!(validate_out_msgs_details::( - &message_details_from_rpc(2..=3), - 1..=3, - ) - .is_ok()) + assert!(validate_out_msgs_details::(&message_details_from_rpc(2..=3), 1..=3,) + .is_ok()) } #[test] @@ -594,7 +586,7 @@ mod tests { let mut message_details_from_rpc = message_details_from_rpc(1..=3); message_details_from_rpc.remove(1); assert!(matches!( - validate_out_msgs_details::(&message_details_from_rpc, 1..=3,), + validate_out_msgs_details::(&message_details_from_rpc, 1..=3,), Err(SubstrateError::Custom(_)) )); } @@ -602,7 +594,7 @@ mod tests { #[test] fn validate_out_msgs_details_map_fails_if_tail_messages_are_missing() { assert!(matches!( - validate_out_msgs_details::(&message_details_from_rpc(1..=2), 1..=3,), + validate_out_msgs_details::(&message_details_from_rpc(1..=2), 1..=3,), Err(SubstrateError::Custom(_)) )); } @@ -610,7 +602,7 @@ mod tests { #[test] fn validate_out_msgs_details_fails_if_all_messages_are_missing() { assert!(matches!( - validate_out_msgs_details::(&[], 1..=3), + validate_out_msgs_details::(&[], 1..=3), Err(SubstrateError::Custom(_)) )); } @@ -618,7 +610,7 @@ mod tests { #[test] fn validate_out_msgs_details_fails_if_more_messages_than_nonces() { assert!(matches!( - validate_out_msgs_details::(&message_details_from_rpc(1..=5), 2..=5,), + validate_out_msgs_details::(&message_details_from_rpc(1..=5), 2..=5,), Err(SubstrateError::Custom(_)) )); } @@ -644,10 +636,8 @@ mod tests { msgs_to_refine.push((payload, out_msg_details)); } - let maybe_batches = split_msgs_to_refine::( - Default::default(), - msgs_to_refine, - ); + let maybe_batches = + split_msgs_to_refine::(Default::default(), msgs_to_refine); match expected_batches { Ok(expected_batches) => { let batches = maybe_batches.unwrap(); @@ -669,7 +659,7 @@ mod tests { #[test] fn test_split_msgs_to_refine() { - let max_extrinsic_size = BridgeHubRococo::max_extrinsic_size() as usize; + let max_extrinsic_size = 100000; // Check that an error is returned when one of the messages is too big. check_split_msgs_to_refine(vec![max_extrinsic_size], Err(())); diff --git a/relays/lib-substrate-relay/src/on_demand/headers.rs b/relays/lib-substrate-relay/src/on_demand/headers.rs index 8b58552d2..99ca1d4d5 100644 --- a/relays/lib-substrate-relay/src/on_demand/headers.rs +++ b/relays/lib-substrate-relay/src/on_demand/headers.rs @@ -526,8 +526,7 @@ fn on_demand_headers_relay_name() -> Str #[cfg(test)] mod tests { use super::*; - - type TestChain = relay_rococo_client::Rococo; + use relay_substrate_client::test_chain::TestChain; const AT_SOURCE: Option = Some(10); const AT_TARGET: Option = Some(1); -- GitLab From a7a47eaebe6260224ecdadc744b2db34d3042f2b Mon Sep 17 00:00:00 2001 From: Serban Iorga Date: Wed, 27 Mar 2024 12:44:50 +0100 Subject: [PATCH 15/39] Backport changes from polkadot-sdk (#2899) * Fix spelling mistakes across the whole repository (#3808) **Update:** Pushed additional changes based on the review comments. **This pull request fixes various spelling mistakes in this repository.** Most of the changes are contained in the first **3** commits: - `Fix spelling mistakes in comments and docs` - `Fix spelling mistakes in test names` - `Fix spelling mistakes in error messages, panic messages, logs and tracing` Other source code spelling mistakes are separated into individual commits for easier reviewing: - `Fix the spelling of 'authority'` - `Fix the spelling of 'REASONABLE_HEADERS_IN_JUSTIFICATION_ANCESTRY'` - `Fix the spelling of 'prev_enqueud_messages'` - `Fix the spelling of 'endpoint'` - `Fix the spelling of 'children'` - `Fix the spelling of 'PenpalSiblingSovereignAccount'` - `Fix the spelling of 'PenpalSudoAccount'` - `Fix the spelling of 'insufficient'` - `Fix the spelling of 'PalletXcmExtrinsicsBenchmark'` - `Fix the spelling of 'subtracted'` - `Fix the spelling of 'CandidatePendingAvailability'` - `Fix the spelling of 'exclusive'` - `Fix the spelling of 'until'` - `Fix the spelling of 'discriminator'` - `Fix the spelling of 'nonexistent'` - `Fix the spelling of 'subsystem'` - `Fix the spelling of 'indices'` - `Fix the spelling of 'committed'` - `Fix the spelling of 'topology'` - `Fix the spelling of 'response'` - `Fix the spelling of 'beneficiary'` - `Fix the spelling of 'formatted'` - `Fix the spelling of 'UNKNOWN_PROOF_REQUEST'` - `Fix the spelling of 'succeeded'` - `Fix the spelling of 'reopened'` - `Fix the spelling of 'proposer'` - `Fix the spelling of 'InstantiationNonce'` - `Fix the spelling of 'depositor'` - `Fix the spelling of 'expiration'` - `Fix the spelling of 'phantom'` - `Fix the spelling of 'AggregatedKeyValue'` - `Fix the spelling of 'randomness'` - `Fix the spelling of 'defendant'` - `Fix the spelling of 'AquaticMammal'` - `Fix the spelling of 'transactions'` - `Fix the spelling of 'PassingTracingSubscriber'` - `Fix the spelling of 'TxSignaturePayload'` - `Fix the spelling of 'versioning'` - `Fix the spelling of 'descendant'` - `Fix the spelling of 'overridden'` - `Fix the spelling of 'network'` Let me know if this structure is adequate. **Note:** The usage of the words `Merkle`, `Merkelize`, `Merklization`, `Merkelization`, `Merkleization`, is somewhat inconsistent but I left it as it is. ~~**Note:** In some places the term `Receival` is used to refer to message reception, IMO `Reception` is the correct word here, but I left it as it is.~~ ~~**Note:** In some places the term `Overlayed` is used instead of the more acceptable version `Overlaid` but I also left it as it is.~~ ~~**Note:** In some places the term `Applyable` is used instead of the correct version `Applicable` but I also left it as it is.~~ **Note:** Some usage of British vs American english e.g. `judgement` vs `judgment`, `initialise` vs `initialize`, `optimise` vs `optimize` etc. are both present in different places, but I suppose that's understandable given the number of contributors. ~~**Note:** There is a spelling mistake in `.github/CODEOWNERS` but it triggers errors in CI when I make changes to it, so I left it as it is.~~ (cherry picked from commit 002d9260f9a0f844f87eefd0abce8bd95aae351b) * Fix --------- Co-authored-by: Dcompoze --- bin/runtime-common/src/messages_api.rs | 2 +- .../src/messages_xcm_extension.rs | 2 +- bin/runtime-common/src/mock.rs | 2 +- bin/runtime-common/src/priority_calculator.rs | 2 +- .../src/refund_relayer_extension.rs | 4 +- chains/chain-kusama/src/lib.rs | 4 +- chains/chain-polkadot-bulletin/src/lib.rs | 8 ++-- chains/chain-polkadot/src/lib.rs | 4 +- chains/chain-rococo/src/lib.rs | 4 +- chains/chain-westend/src/lib.rs | 4 +- modules/grandpa/README.md | 2 +- modules/grandpa/src/call_ext.rs | 8 ++-- modules/grandpa/src/lib.rs | 4 +- modules/grandpa/src/mock.rs | 2 +- modules/messages/src/inbound_lane.rs | 40 +++++++++---------- modules/messages/src/lib.rs | 30 +++++++------- modules/messages/src/outbound_lane.rs | 30 +++++++------- modules/parachains/src/mock.rs | 4 +- modules/xcm-bridge-hub-router/src/lib.rs | 2 +- modules/xcm-bridge-hub/Cargo.toml | 2 +- .../header-chain/src/justification/mod.rs | 2 +- .../src/justification/verification/mod.rs | 2 +- primitives/header-chain/src/lib.rs | 8 ++-- primitives/messages/src/lib.rs | 8 ++-- primitives/polkadot-core/src/lib.rs | 2 +- primitives/runtime/src/chain.rs | 6 +-- primitives/runtime/src/lib.rs | 4 +- primitives/test-utils/src/lib.rs | 2 +- scripts/verify-pallets-build.sh | 2 +- .../src/bridges/rococo_bulletin/mod.rs | 4 +- .../environments/rococo-westend/rococo.zndsl | 4 +- .../environments/rococo-westend/westend.zndsl | 4 +- .../utils/generate_hex_encoded_call/index.js | 18 ++++----- testing/run-tests.sh | 2 +- .../run.sh | 2 +- 35 files changed, 115 insertions(+), 115 deletions(-) diff --git a/bin/runtime-common/src/messages_api.rs b/bin/runtime-common/src/messages_api.rs index ccf1c7540..7fbdeb366 100644 --- a/bin/runtime-common/src/messages_api.rs +++ b/bin/runtime-common/src/messages_api.rs @@ -14,7 +14,7 @@ // You should have received a copy of the GNU General Public License // along with Parity Bridges Common. If not, see . -//! Helpers for implementing various message-related runtime API mthods. +//! Helpers for implementing various message-related runtime API methods. use bp_messages::{ InboundMessageDetails, LaneId, MessageNonce, MessagePayload, OutboundMessageDetails, diff --git a/bin/runtime-common/src/messages_xcm_extension.rs b/bin/runtime-common/src/messages_xcm_extension.rs index e3da6155f..46ed4da0d 100644 --- a/bin/runtime-common/src/messages_xcm_extension.rs +++ b/bin/runtime-common/src/messages_xcm_extension.rs @@ -248,7 +248,7 @@ impl LocalXcmQueueManager { sender_and_lane: &SenderAndLane, enqueued_messages: MessageNonce, ) { - // skip if we dont want to handle congestion + // skip if we don't want to handle congestion if !H::supports_congestion_detection() { return } diff --git a/bin/runtime-common/src/mock.rs b/bin/runtime-common/src/mock.rs index deee4524e..8c4cb2233 100644 --- a/bin/runtime-common/src/mock.rs +++ b/bin/runtime-common/src/mock.rs @@ -379,7 +379,7 @@ impl Chain for BridgedUnderlyingChain { impl ChainWithGrandpa for BridgedUnderlyingChain { const WITH_CHAIN_GRANDPA_PALLET_NAME: &'static str = ""; const MAX_AUTHORITIES_COUNT: u32 = 16; - const REASONABLE_HEADERS_IN_JUSTIFICATON_ANCESTRY: u32 = 8; + const REASONABLE_HEADERS_IN_JUSTIFICATION_ANCESTRY: u32 = 8; const MAX_MANDATORY_HEADER_SIZE: u32 = 256; const AVERAGE_HEADER_SIZE: u32 = 64; } diff --git a/bin/runtime-common/src/priority_calculator.rs b/bin/runtime-common/src/priority_calculator.rs index c2737128e..5035553f5 100644 --- a/bin/runtime-common/src/priority_calculator.rs +++ b/bin/runtime-common/src/priority_calculator.rs @@ -163,7 +163,7 @@ mod integrity_tests { { // just an estimation of extra transaction bytes that are added to every transaction // (including signature, signed extensions extra and etc + in our case it includes - // all call arguments extept the proof itself) + // all call arguments except the proof itself) let base_tx_size = 512; // let's say we are relaying similar small messages and for every message we add more trie // nodes to the proof (x0.5 because we expect some nodes to be reused) diff --git a/bin/runtime-common/src/refund_relayer_extension.rs b/bin/runtime-common/src/refund_relayer_extension.rs index 8e901d728..455392a0a 100644 --- a/bin/runtime-common/src/refund_relayer_extension.rs +++ b/bin/runtime-common/src/refund_relayer_extension.rs @@ -1538,7 +1538,7 @@ mod tests { } #[test] - fn validate_boosts_priority_of_message_delivery_transactons() { + fn validate_boosts_priority_of_message_delivery_transactions() { run_test(|| { initialize_environment(100, 100, 100); @@ -1568,7 +1568,7 @@ mod tests { } #[test] - fn validate_does_not_boost_priority_of_message_delivery_transactons_with_too_many_messages() { + fn validate_does_not_boost_priority_of_message_delivery_transactions_with_too_many_messages() { run_test(|| { initialize_environment(100, 100, 100); diff --git a/chains/chain-kusama/src/lib.rs b/chains/chain-kusama/src/lib.rs index e3b4d0520..a81004afe 100644 --- a/chains/chain-kusama/src/lib.rs +++ b/chains/chain-kusama/src/lib.rs @@ -53,8 +53,8 @@ impl Chain for Kusama { impl ChainWithGrandpa for Kusama { const WITH_CHAIN_GRANDPA_PALLET_NAME: &'static str = WITH_KUSAMA_GRANDPA_PALLET_NAME; const MAX_AUTHORITIES_COUNT: u32 = MAX_AUTHORITIES_COUNT; - const REASONABLE_HEADERS_IN_JUSTIFICATON_ANCESTRY: u32 = - REASONABLE_HEADERS_IN_JUSTIFICATON_ANCESTRY; + const REASONABLE_HEADERS_IN_JUSTIFICATION_ANCESTRY: u32 = + REASONABLE_HEADERS_IN_JUSTIFICATION_ANCESTRY; const MAX_MANDATORY_HEADER_SIZE: u32 = MAX_MANDATORY_HEADER_SIZE; const AVERAGE_HEADER_SIZE: u32 = AVERAGE_HEADER_SIZE; } diff --git a/chains/chain-polkadot-bulletin/src/lib.rs b/chains/chain-polkadot-bulletin/src/lib.rs index f2eebf931..f3d300567 100644 --- a/chains/chain-polkadot-bulletin/src/lib.rs +++ b/chains/chain-polkadot-bulletin/src/lib.rs @@ -43,7 +43,7 @@ use sp_runtime::{traits::DispatchInfoOf, transaction_validity::TransactionValidi pub use bp_polkadot_core::{ AccountAddress, AccountId, Balance, Block, BlockNumber, Hash, Hasher, Header, Nonce, Signature, SignedBlock, UncheckedExtrinsic, AVERAGE_HEADER_SIZE, EXTRA_STORAGE_PROOF_SIZE, - MAX_MANDATORY_HEADER_SIZE, REASONABLE_HEADERS_IN_JUSTIFICATON_ANCESTRY, + MAX_MANDATORY_HEADER_SIZE, REASONABLE_HEADERS_IN_JUSTIFICATION_ANCESTRY, }; /// Maximal number of GRANDPA authorities at Polkadot Bulletin chain. @@ -62,7 +62,7 @@ const NORMAL_DISPATCH_RATIO: Perbill = Perbill::from_percent(90); // Re following constants - we are using the same values at Cumulus parachains. They are limited // by the maximal transaction weight/size. Since block limits at Bulletin Chain are larger than -// at the Cumulus Bridgeg Hubs, we could reuse the same values. +// at the Cumulus Bridge Hubs, we could reuse the same values. /// Maximal number of unrewarded relayer entries at inbound lane for Cumulus-based parachains. pub const MAX_UNREWARDED_RELAYERS_IN_CONFIRMATION_TX: MessageNonce = 1024; @@ -207,8 +207,8 @@ impl Chain for PolkadotBulletin { impl ChainWithGrandpa for PolkadotBulletin { const WITH_CHAIN_GRANDPA_PALLET_NAME: &'static str = WITH_POLKADOT_BULLETIN_GRANDPA_PALLET_NAME; const MAX_AUTHORITIES_COUNT: u32 = MAX_AUTHORITIES_COUNT; - const REASONABLE_HEADERS_IN_JUSTIFICATON_ANCESTRY: u32 = - REASONABLE_HEADERS_IN_JUSTIFICATON_ANCESTRY; + const REASONABLE_HEADERS_IN_JUSTIFICATION_ANCESTRY: u32 = + REASONABLE_HEADERS_IN_JUSTIFICATION_ANCESTRY; const MAX_MANDATORY_HEADER_SIZE: u32 = MAX_MANDATORY_HEADER_SIZE; const AVERAGE_HEADER_SIZE: u32 = AVERAGE_HEADER_SIZE; } diff --git a/chains/chain-polkadot/src/lib.rs b/chains/chain-polkadot/src/lib.rs index fc5e10308..00d35783a 100644 --- a/chains/chain-polkadot/src/lib.rs +++ b/chains/chain-polkadot/src/lib.rs @@ -55,8 +55,8 @@ impl Chain for Polkadot { impl ChainWithGrandpa for Polkadot { const WITH_CHAIN_GRANDPA_PALLET_NAME: &'static str = WITH_POLKADOT_GRANDPA_PALLET_NAME; const MAX_AUTHORITIES_COUNT: u32 = MAX_AUTHORITIES_COUNT; - const REASONABLE_HEADERS_IN_JUSTIFICATON_ANCESTRY: u32 = - REASONABLE_HEADERS_IN_JUSTIFICATON_ANCESTRY; + const REASONABLE_HEADERS_IN_JUSTIFICATION_ANCESTRY: u32 = + REASONABLE_HEADERS_IN_JUSTIFICATION_ANCESTRY; const MAX_MANDATORY_HEADER_SIZE: u32 = MAX_MANDATORY_HEADER_SIZE; const AVERAGE_HEADER_SIZE: u32 = AVERAGE_HEADER_SIZE; } diff --git a/chains/chain-rococo/src/lib.rs b/chains/chain-rococo/src/lib.rs index f1b256f0f..2385dd2cb 100644 --- a/chains/chain-rococo/src/lib.rs +++ b/chains/chain-rococo/src/lib.rs @@ -53,8 +53,8 @@ impl Chain for Rococo { impl ChainWithGrandpa for Rococo { const WITH_CHAIN_GRANDPA_PALLET_NAME: &'static str = WITH_ROCOCO_GRANDPA_PALLET_NAME; const MAX_AUTHORITIES_COUNT: u32 = MAX_AUTHORITIES_COUNT; - const REASONABLE_HEADERS_IN_JUSTIFICATON_ANCESTRY: u32 = - REASONABLE_HEADERS_IN_JUSTIFICATON_ANCESTRY; + const REASONABLE_HEADERS_IN_JUSTIFICATION_ANCESTRY: u32 = + REASONABLE_HEADERS_IN_JUSTIFICATION_ANCESTRY; const MAX_MANDATORY_HEADER_SIZE: u32 = MAX_MANDATORY_HEADER_SIZE; const AVERAGE_HEADER_SIZE: u32 = AVERAGE_HEADER_SIZE; } diff --git a/chains/chain-westend/src/lib.rs b/chains/chain-westend/src/lib.rs index f03fd2160..b344b7f4b 100644 --- a/chains/chain-westend/src/lib.rs +++ b/chains/chain-westend/src/lib.rs @@ -53,8 +53,8 @@ impl Chain for Westend { impl ChainWithGrandpa for Westend { const WITH_CHAIN_GRANDPA_PALLET_NAME: &'static str = WITH_WESTEND_GRANDPA_PALLET_NAME; const MAX_AUTHORITIES_COUNT: u32 = MAX_AUTHORITIES_COUNT; - const REASONABLE_HEADERS_IN_JUSTIFICATON_ANCESTRY: u32 = - REASONABLE_HEADERS_IN_JUSTIFICATON_ANCESTRY; + const REASONABLE_HEADERS_IN_JUSTIFICATION_ANCESTRY: u32 = + REASONABLE_HEADERS_IN_JUSTIFICATION_ANCESTRY; const MAX_MANDATORY_HEADER_SIZE: u32 = MAX_MANDATORY_HEADER_SIZE; const AVERAGE_HEADER_SIZE: u32 = AVERAGE_HEADER_SIZE; } diff --git a/modules/grandpa/README.md b/modules/grandpa/README.md index 992bd2cc4..4a3099b8a 100644 --- a/modules/grandpa/README.md +++ b/modules/grandpa/README.md @@ -10,7 +10,7 @@ It is used by the parachains light client (bridge parachains pallet) and by mess ## A Brief Introduction into GRANDPA Finality You can find detailed information on GRANDPA, by exploring its [repository](https://github.com/paritytech/finality-grandpa). -Here is the minimal reqiuired GRANDPA information to understand how pallet works. +Here is the minimal required GRANDPA information to understand how pallet works. Any Substrate chain may use different block authorship algorithms (like BABE or Aura) to determine block producers and generate blocks. This has nothing common with finality, though - the task of block authorship is to coordinate diff --git a/modules/grandpa/src/call_ext.rs b/modules/grandpa/src/call_ext.rs index e3c778b48..4a7ebb3cc 100644 --- a/modules/grandpa/src/call_ext.rs +++ b/modules/grandpa/src/call_ext.rs @@ -205,7 +205,7 @@ pub(crate) fn submit_finality_proof_info_from_args, I: 'static>( // as an extra weight. let votes_ancestries_len = justification.votes_ancestries.len().saturated_into(); let extra_weight = - if votes_ancestries_len > T::BridgedChain::REASONABLE_HEADERS_IN_JUSTIFICATON_ANCESTRY { + if votes_ancestries_len > T::BridgedChain::REASONABLE_HEADERS_IN_JUSTIFICATION_ANCESTRY { T::WeightInfo::submit_finality_proof(precommits_len, votes_ancestries_len) } else { Weight::zero() @@ -396,11 +396,11 @@ mod tests { let finality_target = test_header(1); let mut justification_params = JustificationGeneratorParams { header: finality_target.clone(), - ancestors: TestBridgedChain::REASONABLE_HEADERS_IN_JUSTIFICATON_ANCESTRY, + ancestors: TestBridgedChain::REASONABLE_HEADERS_IN_JUSTIFICATION_ANCESTRY, ..Default::default() }; - // when there are `REASONABLE_HEADERS_IN_JUSTIFICATON_ANCESTRY` headers => no refund + // when there are `REASONABLE_HEADERS_IN_JUSTIFICATION_ANCESTRY` headers => no refund let justification = make_justification_for_header(justification_params.clone()); let call = RuntimeCall::Grandpa(crate::Call::submit_finality_proof_ex { finality_target: Box::new(finality_target.clone()), @@ -409,7 +409,7 @@ mod tests { }); assert_eq!(call.submit_finality_proof_info().unwrap().extra_weight, Weight::zero()); - // when there are `REASONABLE_HEADERS_IN_JUSTIFICATON_ANCESTRY + 1` headers => full refund + // when there are `REASONABLE_HEADERS_IN_JUSTIFICATION_ANCESTRY + 1` headers => full refund justification_params.ancestors += 1; let justification = make_justification_for_header(justification_params); let call_weight = ::WeightInfo::submit_finality_proof( diff --git a/modules/grandpa/src/lib.rs b/modules/grandpa/src/lib.rs index ce2c47da9..9e095651e 100644 --- a/modules/grandpa/src/lib.rs +++ b/modules/grandpa/src/lib.rs @@ -935,7 +935,7 @@ mod tests { } #[test] - fn succesfully_imports_header_with_valid_finality() { + fn successfully_imports_header_with_valid_finality() { run_test(|| { initialize_substrate_bridge(); @@ -1192,7 +1192,7 @@ mod tests { header.digest = change_log(0); let justification = make_justification_for_header(JustificationGeneratorParams { header: header.clone(), - ancestors: TestBridgedChain::REASONABLE_HEADERS_IN_JUSTIFICATON_ANCESTRY + 1, + ancestors: TestBridgedChain::REASONABLE_HEADERS_IN_JUSTIFICATION_ANCESTRY + 1, ..Default::default() }); diff --git a/modules/grandpa/src/mock.rs b/modules/grandpa/src/mock.rs index 4318d663a..e689e520c 100644 --- a/modules/grandpa/src/mock.rs +++ b/modules/grandpa/src/mock.rs @@ -87,7 +87,7 @@ impl Chain for TestBridgedChain { impl ChainWithGrandpa for TestBridgedChain { const WITH_CHAIN_GRANDPA_PALLET_NAME: &'static str = ""; const MAX_AUTHORITIES_COUNT: u32 = MAX_BRIDGED_AUTHORITIES; - const REASONABLE_HEADERS_IN_JUSTIFICATON_ANCESTRY: u32 = 8; + const REASONABLE_HEADERS_IN_JUSTIFICATION_ANCESTRY: u32 = 8; const MAX_MANDATORY_HEADER_SIZE: u32 = 256; const AVERAGE_HEADER_SIZE: u32 = 64; } diff --git a/modules/messages/src/inbound_lane.rs b/modules/messages/src/inbound_lane.rs index 966ec939e..da1698e6e 100644 --- a/modules/messages/src/inbound_lane.rs +++ b/modules/messages/src/inbound_lane.rs @@ -21,7 +21,7 @@ use crate::Config; use bp_messages::{ target_chain::{DispatchMessage, DispatchMessageData, MessageDispatch}, DeliveredMessages, InboundLaneData, LaneId, MessageKey, MessageNonce, OutboundLaneData, - ReceivalResult, UnrewardedRelayer, + ReceptionResult, UnrewardedRelayer, }; use codec::{Decode, Encode, EncodeLike, MaxEncodedLen}; use frame_support::traits::Get; @@ -170,21 +170,21 @@ impl InboundLane { relayer_at_bridged_chain: &S::Relayer, nonce: MessageNonce, message_data: DispatchMessageData, - ) -> ReceivalResult { + ) -> ReceptionResult { let mut data = self.storage.get_or_init_data(); if Some(nonce) != data.last_delivered_nonce().checked_add(1) { - return ReceivalResult::InvalidNonce + return ReceptionResult::InvalidNonce } // if there are more unrewarded relayer entries than we may accept, reject this message if data.relayers.len() as MessageNonce >= self.storage.max_unrewarded_relayer_entries() { - return ReceivalResult::TooManyUnrewardedRelayers + return ReceptionResult::TooManyUnrewardedRelayers } // if there are more unconfirmed messages than we may accept, reject this message let unconfirmed_messages_count = nonce.saturating_sub(data.last_confirmed_nonce); if unconfirmed_messages_count > self.storage.max_unconfirmed_messages() { - return ReceivalResult::TooManyUnconfirmedMessages + return ReceptionResult::TooManyUnconfirmedMessages } // then, dispatch message @@ -207,7 +207,7 @@ impl InboundLane { }; self.storage.set_data(data); - ReceivalResult::Dispatched(dispatch_result) + ReceptionResult::Dispatched(dispatch_result) } } @@ -235,7 +235,7 @@ mod tests { nonce, inbound_message_data(REGULAR_PAYLOAD) ), - ReceivalResult::Dispatched(dispatch_result(0)) + ReceptionResult::Dispatched(dispatch_result(0)) ); } @@ -362,7 +362,7 @@ mod tests { 10, inbound_message_data(REGULAR_PAYLOAD) ), - ReceivalResult::InvalidNonce + ReceptionResult::InvalidNonce ); assert_eq!(lane.storage.get_or_init_data().last_delivered_nonce(), 0); }); @@ -381,7 +381,7 @@ mod tests { current_nonce, inbound_message_data(REGULAR_PAYLOAD) ), - ReceivalResult::Dispatched(dispatch_result(0)) + ReceptionResult::Dispatched(dispatch_result(0)) ); } // Fails to dispatch new message from different than latest relayer. @@ -391,7 +391,7 @@ mod tests { max_nonce + 1, inbound_message_data(REGULAR_PAYLOAD) ), - ReceivalResult::TooManyUnrewardedRelayers, + ReceptionResult::TooManyUnrewardedRelayers, ); // Fails to dispatch new messages from latest relayer. Prevents griefing attacks. assert_eq!( @@ -400,7 +400,7 @@ mod tests { max_nonce + 1, inbound_message_data(REGULAR_PAYLOAD) ), - ReceivalResult::TooManyUnrewardedRelayers, + ReceptionResult::TooManyUnrewardedRelayers, ); }); } @@ -417,7 +417,7 @@ mod tests { current_nonce, inbound_message_data(REGULAR_PAYLOAD) ), - ReceivalResult::Dispatched(dispatch_result(0)) + ReceptionResult::Dispatched(dispatch_result(0)) ); } // Fails to dispatch new message from different than latest relayer. @@ -427,7 +427,7 @@ mod tests { max_nonce + 1, inbound_message_data(REGULAR_PAYLOAD) ), - ReceivalResult::TooManyUnconfirmedMessages, + ReceptionResult::TooManyUnconfirmedMessages, ); // Fails to dispatch new messages from latest relayer. assert_eq!( @@ -436,7 +436,7 @@ mod tests { max_nonce + 1, inbound_message_data(REGULAR_PAYLOAD) ), - ReceivalResult::TooManyUnconfirmedMessages, + ReceptionResult::TooManyUnconfirmedMessages, ); }); } @@ -451,7 +451,7 @@ mod tests { 1, inbound_message_data(REGULAR_PAYLOAD) ), - ReceivalResult::Dispatched(dispatch_result(0)) + ReceptionResult::Dispatched(dispatch_result(0)) ); assert_eq!( lane.receive_message::( @@ -459,7 +459,7 @@ mod tests { 2, inbound_message_data(REGULAR_PAYLOAD) ), - ReceivalResult::Dispatched(dispatch_result(0)) + ReceptionResult::Dispatched(dispatch_result(0)) ); assert_eq!( lane.receive_message::( @@ -467,7 +467,7 @@ mod tests { 3, inbound_message_data(REGULAR_PAYLOAD) ), - ReceivalResult::Dispatched(dispatch_result(0)) + ReceptionResult::Dispatched(dispatch_result(0)) ); assert_eq!( lane.storage.get_or_init_data().relayers, @@ -490,7 +490,7 @@ mod tests { 1, inbound_message_data(REGULAR_PAYLOAD) ), - ReceivalResult::Dispatched(dispatch_result(0)) + ReceptionResult::Dispatched(dispatch_result(0)) ); assert_eq!( lane.receive_message::( @@ -498,7 +498,7 @@ mod tests { 1, inbound_message_data(REGULAR_PAYLOAD) ), - ReceivalResult::InvalidNonce, + ReceptionResult::InvalidNonce, ); }); } @@ -524,7 +524,7 @@ mod tests { 1, inbound_message_data(payload) ), - ReceivalResult::Dispatched(dispatch_result(1)) + ReceptionResult::Dispatched(dispatch_result(1)) ); }); } diff --git a/modules/messages/src/lib.rs b/modules/messages/src/lib.rs index a86cb326c..bc00db9eb 100644 --- a/modules/messages/src/lib.rs +++ b/modules/messages/src/lib.rs @@ -47,7 +47,7 @@ pub use weights_ext::{ use crate::{ inbound_lane::{InboundLane, InboundLaneStorage}, - outbound_lane::{OutboundLane, OutboundLaneStorage, ReceivalConfirmationError}, + outbound_lane::{OutboundLane, OutboundLaneStorage, ReceptionConfirmationError}, }; use bp_messages::{ @@ -90,7 +90,7 @@ pub const LOG_TARGET: &str = "runtime::bridge-messages"; #[frame_support::pallet] pub mod pallet { use super::*; - use bp_messages::{ReceivalResult, ReceivedMessages}; + use bp_messages::{ReceivedMessages, ReceptionResult}; use bp_runtime::RangeInclusiveExt; use frame_support::pallet_prelude::*; use frame_system::pallet_prelude::*; @@ -376,13 +376,13 @@ pub mod pallet { // delivery transaction cost anyway. And base cost covers everything except // dispatch, so we have a balance here. let unspent_weight = match &receival_result { - ReceivalResult::Dispatched(dispatch_result) => { + ReceptionResult::Dispatched(dispatch_result) => { valid_messages += 1; dispatch_result.unspent_weight }, - ReceivalResult::InvalidNonce | - ReceivalResult::TooManyUnrewardedRelayers | - ReceivalResult::TooManyUnconfirmedMessages => message_dispatch_weight, + ReceptionResult::InvalidNonce | + ReceptionResult::TooManyUnrewardedRelayers | + ReceptionResult::TooManyUnconfirmedMessages => message_dispatch_weight, }; lane_messages_received_status.push(message.key.nonce, receival_result); @@ -455,7 +455,7 @@ pub mod pallet { last_delivered_nonce, &lane_data.relayers, ) - .map_err(Error::::ReceivalConfirmation)?; + .map_err(Error::::ReceptionConfirmation)?; if let Some(confirmed_messages) = confirmed_messages { // emit 'delivered' event @@ -563,7 +563,7 @@ pub mod pallet { /// The message someone is trying to work with (i.e. increase fee) is not yet sent. MessageIsNotYetSent, /// Error confirming messages receival. - ReceivalConfirmation(ReceivalConfirmationError), + ReceptionConfirmation(ReceptionConfirmationError), /// Error generated by the `OwnedBridgeModule` trait. BridgeModule(bp_runtime::OwnedBridgeModuleError), } @@ -923,7 +923,7 @@ mod tests { PAYLOAD_REJECTED_BY_TARGET_CHAIN, REGULAR_PAYLOAD, TEST_LANE_ID, TEST_LANE_ID_2, TEST_LANE_ID_3, TEST_RELAYER_A, TEST_RELAYER_B, }, - outbound_lane::ReceivalConfirmationError, + outbound_lane::ReceptionConfirmationError, }; use bp_messages::{ source_chain::MessagesBridge, BridgeMessagesCall, UnrewardedRelayer, @@ -950,11 +950,11 @@ mod tests { let outbound_lane = outbound_lane::(lane_id); let message_nonce = outbound_lane.data().latest_generated_nonce + 1; - let prev_enqueud_messages = outbound_lane.data().queued_messages().saturating_len(); + let prev_enqueued_messages = outbound_lane.data().queued_messages().saturating_len(); let valid_message = Pallet::::validate_message(lane_id, ®ULAR_PAYLOAD) .expect("validate_message has failed"); let artifacts = Pallet::::send_message(valid_message); - assert_eq!(artifacts.enqueued_messages, prev_enqueud_messages + 1); + assert_eq!(artifacts.enqueued_messages, prev_enqueued_messages + 1); // check event with assigned nonce assert_eq!( @@ -1541,7 +1541,7 @@ mod tests { } #[test] - fn actual_dispatch_weight_does_not_overlow() { + fn actual_dispatch_weight_does_not_overflow() { run_test(|| { let message1 = message(1, message_payload(0, u64::MAX / 2)); let message2 = message(2, message_payload(0, u64::MAX / 2)); @@ -1775,7 +1775,7 @@ mod tests { // returns `last_confirmed_nonce`; // 3) it means that we're going to confirm delivery of messages 1..=1; // 4) so the number of declared messages (see `UnrewardedRelayersState`) is `0` and - // numer of actually confirmed messages is `1`. + // number of actually confirmed messages is `1`. assert_noop!( Pallet::::receive_messages_delivery_proof( RuntimeOrigin::signed(1), @@ -1785,8 +1785,8 @@ mod tests { ))), UnrewardedRelayersState { last_delivered_nonce: 1, ..Default::default() }, ), - Error::::ReceivalConfirmation( - ReceivalConfirmationError::TryingToConfirmMoreMessagesThanExpected + Error::::ReceptionConfirmation( + ReceptionConfirmationError::TryingToConfirmMoreMessagesThanExpected ), ); }); diff --git a/modules/messages/src/outbound_lane.rs b/modules/messages/src/outbound_lane.rs index 431c2cfb7..acef5546d 100644 --- a/modules/messages/src/outbound_lane.rs +++ b/modules/messages/src/outbound_lane.rs @@ -53,7 +53,7 @@ pub type StoredMessagePayload = BoundedVec>::MaximalOu /// Result of messages receival confirmation. #[derive(Encode, Decode, RuntimeDebug, PartialEq, Eq, PalletError, TypeInfo)] -pub enum ReceivalConfirmationError { +pub enum ReceptionConfirmationError { /// Bridged chain is trying to confirm more messages than we have generated. May be a result /// of invalid bridged chain storage. FailedToConfirmFutureMessages, @@ -103,7 +103,7 @@ impl OutboundLane { max_allowed_messages: MessageNonce, latest_delivered_nonce: MessageNonce, relayers: &VecDeque>, - ) -> Result, ReceivalConfirmationError> { + ) -> Result, ReceptionConfirmationError> { let mut data = self.storage.data(); let confirmed_messages = DeliveredMessages { begin: data.latest_received_nonce.saturating_add(1), @@ -113,7 +113,7 @@ impl OutboundLane { return Ok(None) } if confirmed_messages.end > data.latest_generated_nonce { - return Err(ReceivalConfirmationError::FailedToConfirmFutureMessages) + return Err(ReceptionConfirmationError::FailedToConfirmFutureMessages) } if confirmed_messages.total_messages() > max_allowed_messages { // that the relayer has declared correct number of messages that the proof contains (it @@ -127,7 +127,7 @@ impl OutboundLane { confirmed_messages.total_messages(), max_allowed_messages, ); - return Err(ReceivalConfirmationError::TryingToConfirmMoreMessagesThanExpected) + return Err(ReceptionConfirmationError::TryingToConfirmMoreMessagesThanExpected) } ensure_unrewarded_relayers_are_correct(confirmed_messages.end, relayers)?; @@ -176,24 +176,24 @@ impl OutboundLane { fn ensure_unrewarded_relayers_are_correct( latest_received_nonce: MessageNonce, relayers: &VecDeque>, -) -> Result<(), ReceivalConfirmationError> { +) -> Result<(), ReceptionConfirmationError> { let mut expected_entry_begin = relayers.front().map(|entry| entry.messages.begin); for entry in relayers { // unrewarded relayer entry must have at least 1 unconfirmed message // (guaranteed by the `InboundLane::receive_message()`) if entry.messages.end < entry.messages.begin { - return Err(ReceivalConfirmationError::EmptyUnrewardedRelayerEntry) + return Err(ReceptionConfirmationError::EmptyUnrewardedRelayerEntry) } // every entry must confirm range of messages that follows previous entry range // (guaranteed by the `InboundLane::receive_message()`) if expected_entry_begin != Some(entry.messages.begin) { - return Err(ReceivalConfirmationError::NonConsecutiveUnrewardedRelayerEntries) + return Err(ReceptionConfirmationError::NonConsecutiveUnrewardedRelayerEntries) } expected_entry_begin = entry.messages.end.checked_add(1); // entry can't confirm messages larger than `inbound_lane_data.latest_received_nonce()` // (guaranteed by the `InboundLane::receive_message()`) if entry.messages.end > latest_received_nonce { - return Err(ReceivalConfirmationError::FailedToConfirmFutureMessages) + return Err(ReceptionConfirmationError::FailedToConfirmFutureMessages) } } @@ -228,7 +228,7 @@ mod tests { fn assert_3_messages_confirmation_fails( latest_received_nonce: MessageNonce, relayers: &VecDeque>, - ) -> Result, ReceivalConfirmationError> { + ) -> Result, ReceptionConfirmationError> { run_test(|| { let mut lane = outbound_lane::(TEST_LANE_ID); lane.send_message(outbound_message_data(REGULAR_PAYLOAD)); @@ -299,7 +299,7 @@ mod tests { fn confirm_delivery_rejects_nonce_larger_than_last_generated() { assert_eq!( assert_3_messages_confirmation_fails(10, &unrewarded_relayers(1..=10),), - Err(ReceivalConfirmationError::FailedToConfirmFutureMessages), + Err(ReceptionConfirmationError::FailedToConfirmFutureMessages), ); } @@ -314,7 +314,7 @@ mod tests { .chain(unrewarded_relayers(3..=3).into_iter()) .collect(), ), - Err(ReceivalConfirmationError::FailedToConfirmFutureMessages), + Err(ReceptionConfirmationError::FailedToConfirmFutureMessages), ); } @@ -330,7 +330,7 @@ mod tests { .chain(unrewarded_relayers(2..=3).into_iter()) .collect(), ), - Err(ReceivalConfirmationError::EmptyUnrewardedRelayerEntry), + Err(ReceptionConfirmationError::EmptyUnrewardedRelayerEntry), ); } @@ -345,7 +345,7 @@ mod tests { .chain(unrewarded_relayers(2..=2).into_iter()) .collect(), ), - Err(ReceivalConfirmationError::NonConsecutiveUnrewardedRelayerEntries), + Err(ReceptionConfirmationError::NonConsecutiveUnrewardedRelayerEntries), ); } @@ -409,11 +409,11 @@ mod tests { lane.send_message(outbound_message_data(REGULAR_PAYLOAD)); assert_eq!( lane.confirm_delivery(0, 3, &unrewarded_relayers(1..=3)), - Err(ReceivalConfirmationError::TryingToConfirmMoreMessagesThanExpected), + Err(ReceptionConfirmationError::TryingToConfirmMoreMessagesThanExpected), ); assert_eq!( lane.confirm_delivery(2, 3, &unrewarded_relayers(1..=3)), - Err(ReceivalConfirmationError::TryingToConfirmMoreMessagesThanExpected), + Err(ReceptionConfirmationError::TryingToConfirmMoreMessagesThanExpected), ); assert_eq!( lane.confirm_delivery(3, 3, &unrewarded_relayers(1..=3)), diff --git a/modules/parachains/src/mock.rs b/modules/parachains/src/mock.rs index 3af3fd3e7..d9cbabf85 100644 --- a/modules/parachains/src/mock.rs +++ b/modules/parachains/src/mock.rs @@ -261,7 +261,7 @@ impl Chain for TestBridgedChain { impl ChainWithGrandpa for TestBridgedChain { const WITH_CHAIN_GRANDPA_PALLET_NAME: &'static str = ""; const MAX_AUTHORITIES_COUNT: u32 = 16; - const REASONABLE_HEADERS_IN_JUSTIFICATON_ANCESTRY: u32 = 8; + const REASONABLE_HEADERS_IN_JUSTIFICATION_ANCESTRY: u32 = 8; const MAX_MANDATORY_HEADER_SIZE: u32 = 256; const AVERAGE_HEADER_SIZE: u32 = 64; } @@ -294,7 +294,7 @@ impl Chain for OtherBridgedChain { impl ChainWithGrandpa for OtherBridgedChain { const WITH_CHAIN_GRANDPA_PALLET_NAME: &'static str = ""; const MAX_AUTHORITIES_COUNT: u32 = 16; - const REASONABLE_HEADERS_IN_JUSTIFICATON_ANCESTRY: u32 = 8; + const REASONABLE_HEADERS_IN_JUSTIFICATION_ANCESTRY: u32 = 8; const MAX_MANDATORY_HEADER_SIZE: u32 = 256; const AVERAGE_HEADER_SIZE: u32 = 64; } diff --git a/modules/xcm-bridge-hub-router/src/lib.rs b/modules/xcm-bridge-hub-router/src/lib.rs index f219be78f..5d0be41b1 100644 --- a/modules/xcm-bridge-hub-router/src/lib.rs +++ b/modules/xcm-bridge-hub-router/src/lib.rs @@ -427,7 +427,7 @@ mod tests { run_test(|| { Bridge::::put(uncongested_bridge(FixedU128::from_rational(125, 100))); - // it shold eventually decreased to one + // it should eventually decreased to one while XcmBridgeHubRouter::bridge().delivery_fee_factor > MINIMAL_DELIVERY_FEE_FACTOR { XcmBridgeHubRouter::on_initialize(One::one()); } diff --git a/modules/xcm-bridge-hub/Cargo.toml b/modules/xcm-bridge-hub/Cargo.toml index 4d5d01234..1200af483 100644 --- a/modules/xcm-bridge-hub/Cargo.toml +++ b/modules/xcm-bridge-hub/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "pallet-xcm-bridge-hub" -description = "Module that adds dynamic bridges/lanes support to XCM infrastucture at the bridge hub." +description = "Module that adds dynamic bridges/lanes support to XCM infrastructure at the bridge hub." version = "0.2.0" authors.workspace = true edition.workspace = true diff --git a/primitives/header-chain/src/justification/mod.rs b/primitives/header-chain/src/justification/mod.rs index b32d8bdb5..d7c2cbf42 100644 --- a/primitives/header-chain/src/justification/mod.rs +++ b/primitives/header-chain/src/justification/mod.rs @@ -83,7 +83,7 @@ impl GrandpaJustification { .saturating_add(HashOf::::max_encoded_len().saturated_into()); let max_expected_votes_ancestries_size = - C::REASONABLE_HEADERS_IN_JUSTIFICATON_ANCESTRY.saturating_mul(C::AVERAGE_HEADER_SIZE); + C::REASONABLE_HEADERS_IN_JUSTIFICATION_ANCESTRY.saturating_mul(C::AVERAGE_HEADER_SIZE); // justification is round number (u64=8b), a signed GRANDPA commit and the // `votes_ancestries` vector diff --git a/primitives/header-chain/src/justification/verification/mod.rs b/primitives/header-chain/src/justification/verification/mod.rs index c71149bf9..9df3511e1 100644 --- a/primitives/header-chain/src/justification/verification/mod.rs +++ b/primitives/header-chain/src/justification/verification/mod.rs @@ -318,7 +318,7 @@ trait JustificationVerifier { } // check that the cumulative weight of validators that voted for the justification target - // (or one of its descendents) is larger than the required threshold. + // (or one of its descendants) is larger than the required threshold. if cumulative_weight < threshold { return Err(Error::TooLowCumulativeWeight) } diff --git a/primitives/header-chain/src/lib.rs b/primitives/header-chain/src/lib.rs index 84a6a881a..98fb9ff83 100644 --- a/primitives/header-chain/src/lib.rs +++ b/primitives/header-chain/src/lib.rs @@ -283,7 +283,7 @@ pub trait ChainWithGrandpa: Chain { /// ancestry and the pallet will accept such justification. The limit is only used to compute /// maximal refund amount and submitting justifications which exceed the limit, may be costly /// to submitter. - const REASONABLE_HEADERS_IN_JUSTIFICATON_ANCESTRY: u32; + const REASONABLE_HEADERS_IN_JUSTIFICATION_ANCESTRY: u32; /// Maximal size of the mandatory chain header. Mandatory header is the header that enacts new /// GRANDPA authorities set (so it has large digest inside). @@ -317,8 +317,8 @@ where const WITH_CHAIN_GRANDPA_PALLET_NAME: &'static str = ::WITH_CHAIN_GRANDPA_PALLET_NAME; const MAX_AUTHORITIES_COUNT: u32 = ::MAX_AUTHORITIES_COUNT; - const REASONABLE_HEADERS_IN_JUSTIFICATON_ANCESTRY: u32 = - ::REASONABLE_HEADERS_IN_JUSTIFICATON_ANCESTRY; + const REASONABLE_HEADERS_IN_JUSTIFICATION_ANCESTRY: u32 = + ::REASONABLE_HEADERS_IN_JUSTIFICATION_ANCESTRY; const MAX_MANDATORY_HEADER_SIZE: u32 = ::MAX_MANDATORY_HEADER_SIZE; const AVERAGE_HEADER_SIZE: u32 = ::AVERAGE_HEADER_SIZE; @@ -373,7 +373,7 @@ mod tests { impl ChainWithGrandpa for TestChain { const WITH_CHAIN_GRANDPA_PALLET_NAME: &'static str = "Test"; const MAX_AUTHORITIES_COUNT: u32 = 128; - const REASONABLE_HEADERS_IN_JUSTIFICATON_ANCESTRY: u32 = 2; + const REASONABLE_HEADERS_IN_JUSTIFICATION_ANCESTRY: u32 = 2; const MAX_MANDATORY_HEADER_SIZE: u32 = 100_000; const AVERAGE_HEADER_SIZE: u32 = 1_024; } diff --git a/primitives/messages/src/lib.rs b/primitives/messages/src/lib.rs index 51b3f25f7..c3f79b3ee 100644 --- a/primitives/messages/src/lib.rs +++ b/primitives/messages/src/lib.rs @@ -289,27 +289,27 @@ pub struct ReceivedMessages { /// Id of the lane which is receiving messages. pub lane: LaneId, /// Result of messages which we tried to dispatch - pub receive_results: Vec<(MessageNonce, ReceivalResult)>, + pub receive_results: Vec<(MessageNonce, ReceptionResult)>, } impl ReceivedMessages { /// Creates new `ReceivedMessages` structure from given results. pub fn new( lane: LaneId, - receive_results: Vec<(MessageNonce, ReceivalResult)>, + receive_results: Vec<(MessageNonce, ReceptionResult)>, ) -> Self { ReceivedMessages { lane, receive_results } } /// Push `result` of the `message` delivery onto `receive_results` vector. - pub fn push(&mut self, message: MessageNonce, result: ReceivalResult) { + pub fn push(&mut self, message: MessageNonce, result: ReceptionResult) { self.receive_results.push((message, result)); } } /// Result of single message receival. #[derive(RuntimeDebug, Encode, Decode, PartialEq, Eq, Clone, TypeInfo)] -pub enum ReceivalResult { +pub enum ReceptionResult { /// Message has been received and dispatched. Note that we don't care whether dispatch has /// been successful or not - in both case message falls into this category. /// diff --git a/primitives/polkadot-core/src/lib.rs b/primitives/polkadot-core/src/lib.rs index df2836495..e83be59b2 100644 --- a/primitives/polkadot-core/src/lib.rs +++ b/primitives/polkadot-core/src/lib.rs @@ -71,7 +71,7 @@ pub const MAX_AUTHORITIES_COUNT: u32 = 1_256; /// justifications with any additional headers in votes ancestry, so reasonable headers may /// be set to zero. But we assume that there may be small GRANDPA lags, so we're leaving some /// reserve here. -pub const REASONABLE_HEADERS_IN_JUSTIFICATON_ANCESTRY: u32 = 2; +pub const REASONABLE_HEADERS_IN_JUSTIFICATION_ANCESTRY: u32 = 2; /// Average header size in `votes_ancestries` field of justification on Polkadot-like /// chains. diff --git a/primitives/runtime/src/chain.rs b/primitives/runtime/src/chain.rs index 9ba21a1cd..4ec5a001a 100644 --- a/primitives/runtime/src/chain.rs +++ b/primitives/runtime/src/chain.rs @@ -104,7 +104,7 @@ pub trait Chain: Send + Sync + 'static { const ID: ChainId; /// A type that fulfills the abstract idea of what a Substrate block number is. - // Constraits come from the associated Number type of `sp_runtime::traits::Header` + // Constraints come from the associated Number type of `sp_runtime::traits::Header` // See here for more info: // https://crates.parity.io/sp_runtime/traits/trait.Header.html#associatedtype.Number // @@ -125,7 +125,7 @@ pub trait Chain: Send + Sync + 'static { + MaxEncodedLen; /// A type that fulfills the abstract idea of what a Substrate hash is. - // Constraits come from the associated Hash type of `sp_runtime::traits::Header` + // Constraints come from the associated Hash type of `sp_runtime::traits::Header` // See here for more info: // https://crates.parity.io/sp_runtime/traits/trait.Header.html#associatedtype.Hash type Hash: Parameter @@ -143,7 +143,7 @@ pub trait Chain: Send + Sync + 'static { /// A type that fulfills the abstract idea of what a Substrate hasher (a type /// that produces hashes) is. - // Constraits come from the associated Hashing type of `sp_runtime::traits::Header` + // Constraints come from the associated Hashing type of `sp_runtime::traits::Header` // See here for more info: // https://crates.parity.io/sp_runtime/traits/trait.Header.html#associatedtype.Hashing type Hasher: HashT; diff --git a/primitives/runtime/src/lib.rs b/primitives/runtime/src/lib.rs index 850318923..c9c5c9412 100644 --- a/primitives/runtime/src/lib.rs +++ b/primitives/runtime/src/lib.rs @@ -56,7 +56,7 @@ mod chain; mod storage_proof; mod storage_types; -// Re-export macro to aviod include paste dependency everywhere +// Re-export macro to avoid include paste dependency everywhere pub use sp_runtime::paste; /// Use this when something must be shared among all instances. @@ -461,7 +461,7 @@ macro_rules! generate_static_str_provider { }; } -/// Error message that is only dispayable in `std` environment. +/// Error message that is only displayable in `std` environment. #[derive(Encode, Decode, Clone, Eq, PartialEq, PalletError, TypeInfo)] #[scale_info(skip_type_params(T))] pub struct StrippableError { diff --git a/primitives/test-utils/src/lib.rs b/primitives/test-utils/src/lib.rs index 1d8089077..f4fe4a242 100644 --- a/primitives/test-utils/src/lib.rs +++ b/primitives/test-utils/src/lib.rs @@ -88,7 +88,7 @@ pub fn make_default_justification(header: &H) -> GrandpaJustificatio /// Generate justifications in a way where we are able to tune the number of pre-commits /// and vote ancestries which are included in the justification. /// -/// This is useful for benchmarkings where we want to generate valid justifications with +/// This is useful for benchmarks where we want to generate valid justifications with /// a specific number of pre-commits (tuned with the number of "authorities") and/or a specific /// number of vote ancestries (tuned with the "votes" parameter). /// diff --git a/scripts/verify-pallets-build.sh b/scripts/verify-pallets-build.sh index 4eefaa8ef..9c57a2a3c 100755 --- a/scripts/verify-pallets-build.sh +++ b/scripts/verify-pallets-build.sh @@ -134,7 +134,7 @@ cargo check -p bridge-runtime-common cargo check -p bridge-runtime-common --features runtime-benchmarks cargo check -p bridge-runtime-common --features integrity-test -# we're removing lock file after all chechs are done. Otherwise we may use different +# we're removing lock file after all checks are done. Otherwise we may use different # Substrate/Polkadot/Cumulus commits and our checks will fail rm -f $BRIDGES_FOLDER/Cargo.lock diff --git a/substrate-relay/src/bridges/rococo_bulletin/mod.rs b/substrate-relay/src/bridges/rococo_bulletin/mod.rs index 738fea8c5..2812651ea 100644 --- a/substrate-relay/src/bridges/rococo_bulletin/mod.rs +++ b/substrate-relay/src/bridges/rococo_bulletin/mod.rs @@ -64,8 +64,8 @@ impl bp_header_chain::ChainWithGrandpa for RococoBaseAsPolkadot { const WITH_CHAIN_GRANDPA_PALLET_NAME: &'static str = bp_polkadot::Polkadot::WITH_CHAIN_GRANDPA_PALLET_NAME; const MAX_AUTHORITIES_COUNT: u32 = bp_rococo::Rococo::MAX_AUTHORITIES_COUNT; - const REASONABLE_HEADERS_IN_JUSTIFICATON_ANCESTRY: u32 = - bp_rococo::Rococo::REASONABLE_HEADERS_IN_JUSTIFICATON_ANCESTRY; + const REASONABLE_HEADERS_IN_JUSTIFICATION_ANCESTRY: u32 = + bp_rococo::Rococo::REASONABLE_HEADERS_IN_JUSTIFICATION_ANCESTRY; const MAX_MANDATORY_HEADER_SIZE: u32 = bp_rococo::Rococo::MAX_MANDATORY_HEADER_SIZE; const AVERAGE_HEADER_SIZE: u32 = bp_rococo::Rococo::AVERAGE_HEADER_SIZE; } diff --git a/testing/environments/rococo-westend/rococo.zndsl b/testing/environments/rococo-westend/rococo.zndsl index 5b49c7c63..a75286445 100644 --- a/testing/environments/rococo-westend/rococo.zndsl +++ b/testing/environments/rococo-westend/rococo.zndsl @@ -1,7 +1,7 @@ -Description: Check if the with-Westend GRANPDA pallet was initialized at Rococo BH +Description: Check if the with-Westend GRANDPA pallet was initialized at Rococo BH Network: ./bridge_hub_rococo_local_network.toml Creds: config -# relay is already started - let's wait until with-Westend GRANPDA pallet is initialized at Rococo +# relay is already started - let's wait until with-Westend GRANDPA pallet is initialized at Rococo bridge-hub-rococo-collator1: js-script {{FRAMEWORK_PATH}}/js-helpers/best-finalized-header-at-bridged-chain.js with "Westend,0" within 400 seconds diff --git a/testing/environments/rococo-westend/westend.zndsl b/testing/environments/rococo-westend/westend.zndsl index 079688388..21d4ebf3b 100644 --- a/testing/environments/rococo-westend/westend.zndsl +++ b/testing/environments/rococo-westend/westend.zndsl @@ -1,6 +1,6 @@ -Description: Check if the with-Rococo GRANPDA pallet was initialized at Westend BH +Description: Check if the with-Rococo GRANDPA pallet was initialized at Westend BH Network: ./bridge_hub_westend_local_network.toml Creds: config -# relay is already started - let's wait until with-Rococo GRANPDA pallet is initialized at Westend +# relay is already started - let's wait until with-Rococo GRANDPA pallet is initialized at Westend bridge-hub-westend-collator1: js-script {{FRAMEWORK_PATH}}/js-helpers/best-finalized-header-at-bridged-chain.js with "Rococo,0" within 400 seconds diff --git a/testing/framework/utils/generate_hex_encoded_call/index.js b/testing/framework/utils/generate_hex_encoded_call/index.js index 30f89d754..c8e361b25 100644 --- a/testing/framework/utils/generate_hex_encoded_call/index.js +++ b/testing/framework/utils/generate_hex_encoded_call/index.js @@ -126,36 +126,36 @@ if (!process.argv[2] || !process.argv[3]) { } const type = process.argv[2]; -const rpcEnpoint = process.argv[3]; +const rpcEndpoint = process.argv[3]; const output = process.argv[4]; const inputArgs = process.argv.slice(5, process.argv.length); console.log(`Generating hex-encoded call data for:`); console.log(` type: ${type}`); -console.log(` rpcEnpoint: ${rpcEnpoint}`); +console.log(` rpcEndpoint: ${rpcEndpoint}`); console.log(` output: ${output}`); console.log(` inputArgs: ${inputArgs}`); switch (type) { case 'remark-with-event': - remarkWithEvent(rpcEnpoint, output); + remarkWithEvent(rpcEndpoint, output); break; case 'add-exporter-config': - addExporterConfig(rpcEnpoint, output, inputArgs[0], inputArgs[1]); + addExporterConfig(rpcEndpoint, output, inputArgs[0], inputArgs[1]); break; case 'remove-exporter-config': - removeExporterConfig(rpcEnpoint, output, inputArgs[0], inputArgs[1]); + removeExporterConfig(rpcEndpoint, output, inputArgs[0], inputArgs[1]); break; case 'add-universal-alias': - addUniversalAlias(rpcEnpoint, output, inputArgs[0], inputArgs[1]); + addUniversalAlias(rpcEndpoint, output, inputArgs[0], inputArgs[1]); break; case 'add-reserve-location': - addReserveLocation(rpcEnpoint, output, inputArgs[0]); + addReserveLocation(rpcEndpoint, output, inputArgs[0]); break; case 'force-create-asset': - forceCreateAsset(rpcEnpoint, output, inputArgs[0], inputArgs[1], inputArgs[2], inputArgs[3]); + forceCreateAsset(rpcEndpoint, output, inputArgs[0], inputArgs[1], inputArgs[2], inputArgs[3]); break; case 'force-xcm-version': - forceXcmVersion(rpcEnpoint, output, inputArgs[0], inputArgs[1]); + forceXcmVersion(rpcEndpoint, output, inputArgs[0], inputArgs[1]); break; case 'check': console.log(`Checking nodejs installation, if you see this everything is ready!`); diff --git a/testing/run-tests.sh b/testing/run-tests.sh index 6149d9912..fd12b57f5 100755 --- a/testing/run-tests.sh +++ b/testing/run-tests.sh @@ -30,7 +30,7 @@ done export POLKADOT_SDK_PATH=`realpath $(dirname "$0")/../..` export BRIDGE_TESTS_FOLDER=$POLKADOT_SDK_PATH/bridges/testing/tests -# set pathc to binaries +# set path to binaries if [ "$ZOMBIENET_DOCKER_PATHS" -eq 1 ]; then export POLKADOT_BINARY=/usr/local/bin/polkadot export POLKADOT_PARACHAIN_BINARY=/usr/local/bin/polkadot-parachain diff --git a/testing/tests/0002-mandatory-headers-synced-while-idle/run.sh b/testing/tests/0002-mandatory-headers-synced-while-idle/run.sh index 7d5b8d927..3a604b387 100755 --- a/testing/tests/0002-mandatory-headers-synced-while-idle/run.sh +++ b/testing/tests/0002-mandatory-headers-synced-while-idle/run.sh @@ -24,7 +24,7 @@ echo -e "Sleeping 90s before starting relayer ...\n" sleep 90 ${BASH_SOURCE%/*}/../../environments/rococo-westend/start_relayer.sh $rococo_dir $westend_dir relayer_pid -# Sometimes the relayer syncs multiple parachain heads in the begining leading to test failures. +# Sometimes the relayer syncs multiple parachain heads in the beginning leading to test failures. # See issue: https://github.com/paritytech/parity-bridges-common/issues/2838. # TODO: Remove this sleep after the issue is fixed. echo -e "Sleeping 180s before runing the tests ...\n" -- GitLab From 1e4fd28e99c99ed6e6686a056a43f193fef6264f Mon Sep 17 00:00:00 2001 From: Serban Iorga Date: Wed, 27 Mar 2024 13:25:37 +0100 Subject: [PATCH 16/39] Leftover (#2900) --- Cargo.lock | 1 - relays/lib-substrate-relay/Cargo.toml | 1 - relays/lib-substrate-relay/src/on_demand/headers.rs | 4 ++-- 3 files changed, 2 insertions(+), 4 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 25acccb2b..f9a76ae70 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -9093,7 +9093,6 @@ dependencies = [ "bp-parachains", "bp-polkadot-core", "bp-relayers", - "bp-rococo", "bp-runtime", "bridge-runtime-common", "equivocation-detector", diff --git a/relays/lib-substrate-relay/Cargo.toml b/relays/lib-substrate-relay/Cargo.toml index 99ff91a75..c064f11b2 100644 --- a/relays/lib-substrate-relay/Cargo.toml +++ b/relays/lib-substrate-relay/Cargo.toml @@ -56,6 +56,5 @@ sp-consensus-grandpa = { git = "https://github.com/paritytech/polkadot-sdk", bra sp-runtime = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } [dev-dependencies] -bp-rococo = { path = "../../chains/chain-rococo" } pallet-transaction-payment = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } relay-substrate-client = { path = "../client-substrate", features = ["test-helpers"] } diff --git a/relays/lib-substrate-relay/src/on_demand/headers.rs b/relays/lib-substrate-relay/src/on_demand/headers.rs index 99ca1d4d5..e8a2a3c6c 100644 --- a/relays/lib-substrate-relay/src/on_demand/headers.rs +++ b/relays/lib-substrate-relay/src/on_demand/headers.rs @@ -528,8 +528,8 @@ mod tests { use super::*; use relay_substrate_client::test_chain::TestChain; - const AT_SOURCE: Option = Some(10); - const AT_TARGET: Option = Some(1); + const AT_SOURCE: Option> = Some(10); + const AT_TARGET: Option> = Some(1); #[async_std::test] async fn mandatory_headers_scan_range_selects_range_if_some_headers_are_missing() { -- GitLab From 956601369a5157d961dfa420f8219d838f105218 Mon Sep 17 00:00:00 2001 From: Serban Iorga Date: Wed, 27 Mar 2024 17:05:13 +0100 Subject: [PATCH 17/39] Fix polkadot-sdk CI failures (#2901) * taplo * markdown * publish = false * feature propagation --- modules/beefy/Cargo.toml | 13 ++++++++++--- primitives/beefy/Cargo.toml | 6 ++++-- relays/client-substrate/Cargo.toml | 1 + relays/equivocation/Cargo.toml | 1 + relays/finality/Cargo.toml | 1 + relays/finality/README.md | 22 ++++++++++++---------- relays/lib-substrate-relay/Cargo.toml | 1 + relays/messages/Cargo.toml | 1 + relays/parachains/Cargo.toml | 1 + relays/parachains/README.md | 13 +++++++------ relays/utils/Cargo.toml | 1 + 11 files changed, 40 insertions(+), 21 deletions(-) diff --git a/modules/beefy/Cargo.toml b/modules/beefy/Cargo.toml index 4ead33c44..53c8006f8 100644 --- a/modules/beefy/Cargo.toml +++ b/modules/beefy/Cargo.toml @@ -5,6 +5,7 @@ description = "Module implementing BEEFY on-chain light client used for bridging authors = ["Parity Technologies "] edition = "2021" license = "GPL-3.0-or-later WITH Classpath-exception-2.0" +publish = false [lints] workspace = true @@ -38,7 +39,7 @@ sp-io = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" bp-test-utils = { path = "../../primitives/test-utils" } [features] -default = [ "std" ] +default = ["std"] std = [ "bp-beefy/std", "bp-runtime/std", @@ -47,9 +48,15 @@ std = [ "frame-system/std", "log/std", "scale-info/std", - "serde", + "serde/std", "sp-core/std", "sp-runtime/std", "sp-std/std", ] -try-runtime = [ "frame-support/try-runtime", "frame-system/try-runtime" ] +try-runtime = [ + "frame-support/try-runtime", + "frame-system/try-runtime", + "pallet-beefy-mmr/try-runtime", + "pallet-mmr/try-runtime", + "sp-runtime/try-runtime", +] diff --git a/primitives/beefy/Cargo.toml b/primitives/beefy/Cargo.toml index b6bb26986..0a282090f 100644 --- a/primitives/beefy/Cargo.toml +++ b/primitives/beefy/Cargo.toml @@ -5,12 +5,13 @@ version = "0.1.0" authors = ["Parity Technologies "] edition = "2021" license = "GPL-3.0-or-later WITH Classpath-exception-2.0" +publish = false [lints] workspace = true [dependencies] -codec = { package = "parity-scale-codec", version = "3.1.5", default-features = false, features = ["derive", "bit-vec"] } +codec = { package = "parity-scale-codec", version = "3.1.5", default-features = false, features = ["bit-vec", "derive"] } scale-info = { version = "2.11.0", default-features = false, features = ["bit-vec", "derive"] } serde = { default-features = false, features = ["alloc", "derive"], workspace = true } @@ -29,8 +30,9 @@ sp-runtime = { git = "https://github.com/paritytech/polkadot-sdk", branch = "mas sp-std = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } [features] -default = [ "std" ] +default = ["std"] std = [ + "binary-merkle-tree/std", "bp-runtime/std", "codec/std", "frame-support/std", diff --git a/relays/client-substrate/Cargo.toml b/relays/client-substrate/Cargo.toml index 9240af7b5..3049d8350 100644 --- a/relays/client-substrate/Cargo.toml +++ b/relays/client-substrate/Cargo.toml @@ -4,6 +4,7 @@ version = "0.1.0" authors = ["Parity Technologies "] edition = "2021" license = "GPL-3.0-or-later WITH Classpath-exception-2.0" +publish = false [lints] workspace = true diff --git a/relays/equivocation/Cargo.toml b/relays/equivocation/Cargo.toml index 6f65d40fc..3b99e4349 100644 --- a/relays/equivocation/Cargo.toml +++ b/relays/equivocation/Cargo.toml @@ -5,6 +5,7 @@ authors = ["Parity Technologies "] edition = "2021" license = "GPL-3.0-or-later WITH Classpath-exception-2.0" description = "Equivocation detector" +publish = false [lints] workspace = true diff --git a/relays/finality/Cargo.toml b/relays/finality/Cargo.toml index 8cf3d409c..53d2ce579 100644 --- a/relays/finality/Cargo.toml +++ b/relays/finality/Cargo.toml @@ -5,6 +5,7 @@ authors = ["Parity Technologies "] edition = "2021" license = "GPL-3.0-or-later WITH Classpath-exception-2.0" description = "Finality proofs relay" +publish = false [lints] workspace = true diff --git a/relays/finality/README.md b/relays/finality/README.md index 0a8d6a4c8..92e765cea 100644 --- a/relays/finality/README.md +++ b/relays/finality/README.md @@ -3,7 +3,7 @@ The finality relay is able to work with different finality engines. In the modern Substrate world they are GRANDPA and BEEFY. Let's talk about GRANDPA here, because BEEFY relay and bridge BEEFY pallet are in development. -In general, the relay works as follows: it connects to the source and target chain. The source chain must have the +In general, the relay works as follows: it connects to the source and target chain. The source chain must have the [GRANDPA gadget](https://github.com/paritytech/finality-grandpa) running (so it can't be a parachain). The target chain must have the [bridge GRANDPA pallet](../../modules/grandpa/) deployed at its runtime. The relay subscribes to the GRANDPA finality notifications at the source chain and when the new justification is received, it is submitted @@ -41,18 +41,20 @@ Finality relay provides several metrics. Metrics names depend on names of source shows metrics names for Rococo (source chain) to BridgeHubWestend (target chain) finality relay. For other chains, simply change chain names. So the metrics are: -- `Rococo_to_BridgeHubWestend_Sync_best_source_block_number` - returns best finalized source chain (Rococo) block number, known - to the relay. If relay is running in [on-demand mode](../bin-substrate/src/cli/relay_headers_and_messages/), the +- `Rococo_to_BridgeHubWestend_Sync_best_source_block_number` - returns best finalized source chain (Rococo) block + number, known to the relay. + If relay is running in [on-demand mode](../bin-substrate/src/cli/relay_headers_and_messages/), the number may not match (it may be far behind) the actual best finalized number; -- `Rococo_to_BridgeHubWestend_Sync_best_source_at_target_block_number` - returns best finalized source chain (Rococo) block - number that is known to the bridge GRANDPA pallet at the target chain. +- `Rococo_to_BridgeHubWestend_Sync_best_source_at_target_block_number` - returns best finalized source chain (Rococo) + block number that is known to the bridge GRANDPA pallet at the target chain. -- `Rococo_to_BridgeHubWestend_Sync_is_source_and_source_at_target_using_different_forks` - if this metrics is set to `1`, then - the best source chain header, known to the target chain doesn't match the same-number-header at the source chain. - It means that the GRANDPA validators set has crafted the duplicate justification and it has been submitted to the - target chain. Normally (if majority of validators are honest and if you're running finality relay without large - breaks) this shall not happen and the metric will have `0` value. +- `Rococo_to_BridgeHubWestend_Sync_is_source_and_source_at_target_using_different_forks` - if this metrics is set + to `1`, then the best source chain header known to the target chain doesn't match the same-number-header + at the source chain. It means that the GRANDPA validators set has crafted the duplicate justification + and it has been submitted to the target chain. + Normally (if majority of validators are honest and if you're running finality relay without large breaks) + this shall not happen and the metric will have `0` value. If relay operates properly, you should see that the `Rococo_to_BridgeHubWestend_Sync_best_source_at_target_block_number` tries to reach the `Rococo_to_BridgeHubWestend_Sync_best_source_block_number`. And the latter one always increases. diff --git a/relays/lib-substrate-relay/Cargo.toml b/relays/lib-substrate-relay/Cargo.toml index c064f11b2..76be5a92a 100644 --- a/relays/lib-substrate-relay/Cargo.toml +++ b/relays/lib-substrate-relay/Cargo.toml @@ -4,6 +4,7 @@ version = "0.1.0" authors = ["Parity Technologies "] edition = "2021" license = "GPL-3.0-or-later WITH Classpath-exception-2.0" +publish = false [lints] workspace = true diff --git a/relays/messages/Cargo.toml b/relays/messages/Cargo.toml index 1233c8af7..b3f0a9fd1 100644 --- a/relays/messages/Cargo.toml +++ b/relays/messages/Cargo.toml @@ -4,6 +4,7 @@ version = "0.1.0" authors = ["Parity Technologies "] edition = "2021" license = "GPL-3.0-or-later WITH Classpath-exception-2.0" +publish = false [lints] workspace = true diff --git a/relays/parachains/Cargo.toml b/relays/parachains/Cargo.toml index 05a995b85..f37e636a9 100644 --- a/relays/parachains/Cargo.toml +++ b/relays/parachains/Cargo.toml @@ -4,6 +4,7 @@ version = "0.1.0" authors = ["Parity Technologies "] edition = "2018" license = "GPL-3.0-or-later WITH Classpath-exception-2.0" +publish = false [lints] workspace = true diff --git a/relays/parachains/README.md b/relays/parachains/README.md index bacd28594..9043b0b0a 100644 --- a/relays/parachains/README.md +++ b/relays/parachains/README.md @@ -1,7 +1,7 @@ # Parachains Finality Relay The parachains finality relay works with two chains - source relay chain and target chain (which may be standalone -chain, relay chain or a parachain). The source chain must have the +chain, relay chain or a parachain). The source chain must have the [`paras` pallet](https://github.com/paritytech/polkadot/tree/master/runtime/parachains/src/paras) deployed at its runtime. The target chain must have the [bridge parachains pallet](../../modules/parachains/) deployed at its runtime. @@ -39,11 +39,12 @@ Rococo (source relay chain) to BridgeHubWestend (target chain) parachains finali change chain names. So the metrics are: - `Rococo_to_BridgeHubWestend_Parachains_best_parachain_block_number_at_source` - returns best known parachain block - number, registered in the `paras` pallet at the source relay chain (Rococo in our example); + number, registered in the `paras` pallet at the source relay chain (Rococo in our example); - `Rococo_to_BridgeHubWestend_Parachains_best_parachain_block_number_at_target` - returns best known parachain block - number, registered in the bridge parachains pallet at the target chain (BridgeHubWestend in our example). + number, registered in the bridge parachains pallet at the target chain (BridgeHubWestend in our example). -If relay operates properly, you should see that the `Rococo_to_BridgeHubWestend_Parachains_best_parachain_block_number_at_target` -tries to reach the `Rococo_to_BridgeHubWestend_Parachains_best_parachain_block_number_at_source`. And the latter one -always increases. +If relay operates properly, you should see that +the `Rococo_to_BridgeHubWestend_Parachains_best_parachain_block_number_at_target` tries to reach +the `Rococo_to_BridgeHubWestend_Parachains_best_parachain_block_number_at_source`. +And the latter one always increases. diff --git a/relays/utils/Cargo.toml b/relays/utils/Cargo.toml index d1eeba2b8..bc894b34e 100644 --- a/relays/utils/Cargo.toml +++ b/relays/utils/Cargo.toml @@ -4,6 +4,7 @@ version = "0.1.0" authors = ["Parity Technologies "] edition = "2021" license = "GPL-3.0-or-later WITH Classpath-exception-2.0" +publish = false [lints] workspace = true -- GitLab From 35474455e9a437a51d729e7bcb77faa90f25d49c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 1 Apr 2024 01:32:23 +0000 Subject: [PATCH 18/39] Bump serde_json from 1.0.114 to 1.0.115 Bumps [serde_json](https://github.com/serde-rs/json) from 1.0.114 to 1.0.115. - [Release notes](https://github.com/serde-rs/json/releases) - [Commits](https://github.com/serde-rs/json/compare/v1.0.114...v1.0.115) --- updated-dependencies: - dependency-name: serde_json dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- Cargo.lock | 4 ++-- Cargo.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index f9a76ae70..2f68b0f4e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -7690,9 +7690,9 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.114" +version = "1.0.115" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c5f09b1bd632ef549eaa9f60a1f8de742bdbc698e6cee2095fc84dde5f549ae0" +checksum = "12dc5c46daa8e9fdf4f5e71b6cf9a53f2487da0e86e55808e2d35539666497dd" dependencies = [ "indexmap 2.2.2", "itoa", diff --git a/Cargo.toml b/Cargo.toml index 0bd3a2898..d79967875 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -91,5 +91,5 @@ complexity = { level = "deny", priority = 1 } log = { version = "0.4.20", default-features = false } quote = { version = "1.0.33" } serde = { version = "1.0.197", default-features = false } -serde_json = { version = "1.0.114", default-features = false } +serde_json = { version = "1.0.115", default-features = false } thiserror = { version = "1.0.58" } -- GitLab From 051d6ed644651cba3b5031c2419fedd47c3a5091 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 1 Apr 2024 01:30:42 +0000 Subject: [PATCH 19/39] Bump scale-info from 2.11.0 to 2.11.1 Bumps [scale-info](https://github.com/paritytech/scale-info) from 2.11.0 to 2.11.1. - [Release notes](https://github.com/paritytech/scale-info/releases) - [Changelog](https://github.com/paritytech/scale-info/blob/master/CHANGELOG.md) - [Commits](https://github.com/paritytech/scale-info/compare/v2.11.0...v2.11.1) --- updated-dependencies: - dependency-name: scale-info dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- Cargo.lock | 8 ++++---- bin/runtime-common/Cargo.toml | 2 +- chains/chain-asset-hub-rococo/Cargo.toml | 2 +- chains/chain-asset-hub-westend/Cargo.toml | 2 +- chains/chain-polkadot-bulletin/Cargo.toml | 2 +- modules/beefy/Cargo.toml | 2 +- modules/grandpa/Cargo.toml | 2 +- modules/messages/Cargo.toml | 2 +- modules/parachains/Cargo.toml | 2 +- modules/relayers/Cargo.toml | 2 +- modules/xcm-bridge-hub-router/Cargo.toml | 2 +- modules/xcm-bridge-hub/Cargo.toml | 2 +- primitives/beefy/Cargo.toml | 2 +- primitives/header-chain/Cargo.toml | 2 +- primitives/messages/Cargo.toml | 2 +- primitives/parachains/Cargo.toml | 2 +- primitives/polkadot-core/Cargo.toml | 2 +- primitives/relayers/Cargo.toml | 2 +- primitives/runtime/Cargo.toml | 2 +- primitives/xcm-bridge-hub-router/Cargo.toml | 2 +- relay-clients/client-bridge-hub-kusama/Cargo.toml | 2 +- relay-clients/client-bridge-hub-polkadot/Cargo.toml | 2 +- relay-clients/client-bridge-hub-rococo/Cargo.toml | 2 +- relay-clients/client-bridge-hub-westend/Cargo.toml | 2 +- relay-clients/client-kusama/Cargo.toml | 2 +- relay-clients/client-polkadot-bulletin/Cargo.toml | 2 +- relay-clients/client-polkadot/Cargo.toml | 2 +- relay-clients/client-rococo/Cargo.toml | 2 +- relay-clients/client-westend/Cargo.toml | 2 +- relays/client-substrate/Cargo.toml | 2 +- 30 files changed, 33 insertions(+), 33 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 2f68b0f4e..8ac427ba0 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -7473,9 +7473,9 @@ dependencies = [ [[package]] name = "scale-info" -version = "2.11.0" +version = "2.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2ef2175c2907e7c8bc0a9c3f86aeb5ec1f3b275300ad58a44d0c3ae379a5e52e" +checksum = "788745a868b0e751750388f4e6546eb921ef714a4317fa6954f7cde114eb2eb7" dependencies = [ "bitvec", "cfg-if 1.0.0", @@ -7487,9 +7487,9 @@ dependencies = [ [[package]] name = "scale-info-derive" -version = "2.10.0" +version = "2.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "abf2c68b89cafb3b8d918dd07b42be0da66ff202cf1155c5739a4e0c1ea0dc19" +checksum = "7dc2f4e8bc344b9fc3d5f74f72c2e55bfc38d28dc2ebc69c194a3df424e4d9ac" dependencies = [ "proc-macro-crate 1.3.1", "proc-macro2 1.0.76", diff --git a/bin/runtime-common/Cargo.toml b/bin/runtime-common/Cargo.toml index e3c05d1be..d71e9f6ba 100644 --- a/bin/runtime-common/Cargo.toml +++ b/bin/runtime-common/Cargo.toml @@ -14,7 +14,7 @@ workspace = true codec = { package = "parity-scale-codec", version = "3.1.5", default-features = false, features = ["derive"] } hash-db = { version = "0.16.0", default-features = false } log = { workspace = true } -scale-info = { version = "2.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "2.11.1", default-features = false, features = ["derive"] } static_assertions = { version = "1.1", optional = true } # Bridge dependencies diff --git a/chains/chain-asset-hub-rococo/Cargo.toml b/chains/chain-asset-hub-rococo/Cargo.toml index 336e2b4d4..ae1f05e0e 100644 --- a/chains/chain-asset-hub-rococo/Cargo.toml +++ b/chains/chain-asset-hub-rococo/Cargo.toml @@ -11,7 +11,7 @@ workspace = true [dependencies] codec = { package = "parity-scale-codec", version = "3.1.5", default-features = false } -scale-info = { version = "2.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "2.11.1", default-features = false, features = ["derive"] } # Substrate Dependencies frame-support = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } diff --git a/chains/chain-asset-hub-westend/Cargo.toml b/chains/chain-asset-hub-westend/Cargo.toml index 99b6c26e2..14c049f13 100644 --- a/chains/chain-asset-hub-westend/Cargo.toml +++ b/chains/chain-asset-hub-westend/Cargo.toml @@ -11,7 +11,7 @@ workspace = true [dependencies] codec = { package = "parity-scale-codec", version = "3.1.5", default-features = false } -scale-info = { version = "2.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "2.11.1", default-features = false, features = ["derive"] } # Substrate Dependencies frame-support = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } diff --git a/chains/chain-polkadot-bulletin/Cargo.toml b/chains/chain-polkadot-bulletin/Cargo.toml index 1e2bbaf6b..121f0c57c 100644 --- a/chains/chain-polkadot-bulletin/Cargo.toml +++ b/chains/chain-polkadot-bulletin/Cargo.toml @@ -11,7 +11,7 @@ workspace = true [dependencies] codec = { package = "parity-scale-codec", version = "3.1.5", default-features = false, features = ["derive"] } -scale-info = { version = "2.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "2.11.1", default-features = false, features = ["derive"] } # Bridge Dependencies diff --git a/modules/beefy/Cargo.toml b/modules/beefy/Cargo.toml index 53c8006f8..ab3447f65 100644 --- a/modules/beefy/Cargo.toml +++ b/modules/beefy/Cargo.toml @@ -13,7 +13,7 @@ workspace = true [dependencies] codec = { package = "parity-scale-codec", version = "3.1.5", default-features = false } log = { workspace = true } -scale-info = { version = "2.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "2.11.1", default-features = false, features = ["derive"] } serde = { optional = true, workspace = true } # Bridge Dependencies diff --git a/modules/grandpa/Cargo.toml b/modules/grandpa/Cargo.toml index eb7ad6a27..2388767f9 100644 --- a/modules/grandpa/Cargo.toml +++ b/modules/grandpa/Cargo.toml @@ -15,7 +15,7 @@ workspace = true codec = { package = "parity-scale-codec", version = "3.1.5", default-features = false } finality-grandpa = { version = "0.16.2", default-features = false } log = { workspace = true } -scale-info = { version = "2.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "2.11.1", default-features = false, features = ["derive"] } # Bridge Dependencies diff --git a/modules/messages/Cargo.toml b/modules/messages/Cargo.toml index 55b48cc03..ebe7db408 100644 --- a/modules/messages/Cargo.toml +++ b/modules/messages/Cargo.toml @@ -13,7 +13,7 @@ workspace = true codec = { package = "parity-scale-codec", version = "3.1.5", default-features = false } log = { workspace = true } num-traits = { version = "0.2", default-features = false } -scale-info = { version = "2.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "2.11.1", default-features = false, features = ["derive"] } # Bridge dependencies diff --git a/modules/parachains/Cargo.toml b/modules/parachains/Cargo.toml index 2b8445b79..068e9a907 100644 --- a/modules/parachains/Cargo.toml +++ b/modules/parachains/Cargo.toml @@ -12,7 +12,7 @@ workspace = true [dependencies] codec = { package = "parity-scale-codec", version = "3.1.5", default-features = false } log = { workspace = true } -scale-info = { version = "2.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "2.11.1", default-features = false, features = ["derive"] } # Bridge Dependencies diff --git a/modules/relayers/Cargo.toml b/modules/relayers/Cargo.toml index 972b4c33a..9073b9bbe 100644 --- a/modules/relayers/Cargo.toml +++ b/modules/relayers/Cargo.toml @@ -12,7 +12,7 @@ workspace = true [dependencies] codec = { package = "parity-scale-codec", version = "3.1.5", default-features = false } log = { workspace = true } -scale-info = { version = "2.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "2.11.1", default-features = false, features = ["derive"] } # Bridge dependencies diff --git a/modules/xcm-bridge-hub-router/Cargo.toml b/modules/xcm-bridge-hub-router/Cargo.toml index 926b704bd..89c02f706 100644 --- a/modules/xcm-bridge-hub-router/Cargo.toml +++ b/modules/xcm-bridge-hub-router/Cargo.toml @@ -12,7 +12,7 @@ workspace = true [dependencies] codec = { package = "parity-scale-codec", version = "3.1.5", default-features = false } log = { workspace = true } -scale-info = { version = "2.11.0", default-features = false, features = ["bit-vec", "derive", "serde"] } +scale-info = { version = "2.11.1", default-features = false, features = ["bit-vec", "derive", "serde"] } # Bridge dependencies diff --git a/modules/xcm-bridge-hub/Cargo.toml b/modules/xcm-bridge-hub/Cargo.toml index 1200af483..8654c65f6 100644 --- a/modules/xcm-bridge-hub/Cargo.toml +++ b/modules/xcm-bridge-hub/Cargo.toml @@ -12,7 +12,7 @@ workspace = true [dependencies] codec = { package = "parity-scale-codec", version = "3.1.5", default-features = false } log = { workspace = true } -scale-info = { version = "2.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "2.11.1", default-features = false, features = ["derive"] } # Bridge Dependencies bp-messages = { path = "../../primitives/messages", default-features = false } diff --git a/primitives/beefy/Cargo.toml b/primitives/beefy/Cargo.toml index 0a282090f..eef3dcf96 100644 --- a/primitives/beefy/Cargo.toml +++ b/primitives/beefy/Cargo.toml @@ -12,7 +12,7 @@ workspace = true [dependencies] codec = { package = "parity-scale-codec", version = "3.1.5", default-features = false, features = ["bit-vec", "derive"] } -scale-info = { version = "2.11.0", default-features = false, features = ["bit-vec", "derive"] } +scale-info = { version = "2.11.1", default-features = false, features = ["bit-vec", "derive"] } serde = { default-features = false, features = ["alloc", "derive"], workspace = true } # Bridge Dependencies diff --git a/primitives/header-chain/Cargo.toml b/primitives/header-chain/Cargo.toml index 6e9178f3c..8dc9a8ae7 100644 --- a/primitives/header-chain/Cargo.toml +++ b/primitives/header-chain/Cargo.toml @@ -12,7 +12,7 @@ workspace = true [dependencies] codec = { package = "parity-scale-codec", version = "3.1.5", default-features = false } finality-grandpa = { version = "0.16.2", default-features = false } -scale-info = { version = "2.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "2.11.1", default-features = false, features = ["derive"] } serde = { features = ["alloc", "derive"], workspace = true } # Bridge dependencies diff --git a/primitives/messages/Cargo.toml b/primitives/messages/Cargo.toml index 282ef93d3..93b0efa27 100644 --- a/primitives/messages/Cargo.toml +++ b/primitives/messages/Cargo.toml @@ -11,7 +11,7 @@ workspace = true [dependencies] codec = { package = "parity-scale-codec", version = "3.1.5", default-features = false, features = ["bit-vec", "derive"] } -scale-info = { version = "2.11.0", default-features = false, features = ["bit-vec", "derive"] } +scale-info = { version = "2.11.1", default-features = false, features = ["bit-vec", "derive"] } serde = { features = ["alloc", "derive"], workspace = true } # Bridge dependencies diff --git a/primitives/parachains/Cargo.toml b/primitives/parachains/Cargo.toml index 3e148d528..610b7b336 100644 --- a/primitives/parachains/Cargo.toml +++ b/primitives/parachains/Cargo.toml @@ -12,7 +12,7 @@ workspace = true [dependencies] codec = { package = "parity-scale-codec", version = "3.1.5", default-features = false, features = ["derive"] } impl-trait-for-tuples = "0.2" -scale-info = { version = "2.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "2.11.1", default-features = false, features = ["derive"] } # Bridge dependencies diff --git a/primitives/polkadot-core/Cargo.toml b/primitives/polkadot-core/Cargo.toml index 4851ce14c..6c20d64da 100644 --- a/primitives/polkadot-core/Cargo.toml +++ b/primitives/polkadot-core/Cargo.toml @@ -12,7 +12,7 @@ workspace = true [dependencies] codec = { package = "parity-scale-codec", version = "3.1.5", default-features = false, features = ["derive"] } parity-util-mem = { version = "0.12.0", optional = true } -scale-info = { version = "2.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "2.11.1", default-features = false, features = ["derive"] } serde = { optional = true, features = ["derive"], workspace = true, default-features = true } # Bridge Dependencies diff --git a/primitives/relayers/Cargo.toml b/primitives/relayers/Cargo.toml index 3ee433974..441d24060 100644 --- a/primitives/relayers/Cargo.toml +++ b/primitives/relayers/Cargo.toml @@ -11,7 +11,7 @@ workspace = true [dependencies] codec = { package = "parity-scale-codec", version = "3.1.5", default-features = false, features = ["bit-vec", "derive"] } -scale-info = { version = "2.11.0", default-features = false, features = ["bit-vec", "derive"] } +scale-info = { version = "2.11.1", default-features = false, features = ["bit-vec", "derive"] } # Bridge Dependencies diff --git a/primitives/runtime/Cargo.toml b/primitives/runtime/Cargo.toml index b6ed0bc06..c2b2a794b 100644 --- a/primitives/runtime/Cargo.toml +++ b/primitives/runtime/Cargo.toml @@ -15,7 +15,7 @@ hash-db = { version = "0.16.0", default-features = false } impl-trait-for-tuples = "0.2.2" log = { workspace = true } num-traits = { version = "0.2", default-features = false } -scale-info = { version = "2.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "2.11.1", default-features = false, features = ["derive"] } serde = { features = ["alloc", "derive"], workspace = true } # Substrate Dependencies diff --git a/primitives/xcm-bridge-hub-router/Cargo.toml b/primitives/xcm-bridge-hub-router/Cargo.toml index c7bae8443..bc0ff5c60 100644 --- a/primitives/xcm-bridge-hub-router/Cargo.toml +++ b/primitives/xcm-bridge-hub-router/Cargo.toml @@ -11,7 +11,7 @@ workspace = true [dependencies] codec = { package = "parity-scale-codec", version = "3.1.5", default-features = false, features = ["bit-vec", "derive"] } -scale-info = { version = "2.11.0", default-features = false, features = ["bit-vec", "derive"] } +scale-info = { version = "2.11.1", default-features = false, features = ["bit-vec", "derive"] } # Substrate Dependencies sp-runtime = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } diff --git a/relay-clients/client-bridge-hub-kusama/Cargo.toml b/relay-clients/client-bridge-hub-kusama/Cargo.toml index 6ce688e9f..2fc45b5fb 100644 --- a/relay-clients/client-bridge-hub-kusama/Cargo.toml +++ b/relay-clients/client-bridge-hub-kusama/Cargo.toml @@ -10,7 +10,7 @@ workspace = true [dependencies] codec = { package = "parity-scale-codec", version = "3.1.5", features = ["derive"] } -scale-info = { version = "2.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "2.11.1", default-features = false, features = ["derive"] } #relay-substrate-client = { path = "../client-substrate" } subxt = { version = "0.32.1", default-features = false, features = ["native"] } diff --git a/relay-clients/client-bridge-hub-polkadot/Cargo.toml b/relay-clients/client-bridge-hub-polkadot/Cargo.toml index 1c49636ad..ea546cdd4 100644 --- a/relay-clients/client-bridge-hub-polkadot/Cargo.toml +++ b/relay-clients/client-bridge-hub-polkadot/Cargo.toml @@ -10,7 +10,7 @@ workspace = true [dependencies] codec = { package = "parity-scale-codec", version = "3.1.5", features = ["derive"] } -scale-info = { version = "2.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "2.11.1", default-features = false, features = ["derive"] } subxt = { version = "0.32.1", default-features = false, features = ["native"] } # Bridge dependencies diff --git a/relay-clients/client-bridge-hub-rococo/Cargo.toml b/relay-clients/client-bridge-hub-rococo/Cargo.toml index 246c7ed1d..dba2aecf0 100644 --- a/relay-clients/client-bridge-hub-rococo/Cargo.toml +++ b/relay-clients/client-bridge-hub-rococo/Cargo.toml @@ -10,7 +10,7 @@ workspace = true [dependencies] codec = { package = "parity-scale-codec", version = "3.1.5", features = ["derive"] } -scale-info = { version = "2.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "2.11.1", default-features = false, features = ["derive"] } subxt = { version = "0.32.1", default-features = false, features = ["native"] } # Bridge dependencies diff --git a/relay-clients/client-bridge-hub-westend/Cargo.toml b/relay-clients/client-bridge-hub-westend/Cargo.toml index 7f5f01910..410e096be 100644 --- a/relay-clients/client-bridge-hub-westend/Cargo.toml +++ b/relay-clients/client-bridge-hub-westend/Cargo.toml @@ -10,7 +10,7 @@ workspace = true [dependencies] codec = { package = "parity-scale-codec", version = "3.1.5", features = ["derive"] } -scale-info = { version = "2.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "2.11.1", default-features = false, features = ["derive"] } subxt = { version = "0.32.1", default-features = false, features = ["native"] } # Bridge dependencies diff --git a/relay-clients/client-kusama/Cargo.toml b/relay-clients/client-kusama/Cargo.toml index 95b3318f5..a1dbb36e3 100644 --- a/relay-clients/client-kusama/Cargo.toml +++ b/relay-clients/client-kusama/Cargo.toml @@ -10,7 +10,7 @@ workspace = true [dependencies] codec = { package = "parity-scale-codec", version = "3.1.5", features = ["derive"] } -scale-info = { version = "2.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "2.11.1", default-features = false, features = ["derive"] } subxt = { version = "0.32.1", default-features = false, features = ["native"] } # Bridge dependencies diff --git a/relay-clients/client-polkadot-bulletin/Cargo.toml b/relay-clients/client-polkadot-bulletin/Cargo.toml index c563c145a..8b3d42050 100644 --- a/relay-clients/client-polkadot-bulletin/Cargo.toml +++ b/relay-clients/client-polkadot-bulletin/Cargo.toml @@ -10,7 +10,7 @@ workspace = true [dependencies] codec = { package = "parity-scale-codec", version = "3.1.5", features = ["derive"] } -scale-info = { version = "2.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "2.11.1", default-features = false, features = ["derive"] } subxt = { version = "0.32.1", default-features = false, features = ["native"] } # Bridge dependencies diff --git a/relay-clients/client-polkadot/Cargo.toml b/relay-clients/client-polkadot/Cargo.toml index b66df4c84..440db33c9 100644 --- a/relay-clients/client-polkadot/Cargo.toml +++ b/relay-clients/client-polkadot/Cargo.toml @@ -10,7 +10,7 @@ workspace = true [dependencies] codec = { package = "parity-scale-codec", version = "3.1.5", features = ["derive"] } -scale-info = { version = "2.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "2.11.1", default-features = false, features = ["derive"] } subxt = { version = "0.32.1", default-features = false, features = ["native"] } # Bridge dependencies diff --git a/relay-clients/client-rococo/Cargo.toml b/relay-clients/client-rococo/Cargo.toml index 7e6cbe358..ecdf33fdd 100644 --- a/relay-clients/client-rococo/Cargo.toml +++ b/relay-clients/client-rococo/Cargo.toml @@ -10,7 +10,7 @@ workspace = true [dependencies] codec = { package = "parity-scale-codec", version = "3.1.5", features = ["derive"] } -scale-info = { version = "2.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "2.11.1", default-features = false, features = ["derive"] } subxt = { version = "0.32.1", default-features = false, features = ["native"] } # Bridge dependencies diff --git a/relay-clients/client-westend/Cargo.toml b/relay-clients/client-westend/Cargo.toml index 1f148a1c4..0f7188cc3 100644 --- a/relay-clients/client-westend/Cargo.toml +++ b/relay-clients/client-westend/Cargo.toml @@ -10,7 +10,7 @@ workspace = true [dependencies] codec = { package = "parity-scale-codec", version = "3.1.5", features = ["derive"] } -scale-info = { version = "2.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "2.11.1", default-features = false, features = ["derive"] } subxt = { version = "0.32.1", default-features = false, features = ["native"] } # Bridge dependencies diff --git a/relays/client-substrate/Cargo.toml b/relays/client-substrate/Cargo.toml index 3049d8350..8c02c82b5 100644 --- a/relays/client-substrate/Cargo.toml +++ b/relays/client-substrate/Cargo.toml @@ -18,7 +18,7 @@ jsonrpsee = { version = "0.17", features = ["macros", "ws-client"] } log = { workspace = true } num-traits = "0.2" rand = "0.8" -scale-info = { version = "2.11.0", features = ["derive"] } +scale-info = { version = "2.11.1", features = ["derive"] } tokio = { version = "1.36", features = ["rt-multi-thread"] } thiserror = { workspace = true } -- GitLab From f7f983c4f8f5b2035b13c7b08d7919c54c636d41 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 1 Apr 2024 07:31:36 +0000 Subject: [PATCH 20/39] Bump tokio from 1.36.0 to 1.37.0 Bumps [tokio](https://github.com/tokio-rs/tokio) from 1.36.0 to 1.37.0. - [Release notes](https://github.com/tokio-rs/tokio/releases) - [Commits](https://github.com/tokio-rs/tokio/compare/tokio-1.36.0...tokio-1.37.0) --- updated-dependencies: - dependency-name: tokio dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- Cargo.lock | 4 ++-- relays/client-substrate/Cargo.toml | 2 +- relays/utils/Cargo.toml | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 8ac427ba0..e04dc2111 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -9475,9 +9475,9 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.36.0" +version = "1.37.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61285f6515fa018fb2d1e46eb21223fff441ee8db5d0f1435e8ab4f5cdb80931" +checksum = "1adbebffeca75fcfd058afa480fb6c0b81e165a0323f9c9d39c9697e37c46787" dependencies = [ "backtrace", "bytes", diff --git a/relays/client-substrate/Cargo.toml b/relays/client-substrate/Cargo.toml index 8c02c82b5..51e3edefa 100644 --- a/relays/client-substrate/Cargo.toml +++ b/relays/client-substrate/Cargo.toml @@ -19,7 +19,7 @@ log = { workspace = true } num-traits = "0.2" rand = "0.8" scale-info = { version = "2.11.1", features = ["derive"] } -tokio = { version = "1.36", features = ["rt-multi-thread"] } +tokio = { version = "1.37", features = ["rt-multi-thread"] } thiserror = { workspace = true } # Bridge dependencies diff --git a/relays/utils/Cargo.toml b/relays/utils/Cargo.toml index bc894b34e..13fda4773 100644 --- a/relays/utils/Cargo.toml +++ b/relays/utils/Cargo.toml @@ -24,7 +24,7 @@ num-traits = "0.2" serde_json = { workspace = true, default-features = true } sysinfo = "0.30" time = { version = "0.3", features = ["formatting", "local-offset", "std"] } -tokio = { version = "1.36", features = ["rt"] } +tokio = { version = "1.37", features = ["rt"] } thiserror = { workspace = true } # Bridge dependencies -- GitLab From 34817d81593ef5369e45a2e66abf11373ed5f72d Mon Sep 17 00:00:00 2001 From: Svyatoslav Nikolsky Date: Mon, 1 Apr 2024 12:30:39 +0300 Subject: [PATCH 21/39] Some relayer improvments (#2902) * added CLI arguments: full WS URI + separate for WS path URI component + additional log * URI -> URL? * added TODO * fmt --- relays/client-substrate/src/client.rs | 22 ++++++++++++++----- relays/client-substrate/src/lib.rs | 7 ++++++ .../src/cli/chain_schema.rs | 15 +++++++++++-- .../src/cli/relay_headers_and_messages/mod.rs | 8 +++++++ .../src/parachains/source.rs | 7 ++++++ .../src/cli/relay_headers_and_messages.rs | 8 +++++++ 6 files changed, 59 insertions(+), 8 deletions(-) diff --git a/relays/client-substrate/src/client.rs b/relays/client-substrate/src/client.rs index 676fea487..afbda8599 100644 --- a/relays/client-substrate/src/client.rs +++ b/relays/client-substrate/src/client.rs @@ -264,12 +264,22 @@ impl Client { params: &ConnectionParams, ) -> Result<(Arc, Arc)> { let tokio = tokio::runtime::Runtime::new()?; - let uri = format!( - "{}://{}:{}", - if params.secure { "wss" } else { "ws" }, - params.host, - params.port, - ); + + let uri = match params.uri { + Some(ref uri) => uri.clone(), + None => { + format!( + "{}://{}:{}{}", + if params.secure { "wss" } else { "ws" }, + params.host, + params.port, + match params.path { + Some(ref path) => format!("/{}", path), + None => String::new(), + }, + ) + }, + }; log::info!(target: "bridge", "Connecting to {} node at {}", C::NAME, uri); let client = tokio diff --git a/relays/client-substrate/src/lib.rs b/relays/client-substrate/src/lib.rs index 6c62b8e1c..d5b8d4dcc 100644 --- a/relays/client-substrate/src/lib.rs +++ b/relays/client-substrate/src/lib.rs @@ -57,10 +57,15 @@ pub use bp_runtime::{ /// Substrate-over-websocket connection params. #[derive(Debug, Clone)] pub struct ConnectionParams { + /// Websocket endpoint URL. Overrides all other URL components (`host`, `port`, `path` and + /// `secure`). + pub uri: Option, /// Websocket server host name. pub host: String, /// Websocket server TCP port. pub port: u16, + /// Websocket endpoint path at server. + pub path: Option, /// Use secure websocket connection. pub secure: bool, /// Defined chain runtime version @@ -70,8 +75,10 @@ pub struct ConnectionParams { impl Default for ConnectionParams { fn default() -> Self { ConnectionParams { + uri: None, host: "localhost".into(), port: 9944, + path: None, secure: false, chain_runtime_version: ChainRuntimeVersion::Auto, } diff --git a/relays/lib-substrate-relay/src/cli/chain_schema.rs b/relays/lib-substrate-relay/src/cli/chain_schema.rs index c5b802173..6246bdbf0 100644 --- a/relays/lib-substrate-relay/src/cli/chain_schema.rs +++ b/relays/lib-substrate-relay/src/cli/chain_schema.rs @@ -92,15 +92,24 @@ macro_rules! declare_chain_runtime_version_params_cli_schema { macro_rules! declare_chain_connection_params_cli_schema { ($chain:ident, $chain_prefix:ident) => { bp_runtime::paste::item! { + // TODO: https://github.com/paritytech/parity-bridges-common/issues/2909 + // remove all obsolete arguments (separate URI components) + #[doc = $chain " connection params."] #[derive(StructOpt, Debug, PartialEq, Eq, Clone)] pub struct [<$chain ConnectionParams>] { - #[doc = "Connect to " $chain " node at given host."] + #[doc = "WS endpoint of " $chain ": full URI. Overrides all other connection string components (host, port, path, secure)."] + #[structopt(long)] + pub [<$chain_prefix _uri>]: Option, + #[doc = "WS endpoint of " $chain ": host component."] #[structopt(long, default_value = "127.0.0.1")] pub [<$chain_prefix _host>]: String, - #[doc = "Connect to " $chain " node websocket server at given port."] + #[doc = "WS endpoint of " $chain ": port component."] #[structopt(long, default_value = "9944")] pub [<$chain_prefix _port>]: u16, + #[doc = "WS endpoint of " $chain ": path component."] + #[structopt(long)] + pub [<$chain_prefix _path>]: Option, #[doc = "Use secure websocket connection."] #[structopt(long)] pub [<$chain_prefix _secure>]: bool, @@ -119,8 +128,10 @@ macro_rules! declare_chain_connection_params_cli_schema { .[<$chain_prefix _runtime_version>] .into_runtime_version(Chain::RUNTIME_VERSION)?; Ok(relay_substrate_client::Client::new(relay_substrate_client::ConnectionParams { + uri: self.[<$chain_prefix _uri>], host: self.[<$chain_prefix _host>], port: self.[<$chain_prefix _port>], + path: self.[<$chain_prefix _path>], secure: self.[<$chain_prefix _secure>], chain_runtime_version, }) diff --git a/relays/lib-substrate-relay/src/cli/relay_headers_and_messages/mod.rs b/relays/lib-substrate-relay/src/cli/relay_headers_and_messages/mod.rs index d404f714b..27e9f1c21 100644 --- a/relays/lib-substrate-relay/src/cli/relay_headers_and_messages/mod.rs +++ b/relays/lib-substrate-relay/src/cli/relay_headers_and_messages/mod.rs @@ -425,8 +425,10 @@ mod tests { }, }, left: BridgeHubKusamaConnectionParams { + bridge_hub_kusama_uri: None, bridge_hub_kusama_host: "bridge-hub-kusama-node-collator1".into(), bridge_hub_kusama_port: 9944, + bridge_hub_kusama_path: None, bridge_hub_kusama_secure: false, bridge_hub_kusama_runtime_version: BridgeHubKusamaRuntimeVersionParams { bridge_hub_kusama_version_mode: RuntimeVersionType::Bundle, @@ -442,8 +444,10 @@ mod tests { bridge_hub_kusama_transactions_mortality: Some(64), }, left_relay: KusamaConnectionParams { + kusama_uri: None, kusama_host: "kusama-alice".into(), kusama_port: 9944, + kusama_path: None, kusama_secure: false, kusama_runtime_version: KusamaRuntimeVersionParams { kusama_version_mode: RuntimeVersionType::Bundle, @@ -452,8 +456,10 @@ mod tests { }, }, right: BridgeHubPolkadotConnectionParams { + bridge_hub_polkadot_uri: None, bridge_hub_polkadot_host: "bridge-hub-polkadot-collator1".into(), bridge_hub_polkadot_port: 9944, + bridge_hub_polkadot_path: None, bridge_hub_polkadot_secure: false, bridge_hub_polkadot_runtime_version: BridgeHubPolkadotRuntimeVersionParams { bridge_hub_polkadot_version_mode: RuntimeVersionType::Bundle, @@ -469,8 +475,10 @@ mod tests { bridge_hub_polkadot_transactions_mortality: Some(64), }, right_relay: PolkadotConnectionParams { + polkadot_uri: None, polkadot_host: "polkadot-alice".into(), polkadot_port: 9944, + polkadot_path: None, polkadot_secure: false, polkadot_runtime_version: PolkadotRuntimeVersionParams { polkadot_version_mode: RuntimeVersionType::Bundle, diff --git a/relays/lib-substrate-relay/src/parachains/source.rs b/relays/lib-substrate-relay/src/parachains/source.rs index 32d70cf42..4cc512b9d 100644 --- a/relays/lib-substrate-relay/src/parachains/source.rs +++ b/relays/lib-substrate-relay/src/parachains/source.rs @@ -106,6 +106,13 @@ where // parachain head - we simply return `Unavailable` let best_block_number = self.client.best_finalized_header_number().await?; if is_ancient_block(at_block.number(), best_block_number) { + log::trace!( + target: "bridge", + "{} block {:?} is ancient. Cannot prove the {} header there", + P::SourceRelayChain::NAME, + at_block, + P::SourceParachain::NAME, + ); return Ok(AvailableHeader::Unavailable) } diff --git a/substrate-relay/src/cli/relay_headers_and_messages.rs b/substrate-relay/src/cli/relay_headers_and_messages.rs index 229661748..dfd5e8caf 100644 --- a/substrate-relay/src/cli/relay_headers_and_messages.rs +++ b/substrate-relay/src/cli/relay_headers_and_messages.rs @@ -316,8 +316,10 @@ mod tests { }, }, left_relay: KusamaConnectionParams { + kusama_uri: None, kusama_host: "kusama-alice".into(), kusama_port: 9944, + kusama_path: None, kusama_secure: false, kusama_runtime_version: KusamaRuntimeVersionParams { kusama_version_mode: RuntimeVersionType::Bundle, @@ -326,8 +328,10 @@ mod tests { }, }, left: BridgeHubKusamaConnectionParams { + bridge_hub_kusama_uri: None, bridge_hub_kusama_host: "bridge-hub-kusama-node-collator1".into(), bridge_hub_kusama_port: 9944, + bridge_hub_kusama_path: None, bridge_hub_kusama_secure: false, bridge_hub_kusama_runtime_version: BridgeHubKusamaRuntimeVersionParams { bridge_hub_kusama_version_mode: RuntimeVersionType::Bundle, @@ -343,8 +347,10 @@ mod tests { bridge_hub_kusama_transactions_mortality: Some(64), }, right: BridgeHubPolkadotConnectionParams { + bridge_hub_polkadot_uri: None, bridge_hub_polkadot_host: "bridge-hub-polkadot-collator1".into(), bridge_hub_polkadot_port: 9944, + bridge_hub_polkadot_path: None, bridge_hub_polkadot_secure: false, bridge_hub_polkadot_runtime_version: BridgeHubPolkadotRuntimeVersionParams { @@ -361,8 +367,10 @@ mod tests { bridge_hub_polkadot_transactions_mortality: Some(64), }, right_relay: PolkadotConnectionParams { + polkadot_uri: None, polkadot_host: "polkadot-alice".into(), polkadot_port: 9944, + polkadot_path: None, polkadot_secure: false, polkadot_runtime_version: PolkadotRuntimeVersionParams { polkadot_version_mode: RuntimeVersionType::Bundle, -- GitLab From 8c4c99d11b3d4d24ccf6a1237d164577e53a3076 Mon Sep 17 00:00:00 2001 From: Serban Iorga Date: Tue, 2 Apr 2024 00:00:56 +0300 Subject: [PATCH 22/39] Address migration comments (#2910) * Use workspace.[authors|edition] * Add repository.workspace = true * Upgrade dependencies to the polkadot-sdk versions * Upgrade async-std version * Update jsonrpsee version * cargo update * use ci-unified image --- .gitlab-ci.yml | 2 +- Cargo.lock | 2458 +++++++++-------- Dockerfile | 2 +- README.md | 6 +- bin/runtime-common/Cargo.toml | 2 +- chains/chain-asset-hub-rococo/Cargo.toml | 3 +- chains/chain-asset-hub-westend/Cargo.toml | 3 +- chains/chain-bridge-hub-cumulus/Cargo.toml | 1 + chains/chain-bridge-hub-kusama/Cargo.toml | 1 + chains/chain-bridge-hub-polkadot/Cargo.toml | 1 + chains/chain-bridge-hub-rococo/Cargo.toml | 1 + chains/chain-bridge-hub-westend/Cargo.toml | 1 + chains/chain-kusama/Cargo.toml | 1 + chains/chain-polkadot-bulletin/Cargo.toml | 3 +- chains/chain-polkadot/Cargo.toml | 1 + chains/chain-rococo/Cargo.toml | 1 + chains/chain-westend/Cargo.toml | 1 + modules/beefy/Cargo.toml | 9 +- modules/grandpa/Cargo.toml | 3 +- modules/messages/Cargo.toml | 3 +- modules/parachains/Cargo.toml | 3 +- modules/relayers/Cargo.toml | 3 +- modules/xcm-bridge-hub-router/Cargo.toml | 3 +- modules/xcm-bridge-hub/Cargo.toml | 3 +- primitives/beefy/Cargo.toml | 7 +- primitives/header-chain/Cargo.toml | 3 +- primitives/messages/Cargo.toml | 3 +- primitives/parachains/Cargo.toml | 3 +- primitives/polkadot-core/Cargo.toml | 3 +- primitives/relayers/Cargo.toml | 3 +- primitives/runtime/Cargo.toml | 3 +- primitives/test-utils/Cargo.toml | 3 +- primitives/xcm-bridge-hub-router/Cargo.toml | 3 +- primitives/xcm-bridge-hub/Cargo.toml | 1 + .../client-bridge-hub-kusama/Cargo.toml | 7 +- .../client-bridge-hub-polkadot/Cargo.toml | 7 +- .../client-bridge-hub-rococo/Cargo.toml | 7 +- .../client-bridge-hub-westend/Cargo.toml | 7 +- relay-clients/client-kusama/Cargo.toml | 7 +- .../client-polkadot-bulletin/Cargo.toml | 7 +- relay-clients/client-polkadot/Cargo.toml | 7 +- relay-clients/client-rococo/Cargo.toml | 7 +- relay-clients/client-westend/Cargo.toml | 7 +- relays/client-substrate/Cargo.toml | 13 +- relays/client-substrate/src/error.rs | 2 +- relays/client-substrate/src/rpc.rs | 14 +- relays/equivocation/Cargo.toml | 7 +- relays/finality/Cargo.toml | 7 +- relays/lib-substrate-relay/Cargo.toml | 7 +- relays/messages/Cargo.toml | 7 +- relays/messages/src/message_race_loop.rs | 2 +- relays/parachains/Cargo.toml | 9 +- relays/utils/Cargo.toml | 7 +- substrate-relay/Cargo.toml | 7 +- tools/runtime-codegen/Cargo.lock | 34 +- tools/runtime-codegen/Cargo.toml | 5 +- 56 files changed, 1444 insertions(+), 1287 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 892de6704..d6a2644b0 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -10,7 +10,7 @@ variables: GIT_DEPTH: 100 CARGO_INCREMENTAL: 0 ARCH: "x86_64" - CI_IMAGE: "paritytech/bridges-ci:production" + CI_IMAGE: "paritytech/ci-unified:latest" RUST_BACKTRACE: full BUILDAH_IMAGE: "quay.io/buildah/stable:v1.29" BUILDAH_COMMAND: "buildah --storage-driver overlay2" diff --git a/Cargo.lock b/Cargo.lock index e04dc2111..5050cc63f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -18,16 +18,16 @@ version = "0.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a76fd60b23679b7d19bd066031410fb7e458ccc5e958eb5c325888ce4baedc97" dependencies = [ - "gimli", + "gimli 0.27.3", ] [[package]] name = "addr2line" -version = "0.20.0" +version = "0.21.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4fa78e18c64fce05e902adecd7a5eed15a5e0a3439f7b0e169f0252214865e3" +checksum = "8a30b2e23b9e17a9f90641c7ab1549cd9b44f296d3ccbf309d2863cfe398a0cb" dependencies = [ - "gimli", + "gimli 0.28.1", ] [[package]] @@ -36,15 +36,6 @@ version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" -[[package]] -name = "aead" -version = "0.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b613b8e1e3cf911a086f53f03bf286f52fd7a7258e4fa606f0ef220d39d8877" -dependencies = [ - "generic-array 0.14.7", -] - [[package]] name = "aead" version = "0.5.2" @@ -57,21 +48,9 @@ dependencies = [ [[package]] name = "aes" -version = "0.7.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e8b47f52ea9bae42228d07ec09eb676433d7c4ed1ebdf0f1d1c29ed446f1ab8" -dependencies = [ - "cfg-if 1.0.0", - "cipher 0.3.0", - "cpufeatures", - "opaque-debug 0.3.0", -] - -[[package]] -name = "aes" -version = "0.8.3" +version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac1f845298e95f983ff1944b728ae08b8cebab80d684f0a832ed0fc74dfa27e2" +checksum = "b169f7a6d4742236a0a00c541b845991d0ac43e546831af1249753ab4c3aa3a0" dependencies = [ "cfg-if 1.0.0", "cipher 0.4.4", @@ -80,51 +59,37 @@ dependencies = [ [[package]] name = "aes-gcm" -version = "0.9.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df5f85a83a7d8b0442b6aa7b504b8212c1733da07b98aae43d4bc21b2cb3cdf6" -dependencies = [ - "aead 0.4.3", - "aes 0.7.5", - "cipher 0.3.0", - "ctr 0.8.0", - "ghash 0.4.4", - "subtle 2.4.1", -] - -[[package]] -name = "aes-gcm" -version = "0.10.2" +version = "0.10.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "209b47e8954a928e1d72e86eca7000ebb6655fe1436d33eefc2201cad027e237" +checksum = "831010a0f742e1209b3bcea8fab6a8e149051ba6099432c8cb2cc117dec3ead1" dependencies = [ - "aead 0.5.2", - "aes 0.8.3", + "aead", + "aes", "cipher 0.4.4", - "ctr 0.9.2", - "ghash 0.5.0", - "subtle 2.4.1", + "ctr", + "ghash", + "subtle 2.5.0", ] [[package]] name = "ahash" -version = "0.7.6" +version = "0.7.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fcb51a0695d8f838b1ee009b3fbf66bda078cd64590202a864a8f3e8c4315c47" +checksum = "891477e0c6a8957309ee5c45a6368af3ae14bb510732d2684ffa19af310920f9" dependencies = [ - "getrandom 0.2.10", + "getrandom 0.2.12", "once_cell", "version_check", ] [[package]] name = "ahash" -version = "0.8.7" +version = "0.8.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77c3a9648d43b9cd48db467b3f87fdd6e146bcc88ab0180006cef2179fe11d01" +checksum = "e89da841a80418a9b391ebaea17f5c112ffaaa96f621d2c285b5174da76b9011" dependencies = [ "cfg-if 1.0.0", - "getrandom 0.2.10", + "getrandom 0.2.12", "once_cell", "version_check", "zerocopy", @@ -132,9 +97,9 @@ dependencies = [ [[package]] name = "aho-corasick" -version = "1.0.2" +version = "1.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43f6cb1bf222025340178f382c426f13757b2960e89779dfcb319c32542a5a41" +checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916" dependencies = [ "memchr", ] @@ -171,9 +136,9 @@ dependencies = [ [[package]] name = "anstream" -version = "0.6.11" +version = "0.6.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e2e1ebcb11de5c03c67de28a7df593d32191b44939c482e97702baaaa6ab6a5" +checksum = "d96bd03f33fe50a863e394ee9718a706f988b9079b20c3784fb726e7678b62fb" dependencies = [ "anstyle", "anstyle-parse", @@ -241,9 +206,9 @@ dependencies = [ "include_dir", "itertools", "proc-macro-error", - "proc-macro2 1.0.76", + "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.53", + "syn 2.0.57", ] [[package]] @@ -426,7 +391,7 @@ checksum = "7abe79b0e4288889c4574159ab790824d0033b9fdcb2a112a3182fac2e514565" dependencies = [ "num-bigint", "num-traits", - "proc-macro2 1.0.76", + "proc-macro2 1.0.79", "quote 1.0.35", "syn 1.0.109", ] @@ -504,7 +469,7 @@ version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ae3281bc6d0fd7e549af32b52511e1302185bd688fd3359fa36423346ff682ea" dependencies = [ - "proc-macro2 1.0.76", + "proc-macro2 1.0.79", "quote 1.0.35", "syn 1.0.109", ] @@ -541,9 +506,9 @@ checksum = "f52f63c5c1316a16a4b35eaac8b76a98248961a533f061684cb2a7cb0eafb6c6" [[package]] name = "array-bytes" -version = "6.1.0" +version = "6.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d9b1c5a481ec30a5abd8dfbd94ab5cf1bb4e9a66be7f1b3b322f2f1170c200fd" +checksum = "6f840fb7195bcfc5e17ea40c26e5ce6d5b9ce5d584466e17703209657e459ae0" [[package]] name = "arrayref" @@ -588,7 +553,7 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "726535892e8eae7e70657b4c8ea93d26b8553afb1ce617caee529ef96d7dee6c" dependencies = [ - "proc-macro2 1.0.76", + "proc-macro2 1.0.79", "quote 1.0.35", "syn 1.0.109", "synstructure", @@ -600,7 +565,7 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2777730b2039ac0f95f093556e61b6d26cebed5393ca6f152717777cec3a42ed" dependencies = [ - "proc-macro2 1.0.76", + "proc-macro2 1.0.79", "quote 1.0.35", "syn 1.0.109", ] @@ -622,21 +587,34 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "81953c529336010edd6d8e358f886d9581267795c61b19475b71314bffa46d35" dependencies = [ "concurrent-queue", - "event-listener", + "event-listener 2.5.3", + "futures-core", +] + +[[package]] +name = "async-channel" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f28243a43d821d11341ab73c80bed182dc015c514b951616cf79bd4af39af0c3" +dependencies = [ + "concurrent-queue", + "event-listener 5.2.0", + "event-listener-strategy 0.5.1", "futures-core", + "pin-project-lite 0.2.14", ] [[package]] name = "async-executor" -version = "1.5.1" +version = "1.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6fa3dc5f2a8564f07759c008b9109dc0d39de92a88d5588b8a5036d286383afb" +checksum = "10b3e585719c2358d2660232671ca8ca4ddb4be4ce8a1842d6c2dc8685303316" dependencies = [ - "async-lock", + "async-lock 3.3.0", "async-task", "concurrent-queue", - "fastrand 1.9.0", - "futures-lite", + "fastrand 2.0.2", + "futures-lite 2.3.0", "slab", ] @@ -646,24 +624,24 @@ version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "279cf904654eeebfa37ac9bb1598880884924aab82e290aa65c9e77a0e142e06" dependencies = [ - "async-lock", + "async-lock 2.8.0", "autocfg", "blocking", - "futures-lite", + "futures-lite 1.13.0", ] [[package]] name = "async-global-executor" -version = "2.3.1" +version = "2.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f1b6f5d7df27bd294849f8eec66ecfc63d11814df7a4f5d74168a2394467b776" +checksum = "05b1b633a2115cd122d73b955eadd9916c18c8f510ec9cd1686404c60ad1c29c" dependencies = [ - "async-channel", + "async-channel 2.2.0", "async-executor", - "async-io", - "async-lock", + "async-io 2.3.2", + "async-lock 3.3.0", "blocking", - "futures-lite", + "futures-lite 2.3.0", "once_cell", ] @@ -673,56 +651,102 @@ version = "1.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0fc5b45d93ef0529756f812ca52e44c221b35341892d3dcc34132ac02f3dd2af" dependencies = [ - "async-lock", + "async-lock 2.8.0", "autocfg", "cfg-if 1.0.0", "concurrent-queue", - "futures-lite", + "futures-lite 1.13.0", "log", "parking", - "polling", + "polling 2.8.0", "rustix 0.37.27", "slab", - "socket2 0.4.9", + "socket2 0.4.10", "waker-fn", ] +[[package]] +name = "async-io" +version = "2.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dcccb0f599cfa2f8ace422d3555572f47424da5648a4382a9dd0310ff8210884" +dependencies = [ + "async-lock 3.3.0", + "cfg-if 1.0.0", + "concurrent-queue", + "futures-io", + "futures-lite 2.3.0", + "parking", + "polling 3.6.0", + "rustix 0.38.32", + "slab", + "tracing", + "windows-sys 0.52.0", +] + +[[package]] +name = "async-lock" +version = "2.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "287272293e9d8c41773cec55e365490fe034813a2f172f502d6ddcf75b2f582b" +dependencies = [ + "event-listener 2.5.3", +] + [[package]] name = "async-lock" -version = "2.7.0" +version = "3.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa24f727524730b077666307f2734b4a1a1c57acb79193127dcc8914d5242dd7" +checksum = "d034b430882f8381900d3fe6f0aaa3ad94f2cb4ac519b429692a1bc2dda4ae7b" dependencies = [ - "event-listener", + "event-listener 4.0.3", + "event-listener-strategy 0.4.0", + "pin-project-lite 0.2.14", ] [[package]] name = "async-net" -version = "1.7.0" +version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4051e67316bc7eff608fe723df5d32ed639946adcd69e07df41fd42a7b411f1f" +checksum = "0434b1ed18ce1cf5769b8ac540e33f01fa9471058b5e89da9e06f3c882a8c12f" dependencies = [ - "async-io", - "autocfg", + "async-io 1.13.0", "blocking", - "futures-lite", + "futures-lite 1.13.0", ] [[package]] name = "async-process" -version = "1.7.0" +version = "1.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a9d28b1d97e08915212e2e45310d47854eafa69600756fc735fb788f75199c9" +checksum = "ea6438ba0a08d81529c69b36700fa2f95837bfe3e776ab39cde9c14d9149da88" dependencies = [ - "async-io", - "async-lock", - "autocfg", + "async-io 1.13.0", + "async-lock 2.8.0", + "async-signal", "blocking", "cfg-if 1.0.0", - "event-listener", - "futures-lite", - "rustix 0.37.27", - "signal-hook", + "event-listener 3.1.0", + "futures-lite 1.13.0", + "rustix 0.38.32", + "windows-sys 0.48.0", +] + +[[package]] +name = "async-signal" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e47d90f65a225c4527103a8d747001fc56e375203592b25ad103e1ca13124c5" +dependencies = [ + "async-io 2.3.2", + "async-lock 2.8.0", + "atomic-waker", + "cfg-if 1.0.0", + "futures-core", + "futures-io", + "rustix 0.38.32", + "signal-hook-registry", + "slab", "windows-sys 0.48.0", ] @@ -733,21 +757,21 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "62565bb4402e926b29953c785397c6dc0391b7b446e45008b0049eb43cec6f5d" dependencies = [ "async-attributes", - "async-channel", + "async-channel 1.9.0", "async-global-executor", - "async-io", - "async-lock", + "async-io 1.13.0", + "async-lock 2.8.0", "crossbeam-utils", "futures-channel", "futures-core", "futures-io", - "futures-lite", + "futures-lite 1.13.0", "gloo-timers", "kv-log-macro", "log", "memchr", "once_cell", - "pin-project-lite 0.2.12", + "pin-project-lite 0.2.14", "pin-utils", "slab", "wasm-bindgen-futures", @@ -755,9 +779,9 @@ dependencies = [ [[package]] name = "async-task" -version = "4.4.0" +version = "4.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ecc7ab41815b3c653ccd2978ec3255c81349336702dfdf62ee6f7069b12a3aae" +checksum = "fbb36e985947064623dbd357f727af08ffd077f93d696782f3c56365fa2e2799" [[package]] name = "async-trait" @@ -765,22 +789,22 @@ version = "0.1.79" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a507401cad91ec6a857ed5513a2073c82a9b9048762b885bb98655b306964681" dependencies = [ - "proc-macro2 1.0.76", + "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.53", + "syn 2.0.57", ] [[package]] name = "asynchronous-codec" -version = "0.6.1" +version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "06a0daa378f5fd10634e44b0a29b2a87b890657658e072a30d6f26e57ddee182" +checksum = "4057f2c32adbb2fc158e22fb38433c8e9bbf76b75a4732c7c0cbaf695fb65568" dependencies = [ "bytes", "futures-sink", "futures-util", "memchr", - "pin-project-lite 0.2.12", + "pin-project-lite 0.2.14", ] [[package]] @@ -791,9 +815,9 @@ checksum = "c59bdb34bc650a32731b31bd8f0829cc15d24a708ee31559e0bb34f2bc320cba" [[package]] name = "atomic-waker" -version = "1.1.1" +version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1181e1e0d1fce796a03db1ae795d67167da795f9cf4a39c37589e85ef57f26d3" +checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" [[package]] name = "atty" @@ -808,9 +832,9 @@ dependencies = [ [[package]] name = "autocfg" -version = "1.1.0" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" +checksum = "f1fdabc7756949593fe60f30ec81974b613357de856987752631dea1e3394c80" [[package]] name = "backoff" @@ -818,23 +842,23 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b62ddb9cb1ec0a098ad4bbf9344d0713fa193ae1a80af55febcff2627b6a00c1" dependencies = [ - "getrandom 0.2.10", + "getrandom 0.2.12", "instant", "rand", ] [[package]] name = "backtrace" -version = "0.3.68" +version = "0.3.71" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4319208da049c43661739c5fade2ba182f09d1dc2299b32298d3a31692b17e12" +checksum = "26b05800d2e817c8b3b4b54abd461726265fa9789ae34330622f2db9ee696f9d" dependencies = [ - "addr2line 0.20.0", + "addr2line 0.21.0", "cc", "cfg-if 1.0.0", "libc", "miniz_oxide", - "object 0.31.1", + "object 0.32.2", "rustc-demangle", ] @@ -855,7 +879,7 @@ dependencies = [ "rand_chacha", "rand_core 0.6.4", "ring 0.1.0", - "sha2 0.10.7", + "sha2 0.10.8", "sp-ark-bls12-381", "sp-ark-ed-on-bls12-381-bandersnatch", "zeroize", @@ -887,9 +911,9 @@ checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8" [[package]] name = "base64" -version = "0.21.2" +version = "0.21.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "604178f6c5c21f02dc555784810edfb88d34ac2c73b2eae109655649ee73ce3d" +checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" [[package]] name = "base64ct" @@ -909,7 +933,7 @@ dependencies = [ [[package]] name = "binary-merkle-tree" version = "13.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ "hash-db", "log", @@ -963,9 +987,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" -version = "2.4.1" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "327762f6e5a765692301e5bb513e0d9fef63be86bbc14528052b1cd3e6f03e07" +checksum = "cf4b9d6a944f767f8e5e0db018570623c85f3d925ac718db4e06d0187adb21c1" [[package]] name = "bitvec" @@ -1013,13 +1037,13 @@ dependencies = [ [[package]] name = "blake2b_simd" -version = "1.0.1" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c2f0dc9a68c6317d884f97cc36cf5a3d20ba14ce404227df55e1af708ab04bc" +checksum = "23285ad32269793932e830392f2fe2f83e26488fd3ec778883a93c8323735780" dependencies = [ "arrayref", "arrayvec 0.7.4", - "constant_time_eq 0.2.6", + "constant_time_eq 0.3.0", ] [[package]] @@ -1042,17 +1066,18 @@ dependencies = [ [[package]] name = "blocking" -version = "1.3.1" +version = "1.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77231a1c8f801696fc0123ec6150ce92cffb8e164a02afb9c8ddee0e9b65ad65" +checksum = "6a37913e8dc4ddcc604f0c6d3bf2887c995153af3611de9e23c352b44c1b9118" dependencies = [ - "async-channel", - "async-lock", + "async-channel 2.2.0", + "async-lock 3.3.0", "async-task", - "atomic-waker", - "fastrand 1.9.0", - "futures-lite", - "log", + "fastrand 2.0.2", + "futures-io", + "futures-lite 2.3.0", + "piper", + "tracing", ] [[package]] @@ -1423,18 +1448,18 @@ checksum = "771fe0050b883fcc3ea2359b1a96bcfbc090b7116eae7c3c512c7a083fdf23d3" [[package]] name = "bs58" -version = "0.5.0" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f5353f36341f7451062466f0b755b96ac3a9547e4d7f6b70d603fc721a7d7896" +checksum = "bf88ba1141d185c399bee5288d850d63b8369520c1eafc32a0430b5b6c287bf4" dependencies = [ "tinyvec", ] [[package]] name = "bumpalo" -version = "3.13.0" +version = "3.15.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a3e2c3daef883ecc1b5d58c15adae93470a91d425f3532ba1695849656af3fc1" +checksum = "7ff69b9dd49fd426c69a0db9fc04dd934cdb6645ff000864d98f7e2af8830eaa" [[package]] name = "byte-slice-cast" @@ -1450,21 +1475,21 @@ checksum = "e3b5ca7a04898ad4bcd41c90c5285445ff5b791899bb1b0abdd2a2aa791211d7" [[package]] name = "bytemuck" -version = "1.13.1" +version = "1.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "17febce684fd15d89027105661fec94afb475cb995fbc59d2865198446ba2eea" +checksum = "5d6d68c57235a3a081186990eca2867354726650f42f7516ca50c28d6281fd15" [[package]] name = "byteorder" -version = "1.4.3" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610" +checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" [[package]] name = "bytes" -version = "1.4.0" +version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89b2fd2a0dcf38d7971e2194b6b6eebab45ae01067456a7fd93d5547a61b70be" +checksum = "514de17de45fdb8dc022b1a7975556c53c86f9f0aa5f534b98977b171857c2c9" [[package]] name = "c2-chacha" @@ -1484,18 +1509,19 @@ checksum = "a2698f953def977c68f935bb0dfa959375ad4638570e969e2f1e9f433cbf1af6" [[package]] name = "cc" -version = "1.0.79" +version = "1.0.90" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50d30906286121d95be3d479533b458f87493b30a4b5f79a607db8f5d11aa91f" +checksum = "8cd6604a82acf3039f1144f54b8eb34e91ffba622051189e71b781822d5ee1f5" dependencies = [ "jobserver", + "libc", ] [[package]] name = "cfg-expr" -version = "0.15.5" +version = "0.15.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "03915af431787e6ffdcc74c645077518c6b6e01f80b761e0fbbfa288536311b3" +checksum = "fa50868b64a9a6fda9d593ce778849ea8715cd2a3d2cc17ffdb4a2f2f2f1961d" dependencies = [ "smallvec", ] @@ -1524,41 +1550,40 @@ dependencies = [ [[package]] name = "chacha20" -version = "0.8.2" +version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c80e5460aa66fe3b91d40bcbdab953a597b60053e34d684ac6903f863b680a6" +checksum = "c3613f74bd2eac03dad61bd53dbe620703d4371614fe0bc3b9f04dd36fe4e818" dependencies = [ "cfg-if 1.0.0", - "cipher 0.3.0", + "cipher 0.4.4", "cpufeatures", - "zeroize", ] [[package]] name = "chacha20poly1305" -version = "0.9.1" +version = "0.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a18446b09be63d457bbec447509e85f662f32952b035ce892290396bc0b0cff5" +checksum = "10cd79432192d1c0f4e1a0fef9527696cc039165d729fb41b3f4f4f354c2dc35" dependencies = [ - "aead 0.4.3", + "aead", "chacha20", - "cipher 0.3.0", + "cipher 0.4.4", "poly1305", "zeroize", ] [[package]] name = "chrono" -version = "0.4.31" +version = "0.4.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f2c685bad3eb3d45a01354cedb7d5faa66194d1d58ba6e267a8de788f79db38" +checksum = "8a0d04d43504c61aa6c7531f1871dd0d418d91130162063b789da00fd7057a5e" dependencies = [ "android-tzdata", "iana-time-zone", "js-sys", "num-traits", "wasm-bindgen", - "windows-targets 0.48.1", + "windows-targets 0.52.4", ] [[package]] @@ -1570,15 +1595,6 @@ dependencies = [ "generic-array 0.14.7", ] -[[package]] -name = "cipher" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ee52072ec15386f770805afd189a01c8841be8696bed250fa2f13c4c0d6dfb7" -dependencies = [ - "generic-array 0.14.7", -] - [[package]] name = "cipher" version = "0.4.4" @@ -1587,6 +1603,7 @@ checksum = "773f3b9af64447d2ce9850330c473515014aa235e6a783b02db81ff39e4a3dad" dependencies = [ "crypto-common", "inout", + "zeroize", ] [[package]] @@ -1652,38 +1669,36 @@ checksum = "2382f75942f4b3be3690fe4f86365e9c853c1587d6ee58212cebf6e2a9ccd101" [[package]] name = "concurrent-queue" -version = "2.2.0" +version = "2.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62ec6771ecfa0762d24683ee5a32ad78487a3d3afdc0fb8cae19d2c5deb50b7c" +checksum = "d16048cd947b08fa32c24458a22f5dc5e835264f689f4f5653210c69fd107363" dependencies = [ "crossbeam-utils", ] [[package]] name = "const-oid" -version = "0.9.4" +version = "0.9.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "795bc6e66a8e340f075fcf6227e417a2dc976b92b91f3cdc778bb858778b6747" +checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8" [[package]] name = "const-random" -version = "0.1.15" +version = "0.1.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "368a7a772ead6ce7e1de82bfb04c485f3db8ec744f72925af5735e29a22cc18e" +checksum = "87e00182fe74b066627d63b85fd550ac2998d4b0bd86bfed477a0ae4c7c71359" dependencies = [ "const-random-macro", - "proc-macro-hack", ] [[package]] name = "const-random-macro" -version = "0.1.15" +version = "0.1.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d7d6ab3c3a2282db210df5f02c4dab6e0a7057af0fb7ebd4070f30fe05c0ddb" +checksum = "f9d839f2a20b0aee515dc581a6172f2321f96cab76c1a38a4c584a194955390e" dependencies = [ - "getrandom 0.2.10", + "getrandom 0.2.12", "once_cell", - "proc-macro-hack", "tiny-keccak", ] @@ -1695,9 +1710,9 @@ checksum = "245097e9a4535ee1e3e3931fcfcd55a796a44c643e8596ff6566d68f09b87bbc" [[package]] name = "constant_time_eq" -version = "0.2.6" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "21a53c0a4d288377e7415b53dcfc3c04da5cdc2cc95c8d5ac178b58f0b861ad6" +checksum = "f7144d30dcf0fafbce74250a3963025d8d52177934239851c917d29f1df280c2" [[package]] name = "constcat" @@ -1713,9 +1728,9 @@ checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e" [[package]] name = "core-foundation" -version = "0.9.3" +version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "194a7a9e6de53fa55116934067c844d9d749312f75c6f6d0980e8c252f8c2146" +checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f" dependencies = [ "core-foundation-sys", "libc", @@ -1723,9 +1738,9 @@ dependencies = [ [[package]] name = "core-foundation-sys" -version = "0.8.4" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e496a50fda8aacccc86d7529e2c1e0892dbd0f898a6b5645b5561b89c3210efa" +checksum = "06ea2b9bc92be3c2baa9334a323ebca2d6f074ff852cd1d7b11064035cd3868f" [[package]] name = "core2" @@ -1747,9 +1762,9 @@ dependencies = [ [[package]] name = "cpufeatures" -version = "0.2.9" +version = "0.2.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a17b76ff3a4162b0b27f354a0c87015ddad39d35f9c0c36607a3bdd175dde1f1" +checksum = "53fe5e26ff1b7aef8bca9c6080520cfb8d9333c7568e1829cef191a9723e5504" dependencies = [ "libc", ] @@ -1775,7 +1790,7 @@ dependencies = [ "cranelift-codegen-shared", "cranelift-entity", "cranelift-isle", - "gimli", + "gimli 0.27.3", "hashbrown 0.13.2", "log", "regalloc2", @@ -1854,55 +1869,46 @@ dependencies = [ [[package]] name = "crc32fast" -version = "1.3.2" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b540bd8bc810d3885c6ea91e2018302f68baba2129ab3e88f32389ee9370880d" +checksum = "b3855a8a784b474f333699ef2bbca9db2c4a1f6d9088a90a2d25b1eb53111eaa" dependencies = [ "cfg-if 1.0.0", ] [[package]] name = "crossbeam-deque" -version = "0.8.3" +version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce6fd6f855243022dcecf8702fef0c297d4338e226845fe067f6341ad9fa0cef" +checksum = "613f8cc01fe9cf1a3eb3d7f488fd2fa8388403e97039e2f73692932e291a770d" dependencies = [ - "cfg-if 1.0.0", "crossbeam-epoch", "crossbeam-utils", ] [[package]] name = "crossbeam-epoch" -version = "0.9.15" +version = "0.9.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae211234986c545741a7dc064309f67ee1e5ad243d0e48335adc0484d960bcc7" +checksum = "5b82ac4a3c2ca9c3460964f020e1402edd5753411d7737aa39c3714ad1b5420e" dependencies = [ - "autocfg", - "cfg-if 1.0.0", "crossbeam-utils", - "memoffset 0.9.0", - "scopeguard", ] [[package]] name = "crossbeam-queue" -version = "0.3.8" +version = "0.3.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d1cfb3ea8a53f37c40dea2c7bedcbd88bdfae54f5e2175d6ecaff1c988353add" +checksum = "df0346b5d5e76ac2fe4e327c5fd1118d6be7c51dfb18f9b7922923f287471e35" dependencies = [ - "cfg-if 1.0.0", "crossbeam-utils", ] [[package]] name = "crossbeam-utils" -version = "0.8.16" +version = "0.8.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a22b2d63d4d1dc0b7f1b6b2747dd0088008a9be28b6ddf0b1e7d335e3037294" -dependencies = [ - "cfg-if 1.0.0", -] +checksum = "248e3bacc7dc6baa3b21e405ee045c3047101a49145e7e9eca583ab4c2ca5345" [[package]] name = "crunchy" @@ -1912,13 +1918,13 @@ checksum = "7a81dae078cea95a014a339291cec439d2f232ebe854a9d672b796c6afafa9b7" [[package]] name = "crypto-bigint" -version = "0.5.2" +version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf4c2f4e1afd912bc40bfd6fed5d9dc1f288e0ba01bfcc835cc5bc3eb13efe15" +checksum = "0dc92fb57ca44df6db8059111ab3af99a63d5d0f8375d9972e319a379c6bab76" dependencies = [ "generic-array 0.14.7", "rand_core 0.6.4", - "subtle 2.4.1", + "subtle 2.5.0", "zeroize", ] @@ -1950,16 +1956,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b584a330336237c1eecd3e94266efb216c56ed91225d634cb2991c5f3fd1aeab" dependencies = [ "generic-array 0.14.7", - "subtle 2.4.1", -] - -[[package]] -name = "ctr" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "049bb91fb4aaf0e3c7efa6cd5ef877dbbbd15b39dad06d9948de4ec8a75761ea" -dependencies = [ - "cipher 0.3.0", + "subtle 2.5.0", ] [[package]] @@ -1973,24 +1970,24 @@ dependencies = [ [[package]] name = "curl" -version = "0.4.44" +version = "0.4.46" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "509bd11746c7ac09ebd19f0b17782eae80aadee26237658a6b4808afb5c11a22" +checksum = "1e2161dd6eba090ff1594084e95fd67aeccf04382ffea77999ea94ed42ec67b6" dependencies = [ "curl-sys", "libc", "openssl-probe", "openssl-sys", "schannel", - "socket2 0.4.9", - "winapi", + "socket2 0.5.6", + "windows-sys 0.52.0", ] [[package]] name = "curl-sys" -version = "0.4.63+curl-8.1.2" +version = "0.4.72+curl-8.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aeb0fef7046022a1e2ad67a004978f0e3cacb9e3123dc62ce768f92197b771dc" +checksum = "29cbdc8314c447d11e8fd156dcdd031d9e02a7a976163e396b548c03153bc9ea" dependencies = [ "cc", "libc", @@ -1999,7 +1996,7 @@ dependencies = [ "openssl-sys", "pkg-config", "vcpkg", - "winapi", + "windows-sys 0.52.0", ] [[package]] @@ -2011,7 +2008,7 @@ dependencies = [ "byteorder", "digest 0.9.0", "rand_core 0.5.1", - "subtle 2.4.1", + "subtle 2.5.0", "zeroize", ] @@ -2028,19 +2025,19 @@ dependencies = [ "fiat-crypto", "platforms", "rustc_version", - "subtle 2.4.1", + "subtle 2.5.0", "zeroize", ] [[package]] name = "curve25519-dalek-derive" -version = "0.1.0" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "83fdaf97f4804dcebfa5862639bc9ce4121e82140bec2a987ac5140294865b5b" +checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3" dependencies = [ - "proc-macro2 1.0.76", + "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.53", + "syn 2.0.57", ] [[package]] @@ -2068,12 +2065,12 @@ dependencies = [ [[package]] name = "darling" -version = "0.20.3" +version = "0.20.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0209d94da627ab5605dcccf08bb18afa5009cfbef48d8a8b7d7bdbc79be25c5e" +checksum = "54e36fcd13ed84ffdfda6f5be89b31287cbb80c439841fe69e04841435464391" dependencies = [ - "darling_core 0.20.3", - "darling_macro 0.20.3", + "darling_core 0.20.8", + "darling_macro 0.20.8", ] [[package]] @@ -2084,7 +2081,7 @@ checksum = "109c1ca6e6b7f82cc233a97004ea8ed7ca123a9af07a8230878fcfda9b158bf0" dependencies = [ "fnv", "ident_case", - "proc-macro2 1.0.76", + "proc-macro2 1.0.79", "quote 1.0.35", "strsim 0.10.0", "syn 1.0.109", @@ -2092,16 +2089,16 @@ dependencies = [ [[package]] name = "darling_core" -version = "0.20.3" +version = "0.20.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "177e3443818124b357d8e76f53be906d60937f0d3a90773a664fa63fa253e621" +checksum = "9c2cf1c23a687a1feeb728783b993c4e1ad83d99f351801977dd809b48d0a70f" dependencies = [ "fnv", "ident_case", - "proc-macro2 1.0.76", + "proc-macro2 1.0.79", "quote 1.0.35", "strsim 0.10.0", - "syn 2.0.53", + "syn 2.0.57", ] [[package]] @@ -2117,26 +2114,26 @@ dependencies = [ [[package]] name = "darling_macro" -version = "0.20.3" +version = "0.20.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "836a9bbc7ad63342d6d6e7b815ccab164bc77a2d95d84bc3117a8c0d5c98e2d5" +checksum = "a668eda54683121533a393014d8692171709ff57a7d61f187b6e782719f8933f" dependencies = [ - "darling_core 0.20.3", + "darling_core 0.20.8", "quote 1.0.35", - "syn 2.0.53", + "syn 2.0.57", ] [[package]] name = "data-encoding" -version = "2.4.0" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c2e66c9d817f1720209181c316d28635c050fa304f9c79e47a520882661b7308" +checksum = "7e962a19be5cfc3f3bf6dd8f61eb50107f356ad6270fbb3ed41476571db78be5" [[package]] name = "data-encoding-macro" -version = "0.1.13" +version = "0.1.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c904b33cc60130e1aeea4956ab803d08a3f4a0ca82d64ed757afac3891f2bb99" +checksum = "20c01c06f5f429efdf2bae21eb67c28b3df3cf85b7dd2d8ef09c0838dac5d33e" dependencies = [ "data-encoding", "data-encoding-macro-internal", @@ -2144,9 +2141,9 @@ dependencies = [ [[package]] name = "data-encoding-macro-internal" -version = "0.1.11" +version = "0.1.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8fdf3fce3ce863539ec1d7fd1b6dcc3c645663376b43ed376bbf887733e4f772" +checksum = "0047d07f2c89b17dd631c80450d69841a6b5d7fb17278cbc43d7e4cfcf2576f3" dependencies = [ "data-encoding", "syn 1.0.109", @@ -2154,9 +2151,9 @@ dependencies = [ [[package]] name = "der" -version = "0.7.7" +version = "0.7.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c7ed52955ce76b1554f509074bb357d3fb8ac9b51288a65a3fd480d1dfba946" +checksum = "fffa369a668c8af7dbf8b5e56c9f744fbd399949ed171606040001947de40b1c" dependencies = [ "const-oid", "zeroize", @@ -2178,9 +2175,9 @@ dependencies = [ [[package]] name = "deranged" -version = "0.3.9" +version = "0.3.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f32d04922c60427da6f9fef14d042d9edddef64cb9d4ce0d64d0685fbeb1fd3" +checksum = "b42b6fa04a440b495c8b04d0e71b707c585f83cb9cb28cf8cd0d976c315e31b4" dependencies = [ "powerfmt", ] @@ -2191,7 +2188,7 @@ version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b" dependencies = [ - "proc-macro2 1.0.76", + "proc-macro2 1.0.79", "quote 1.0.35", "syn 1.0.109", ] @@ -2202,11 +2199,22 @@ version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e79116f119dd1dba1abf1f3405f03b9b0e79a27a3883864bfebded8a3dc768cd" dependencies = [ - "proc-macro2 1.0.76", + "proc-macro2 1.0.79", "quote 1.0.35", "syn 1.0.109", ] +[[package]] +name = "derive-syn-parse" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d65d7ce8132b7c0e54497a4d9a55a1c2a0912a0d786cf894472ba818fba45762" +dependencies = [ + "proc-macro2 1.0.79", + "quote 1.0.35", + "syn 2.0.57", +] + [[package]] name = "derive_more" version = "0.99.17" @@ -2214,7 +2222,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321" dependencies = [ "convert_case", - "proc-macro2 1.0.76", + "proc-macro2 1.0.79", "quote 1.0.35", "rustc_version", "syn 1.0.109", @@ -2253,7 +2261,7 @@ dependencies = [ "block-buffer 0.10.4", "const-oid", "crypto-common", - "subtle 2.4.1", + "subtle 2.5.0", ] [[package]] @@ -2283,9 +2291,9 @@ version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "487585f4d0c6655fe74905e2504d8ad6908e4db67f744eb140876906c2f3175d" dependencies = [ - "proc-macro2 1.0.76", + "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.53", + "syn 2.0.57", ] [[package]] @@ -2306,28 +2314,28 @@ dependencies = [ [[package]] name = "docify" -version = "0.2.7" +version = "0.2.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7cc4fd38aaa9fb98ac70794c82a00360d1e165a87fbf96a8a91f9dfc602aaee2" +checksum = "43a2f138ad521dc4a2ced1a4576148a6a610b4c5923933b062a263130a6802ce" dependencies = [ "docify_macros", ] [[package]] name = "docify_macros" -version = "0.2.7" +version = "0.2.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "63fa215f3a0d40fb2a221b3aa90d8e1fbb8379785a990cb60d62ac71ebdc6460" +checksum = "1a081e51fb188742f5a7a1164ad752121abcb22874b21e2c3b0dd040c515fdad" dependencies = [ "common-path", - "derive-syn-parse", + "derive-syn-parse 0.2.0", "once_cell", - "proc-macro2 1.0.76", + "proc-macro2 1.0.79", "quote 1.0.35", "regex", - "syn 2.0.53", + "syn 2.0.57", "termcolor", - "toml 0.8.11", + "toml 0.8.12", "walkdir", ] @@ -2365,16 +2373,16 @@ version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "558e40ea573c374cf53507fd240b7ee2f5477df7cfebdb97323ec61c719399c5" dependencies = [ - "proc-macro2 1.0.76", + "proc-macro2 1.0.79", "quote 1.0.35", "syn 1.0.109", ] [[package]] name = "dyn-clone" -version = "1.0.12" +version = "1.0.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "304e6508efa593091e97a9abbc10f90aa7ca635b6d2784feff3c89d41dd12272" +checksum = "0d6ef0072f8a535281e4876be788938b528e9a1d43900b82c2569af7da799125" [[package]] name = "ecdsa" @@ -2393,9 +2401,9 @@ dependencies = [ [[package]] name = "ed25519" -version = "2.2.2" +version = "2.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "60f6d271ca33075c88028be6f04d502853d63a5ece419d269c15315d4fc1cf1d" +checksum = "115531babc129696a58c64a4fef0a8bf9e9698629fb97e9e40767d235cfbcd53" dependencies = [ "pkcs8", "signature", @@ -2411,8 +2419,8 @@ dependencies = [ "ed25519", "rand_core 0.6.4", "serde", - "sha2 0.10.7", - "subtle 2.4.1", + "sha2 0.10.8", + "subtle 2.5.0", "zeroize", ] @@ -2432,9 +2440,9 @@ dependencies = [ [[package]] name = "either" -version = "1.9.0" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a26ae43d7bcc3b814de94796a5e736d4029efb0ee900c12e2d54c993ad1a1e07" +checksum = "11157ac094ffbdde99aa67b23417ebdd801842852b500e395a45a9c0aac03e4a" [[package]] name = "elliptic-curve" @@ -2452,15 +2460,15 @@ dependencies = [ "rand_core 0.6.4", "sec1", "serdect", - "subtle 2.4.1", + "subtle 2.5.0", "zeroize", ] [[package]] name = "encoding_rs" -version = "0.8.32" +version = "0.8.33" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "071a31f4ee85403370b58aca746f01041ede6f0da2730960ad001edc2b71b394" +checksum = "7268b386296a025e474d5140678f75d6de9493ae55a5d709eeb9dd08149945e1" dependencies = [ "cfg-if 1.0.0", ] @@ -2472,7 +2480,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c9720bba047d567ffc8a3cba48bf19126600e249ab7f128e9233e6376976a116" dependencies = [ "heck 0.4.1", - "proc-macro2 1.0.76", + "proc-macro2 1.0.79", "quote 1.0.35", "syn 1.0.109", ] @@ -2583,17 +2591,71 @@ version = "2.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0" +[[package]] +name = "event-listener" +version = "3.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d93877bcde0eb80ca09131a08d23f0a5c18a620b01db137dba666d18cd9b30c2" +dependencies = [ + "concurrent-queue", + "parking", + "pin-project-lite 0.2.14", +] + +[[package]] +name = "event-listener" +version = "4.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67b215c49b2b248c855fb73579eb1f4f26c38ffdc12973e20e07b91d78d5646e" +dependencies = [ + "concurrent-queue", + "parking", + "pin-project-lite 0.2.14", +] + +[[package]] +name = "event-listener" +version = "5.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b5fb89194fa3cad959b833185b3063ba881dbfc7030680b314250779fb4cc91" +dependencies = [ + "concurrent-queue", + "parking", + "pin-project-lite 0.2.14", +] + +[[package]] +name = "event-listener-strategy" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "958e4d70b6d5e81971bebec42271ec641e7ff4e170a6fa605f2b8a8b65cb97d3" +dependencies = [ + "event-listener 4.0.3", + "pin-project-lite 0.2.14", +] + +[[package]] +name = "event-listener-strategy" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "332f51cb23d20b0de8458b86580878211da09bcd4503cb579c225b3d124cabb3" +dependencies = [ + "event-listener 5.2.0", + "pin-project-lite 0.2.14", +] + [[package]] name = "expander" -version = "2.0.0" +version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f86a749cf851891866c10515ef6c299b5c69661465e9c3bbe7e07a2b77fb0f7" +checksum = "00e83c02035136f1592a47964ea60c05a50e4ed8b5892cfac197063850898d4d" dependencies = [ "blake2 0.10.6", "fs-err", - "proc-macro2 1.0.76", + "prettier-please", + "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.53", + "syn 2.0.57", ] [[package]] @@ -2613,9 +2675,9 @@ dependencies = [ [[package]] name = "fastrand" -version = "2.0.1" +version = "2.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "25cbce373ec4653f1a01a31e8a5e5ec0c622dc27ff9c4e6606eefef5cbbed4a5" +checksum = "658bd65b1cf4c852a3cc96f18a8ce7b5640f6b703f905c7d74532294c2a63984" [[package]] name = "ff" @@ -2624,7 +2686,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ded41244b729663b1e574f1b4fb731469f69f79c17667b5d776b16cda0479449" dependencies = [ "rand_core 0.6.4", - "subtle 2.4.1", + "subtle 2.5.0", ] [[package]] @@ -2642,9 +2704,9 @@ dependencies = [ [[package]] name = "fiat-crypto" -version = "0.2.5" +version = "0.2.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "27573eac26f4dd11e2b1916c3fe1baa56407c83c71a773a8ba17ec0bca03b6b7" +checksum = "c007b1ae3abe1cb6f85a16305acd418b7ca6343b953633fee2b76d8f108b830f" [[package]] name = "file-per-thread-logger" @@ -2707,9 +2769,9 @@ checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" [[package]] name = "flate2" -version = "1.0.26" +version = "1.0.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b9429470923de8e8cbd4d2dc513535400b4b3fef0319fb5c4e1f520a7bef743" +checksum = "46303f565772937ffe1d394a4fac6f411c6013172fadde9dcdb1e147a086940e" dependencies = [ "crc32fast", "libz-sys", @@ -2733,9 +2795,9 @@ checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" [[package]] name = "form_urlencoded" -version = "1.2.0" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a62bc1cf6f830c2ec14a513a9fb124d0a213a629668a4186f329db21fe045652" +checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" dependencies = [ "percent-encoding", ] @@ -2749,7 +2811,7 @@ checksum = "6c2141d6d6c8512188a7891b4b01590a45f6dac67afb4f255c4124dbb86d4eaa" [[package]] name = "frame-benchmarking" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ "frame-support", "frame-support-procedural", @@ -2797,10 +2859,10 @@ dependencies = [ [[package]] name = "frame-support" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ "aquamarine", - "array-bytes 6.1.0", + "array-bytes 6.2.2", "bitflags 1.3.2", "docify", "environmental", @@ -2838,48 +2900,48 @@ dependencies = [ [[package]] name = "frame-support-procedural" version = "23.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ "Inflector", "cfg-expr", - "derive-syn-parse", + "derive-syn-parse 0.1.5", "expander", "frame-support-procedural-tools", "itertools", "macro_magic", "proc-macro-warning", - "proc-macro2 1.0.76", + "proc-macro2 1.0.79", "quote 1.0.35", "sp-crypto-hashing", - "syn 2.0.53", + "syn 2.0.57", ] [[package]] name = "frame-support-procedural-tools" version = "10.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ "frame-support-procedural-tools-derive", "proc-macro-crate 3.1.0", - "proc-macro2 1.0.76", + "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.53", + "syn 2.0.57", ] [[package]] name = "frame-support-procedural-tools-derive" version = "11.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ - "proc-macro2 1.0.76", + "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.53", + "syn 2.0.57", ] [[package]] name = "frame-system" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ "cfg-if 1.0.0", "docify", @@ -2898,9 +2960,12 @@ dependencies = [ [[package]] name = "fs-err" -version = "2.9.0" +version = "2.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0845fa252299212f0389d64ba26f34fa32cfe41588355f21ed507c59a0f64541" +checksum = "88a41f105fe1d5b6b34b2055e3dc59bb79b46b48b2040b9e6c7b4b5de097aa41" +dependencies = [ + "autocfg", +] [[package]] name = "funty" @@ -2968,19 +3033,32 @@ dependencies = [ "futures-io", "memchr", "parking", - "pin-project-lite 0.2.12", + "pin-project-lite 0.2.14", "waker-fn", ] +[[package]] +name = "futures-lite" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "52527eb5074e35e9339c6b4e8d12600c7128b68fb25dcb9fa9dec18f7c25f3a5" +dependencies = [ + "fastrand 2.0.2", + "futures-core", + "futures-io", + "parking", + "pin-project-lite 0.2.14", +] + [[package]] name = "futures-macro" version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac" dependencies = [ - "proc-macro2 1.0.76", + "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.53", + "syn 2.0.57", ] [[package]] @@ -2990,7 +3068,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d2411eed028cdf8c8034eaf21f9915f956b6c3abec4d4c7949ee67f0721127bd" dependencies = [ "futures-io", - "rustls 0.20.8", + "rustls 0.20.9", "webpki", ] @@ -3008,9 +3086,9 @@ checksum = "38d84fa142264698cdce1a9f9172cf383a0c82de1bddcf3092901442c4097004" [[package]] name = "futures-timer" -version = "3.0.2" +version = "3.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e64b03909df88034c26dc1547e8970b91f98bdb65165d6a4e9110d94263dbb2c" +checksum = "f288b0a4f20f9a56b5d1da57e2227c661b7b16168e2f72365f57b63326e29b24" [[package]] name = "futures-util" @@ -3025,7 +3103,7 @@ dependencies = [ "futures-sink", "futures-task", "memchr", - "pin-project-lite 0.2.12", + "pin-project-lite 0.2.14", "pin-utils", "slab", ] @@ -3072,9 +3150,9 @@ dependencies = [ [[package]] name = "getrandom" -version = "0.2.10" +version = "0.2.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be4136b2a15dd319360be1c07d9933517ccf0be8f16bf62a3bee4f0d618df427" +checksum = "190092ea657667030ac6a35e305e62fc4dd69fd98ac98631e5d3a2b1575a12b5" dependencies = [ "cfg-if 1.0.0", "libc", @@ -3093,22 +3171,12 @@ dependencies = [ [[package]] name = "ghash" -version = "0.4.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1583cc1656d7839fd3732b80cf4f38850336cdb9b8ded1cd399ca62958de3c99" -dependencies = [ - "opaque-debug 0.3.0", - "polyval 0.5.3", -] - -[[package]] -name = "ghash" -version = "0.5.0" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d930750de5717d2dd0b8c0d42c076c0e884c81a73e6cab859bbd2339c71e3e40" +checksum = "f0d8a4362ccb29cb0b265253fb0a2728f592895ee6854fd9bc13f2ffda266ff1" dependencies = [ - "opaque-debug 0.3.0", - "polyval 0.6.1", + "opaque-debug 0.3.1", + "polyval", ] [[package]] @@ -3122,6 +3190,12 @@ dependencies = [ "stable_deref_trait", ] +[[package]] +name = "gimli" +version = "0.28.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4271d37baee1b8c7e4b708028c57d816cf9d2434acb33a549475f78c181f6253" + [[package]] name = "gloo-timers" version = "0.2.6" @@ -3142,14 +3216,14 @@ checksum = "f0f9ef7462f7c099f518d754361858f86d8a07af53ba9af0fe635bbccb151a63" dependencies = [ "ff", "rand_core 0.6.4", - "subtle 2.4.1", + "subtle 2.5.0", ] [[package]] name = "h2" -version = "0.3.20" +version = "0.3.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97ec8491ebaf99c8eaa73058b045fe58073cd6be7f596ac993ced0b0a0c01049" +checksum = "4fbd2820c5e49886948654ab546d0688ff24530286bdcf8fca3cefb16d4618eb" dependencies = [ "bytes", "fnv", @@ -3157,7 +3231,7 @@ dependencies = [ "futures-sink", "futures-util", "http", - "indexmap 1.9.3", + "indexmap 2.2.6", "slab", "tokio", "tokio-util", @@ -3185,7 +3259,7 @@ version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" dependencies = [ - "ahash 0.7.6", + "ahash 0.7.8", ] [[package]] @@ -3194,7 +3268,7 @@ version = "0.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "43a3c133739dddd0d2990f9a4bdf8eb4b21ef50e4851ca85ab661199821d510e" dependencies = [ - "ahash 0.8.7", + "ahash 0.8.11", ] [[package]] @@ -3203,7 +3277,7 @@ version = "0.14.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "290f1a1d9242c78d09ce40a5e87e7554ee637af1351968159f4952f028f75604" dependencies = [ - "ahash 0.8.7", + "ahash 0.8.11", "allocator-api2", "serde", ] @@ -3243,9 +3317,9 @@ dependencies = [ [[package]] name = "hermit-abi" -version = "0.3.2" +version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "443144c8cdadd93ebf52ddb4056d257f5b52c04d3c804e657d19eb73fc33668b" +checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024" [[package]] name = "hex" @@ -3267,9 +3341,9 @@ checksum = "6fe2267d4ed49bc07b63801559be28c718ea06c4738b7a03c94df7386d2cde46" [[package]] name = "hkdf" -version = "0.12.3" +version = "0.12.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "791a029f6b9fc27657f6f188ec6e5e43f6911f6f878e0dc5501396e09809d437" +checksum = "7b5f8eb2ad728638ea2c7d47a21db23b7b58a72ed6a38256b8a1849f15fbbdf7" dependencies = [ "hmac 0.12.1", ] @@ -3304,6 +3378,15 @@ dependencies = [ "hmac 0.8.1", ] +[[package]] +name = "home" +version = "0.5.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3d1354bf6b7235cb4a0576c2619fd4ed18183f689b12b006a0ee7329eeff9a5" +dependencies = [ + "windows-sys 0.52.0", +] + [[package]] name = "hostname" version = "0.3.1" @@ -3317,9 +3400,9 @@ dependencies = [ [[package]] name = "http" -version = "0.2.9" +version = "0.2.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd6effc99afb63425aff9b05836f029929e345a6148a14b7ecd5ab67af944482" +checksum = "601cbb57e577e2f5ef5be8e7b83f0f63994f25aa94d673e54a92d5c516d101f1" dependencies = [ "bytes", "fnv", @@ -3328,13 +3411,13 @@ dependencies = [ [[package]] name = "http-body" -version = "0.4.5" +version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d5f38f16d184e36f2408a55281cd658ecbd3ca05cce6d6510a176eca393e26d1" +checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2" dependencies = [ "bytes", "http", - "pin-project-lite 0.2.12", + "pin-project-lite 0.2.14", ] [[package]] @@ -3345,9 +3428,9 @@ checksum = "d897f394bad6a705d5f4104762e116a75639e470d80901eed05a860a95cb1904" [[package]] name = "httpdate" -version = "1.0.2" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4a1e36c821dbe04574f602848a19f742f4fb3c98d40449f11bcad18d6b17421" +checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" [[package]] name = "humantime" @@ -3357,9 +3440,9 @@ checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" [[package]] name = "hyper" -version = "0.14.27" +version = "0.14.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ffb1cfd654a8219eaef89881fdb3bb3b1cdc5fa75ded05d6933b2b382e395468" +checksum = "bf96e135eb83a2a8ddf766e426a841d8ddd7449d5f00d34ea02b41d2f19eef80" dependencies = [ "bytes", "futures-channel", @@ -3371,8 +3454,8 @@ dependencies = [ "httparse", "httpdate", "itoa", - "pin-project-lite 0.2.12", - "socket2 0.4.9", + "pin-project-lite 0.2.14", + "socket2 0.5.6", "tokio", "tower-service", "tracing", @@ -3381,32 +3464,32 @@ dependencies = [ [[package]] name = "hyper-rustls" -version = "0.24.1" +version = "0.24.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d78e1e73ec14cf7375674f74d7dde185c8206fd9dea6fb6295e8a98098aaa97" +checksum = "ec3efd23720e2049821a693cbc7e65ea87c72f1c58ff2f9522ff332b1491e590" dependencies = [ "futures-util", "http", "hyper", "log", - "rustls 0.21.5", - "rustls-native-certs", + "rustls 0.21.10", + "rustls-native-certs 0.6.3", "tokio", - "tokio-rustls", + "tokio-rustls 0.24.1", ] [[package]] name = "iana-time-zone" -version = "0.1.57" +version = "0.1.60" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2fad5b825842d2b38bd206f3e81d6957625fd7f0a361e345c30e01a0ae2dd613" +checksum = "e7ffbb5a1b541ea2561f8c41c087286cc091e21e556a4f09a8f6cbf17b69b141" dependencies = [ "android_system_properties", "core-foundation-sys", "iana-time-zone-haiku", "js-sys", "wasm-bindgen", - "windows 0.48.0", + "windows-core 0.52.0", ] [[package]] @@ -3437,9 +3520,9 @@ dependencies = [ [[package]] name = "idna" -version = "0.4.0" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d20d6b07bfbc108882d88ed8e37d39636dcc260e15e30c45e6ba089610b917c" +checksum = "634d9b1461af396cad843f47fdba5597a4f9e6ddd4bfb6ff5d85028c25cb12f6" dependencies = [ "unicode-bidi", "unicode-normalization", @@ -3447,21 +3530,21 @@ dependencies = [ [[package]] name = "if-addrs" -version = "0.7.0" +version = "0.10.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cbc0fa01ffc752e9dbc72818cdb072cd028b86be5e09dd04c5a643704fe101a9" +checksum = "cabb0019d51a643781ff15c9c8a3e5dedc365c47211270f4e8f82812fedd8f0a" dependencies = [ "libc", - "winapi", + "windows-sys 0.48.0", ] [[package]] name = "if-watch" -version = "3.0.1" +version = "3.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a9465340214b296cd17a0009acdb890d6160010b8adf8f78a00d0d7ab270f79f" +checksum = "d6b0422c86d7ce0e97169cc42e04ae643caf278874a7a3c87b8150a220dc7e1e" dependencies = [ - "async-io", + "async-io 2.3.2", "core-foundation", "fnv", "futures", @@ -3471,7 +3554,7 @@ dependencies = [ "rtnetlink", "system-configuration", "tokio", - "windows 0.34.0", + "windows 0.51.1", ] [[package]] @@ -3507,7 +3590,7 @@ version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "11d7a9f6330b71fea57921c9b61c47ee6e84f72d394754eff6163ae67e7395eb" dependencies = [ - "proc-macro2 1.0.76", + "proc-macro2 1.0.79", "quote 1.0.35", "syn 1.0.109", ] @@ -3527,7 +3610,7 @@ version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b139284b5cf57ecfa712bcc66950bb635b31aff41c188e8a4cfc758eca374a3f" dependencies = [ - "proc-macro2 1.0.76", + "proc-macro2 1.0.79", "quote 1.0.35", ] @@ -3544,9 +3627,9 @@ dependencies = [ [[package]] name = "indexmap" -version = "2.2.2" +version = "2.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "824b2ae422412366ba479e8111fd301f7b5faece8149317bb81925979a53f520" +checksum = "168fb715dda47215e360912c096649d23d58bf392ac62f73919e831745e40f26" dependencies = [ "equivalent", "hashbrown 0.14.3", @@ -3597,7 +3680,7 @@ version = "1.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "eae7b9aee968036d54dce06cebaefd919e4472e753296daccd6d344e3e2df0c2" dependencies = [ - "hermit-abi 0.3.2", + "hermit-abi 0.3.9", "libc", "windows-sys 0.48.0", ] @@ -3614,7 +3697,7 @@ version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b58db92f96b720de98181bbbe63c831e87005ab460c1bf306eb2622b4707997f" dependencies = [ - "socket2 0.5.5", + "socket2 0.5.6", "widestring", "windows-sys 0.48.0", "winreg", @@ -3622,19 +3705,19 @@ dependencies = [ [[package]] name = "ipnet" -version = "2.8.0" +version = "2.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28b29a3cd74f0f4598934efe3aeba42bae0eb4680554128851ebbecb02af14e6" +checksum = "8f518f335dce6725a761382244631d86cf0ccb2863413590b31338feb467f9c3" [[package]] name = "is-terminal" -version = "0.4.9" +version = "0.4.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb0889898416213fab133e1d33a0e5858a48177452750691bde3666d0fdbaf8b" +checksum = "f23ff5ef2b80d608d61efee834934d862cd92461afc0560dedf493e4c033738b" dependencies = [ - "hermit-abi 0.3.2", - "rustix 0.38.31", - "windows-sys 0.48.0", + "hermit-abi 0.3.9", + "libc", + "windows-sys 0.52.0", ] [[package]] @@ -3643,19 +3726,19 @@ version = "1.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "334e04b4d781f436dc315cb1e7515bd96826426345d498149e4bde36b67f8ee9" dependencies = [ - "async-channel", + "async-channel 1.9.0", "castaway", "crossbeam-utils", "curl", "curl-sys", "encoding_rs", - "event-listener", - "futures-lite", + "event-listener 2.5.3", + "futures-lite 1.13.0", "http", "log", "mime", "once_cell", - "polling", + "polling 2.8.0", "slab", "sluice", "tracing", @@ -3675,24 +3758,24 @@ dependencies = [ [[package]] name = "itoa" -version = "1.0.9" +version = "1.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af150ab688ff2122fcef229be89cb50dd66af9e01a4ff320cc137eecc9bacc38" +checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b" [[package]] name = "jobserver" -version = "0.1.26" +version = "0.1.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "936cfd212a0155903bcbc060e316fb6cc7cbf2e1907329391ebadc1fe0ce77c2" +checksum = "ab46a6e9526ddef3ae7f787c06f0f2600639ba80ea3eade3d8e670a2230f51d6" dependencies = [ "libc", ] [[package]] name = "js-sys" -version = "0.3.64" +version = "0.3.69" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c5f195fe497f702db0f318b07fdd68edb16955aed830df8363d837542f8f935a" +checksum = "29c15563dc2726973df627357ce0c9ddddbea194836909d655df6a75d2cf296d" dependencies = [ "wasm-bindgen", ] @@ -3710,77 +3793,67 @@ dependencies = [ [[package]] name = "jsonrpsee" -version = "0.17.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b971ce0f6cd1521ede485afc564b95b2c8e7079b9da41d4273bd9b55140a55d" -dependencies = [ - "jsonrpsee-core 0.17.1", - "jsonrpsee-proc-macros 0.17.1", - "jsonrpsee-types 0.17.1", - "jsonrpsee-ws-client", - "tracing", -] - -[[package]] -name = "jsonrpsee" -version = "0.20.1" +version = "0.20.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ad9b31183a8bcbe843e32ca8554ad2936633548d95a7bb6a8e14c767dea6b05" +checksum = "affdc52f7596ccb2d7645231fc6163bb314630c989b64998f3699a28b4d5d4dc" dependencies = [ - "jsonrpsee-client-transport 0.20.1", - "jsonrpsee-core 0.20.1", + "jsonrpsee-client-transport 0.20.3", + "jsonrpsee-core 0.20.3", "jsonrpsee-http-client", - "jsonrpsee-types 0.20.1", + "jsonrpsee-types 0.20.3", ] [[package]] name = "jsonrpsee" -version = "0.22.2" +version = "0.22.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87f3ae45a64cfc0882934f963be9431b2a165d667f53140358181f262aca0702" +checksum = "3cdbb7cb6f3ba28f5b212dd250ab4483105efc3e381f5c8bb90340f14f0a2cc3" dependencies = [ - "jsonrpsee-core 0.22.2", - "jsonrpsee-proc-macros 0.22.2", + "jsonrpsee-core 0.22.3", + "jsonrpsee-proc-macros", "jsonrpsee-server", - "jsonrpsee-types 0.22.2", + "jsonrpsee-types 0.22.3", + "jsonrpsee-ws-client", "tokio", "tracing", ] [[package]] name = "jsonrpsee-client-transport" -version = "0.17.1" +version = "0.20.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ca00d975eda834826b04ad57d4e690c67439bb51b02eb0f8b7e4c30fcef8ab9" +checksum = "b5b005c793122d03217da09af68ba9383363caa950b90d3436106df8cabce935" dependencies = [ "futures-util", "http", - "jsonrpsee-core 0.17.1", + "jsonrpsee-core 0.20.3", "pin-project", - "rustls-native-certs", + "rustls-native-certs 0.6.3", "soketto", "thiserror", "tokio", - "tokio-rustls", + "tokio-rustls 0.24.1", "tokio-util", "tracing", + "url", ] [[package]] name = "jsonrpsee-client-transport" -version = "0.20.1" +version = "0.22.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97f2743cad51cc86b0dbfe316309eeb87a9d96a3d7f4dd7a99767c4b5f065335" +checksum = "9ab2e14e727d2faf388c99d9ca5210566ed3b044f07d92c29c3611718d178380" dependencies = [ "futures-util", "http", - "jsonrpsee-core 0.20.1", + "jsonrpsee-core 0.22.3", "pin-project", - "rustls-native-certs", + "rustls-native-certs 0.7.0", + "rustls-pki-types", "soketto", "thiserror", "tokio", - "tokio-rustls", + "tokio-rustls 0.25.0", "tokio-util", "tracing", "url", @@ -3788,40 +3861,18 @@ dependencies = [ [[package]] name = "jsonrpsee-core" -version = "0.17.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b83cca7a5a7899eed8b2935d5f755c8c4052ad66ab5b328bd34ac2b3ffd3515f" -dependencies = [ - "anyhow", - "async-lock", - "async-trait", - "beef", - "futures-timer", - "futures-util", - "jsonrpsee-types 0.17.1", - "rustc-hash", - "serde", - "serde_json", - "thiserror", - "tokio", - "tokio-stream", - "tracing", -] - -[[package]] -name = "jsonrpsee-core" -version = "0.20.1" +version = "0.20.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "35dc957af59ce98373bcdde0c1698060ca6c2d2e9ae357b459c7158b6df33330" +checksum = "da2327ba8df2fdbd5e897e2b5ed25ce7f299d345b9736b6828814c3dbd1fd47b" dependencies = [ "anyhow", - "async-lock", + "async-lock 2.8.0", "async-trait", "beef", "futures-timer", "futures-util", "hyper", - "jsonrpsee-types 0.20.1", + "jsonrpsee-types 0.20.3", "rustc-hash", "serde", "serde_json", @@ -3832,37 +3883,41 @@ dependencies = [ [[package]] name = "jsonrpsee-core" -version = "0.22.2" +version = "0.22.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b75568f4f9696e3a47426e1985b548e1a9fcb13372a5e320372acaf04aca30d1" +checksum = "71962a1c49af43adf81d337e4ebc93f3c915faf6eccaa14d74e255107dfd7723" dependencies = [ "anyhow", + "async-lock 3.3.0", "async-trait", "beef", + "futures-timer", "futures-util", "hyper", - "jsonrpsee-types 0.22.2", + "jsonrpsee-types 0.22.3", "parking_lot 0.12.1", + "pin-project", "rand", "rustc-hash", "serde", "serde_json", "thiserror", "tokio", + "tokio-stream", "tracing", ] [[package]] name = "jsonrpsee-http-client" -version = "0.20.1" +version = "0.20.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0dd865d0072764cb937b0110a92b5f53e995f7101cb346beca03d93a2dea79de" +checksum = "5f80c17f62c7653ce767e3d7288b793dfec920f97067ceb189ebdd3570f2bc20" dependencies = [ "async-trait", "hyper", "hyper-rustls", - "jsonrpsee-core 0.20.1", - "jsonrpsee-types 0.20.1", + "jsonrpsee-core 0.20.3", + "jsonrpsee-types 0.20.3", "serde", "serde_json", "thiserror", @@ -3874,41 +3929,28 @@ dependencies = [ [[package]] name = "jsonrpsee-proc-macros" -version = "0.17.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d814a21d9a819f8de1a41b819a263ffd68e4bb5f043d936db1c49b54684bde0a" -dependencies = [ - "heck 0.4.1", - "proc-macro-crate 1.3.1", - "proc-macro2 1.0.76", - "quote 1.0.35", - "syn 1.0.109", -] - -[[package]] -name = "jsonrpsee-proc-macros" -version = "0.22.2" +version = "0.22.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30ca066e73dd70294aebc5c2675d8ffae43be944af027c857ce0d4c51785f014" +checksum = "1d7c2416c400c94b2e864603c51a5bbd5b103386da1f5e58cbf01e7bb3ef0833" dependencies = [ "heck 0.4.1", "proc-macro-crate 3.1.0", - "proc-macro2 1.0.76", + "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.53", + "syn 2.0.57", ] [[package]] name = "jsonrpsee-server" -version = "0.22.2" +version = "0.22.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e29c1bd1f9bba83c864977c73404e505f74f730fa0db89dd490ec174e36d7f0" +checksum = "4882e640e70c2553e3d9487e6f4dddd5fd11918f25e40fa45218f9fe29ed2152" dependencies = [ "futures-util", "http", "hyper", - "jsonrpsee-core 0.22.2", - "jsonrpsee-types 0.22.2", + "jsonrpsee-core 0.22.3", + "jsonrpsee-types 0.22.3", "pin-project", "route-recognizer", "serde", @@ -3924,23 +3966,9 @@ dependencies = [ [[package]] name = "jsonrpsee-types" -version = "0.17.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd301ccc3e08718393432d1961539d78c4580dcca86014dfe6769c308b2c08b2" -dependencies = [ - "anyhow", - "beef", - "serde", - "serde_json", - "thiserror", - "tracing", -] - -[[package]] -name = "jsonrpsee-types" -version = "0.20.1" +version = "0.20.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa9e25aec855b2a7d3ed90fded6c41e8c3fb72b63f071e1be3f0004eba19b625" +checksum = "5be0be325642e850ed0bdff426674d2e66b2b7117c9be23a7caef68a2902b7d9" dependencies = [ "anyhow", "beef", @@ -3952,9 +3980,9 @@ dependencies = [ [[package]] name = "jsonrpsee-types" -version = "0.22.2" +version = "0.22.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3467fd35feeee179f71ab294516bdf3a81139e7aeebdd860e46897c12e1a3368" +checksum = "1e53c72de6cd2ad6ac1aa6e848206ef8b736f92ed02354959130373dfa5b3cbd" dependencies = [ "anyhow", "beef", @@ -3965,14 +3993,15 @@ dependencies = [ [[package]] name = "jsonrpsee-ws-client" -version = "0.17.1" +version = "0.22.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89a69852133d549b07cb37ff2d0ec540eae0d20abb75ae923f5d39bc7536d987" +checksum = "c8a07ab8da9a283b906f6735ddd17d3680158bb72259e853441d1dd0167079ec" dependencies = [ "http", - "jsonrpsee-client-transport 0.17.1", - "jsonrpsee-core 0.17.1", - "jsonrpsee-types 0.17.1", + "jsonrpsee-client-transport 0.22.3", + "jsonrpsee-core 0.22.3", + "jsonrpsee-types 0.22.3", + "url", ] [[package]] @@ -3986,14 +4015,14 @@ dependencies = [ "elliptic-curve", "once_cell", "serdect", - "sha2 0.10.7", + "sha2 0.10.8", ] [[package]] name = "keccak" -version = "0.1.4" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f6d5ed8676d904364de097082f4e7d240b571b67989ced0240f08b7f966f940" +checksum = "ecc2af9a1119c51f12a14607e783cb977bde58bc069ff0c3da1095e635d70654" dependencies = [ "cpufeatures", ] @@ -4036,15 +4065,15 @@ checksum = "9c198f91728a82281a64e1f4f9eeb25d82cb32a5de251c6bd1b5154d63a8e7bd" [[package]] name = "libm" -version = "0.2.7" +version = "0.2.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f7012b1bbb0719e1097c47611d3898568c546d597c2e74d66f6087edd5233ff4" +checksum = "4ec2a862134d2a7d32d7983ddcdd1c4923530833c9f2ea1a44fc5fa473989058" [[package]] name = "libnghttp2-sys" -version = "0.1.7+1.45.0" +version = "0.1.9+1.58.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57ed28aba195b38d5ff02b9170cbff627e336a20925e43b4945390401c5dc93f" +checksum = "b57e858af2798e167e709b9d969325b6d8e9d50232fcbc494d7d54f976854a64" dependencies = [ "cc", "libc", @@ -4059,7 +4088,7 @@ dependencies = [ "bytes", "futures", "futures-timer", - "getrandom 0.2.10", + "getrandom 0.2.12", "instant", "libp2p-allow-block-list", "libp2p-connection-limits", @@ -4184,7 +4213,7 @@ dependencies = [ "multihash", "quick-protobuf", "rand", - "sha2 0.10.7", + "sha2 0.10.8", "thiserror", "zeroize", ] @@ -4209,7 +4238,7 @@ dependencies = [ "log", "quick-protobuf", "rand", - "sha2 0.10.7", + "sha2 0.10.8", "smallvec", "thiserror", "uint", @@ -4232,7 +4261,7 @@ dependencies = [ "log", "rand", "smallvec", - "socket2 0.4.9", + "socket2 0.4.10", "tokio", "trust-dns-proto", "void", @@ -4267,7 +4296,7 @@ dependencies = [ "once_cell", "quick-protobuf", "rand", - "sha2 0.10.7", + "sha2 0.10.8", "snow", "static_assertions", "thiserror", @@ -4309,7 +4338,7 @@ dependencies = [ "parking_lot 0.12.1", "quinn-proto", "rand", - "rustls 0.20.8", + "rustls 0.20.9", "thiserror", "tokio", ] @@ -4374,7 +4403,7 @@ dependencies = [ "libc", "libp2p-core", "log", - "socket2 0.4.9", + "socket2 0.4.10", "tokio", ] @@ -4390,7 +4419,7 @@ dependencies = [ "libp2p-identity", "rcgen", "ring 0.16.20", - "rustls 0.20.8", + "rustls 0.20.9", "thiserror", "webpki", "x509-parser", @@ -4443,6 +4472,16 @@ dependencies = [ "yamux", ] +[[package]] +name = "libredox" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d" +dependencies = [ + "bitflags 2.5.0", + "libc", +] + [[package]] name = "libsecp256k1" version = "0.7.1" @@ -4470,7 +4509,7 @@ checksum = "5be9b9bb642d8522a44d533eab56c16c738301965504753b03ad1de3425d5451" dependencies = [ "crunchy", "digest 0.9.0", - "subtle 2.4.1", + "subtle 2.5.0", ] [[package]] @@ -4493,9 +4532,9 @@ dependencies = [ [[package]] name = "libz-sys" -version = "1.1.9" +version = "1.1.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56ee889ecc9568871456d42f603d6a0ce59ff328d291063a45cbdf0036baf6db" +checksum = "5e143b5e666b2695d28f6bca6497720813f699c9602dd7f5cac91008b8ada7f9" dependencies = [ "cc", "libc", @@ -4520,9 +4559,9 @@ dependencies = [ [[package]] name = "linregress" -version = "0.5.2" +version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4de0b5f52a9f84544d268f5fabb71b38962d6aa3c6600b8bcd27d44ccf9c9c45" +checksum = "4de04dcecc58d366391f9920245b85ffa684558a5ef6e7736e754347c3aea9c2" dependencies = [ "nalgebra", ] @@ -4541,9 +4580,9 @@ checksum = "ef53942eb7bf7ff43a617b3e2c1c4a5ecf5944a7c1bc12d7ee39bbb15e5c1519" [[package]] name = "linux-raw-sys" -version = "0.4.12" +version = "0.4.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4cd1a83af159aa67994778be9070f0ae1bd732942279cabb14f86f986a21456" +checksum = "01cda141df6706de531b6c46c3a33ecca755538219bd484262fa09410c13539c" [[package]] name = "lioness" @@ -4559,9 +4598,9 @@ dependencies = [ [[package]] name = "lock_api" -version = "0.4.10" +version = "0.4.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c1cc9717a20b1bb222f333e6a92fd32f7d8a18ddc5a3191a11af45dcbf4dcd16" +checksum = "3c168f8615b12bc01f9c17e2eb0cc07dcae1940121185446edc3744920e8ef45" dependencies = [ "autocfg", "scopeguard", @@ -4621,7 +4660,7 @@ dependencies = [ "macro_magic_core", "macro_magic_macros", "quote 1.0.35", - "syn 2.0.53", + "syn 2.0.57", ] [[package]] @@ -4631,11 +4670,11 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "468155613a44cfd825f1fb0ffa532b018253920d404e6fca1e8d43155198a46d" dependencies = [ "const-random", - "derive-syn-parse", + "derive-syn-parse 0.1.5", "macro_magic_core_macros", - "proc-macro2 1.0.76", + "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.53", + "syn 2.0.57", ] [[package]] @@ -4644,9 +4683,9 @@ version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9ea73aa640dc01d62a590d48c0c3521ed739d53b27f919b25c3551e233481654" dependencies = [ - "proc-macro2 1.0.76", + "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.53", + "syn 2.0.57", ] [[package]] @@ -4657,7 +4696,7 @@ checksum = "ef9d79ae96aaba821963320eb2b6e34d17df1e5a83d8a1985c29cc5be59577b3" dependencies = [ "macro_magic_core", "quote 1.0.35", - "syn 2.0.53", + "syn 2.0.57", ] [[package]] @@ -4683,9 +4722,9 @@ checksum = "2532096657941c2fea9c289d370a250971c689d4f143798ff67113ec042024a5" [[package]] name = "matrixmultiply" -version = "0.3.7" +version = "0.3.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "090126dc04f95dc0d1c1c91f61bdd474b3930ca064c1edc8a849da2c6cbe1e77" +checksum = "7574c1cf36da4798ab73da5b215bbf444f50718207754cb522201d78d1cd0ff2" dependencies = [ "autocfg", "rawpointer", @@ -4693,17 +4732,17 @@ dependencies = [ [[package]] name = "memchr" -version = "2.5.0" +version = "2.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d" +checksum = "6c8640c5d730cb13ebd907d8d04b52f55ac9a2eec55b440c8892f40d56c76c1d" [[package]] name = "memfd" -version = "0.6.3" +version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ffc89ccdc6e10d6907450f753537ebc5c5d3460d2e4e62ea74bd571db62c0f9e" +checksum = "b2cffa4ad52c6f791f4f8b15f0c05f9824b2ced1160e88cc393d64fff9a8ac64" dependencies = [ - "rustix 0.37.27", + "rustix 0.38.32", ] [[package]] @@ -4724,15 +4763,6 @@ dependencies = [ "autocfg", ] -[[package]] -name = "memoffset" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a634b1c61a95585bd15607c6ab0c4e5b226e695ff2800ba0cdccddf208c406c" -dependencies = [ - "autocfg", -] - [[package]] name = "memory-db" version = "0.32.0" @@ -4786,9 +4816,9 @@ checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" [[package]] name = "miniz_oxide" -version = "0.7.1" +version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e7810e0be55b428ada41041c41f32c9f1a42817901b4ccf45fa3d4b6561e74c7" +checksum = "9d811f3e15f28568be3407c8e7fdb6514c1cda3cb30683f15b6a1a1dc4ea14a7" dependencies = [ "adler", ] @@ -4824,7 +4854,7 @@ dependencies = [ "rand", "rand_chacha", "rand_distr", - "subtle 2.4.1", + "subtle 2.5.0", "thiserror", "zeroize", ] @@ -4851,7 +4881,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "22ce75669015c4f47b289fd4d4f56e894e4c96003ffdf3ac51313126f94c6cbb" dependencies = [ "cfg-if 1.0.0", - "proc-macro2 1.0.76", + "proc-macro2 1.0.79", "quote 1.0.35", "syn 1.0.109", ] @@ -4899,13 +4929,13 @@ dependencies = [ [[package]] name = "multihash-derive" -version = "0.8.0" +version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc076939022111618a5026d3be019fd8b366e76314538ff9a1b59ffbcbf98bcd" +checksum = "1d6d4752e6230d8ef7adf7bd5d8c4b1f6561c1014c5ba9a37445ccefe18aa1db" dependencies = [ - "proc-macro-crate 1.3.1", + "proc-macro-crate 1.1.3", "proc-macro-error", - "proc-macro2 1.0.76", + "proc-macro2 1.0.79", "quote 1.0.35", "syn 1.0.109", "synstructure", @@ -4933,9 +4963,9 @@ dependencies = [ [[package]] name = "nalgebra" -version = "0.32.3" +version = "0.32.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "307ed9b18cc2423f29e83f84fd23a8e73628727990181f18641a8b5dc2ab1caa" +checksum = "3ea4908d4f23254adda3daa60ffef0f1ac7b8c3e9a864cf3cc154b251908a2ef" dependencies = [ "approx", "matrixmultiply", @@ -4953,7 +4983,7 @@ version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "91761aed67d03ad966ef783ae962ef9bbaca728d2dd7ceb7939ec110fffad998" dependencies = [ - "proc-macro2 1.0.76", + "proc-macro2 1.0.79", "quote 1.0.35", "syn 1.0.109", ] @@ -5080,9 +5110,9 @@ dependencies = [ [[package]] name = "num-bigint" -version = "0.4.3" +version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f93ab6289c7b344a8a9f60f88d80aa20032336fe78da341afc91c8a2341fc75f" +checksum = "608e7659b5c3d7cba262d894801b9ec9d00de989e8a82bd4bef91d08da45cdc0" dependencies = [ "autocfg", "num-integer", @@ -5091,9 +5121,9 @@ dependencies = [ [[package]] name = "num-complex" -version = "0.4.3" +version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "02e0d21255c828d6f128a1e41534206671e8c3ea0c62f32291e808dc82cff17d" +checksum = "23c6602fda94a57c990fe0df199a035d83576b496aa29f4e634a8ac6004e68a6" dependencies = [ "num-traits", ] @@ -5116,11 +5146,10 @@ dependencies = [ [[package]] name = "num-integer" -version = "0.1.45" +version = "0.1.46" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "225d3389fb3509a24c93f5c29eb6bde2586b98d9f016636dff58d7c6f7569cd9" +checksum = "7969661fd2958a5cb096e56c8e1ad0444ac2bbcd0061bd28660485a44879858f" dependencies = [ - "autocfg", "num-traits", ] @@ -5152,15 +5181,15 @@ version = "1.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43" dependencies = [ - "hermit-abi 0.3.2", + "hermit-abi 0.3.9", "libc", ] [[package]] name = "num_threads" -version = "0.1.6" +version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2819ce041d2ee131036f4fc9d6ae7ae125a3a40e97ba64d04fe799ad9dabbb44" +checksum = "5c7398b9c8b70908f6371f47ed36737907c87c52af34c268fed0bf0ceb92ead9" dependencies = [ "libc", ] @@ -5179,9 +5208,9 @@ dependencies = [ [[package]] name = "object" -version = "0.31.1" +version = "0.32.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8bda667d9f2b5051b8833f59f3bf748b28ef54f850f4fcb389a252aa383866d1" +checksum = "a6a622008b6e321afc04970976f62ee297fdbaa6f95318ca343e3eebb9648441" dependencies = [ "memchr", ] @@ -5209,9 +5238,9 @@ checksum = "2839e79665f131bdb5782e51f2c6c9599c133c6098982a54c794358bf432529c" [[package]] name = "opaque-debug" -version = "0.3.0" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5" +checksum = "c08d65885ee38876c4f86fa503fb49d7b507c2b62552df7c70b2fce627e06381" [[package]] name = "openssl-probe" @@ -5221,9 +5250,9 @@ checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" [[package]] name = "openssl-sys" -version = "0.9.90" +version = "0.9.102" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "374533b0e45f3a7ced10fcaeccca020e66656bc03dac384f852e4e5a7a8104a6" +checksum = "c597637d56fbc83893a35eb0dd04b2b8e7a50c91e64e9493e398b5df4fb45fa2" dependencies = [ "cc", "libc", @@ -5234,7 +5263,7 @@ dependencies = [ [[package]] name = "pallet-authorship" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ "frame-support", "frame-system", @@ -5248,7 +5277,7 @@ dependencies = [ [[package]] name = "pallet-balances" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ "docify", "frame-benchmarking", @@ -5264,7 +5293,7 @@ dependencies = [ [[package]] name = "pallet-beefy" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ "frame-support", "frame-system", @@ -5284,9 +5313,9 @@ dependencies = [ [[package]] name = "pallet-beefy-mmr" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ - "array-bytes 6.1.0", + "array-bytes 6.2.2", "binary-merkle-tree", "frame-support", "frame-system", @@ -5419,7 +5448,7 @@ dependencies = [ [[package]] name = "pallet-grandpa" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ "frame-benchmarking", "frame-support", @@ -5442,7 +5471,7 @@ dependencies = [ [[package]] name = "pallet-mmr" version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ "frame-benchmarking", "frame-support", @@ -5460,7 +5489,7 @@ dependencies = [ [[package]] name = "pallet-session" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ "frame-support", "frame-system", @@ -5482,7 +5511,7 @@ dependencies = [ [[package]] name = "pallet-timestamp" version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ "docify", "frame-benchmarking", @@ -5502,7 +5531,7 @@ dependencies = [ [[package]] name = "pallet-transaction-payment" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ "frame-support", "frame-system", @@ -5518,7 +5547,7 @@ dependencies = [ [[package]] name = "pallet-transaction-payment-rpc-runtime-api" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ "pallet-transaction-payment", "parity-scale-codec", @@ -5530,7 +5559,7 @@ dependencies = [ [[package]] name = "pallet-utility" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ "frame-benchmarking", "frame-support", @@ -5610,16 +5639,16 @@ checksum = "4e69bf016dc406eff7d53a7d3f7cf1c2e72c82b9088aac1118591e36dd2cd3e9" dependencies = [ "bitcoin_hashes 0.13.0", "rand", - "rand_core 0.6.4", + "rand_core 0.5.1", "serde", "unicode-normalization", ] [[package]] name = "parity-scale-codec" -version = "3.6.4" +version = "3.6.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd8e946cc0cc711189c0b0249fb8b599cbeeab9784d83c415719368bb8d4ac64" +checksum = "881331e34fa842a2fb61cc2db9643a8fedc615e47cfcc52597d1af0db9a7e8fe" dependencies = [ "arrayvec 0.7.4", "bitvec", @@ -5632,12 +5661,12 @@ dependencies = [ [[package]] name = "parity-scale-codec-derive" -version = "3.6.4" +version = "3.6.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a296c3079b5fefbc499e1de58dc26c09b1b9a5952d26694ee89f04a43ebbb3e" +checksum = "be30eaf4b0a9fba5336683b38de57bb86d179a35862ba6bfcf57625d006bde5b" dependencies = [ - "proc-macro-crate 1.3.1", - "proc-macro2 1.0.76", + "proc-macro-crate 2.0.0", + "proc-macro2 1.0.79", "quote 1.0.35", "syn 1.0.109", ] @@ -5672,7 +5701,7 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f557c32c6d268a07c921471619c0295f5efad3a0e76d4f97a05c091a51d110b2" dependencies = [ - "proc-macro2 1.0.76", + "proc-macro2 1.0.79", "syn 1.0.109", "synstructure", ] @@ -5685,9 +5714,9 @@ checksum = "e1ad0aff30c1da14b1254fcb2af73e1fa9a28670e584a626f53a369d0e157304" [[package]] name = "parking" -version = "2.1.0" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14f2252c834a40ed9bb5422029649578e63aa341ac401f74e719dd1afda8394e" +checksum = "bb813b8af86854136c6922af0598d719255ecb2179515e6e7730d468f05c9cae" [[package]] name = "parking_lot" @@ -5707,7 +5736,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f" dependencies = [ "lock_api", - "parking_lot_core 0.9.8", + "parking_lot_core 0.9.9", ] [[package]] @@ -5726,15 +5755,15 @@ dependencies = [ [[package]] name = "parking_lot_core" -version = "0.9.8" +version = "0.9.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93f00c865fe7cabf650081affecd3871070f26767e7b2070a3ffae14c654b447" +checksum = "4c42a9226546d68acdd9c0a280d17ce19bfe27a46bf68784e4066115788d008e" dependencies = [ "cfg-if 1.0.0", "libc", - "redox_syscall 0.3.5", + "redox_syscall 0.4.1", "smallvec", - "windows-targets 0.48.1", + "windows-targets 0.48.5", ] [[package]] @@ -5751,7 +5780,7 @@ checksum = "346f04948ba92c43e8469c1ee6736c7563d71012b17d40745260fe106aac2166" dependencies = [ "base64ct", "rand_core 0.6.4", - "subtle 2.4.1", + "subtle 2.5.0", ] [[package]] @@ -5781,38 +5810,38 @@ dependencies = [ [[package]] name = "percent-encoding" -version = "2.3.0" +version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b2a4787296e9989611394c33f193f676704af1686e70b8f8033ab5ba9a35a94" +checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" [[package]] name = "petgraph" -version = "0.6.3" +version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4dd7d28ee937e54fe3080c91faa1c3a46c06de6252988a7f4592ba2310ef22a4" +checksum = "e1d3afd2628e69da2be385eb6f2fd57c8ac7977ceeff6dc166ff1657b0e386a9" dependencies = [ "fixedbitset", - "indexmap 1.9.3", + "indexmap 2.2.6", ] [[package]] name = "pin-project" -version = "1.1.3" +version = "1.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fda4ed1c6c173e3fc7a83629421152e01d7b1f9b7f65fb301e490e8cfc656422" +checksum = "b6bf43b791c5b9e34c3d182969b4abb522f9343702850a2e57f460d00d09b4b3" dependencies = [ "pin-project-internal", ] [[package]] name = "pin-project-internal" -version = "1.1.3" +version = "1.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4359fd9c9171ec6e8c62926d6faaf553a8dc3f64e1507e76da7911b4f6a04405" +checksum = "2f38a4412a78282e09a2cf38d195ea5420d15ba0602cb375210efbc877243965" dependencies = [ - "proc-macro2 1.0.76", + "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.53", + "syn 2.0.57", ] [[package]] @@ -5823,15 +5852,26 @@ checksum = "257b64915a082f7811703966789728173279bdebb956b143dbcd23f6f970a777" [[package]] name = "pin-project-lite" -version = "0.2.12" +version = "0.2.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "12cc1b0bf1727a77a54b6654e7b5f1af8604923edc8b81885f8ec92f9e3f0a05" +checksum = "bda66fc9667c18cb2758a2ac84d1167245054bcf85d5d1aaa6923f45801bdd02" [[package]] name = "pin-utils" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" +checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" + +[[package]] +name = "piper" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "668d31b1c4eba19242f2088b2bf3316b82ca31082a8335764db4e083db7485d4" +dependencies = [ + "atomic-waker", + "fastrand 2.0.2", + "futures-io", +] [[package]] name = "pkcs8" @@ -5845,20 +5885,20 @@ dependencies = [ [[package]] name = "pkg-config" -version = "0.3.27" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26072860ba924cbfa98ea39c8c19b4dd6a4a25423dbdf219c1eca91aa0cf6964" +checksum = "d231b230927b5e4ad203db57bbcbee2802f6bce620b1e4a9024a07d94e2907ec" [[package]] name = "platforms" -version = "3.0.2" +version = "3.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3d7ddaed09e0eb771a79ab0fd64609ba0afb0a8366421957936ad14cbd13630" +checksum = "db23d408679286588f4d4644f965003d056e3dd5abcaaa938116871d7ce2fee7" [[package]] name = "polkadot-core-primitives" version = "7.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ "parity-scale-codec", "scale-info", @@ -5870,7 +5910,7 @@ dependencies = [ [[package]] name = "polkadot-parachain-primitives" version = "6.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ "bounded-collections", "derive_more", @@ -5887,7 +5927,7 @@ dependencies = [ [[package]] name = "polkadot-primitives" version = "7.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ "bitvec", "hex-literal", @@ -5958,9 +5998,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5c4fdfc49717fb9a196e74a5d28e0bc764eb394a2c803eb11133a31ac996c60c" dependencies = [ "polkavm-common", - "proc-macro2 1.0.76", + "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.53", + "syn 2.0.57", ] [[package]] @@ -5970,7 +6010,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8ba81f7b5faac81e528eb6158a6f3c9e0bb1008e0ffa19653bc8dea925ecb429" dependencies = [ "polkavm-derive-impl", - "syn 2.0.53", + "syn 2.0.57", ] [[package]] @@ -5991,43 +6031,46 @@ dependencies = [ "concurrent-queue", "libc", "log", - "pin-project-lite 0.2.12", + "pin-project-lite 0.2.14", "windows-sys 0.48.0", ] [[package]] -name = "poly1305" -version = "0.7.2" +name = "polling" +version = "3.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "048aeb476be11a4b6ca432ca569e375810de9294ae78f4774e78ea98a9246ede" +checksum = "e0c976a60b2d7e99d6f229e414670a9b85d13ac305cc6d1e9c134de58c5aaaf6" dependencies = [ - "cpufeatures", - "opaque-debug 0.3.0", - "universal-hash 0.4.1", + "cfg-if 1.0.0", + "concurrent-queue", + "hermit-abi 0.3.9", + "pin-project-lite 0.2.14", + "rustix 0.38.32", + "tracing", + "windows-sys 0.52.0", ] [[package]] -name = "polyval" -version = "0.5.3" +name = "poly1305" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8419d2b623c7c0896ff2d5d96e2cb4ede590fed28fcc34934f4c33c036e620a1" +checksum = "8159bd90725d2df49889a078b54f4f79e87f1f8a8444194cdca81d38f5393abf" dependencies = [ - "cfg-if 1.0.0", "cpufeatures", - "opaque-debug 0.3.0", - "universal-hash 0.4.1", + "opaque-debug 0.3.1", + "universal-hash", ] [[package]] name = "polyval" -version = "0.6.1" +version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d52cff9d1d4dee5fe6d03729099f4a310a41179e0a10dbf542039873f2e826fb" +checksum = "9d1fe60d06143b2430aa532c94cfe9e29783047f06c0d7fd359a9a51b729fa25" dependencies = [ "cfg-if 1.0.0", "cpufeatures", - "opaque-debug 0.3.0", - "universal-hash 0.5.1", + "opaque-debug 0.3.1", + "universal-hash", ] [[package]] @@ -6072,21 +6115,31 @@ dependencies = [ "termtree", ] +[[package]] +name = "prettier-please" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22020dfcf177fcc7bf5deaf7440af371400c67c0de14c399938d8ed4fb4645d3" +dependencies = [ + "proc-macro2 1.0.79", + "syn 2.0.57", +] + [[package]] name = "prettyplease" -version = "0.1.25" +version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c8646e95016a7a6c4adea95bafa8a16baab64b583356217f2c85db4a39d9a86" +checksum = "f28f53e8b192565862cf99343194579a022eb9c7dd3a8d03134734803c7b3125" dependencies = [ - "proc-macro2 1.0.76", + "proc-macro2 1.0.79", "syn 1.0.109", ] [[package]] name = "primitive-types" -version = "0.12.1" +version = "0.12.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f3486ccba82358b11a77516035647c34ba167dfa53312630de83b12bd4f3d66" +checksum = "0b34d9fd68ae0b74a41b21c03c2f62847aa0ffea044eee893b4c140b37e244e2" dependencies = [ "fixed-hash", "impl-codec", @@ -6098,12 +6151,21 @@ dependencies = [ [[package]] name = "proc-macro-crate" -version = "1.3.1" +version = "1.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f4c021e1093a56626774e81216a4ce732a735e5bad4868a03f3ed65ca0c3919" +checksum = "e17d47ce914bf4de440332250b0edd23ce48c005f59fab39d3335866b114f11a" dependencies = [ - "once_cell", - "toml_edit 0.19.14", + "thiserror", + "toml 0.5.11", +] + +[[package]] +name = "proc-macro-crate" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7e8366a6159044a37876a2b9817124296703c586a5c92e2c53751fa06d8d43e8" +dependencies = [ + "toml_edit 0.20.7", ] [[package]] @@ -6112,7 +6174,7 @@ version = "3.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6d37c51ca738a55da99dc0c4a34860fd675453b8b36209178c2249bb13651284" dependencies = [ - "toml_edit 0.21.0", + "toml_edit 0.21.1", ] [[package]] @@ -6122,7 +6184,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c" dependencies = [ "proc-macro-error-attr", - "proc-macro2 1.0.76", + "proc-macro2 1.0.79", "quote 1.0.35", "syn 1.0.109", "version_check", @@ -6134,26 +6196,20 @@ version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869" dependencies = [ - "proc-macro2 1.0.76", + "proc-macro2 1.0.79", "quote 1.0.35", "version_check", ] -[[package]] -name = "proc-macro-hack" -version = "0.5.20+deprecated" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc375e1527247fe1a97d8b7156678dfe7c1af2fc075c9a4db3690ecd2a148068" - [[package]] name = "proc-macro-warning" -version = "1.0.0" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b698b0b09d40e9b7c1a47b132d66a8b54bcd20583d9b6d06e4535e383b4405c" +checksum = "834da187cfe638ae8abb0203f0b33e5ccdb02a28e7199f2f47b3e2754f50edca" dependencies = [ - "proc-macro2 1.0.76", + "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.53", + "syn 2.0.57", ] [[package]] @@ -6167,9 +6223,9 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.76" +version = "1.0.79" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95fc56cda0b5c3325f5fbbd7ff9fda9e02bb00bb3dac51252d2f1bfa1cb8cc8c" +checksum = "e835ff2298f5721608eb1a980ecaee1aef2c132bf95ecc026a11b7bf3c01c02e" dependencies = [ "unicode-ident", ] @@ -6202,13 +6258,13 @@ dependencies = [ [[package]] name = "prometheus-client-derive-encode" -version = "0.4.1" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72b6a5217beb0ad503ee7fa752d451c905113d70721b937126158f3106a48cc1" +checksum = "440f724eba9f6996b75d63681b0a92b06947f1457076d503a4d2e2c8f56442b8" dependencies = [ - "proc-macro2 1.0.76", + "proc-macro2 1.0.79", "quote 1.0.35", - "syn 1.0.109", + "syn 2.0.57", ] [[package]] @@ -6251,7 +6307,7 @@ checksum = "e5d2d8d10f3c6ded6da8b05b5fb3b8a5082514344d56c9f871412d29b4e075b4" dependencies = [ "anyhow", "itertools", - "proc-macro2 1.0.76", + "proc-macro2 1.0.79", "quote 1.0.35", "syn 1.0.109", ] @@ -6315,15 +6371,15 @@ dependencies = [ [[package]] name = "quinn-proto" -version = "0.9.3" +version = "0.9.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67c10f662eee9c94ddd7135043e544f3c82fa839a1e7b865911331961b53186c" +checksum = "94b0b33c13a79f669c85defaf4c275dc86a0c0372807d0ca3d78e0bb87274863" dependencies = [ "bytes", "rand", "ring 0.16.20", "rustc-hash", - "rustls 0.20.8", + "rustls 0.20.9", "slab", "thiserror", "tinyvec", @@ -6346,7 +6402,7 @@ version = "1.0.35" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "291ec9ab5efd934aaf503a6466c5d5251535d108ee747472c3977cc5acc868ef" dependencies = [ - "proc-macro2 1.0.76", + "proc-macro2 1.0.79", ] [[package]] @@ -6391,7 +6447,7 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" dependencies = [ - "getrandom 0.2.10", + "getrandom 0.2.12", ] [[package]] @@ -6412,9 +6468,9 @@ checksum = "60a357793950651c4ed0f3f52338f53b2f809f32d83a07f72909fa13e4c6c1e3" [[package]] name = "rayon" -version = "1.8.0" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c27db03db7734835b3f53954b534c91069375ce6ccaa2e065441e07d9b6cdb1" +checksum = "b418a60154510ca1a002a752ca9714984e21e4241e804d32555251faf8b78ffa" dependencies = [ "either", "rayon-core", @@ -6422,9 +6478,9 @@ dependencies = [ [[package]] name = "rayon-core" -version = "1.12.0" +version = "1.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ce3fb6ad83f861aac485e76e1985cd109d9a3713802152be56c3b1f0e0658ed" +checksum = "1465873a3dfdaa8ae7cb14b4383657caab0b3e8a0aa9ae8e04b044854c8dfce2" dependencies = [ "crossbeam-deque", "crossbeam-utils", @@ -6473,42 +6529,42 @@ dependencies = [ [[package]] name = "redox_syscall" -version = "0.3.5" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "567664f262709473930a4bf9e51bf2ebf3348f2e748ccc50dea20646858f8f29" +checksum = "4722d768eff46b75989dd134e5c353f0d6296e5aaa3132e776cbdb56be7731aa" dependencies = [ "bitflags 1.3.2", ] [[package]] name = "redox_users" -version = "0.4.3" +version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b033d837a7cf162d7993aded9304e30a83213c648b6e389db233191f891e5c2b" +checksum = "bd283d9651eeda4b2a83a43c1c91b266c40fd76ecd39a50a8c630ae69dc72891" dependencies = [ - "getrandom 0.2.10", - "redox_syscall 0.2.16", + "getrandom 0.2.12", + "libredox", "thiserror", ] [[package]] name = "ref-cast" -version = "1.0.18" +version = "1.0.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1641819477c319ef452a075ac34a4be92eb9ba09f6841f62d594d50fdcf0bf6b" +checksum = "c4846d4c50d1721b1a3bef8af76924eef20d5e723647333798c1b519b3a9473f" dependencies = [ "ref-cast-impl", ] [[package]] name = "ref-cast-impl" -version = "1.0.18" +version = "1.0.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68bf53dad9b6086826722cdc99140793afd9f62faa14a1ad07eb4f955e7a7216" +checksum = "5fddb4f8d99b0a2ebafc65a87a69a7b9875e4b1ae1f00db265d300ef7f28bccc" dependencies = [ - "proc-macro2 1.0.76", + "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.53", + "syn 2.0.57", ] [[package]] @@ -6525,14 +6581,14 @@ dependencies = [ [[package]] name = "regex" -version = "1.9.1" +version = "1.10.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2eae68fc220f7cf2532e4494aded17545fce192d59cd996e0fe7887f4ceb575" +checksum = "c117dbdfde9c8308975b6a18d71f3f385c89461f7b3fb054288ecf2a2058ba4c" dependencies = [ "aho-corasick", "memchr", - "regex-automata 0.3.3", - "regex-syntax 0.7.4", + "regex-automata 0.4.6", + "regex-syntax 0.8.3", ] [[package]] @@ -6546,13 +6602,13 @@ dependencies = [ [[package]] name = "regex-automata" -version = "0.3.3" +version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "39354c10dd07468c2e73926b23bb9c2caca74c5501e38a35da70406f1d923310" +checksum = "86b83b8b9847f9bf95ef68afb0b8e6cdb80f498442f5179a29fad448fcc1eaea" dependencies = [ "aho-corasick", "memchr", - "regex-syntax 0.7.4", + "regex-syntax 0.8.3", ] [[package]] @@ -6563,9 +6619,9 @@ checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" [[package]] name = "regex-syntax" -version = "0.7.4" +version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5ea92a5b6195c6ef2a0295ea818b312502c6fc94dde986c5553242e18fd4ce2" +checksum = "adad44e29e4c806119491a7f06f03de4d1af22c3a680dd47f1e6e179439d1f56" [[package]] name = "relay-bridge-hub-kusama-client" @@ -6743,7 +6799,7 @@ dependencies = [ "frame-support", "frame-system", "futures", - "jsonrpsee 0.17.1", + "jsonrpsee 0.22.3", "log", "num-traits", "pallet-balances", @@ -6831,7 +6887,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f8dd2a808d456c4a54e300a23e9f5a67e122c3024119acbfd73e3bf664491cb2" dependencies = [ "hmac 0.12.1", - "subtle 2.4.1", + "subtle 2.5.0", ] [[package]] @@ -6860,11 +6916,26 @@ dependencies = [ "libc", "once_cell", "spin 0.5.2", - "untrusted", + "untrusted 0.7.1", "web-sys", "winapi", ] +[[package]] +name = "ring" +version = "0.17.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c17fa4cb658e3583423e915b9f3acc01cceaee1860e33d59ebae66adc3a2dc0d" +dependencies = [ + "cc", + "cfg-if 1.0.0", + "getrandom 0.2.12", + "libc", + "spin 0.9.8", + "untrusted 0.9.0", + "windows-sys 0.52.0", +] + [[package]] name = "rlp" version = "0.5.2" @@ -6934,9 +7005,9 @@ dependencies = [ [[package]] name = "rustix" -version = "0.36.15" +version = "0.36.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c37f1bd5ef1b5422177b7646cba67430579cfe2ace80f284fee876bca52ad941" +checksum = "305efbd14fde4139eb501df5f136994bb520b033fa9fbdce287507dc23b8c7ed" dependencies = [ "bitflags 1.3.2", "errno", @@ -6962,22 +7033,22 @@ dependencies = [ [[package]] name = "rustix" -version = "0.38.31" +version = "0.38.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ea3e1a662af26cd7a3ba09c0297a31af215563ecf42817c98df621387f4e949" +checksum = "65e04861e65f21776e67888bfbea442b3642beaa0138fdb1dd7a84a52dffdb89" dependencies = [ - "bitflags 2.4.1", + "bitflags 2.5.0", "errno", "libc", - "linux-raw-sys 0.4.12", + "linux-raw-sys 0.4.13", "windows-sys 0.52.0", ] [[package]] name = "rustls" -version = "0.20.8" +version = "0.20.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fff78fc74d175294f4e83b28343315ffcfb114b156f0185e9741cb5570f50e2f" +checksum = "1b80e3dec595989ea8510028f30c408a4630db12c9cbb8de34203b89d6577e99" dependencies = [ "log", "ring 0.16.20", @@ -6987,16 +7058,30 @@ dependencies = [ [[package]] name = "rustls" -version = "0.21.5" +version = "0.21.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "79ea77c539259495ce8ca47f53e66ae0330a8819f67e23ac96ca02f50e7b7d36" +checksum = "f9d5a6813c0759e4609cd494e8e725babae6a2ca7b62a5536a13daaec6fcb7ba" dependencies = [ "log", - "ring 0.16.20", - "rustls-webpki", + "ring 0.17.8", + "rustls-webpki 0.101.7", "sct", ] +[[package]] +name = "rustls" +version = "0.22.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "99008d7ad0bbbea527ec27bddbc0e432c5b87d8175178cee68d2eec9c4a1813c" +dependencies = [ + "log", + "ring 0.17.8", + "rustls-pki-types", + "rustls-webpki 0.102.2", + "subtle 2.5.0", + "zeroize", +] + [[package]] name = "rustls-native-certs" version = "0.6.3" @@ -7004,28 +7089,68 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a9aace74cb666635c918e9c12bc0d348266037aa8eb599b5cba565709a8dff00" dependencies = [ "openssl-probe", - "rustls-pemfile", + "rustls-pemfile 1.0.4", + "schannel", + "security-framework", +] + +[[package]] +name = "rustls-native-certs" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f1fb85efa936c42c6d5fc28d2629bb51e4b2f4b8a5211e297d599cc5a093792" +dependencies = [ + "openssl-probe", + "rustls-pemfile 2.1.1", + "rustls-pki-types", "schannel", "security-framework", ] [[package]] name = "rustls-pemfile" -version = "1.0.3" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1c74cae0a4cf6ccbbf5f359f08efdf8ee7e1dc532573bf0db71968cb56b1448c" +dependencies = [ + "base64 0.21.7", +] + +[[package]] +name = "rustls-pemfile" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2d3987094b1d07b653b7dfdc3f70ce9a1da9c51ac18c1b06b662e4f9a0e9f4b2" +checksum = "f48172685e6ff52a556baa527774f61fcaa884f59daf3375c62a3f1cd2549dab" dependencies = [ - "base64 0.21.2", + "base64 0.21.7", + "rustls-pki-types", ] +[[package]] +name = "rustls-pki-types" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ecd36cc4259e3e4514335c4a138c6b43171a8d61d8f5c9348f9fc7529416f247" + [[package]] name = "rustls-webpki" -version = "0.101.4" +version = "0.101.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d93931baf2d282fff8d3a532bbfd7653f734643161b87e3e01e59a04439bf0d" +checksum = "8b6275d1ee7a1cd780b64aca7726599a1dbc893b1e64144529e55c3c2f745765" dependencies = [ - "ring 0.16.20", - "untrusted", + "ring 0.17.8", + "untrusted 0.9.0", +] + +[[package]] +name = "rustls-webpki" +version = "0.102.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "faaa0a62740bedb9b2ef5afa303da42764c012f743917351dc9a237ea1663610" +dependencies = [ + "ring 0.17.8", + "rustls-pki-types", + "untrusted 0.9.0", ] [[package]] @@ -7058,9 +7183,9 @@ dependencies = [ [[package]] name = "ryu" -version = "1.0.15" +version = "1.0.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ad4cc8da4ef723ed60bced201181d83791ad433213d8c24efffda1eec85d741" +checksum = "e86697c916019a8588c99b5fac3cead74ec0b4b819707a682fd4d23fa0ce1ba1" [[package]] name = "safe_arch" @@ -7083,7 +7208,7 @@ dependencies = [ [[package]] name = "sc-allocator" version = "23.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ "log", "sp-core", @@ -7094,9 +7219,9 @@ dependencies = [ [[package]] name = "sc-chain-spec" version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ - "array-bytes 6.1.0", + "array-bytes 6.2.2", "docify", "log", "memmap2", @@ -7120,18 +7245,18 @@ dependencies = [ [[package]] name = "sc-chain-spec-derive" version = "11.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ "proc-macro-crate 3.1.0", - "proc-macro2 1.0.76", + "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.53", + "syn 2.0.57", ] [[package]] name = "sc-client-api" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ "fnv", "futures", @@ -7158,7 +7283,7 @@ dependencies = [ [[package]] name = "sc-consensus" version = "0.33.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ "async-trait", "futures", @@ -7183,7 +7308,7 @@ dependencies = [ [[package]] name = "sc-executor" version = "0.32.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ "parity-scale-codec", "parking_lot 0.12.1", @@ -7206,7 +7331,7 @@ dependencies = [ [[package]] name = "sc-executor-common" version = "0.29.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ "polkavm", "sc-allocator", @@ -7219,7 +7344,7 @@ dependencies = [ [[package]] name = "sc-executor-polkavm" version = "0.29.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ "log", "polkavm", @@ -7230,14 +7355,14 @@ dependencies = [ [[package]] name = "sc-executor-wasmtime" version = "0.29.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ "anyhow", "cfg-if 1.0.0", "libc", "log", "parking_lot 0.12.1", - "rustix 0.36.15", + "rustix 0.36.17", "sc-allocator", "sc-executor-common", "sp-runtime-interface 24.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=master)", @@ -7248,7 +7373,7 @@ dependencies = [ [[package]] name = "sc-mixnet" version = "0.4.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ "array-bytes 4.2.0", "arrayvec 0.7.4", @@ -7277,10 +7402,10 @@ dependencies = [ [[package]] name = "sc-network" version = "0.34.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ - "array-bytes 6.1.0", - "async-channel", + "array-bytes 6.2.2", + "async-channel 1.9.0", "async-trait", "asynchronous-codec", "bytes", @@ -7320,7 +7445,7 @@ dependencies = [ [[package]] name = "sc-network-common" version = "0.33.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ "async-trait", "bitflags 1.3.2", @@ -7337,9 +7462,9 @@ dependencies = [ [[package]] name = "sc-rpc-api" version = "0.33.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ - "jsonrpsee 0.22.2", + "jsonrpsee 0.22.3", "parity-scale-codec", "sc-chain-spec", "sc-mixnet", @@ -7357,7 +7482,7 @@ dependencies = [ [[package]] name = "sc-telemetry" version = "15.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ "chrono", "futures", @@ -7376,7 +7501,7 @@ dependencies = [ [[package]] name = "sc-transaction-pool-api" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ "async-trait", "futures", @@ -7392,9 +7517,9 @@ dependencies = [ [[package]] name = "sc-utils" version = "14.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ - "async-channel", + "async-channel 1.9.0", "futures", "futures-timer", "lazy_static", @@ -7437,8 +7562,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "27873eb6005868f8cc72dcfe109fae664cf51223d35387bc2f28be4c28d94c47" dependencies = [ "darling 0.14.4", - "proc-macro-crate 1.3.1", - "proc-macro2 1.0.76", + "proc-macro-crate 1.1.3", + "proc-macro2 1.0.79", "quote 1.0.35", "syn 1.0.109", ] @@ -7465,8 +7590,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "995491f110efdc6bea96d6a746140e32bfceb4ea47510750a5467295a4707a25" dependencies = [ "darling 0.14.4", - "proc-macro-crate 1.3.1", - "proc-macro2 1.0.76", + "proc-macro-crate 1.1.3", + "proc-macro2 1.0.79", "quote 1.0.35", "syn 1.0.109", ] @@ -7491,8 +7616,8 @@ version = "2.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7dc2f4e8bc344b9fc3d5f74f72c2e55bfc38d28dc2ebc69c194a3df424e4d9ac" dependencies = [ - "proc-macro-crate 1.3.1", - "proc-macro2 1.0.76", + "proc-macro-crate 1.1.3", + "proc-macro2 1.0.79", "quote 1.0.35", "syn 1.0.109", ] @@ -7519,11 +7644,11 @@ dependencies = [ [[package]] name = "schannel" -version = "0.1.22" +version = "0.1.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c3733bf4cf7ea0880754e19cb5a462007c4a8c1914bff372ccc95b464f1df88" +checksum = "fbc91545643bcf3a0bbb6569265615222618bdf33ce4ffbbd13c4bbd4c093534" dependencies = [ - "windows-sys 0.48.0", + "windows-sys 0.52.0", ] [[package]] @@ -7532,7 +7657,7 @@ version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "772575a524feeb803e5b0fcbc6dd9f367e579488197c94c6e4023aad2305774d" dependencies = [ - "ahash 0.8.7", + "ahash 0.8.11", "cfg-if 1.0.0", "hashbrown 0.13.2", ] @@ -7559,7 +7684,7 @@ version = "0.11.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8de18f6d8ba0aad7045f5feae07ec29899c1112584a38509a84ad7b04451eaa0" dependencies = [ - "aead 0.5.2", + "aead", "arrayref", "arrayvec 0.7.4", "curve25519-dalek 4.1.1", @@ -7567,8 +7692,8 @@ dependencies = [ "merlin", "rand_core 0.6.4", "serde_bytes", - "sha2 0.10.7", - "subtle 2.4.1", + "sha2 0.10.8", + "subtle 2.5.0", "zeroize", ] @@ -7580,12 +7705,12 @@ checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" [[package]] name = "sct" -version = "0.7.0" +version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d53dcdb7c9f8158937a7981b48accfd39a43af418591a5d008c7b22b5e1b7ca4" +checksum = "da046153aa2352493d6cb7da4b6e5c0c057d8a1d0a9aa8560baffdd945acd414" dependencies = [ - "ring 0.16.20", - "untrusted", + "ring 0.17.8", + "untrusted 0.9.0", ] [[package]] @@ -7599,24 +7724,24 @@ dependencies = [ "generic-array 0.14.7", "pkcs8", "serdect", - "subtle 2.4.1", + "subtle 2.5.0", "zeroize", ] [[package]] name = "secp256k1" -version = "0.28.0" +version = "0.28.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2acea373acb8c21ecb5a23741452acd2593ed44ee3d343e72baaa143bc89d0d5" +checksum = "d24b59d129cdadea20aea4fb2352fa053712e5d713eee47d700cd4b2bc002f10" dependencies = [ "secp256k1-sys", ] [[package]] name = "secp256k1-sys" -version = "0.9.0" +version = "0.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09e67c467c38fd24bd5499dc9a18183b31575c12ee549197e3e20d57aa4fe3b7" +checksum = "e5d1746aae42c19d583c3c1a8c646bfad910498e2051c551a7f2e3c0c9fbb7eb" dependencies = [ "cc", ] @@ -7632,9 +7757,9 @@ dependencies = [ [[package]] name = "security-framework" -version = "2.9.1" +version = "2.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fc758eb7bffce5b308734e9b0c1468893cae9ff70ebf13e7090be8dcbcc83a8" +checksum = "770452e37cad93e0a50d5abc3990d2bc351c36d0328f86cefec2f2fb206eaef6" dependencies = [ "bitflags 1.3.2", "core-foundation", @@ -7645,9 +7770,9 @@ dependencies = [ [[package]] name = "security-framework-sys" -version = "2.9.0" +version = "2.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f51d0c0d83bec45f16480d0ce0058397a69e48fcdc52d1dc8855fb68acbd31a7" +checksum = "41f3cc463c0ef97e11c3461a9d3787412d30e8e7eb907c79180c4a57bf7c04ef" dependencies = [ "core-foundation-sys", "libc", @@ -7655,9 +7780,9 @@ dependencies = [ [[package]] name = "semver" -version = "1.0.18" +version = "1.0.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b0293b4b29daaf487284529cc2f5675b8e57c61f70167ba415a463651fd6a918" +checksum = "92d43fe69e652f3df9bdc2b85b2854a0825b86e4fb76bc44d945137d053639ca" [[package]] name = "serde" @@ -7683,9 +7808,9 @@ version = "1.0.197" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7eb0b34b42edc17f6b7cac84a52a1c5f0e1bb2227e997ca9011ea3dd34e8610b" dependencies = [ - "proc-macro2 1.0.76", + "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.53", + "syn 2.0.57", ] [[package]] @@ -7694,7 +7819,7 @@ version = "1.0.115" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "12dc5c46daa8e9fdf4f5e71b6cf9a53f2487da0e86e55808e2d35539666497dd" dependencies = [ - "indexmap 2.2.2", + "indexmap 2.2.6", "itoa", "ryu", "serde", @@ -7729,7 +7854,7 @@ dependencies = [ "cfg-if 1.0.0", "cpufeatures", "digest 0.9.0", - "opaque-debug 0.3.0", + "opaque-debug 0.3.1", ] [[package]] @@ -7742,14 +7867,14 @@ dependencies = [ "cfg-if 1.0.0", "cpufeatures", "digest 0.9.0", - "opaque-debug 0.3.0", + "opaque-debug 0.3.1", ] [[package]] name = "sha2" -version = "0.10.7" +version = "0.10.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "479fb9d862239e610720565ca91403019f2f00410f1864c5aa7479b950a76ed8" +checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8" dependencies = [ "cfg-if 1.0.0", "cpufeatures", @@ -7768,9 +7893,9 @@ dependencies = [ [[package]] name = "sharded-slab" -version = "0.1.4" +version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "900fba806f70c630b0a382d0d825e17a0f19fcd059a2ade1ff237bcddf446b31" +checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6" dependencies = [ "lazy_static", ] @@ -7791,8 +7916,8 @@ version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0c4aa94397e2023af5b7cff5b8d4785e935cfb77f0e4aab0cae3b26258ace556" dependencies = [ - "async-io", - "futures-lite", + "async-io 1.13.0", + "futures-lite 1.13.0", "libc", "signal-hook", ] @@ -7808,9 +7933,9 @@ dependencies = [ [[package]] name = "signature" -version = "2.1.0" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e1788eed21689f9cf370582dfc467ef36ed9c707f073528ddafa8d83e3b8500" +checksum = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de" dependencies = [ "digest 0.10.7", "rand_core 0.6.4", @@ -7837,15 +7962,15 @@ checksum = "620a1d43d70e142b1d46a929af51d44f383db9c7a2ec122de2cd992ccfcf3c18" [[package]] name = "siphasher" -version = "0.3.10" +version = "0.3.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7bd3e3206899af3f8b12af284fafc038cc1dc2b41d1b89dd17297221c5d225de" +checksum = "38b58827f4464d87d377d175e90bf58eb00fd8716ff0a62f80356b5e61555d0d" [[package]] name = "slab" -version = "0.4.8" +version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6528351c9bc8ab22353f9d776db39a20288e8d6c37ef8cfe3317cf875eecfc2d" +checksum = "8f92a496fb766b417c996b9c5e57daf2f7ad3b0bebe1ccfca4856390e3d3bb67" dependencies = [ "autocfg", ] @@ -7862,16 +7987,16 @@ version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6d7400c0eff44aa2fcb5e31a5f24ba9716ed90138769e4977a2ba6014ae63eb5" dependencies = [ - "async-channel", + "async-channel 1.9.0", "futures-core", "futures-io", ] [[package]] name = "smallvec" -version = "1.11.0" +version = "1.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62bb4feee49fdd9f707ef802e22365a35de4b7b299de4763d44bfea899442ff9" +checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" [[package]] name = "smol" @@ -7879,15 +8004,15 @@ version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "13f2b548cd8447f8de0fdf1c592929f70f4fc7039a05e47404b0d096ec6987a1" dependencies = [ - "async-channel", + "async-channel 1.9.0", "async-executor", "async-fs", - "async-io", - "async-lock", + "async-io 1.13.0", + "async-lock 2.8.0", "async-net", "async-process", "blocking", - "futures-lite", + "futures-lite 1.13.0", ] [[package]] @@ -7897,17 +8022,17 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1cce5e2881b30bad7ef89f383a816ad0b22c45915911f28499026de4a76d20ee" dependencies = [ "arrayvec 0.7.4", - "async-lock", + "async-lock 2.8.0", "atomic", - "base64 0.21.2", + "base64 0.21.7", "bip39", "blake2-rfc", - "bs58 0.5.0", + "bs58 0.5.1", "crossbeam-queue", "derive_more", "ed25519-zebra", "either", - "event-listener", + "event-listener 2.5.3", "fnv", "futures-channel", "futures-util", @@ -7930,7 +8055,7 @@ dependencies = [ "schnorrkel 0.10.2", "serde", "serde_json", - "sha2 0.10.7", + "sha2 0.10.8", "siphasher", "slab", "smallvec", @@ -7948,11 +8073,11 @@ version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4b2f7b4687b83ff244ef6137735ed5716ad37dcdf3ee16c4eb1a32fb9808fa47" dependencies = [ - "async-lock", + "async-lock 2.8.0", "blake2-rfc", "derive_more", "either", - "event-listener", + "event-listener 2.5.3", "fnv", "futures-channel", "futures-util", @@ -7973,26 +8098,26 @@ dependencies = [ [[package]] name = "snow" -version = "0.9.3" +version = "0.9.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c9d1425eb528a21de2755c75af4c9b5d57f50a0d4c3b7f1828a4cd03f8ba155" +checksum = "850948bee068e713b8ab860fe1adc4d109676ab4c3b621fd8147f06b261f2f85" dependencies = [ - "aes-gcm 0.9.4", + "aes-gcm", "blake2 0.10.6", "chacha20poly1305", "curve25519-dalek 4.1.1", "rand_core 0.6.4", - "ring 0.16.20", + "ring 0.17.8", "rustc_version", - "sha2 0.10.7", - "subtle 2.4.1", + "sha2 0.10.8", + "subtle 2.5.0", ] [[package]] name = "socket2" -version = "0.4.9" +version = "0.4.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64a4a911eed85daf18834cfaa86a79b7d266ff93ff5ba14005426219480ed662" +checksum = "9f7916fc008ca5542385b89a3d3ce689953c143e9304a9bf8beec1de48994c0d" dependencies = [ "libc", "winapi", @@ -8000,12 +8125,12 @@ dependencies = [ [[package]] name = "socket2" -version = "0.5.5" +version = "0.5.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b5fac59a5cb5dd637972e5fca70daf0523c9067fcdc4842f053dae04a18f8e9" +checksum = "05ffd9c0a93b7543e062e759284fcf5f5e3b098501104bfbdde4d404db792871" dependencies = [ "libc", - "windows-sys 0.48.0", + "windows-sys 0.52.0", ] [[package]] @@ -8028,7 +8153,7 @@ dependencies = [ [[package]] name = "sp-api" version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ "hash-db", "log", @@ -8050,21 +8175,21 @@ dependencies = [ [[package]] name = "sp-api-proc-macro" version = "15.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ "Inflector", "blake2 0.10.6", "expander", "proc-macro-crate 3.1.0", - "proc-macro2 1.0.76", + "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.53", + "syn 2.0.57", ] [[package]] name = "sp-application-crypto" version = "30.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ "parity-scale-codec", "scale-info", @@ -8077,7 +8202,7 @@ dependencies = [ [[package]] name = "sp-arithmetic" version = "23.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ "docify", "integer-sqrt", @@ -8110,7 +8235,7 @@ dependencies = [ [[package]] name = "sp-authority-discovery" version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ "parity-scale-codec", "scale-info", @@ -8122,7 +8247,7 @@ dependencies = [ [[package]] name = "sp-blockchain" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ "futures", "log", @@ -8140,7 +8265,7 @@ dependencies = [ [[package]] name = "sp-consensus" version = "0.32.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ "async-trait", "futures", @@ -8155,7 +8280,7 @@ dependencies = [ [[package]] name = "sp-consensus-beefy" version = "13.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ "lazy_static", "parity-scale-codec", @@ -8175,7 +8300,7 @@ dependencies = [ [[package]] name = "sp-consensus-grandpa" version = "13.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ "finality-grandpa", "log", @@ -8192,7 +8317,7 @@ dependencies = [ [[package]] name = "sp-consensus-slots" version = "0.32.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ "parity-scale-codec", "scale-info", @@ -8203,14 +8328,14 @@ dependencies = [ [[package]] name = "sp-core" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ - "array-bytes 6.1.0", + "array-bytes 6.2.2", "bandersnatch_vrfs", "bitflags 1.3.2", "blake2 0.10.6", "bounded-collections", - "bs58 0.5.0", + "bs58 0.5.1", "dyn-clonable", "ed25519-zebra", "futures", @@ -8256,7 +8381,7 @@ dependencies = [ "blake2b_simd", "byteorder", "digest 0.10.7", - "sha2 0.10.7", + "sha2 0.10.8", "sha3", "sp-std 8.0.0", "twox-hash", @@ -8265,7 +8390,7 @@ dependencies = [ [[package]] name = "sp-crypto-ec-utils" version = "0.10.0" -source = "git+https://github.com/paritytech/polkadot-sdk#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ "ark-bls12-377", "ark-bls12-377-ext", @@ -8285,12 +8410,12 @@ dependencies = [ [[package]] name = "sp-crypto-hashing" version = "0.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ "blake2b_simd", "byteorder", "digest 0.10.7", - "sha2 0.10.7", + "sha2 0.10.8", "sha3", "twox-hash", ] @@ -8298,17 +8423,17 @@ dependencies = [ [[package]] name = "sp-crypto-hashing-proc-macro" version = "0.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ "quote 1.0.35", "sp-crypto-hashing", - "syn 2.0.53", + "syn 2.0.57", ] [[package]] name = "sp-database" version = "10.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ "kvdb", "parking_lot 0.12.1", @@ -8317,27 +8442,27 @@ dependencies = [ [[package]] name = "sp-debug-derive" version = "14.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ - "proc-macro2 1.0.76", + "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.53", + "syn 2.0.57", ] [[package]] name = "sp-debug-derive" version = "14.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ - "proc-macro2 1.0.76", + "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.53", + "syn 2.0.57", ] [[package]] name = "sp-externalities" version = "0.25.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ "environmental", "parity-scale-codec", @@ -8347,7 +8472,7 @@ dependencies = [ [[package]] name = "sp-externalities" version = "0.25.0" -source = "git+https://github.com/paritytech/polkadot-sdk#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ "environmental", "parity-scale-codec", @@ -8357,7 +8482,7 @@ dependencies = [ [[package]] name = "sp-genesis-builder" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ "serde_json", "sp-api", @@ -8367,7 +8492,7 @@ dependencies = [ [[package]] name = "sp-inherents" version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ "async-trait", "impl-trait-for-tuples", @@ -8380,7 +8505,7 @@ dependencies = [ [[package]] name = "sp-io" version = "30.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ "bytes", "ed25519-dalek", @@ -8406,7 +8531,7 @@ dependencies = [ [[package]] name = "sp-keyring" version = "31.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ "sp-core", "sp-runtime", @@ -8416,7 +8541,7 @@ dependencies = [ [[package]] name = "sp-keystore" version = "0.34.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ "parity-scale-codec", "parking_lot 0.12.1", @@ -8427,7 +8552,7 @@ dependencies = [ [[package]] name = "sp-maybe-compressed-blob" version = "11.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ "thiserror", "zstd 0.12.4", @@ -8436,7 +8561,7 @@ dependencies = [ [[package]] name = "sp-metadata-ir" version = "0.6.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ "frame-metadata 16.0.0", "parity-scale-codec", @@ -8446,7 +8571,7 @@ dependencies = [ [[package]] name = "sp-mixnet" version = "0.4.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ "parity-scale-codec", "scale-info", @@ -8457,7 +8582,7 @@ dependencies = [ [[package]] name = "sp-mmr-primitives" version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ "ckb-merkle-mountain-range 0.5.2", "log", @@ -8474,7 +8599,7 @@ dependencies = [ [[package]] name = "sp-panic-handler" version = "13.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ "backtrace", "lazy_static", @@ -8484,7 +8609,7 @@ dependencies = [ [[package]] name = "sp-rpc" version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ "rustc-hash", "serde", @@ -8494,7 +8619,7 @@ dependencies = [ [[package]] name = "sp-runtime" version = "31.0.1" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ "docify", "either", @@ -8518,7 +8643,7 @@ dependencies = [ [[package]] name = "sp-runtime-interface" version = "24.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ "bytes", "impl-trait-for-tuples", @@ -8537,7 +8662,7 @@ dependencies = [ [[package]] name = "sp-runtime-interface" version = "24.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ "bytes", "impl-trait-for-tuples", @@ -8556,33 +8681,33 @@ dependencies = [ [[package]] name = "sp-runtime-interface-proc-macro" version = "17.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ "Inflector", "expander", "proc-macro-crate 3.1.0", - "proc-macro2 1.0.76", + "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.53", + "syn 2.0.57", ] [[package]] name = "sp-runtime-interface-proc-macro" version = "17.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ "Inflector", "expander", "proc-macro-crate 3.1.0", - "proc-macro2 1.0.76", + "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.53", + "syn 2.0.57", ] [[package]] name = "sp-session" version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ "parity-scale-codec", "scale-info", @@ -8596,7 +8721,7 @@ dependencies = [ [[package]] name = "sp-staking" version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ "impl-trait-for-tuples", "parity-scale-codec", @@ -8609,7 +8734,7 @@ dependencies = [ [[package]] name = "sp-state-machine" version = "0.35.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ "hash-db", "log", @@ -8629,16 +8754,16 @@ dependencies = [ [[package]] name = "sp-statement-store" version = "10.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ - "aes-gcm 0.10.2", + "aes-gcm", "curve25519-dalek 4.1.1", "ed25519-dalek", "hkdf", "parity-scale-codec", "rand", "scale-info", - "sha2 0.10.7", + "sha2 0.10.8", "sp-api", "sp-application-crypto", "sp-core", @@ -8647,7 +8772,7 @@ dependencies = [ "sp-runtime", "sp-runtime-interface 24.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=master)", "thiserror", - "x25519-dalek 2.0.0", + "x25519-dalek 2.0.1", ] [[package]] @@ -8659,17 +8784,17 @@ checksum = "53458e3c57df53698b3401ec0934bea8e8cfce034816873c0b0abbd83d7bac0d" [[package]] name = "sp-std" version = "14.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" [[package]] name = "sp-std" version = "14.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" [[package]] name = "sp-storage" version = "19.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ "impl-serde", "parity-scale-codec", @@ -8681,7 +8806,7 @@ dependencies = [ [[package]] name = "sp-storage" version = "19.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ "impl-serde", "parity-scale-codec", @@ -8693,7 +8818,7 @@ dependencies = [ [[package]] name = "sp-timestamp" version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ "async-trait", "parity-scale-codec", @@ -8705,7 +8830,7 @@ dependencies = [ [[package]] name = "sp-tracing" version = "16.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ "parity-scale-codec", "tracing", @@ -8716,7 +8841,7 @@ dependencies = [ [[package]] name = "sp-tracing" version = "16.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ "parity-scale-codec", "tracing", @@ -8727,9 +8852,9 @@ dependencies = [ [[package]] name = "sp-trie" version = "29.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ - "ahash 0.8.7", + "ahash 0.8.11", "hash-db", "lazy_static", "memory-db", @@ -8750,7 +8875,7 @@ dependencies = [ [[package]] name = "sp-version" version = "29.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ "impl-serde", "parity-scale-codec", @@ -8767,18 +8892,18 @@ dependencies = [ [[package]] name = "sp-version-proc-macro" version = "13.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ "parity-scale-codec", - "proc-macro2 1.0.76", + "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.53", + "syn 2.0.57", ] [[package]] name = "sp-wasm-interface" version = "20.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ "anyhow", "impl-trait-for-tuples", @@ -8790,7 +8915,7 @@ dependencies = [ [[package]] name = "sp-wasm-interface" version = "20.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ "anyhow", "impl-trait-for-tuples", @@ -8802,7 +8927,7 @@ dependencies = [ [[package]] name = "sp-weights" version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ "bounded-collections", "parity-scale-codec", @@ -8827,9 +8952,9 @@ checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67" [[package]] name = "spki" -version = "0.7.2" +version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d1e996ef02c474957d681f1b05213dfb0abab947b446a62d37770b23500184a" +checksum = "d91ed6c858b01f942cd56b37a94b3e0a1798290327d1236e4d9cf4eaca44d29d" dependencies = [ "base64ct", "der", @@ -8837,13 +8962,13 @@ dependencies = [ [[package]] name = "ss58-registry" -version = "1.41.0" +version = "1.47.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfc443bad666016e012538782d9e3006213a7db43e9fb1dda91657dc06a6fa08" +checksum = "4743ce898933fbff7bbf414f497c459a782d496269644b3d650a398ae6a487ba" dependencies = [ "Inflector", "num-format", - "proc-macro2 1.0.76", + "proc-macro2 1.0.79", "quote 1.0.35", "serde", "serde_json", @@ -8859,9 +8984,9 @@ checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" [[package]] name = "staging-xcm" version = "7.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ - "array-bytes 6.1.0", + "array-bytes 6.2.2", "bounded-collections", "derivative", "environmental", @@ -8877,7 +9002,7 @@ dependencies = [ [[package]] name = "staging-xcm-builder" version = "7.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ "frame-support", "frame-system", @@ -8899,7 +9024,7 @@ dependencies = [ [[package]] name = "staging-xcm-executor" version = "7.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ "environmental", "frame-benchmarking", @@ -8954,7 +9079,7 @@ checksum = "dcb5ae327f9cc13b68763b5749770cb9e048a99bd9dfdfa58d0cf05d5f64afe0" dependencies = [ "heck 0.3.3", "proc-macro-error", - "proc-macro2 1.0.76", + "proc-macro2 1.0.79", "quote 1.0.35", "syn 1.0.109", ] @@ -8974,7 +9099,7 @@ version = "0.26.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5d8cec3501a5194c432b2b7976db6b7d10ec95c253208b45f83f7136aa985e29" dependencies = [ - "strum_macros 0.26.1", + "strum_macros 0.26.2", ] [[package]] @@ -8984,7 +9109,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e385be0d24f186b4ce2f9982191e7101bb737312ad61c1f2f984f34bcf85d59" dependencies = [ "heck 0.4.1", - "proc-macro2 1.0.76", + "proc-macro2 1.0.79", "quote 1.0.35", "rustversion", "syn 1.0.109", @@ -8992,33 +9117,33 @@ dependencies = [ [[package]] name = "strum_macros" -version = "0.26.1" +version = "0.26.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a3417fc93d76740d974a01654a09777cb500428cc874ca9f45edfe0c4d4cd18" +checksum = "c6cf59daf282c0a494ba14fd21610a0325f9f90ec9d1231dea26bcb1d696c946" dependencies = [ "heck 0.4.1", - "proc-macro2 1.0.76", + "proc-macro2 1.0.79", "quote 1.0.35", "rustversion", - "syn 2.0.53", + "syn 2.0.57", ] [[package]] name = "substrate-bip39" version = "0.4.7" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ "hmac 0.12.1", "pbkdf2", "schnorrkel 0.11.4", - "sha2 0.10.7", + "sha2 0.10.8", "zeroize", ] [[package]] name = "substrate-prometheus-endpoint" version = "0.17.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ "hyper", "log", @@ -9132,9 +9257,9 @@ checksum = "2d67a5a62ba6e01cb2192ff309324cb4875d0c451d55fe2319433abe7a05a8ee" [[package]] name = "subtle" -version = "2.4.1" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6bdef32e8150c2a081110b42772ffe7d7c9032b606bc226c8260fd97e0976601" +checksum = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc" [[package]] name = "subtle-ng" @@ -9157,7 +9282,7 @@ dependencies = [ "futures", "hex", "impl-serde", - "jsonrpsee 0.20.1", + "jsonrpsee 0.20.3", "parity-scale-codec", "primitive-types", "scale-bits", @@ -9184,13 +9309,13 @@ dependencies = [ "frame-metadata 16.0.0", "heck 0.4.1", "hex", - "jsonrpsee 0.20.1", + "jsonrpsee 0.20.3", "parity-scale-codec", - "proc-macro2 1.0.76", + "proc-macro2 1.0.79", "quote 1.0.35", "scale-info", "subxt-metadata", - "syn 2.0.53", + "syn 2.0.57", "thiserror", "tokio", ] @@ -9218,10 +9343,10 @@ version = "0.32.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "12e8be9ab6fe88b8c13edbe15911e148482cfb905a8b8d5b8d766a64c54be0bd" dependencies = [ - "darling 0.20.3", + "darling 0.20.8", "proc-macro-error", "subxt-codegen", - "syn 2.0.53", + "syn 2.0.57", ] [[package]] @@ -9254,18 +9379,18 @@ version = "1.0.109" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" dependencies = [ - "proc-macro2 1.0.76", + "proc-macro2 1.0.79", "quote 1.0.35", "unicode-ident", ] [[package]] name = "syn" -version = "2.0.53" +version = "2.0.57" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7383cd0e49fff4b6b90ca5670bfd3e9d6a733b3f90c686605aa7eec8c4996032" +checksum = "11a6ae1e52eb25aab8f3fb9fca13be982a373b8f1157ca14b897a825ba4a2d35" dependencies = [ - "proc-macro2 1.0.76", + "proc-macro2 1.0.79", "quote 1.0.35", "unicode-ident", ] @@ -9276,7 +9401,7 @@ version = "0.12.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f36bdaa60a83aca3921b5259d5400cbf5e90fc51931376a9bd4a0eb79aa7210f" dependencies = [ - "proc-macro2 1.0.76", + "proc-macro2 1.0.79", "quote 1.0.35", "syn 1.0.109", "unicode-xid 0.2.4", @@ -9326,9 +9451,9 @@ checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" [[package]] name = "target-lexicon" -version = "0.12.10" +version = "0.12.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d2faeef5759ab89935255b1a4cd98e0baf99d1085e37d36599c625dac49ae8e" +checksum = "e1fc403891a21bcfb7c37834ba66a547a8f402146eba7265b5a6d88059c9ff2f" [[package]] name = "tempfile" @@ -9337,16 +9462,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "85b77fafb263dd9d05cbeac119526425676db3784113aa9295c88498cbf8bff1" dependencies = [ "cfg-if 1.0.0", - "fastrand 2.0.1", - "rustix 0.38.31", + "fastrand 2.0.2", + "rustix 0.38.32", "windows-sys 0.52.0", ] [[package]] name = "termcolor" -version = "1.2.0" +version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be55cf8942feac5c765c2c993422806843c9a9a45d4d5c407ad6dd2ea95eb9b6" +checksum = "06794f8f6c5c898b3275aebefa6b8a1cb24cd2c6c79397ab15774837a0bc5755" dependencies = [ "winapi-util", ] @@ -9377,22 +9502,22 @@ dependencies = [ [[package]] name = "thiserror-core" -version = "1.0.38" +version = "1.0.50" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0d97345f6437bb2004cd58819d8a9ef8e36cdd7661c2abc4bbde0a7c40d9f497" +checksum = "c001ee18b7e5e3f62cbf58c7fe220119e68d902bb7443179c0c8aef30090e999" dependencies = [ "thiserror-core-impl", ] [[package]] name = "thiserror-core-impl" -version = "1.0.38" +version = "1.0.50" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "10ac1c5050e43014d16b2f94d0d2ce79e65ffdd8b38d8048f9c8f6a8a6da62ac" +checksum = "e4c60d69f36615a077cc7663b9cb8e42275722d23e58a7fa3d2c7f2915d09d04" dependencies = [ - "proc-macro2 1.0.76", + "proc-macro2 1.0.79", "quote 1.0.35", - "syn 1.0.109", + "syn 2.0.57", ] [[package]] @@ -9401,16 +9526,16 @@ version = "1.0.58" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c61f3ba182994efc43764a46c018c347bc492c79f024e705f46567b418f6d4f7" dependencies = [ - "proc-macro2 1.0.76", + "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.53", + "syn 2.0.57", ] [[package]] name = "thread_local" -version = "1.1.7" +version = "1.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3fdd6f064ccff2d6567adcb3873ca630700f00b5ad3f060c25b5dcfd9a4ce152" +checksum = "8b9ef9bad013ada3808854ceac7b46812a6465ba368859a37e2100283d2d719c" dependencies = [ "cfg-if 1.0.0", "once_cell", @@ -9485,8 +9610,8 @@ dependencies = [ "mio", "num_cpus", "parking_lot 0.12.1", - "pin-project-lite 0.2.12", - "socket2 0.5.5", + "pin-project-lite 0.2.14", + "socket2 0.5.6", "tokio-macros", "windows-sys 0.48.0", ] @@ -9497,9 +9622,9 @@ version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5b8a1e28f2deaa14e508979454cb3a223b10b938b45af148bc0986de36f1923b" dependencies = [ - "proc-macro2 1.0.76", + "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.53", + "syn 2.0.57", ] [[package]] @@ -9508,33 +9633,44 @@ version = "0.24.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081" dependencies = [ - "rustls 0.21.5", + "rustls 0.21.10", + "tokio", +] + +[[package]] +name = "tokio-rustls" +version = "0.25.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "775e0c0f0adb3a2f22a00c4745d728b479985fc15ee7ca6a2608388c5569860f" +dependencies = [ + "rustls 0.22.3", + "rustls-pki-types", "tokio", ] [[package]] name = "tokio-stream" -version = "0.1.14" +version = "0.1.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "397c988d37662c7dda6d2208364a706264bf3d6138b11d436cbac0ad38832842" +checksum = "267ac89e0bec6e691e5813911606935d77c476ff49024f98abcea3e7b15e37af" dependencies = [ "futures-core", - "pin-project-lite 0.2.12", + "pin-project-lite 0.2.14", "tokio", "tokio-util", ] [[package]] name = "tokio-util" -version = "0.7.8" +version = "0.7.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "806fe8c2c87eccc8b3267cbae29ed3ab2d0bd37fca70ab622e46aaa9375ddb7d" +checksum = "5419f34732d9eb6ee4c3578b7989078579b7f039cbbb9ca2c4da015749371e15" dependencies = [ "bytes", "futures-core", "futures-io", "futures-sink", - "pin-project-lite 0.2.12", + "pin-project-lite 0.2.14", "tokio", "tracing", ] @@ -9550,14 +9686,14 @@ dependencies = [ [[package]] name = "toml" -version = "0.8.11" +version = "0.8.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af06656561d28735e9c1cd63dfd57132c8155426aa6af24f36a00a351f88c48e" +checksum = "e9dd1545e8208b4a5af1aa9bbd0b4cf7e9ea08fabc5d0a5c67fcaafa17433aa3" dependencies = [ "serde", "serde_spanned", "toml_datetime", - "toml_edit 0.22.7", + "toml_edit 0.22.9", ] [[package]] @@ -9571,33 +9707,33 @@ dependencies = [ [[package]] name = "toml_edit" -version = "0.19.14" +version = "0.20.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8123f27e969974a3dfba720fdb560be359f57b44302d280ba72e76a74480e8a" +checksum = "70f427fce4d84c72b5b732388bf4a9f4531b53f74e2887e3ecb2481f68f66d81" dependencies = [ - "indexmap 2.2.2", + "indexmap 2.2.6", "toml_datetime", - "winnow 0.5.0", + "winnow 0.5.40", ] [[package]] name = "toml_edit" -version = "0.21.0" +version = "0.21.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d34d383cd00a163b4a5b85053df514d45bc330f6de7737edfe0a93311d1eaa03" +checksum = "6a8534fd7f78b5405e860340ad6575217ce99f38d4d5c8f2442cb5ecb50090e1" dependencies = [ - "indexmap 2.2.2", + "indexmap 2.2.6", "toml_datetime", - "winnow 0.5.0", + "winnow 0.5.40", ] [[package]] name = "toml_edit" -version = "0.22.7" +version = "0.22.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "18769cd1cec395d70860ceb4d932812a0b4d06b1a4bb336745a4d21b9496e992" +checksum = "8e40bb779c5187258fd7aad0eb68cb8706a0a81fa712fbea808ab43c4b8374c4" dependencies = [ - "indexmap 2.2.2", + "indexmap 2.2.6", "serde", "serde_spanned", "toml_datetime", @@ -9613,7 +9749,7 @@ dependencies = [ "futures-core", "futures-util", "pin-project", - "pin-project-lite 0.2.12", + "pin-project-lite 0.2.14", "tower-layer", "tower-service", "tracing", @@ -9633,26 +9769,25 @@ checksum = "b6bc1c9ce2b5135ac7f93c72918fc37feb872bdc6a5533a8b85eb4b86bfdae52" [[package]] name = "tracing" -version = "0.1.37" +version = "0.1.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ce8c33a8d48bd45d624a6e523445fd21ec13d3653cd51f681abf67418f54eb8" +checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef" dependencies = [ - "cfg-if 1.0.0", "log", - "pin-project-lite 0.2.12", + "pin-project-lite 0.2.14", "tracing-attributes", "tracing-core", ] [[package]] name = "tracing-attributes" -version = "0.1.26" +version = "0.1.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f4f31f56159e98206da9efd823404b79b6ef3143b4a7ab76e67b1751b25a4ab" +checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" dependencies = [ - "proc-macro2 1.0.76", + "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.53", + "syn 2.0.57", ] [[package]] @@ -9677,12 +9812,12 @@ dependencies = [ [[package]] name = "tracing-log" -version = "0.1.3" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78ddad33d2d10b1ed7eb9d1f518a5674713876e97e5bb9b7345a7984fbb4f922" +checksum = "f751112709b4e791d8ce53e32c4ed2d353565a795ce84da2285393f41557bdf2" dependencies = [ - "lazy_static", "log", + "once_cell", "tracing-core", ] @@ -9758,7 +9893,7 @@ dependencies = [ "lazy_static", "rand", "smallvec", - "socket2 0.4.9", + "socket2 0.4.10", "thiserror", "tinyvec", "tokio", @@ -9788,9 +9923,9 @@ dependencies = [ [[package]] name = "try-lock" -version = "0.2.4" +version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3528ecfd12c466c6f163363caf2d02a71161dd5e1cc6ae7b34207ea2d42d81ed" +checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" [[package]] name = "tt-call" @@ -9804,7 +9939,7 @@ version = "1.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "97fee6b57c6a41524a810daee9286c02d7752c4253064d0b05472833a438f675" dependencies = [ - "cfg-if 1.0.0", + "cfg-if 0.1.10", "digest 0.10.7", "rand", "static_assertions", @@ -9812,9 +9947,9 @@ dependencies = [ [[package]] name = "typenum" -version = "1.16.0" +version = "1.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "497961ef93d974e23eb6f433eb5fe1b7930b659f06d12dec6fc44a8f554c0bba" +checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" [[package]] name = "uint" @@ -9830,15 +9965,15 @@ dependencies = [ [[package]] name = "unicode-bidi" -version = "0.3.13" +version = "0.3.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "92888ba5573ff080736b3648696b70cafad7d250551175acbaa4e0385b3e1460" +checksum = "08f95100a766bf4f8f28f90d77e0a5461bbdb219042e7679bebe79004fed8d75" [[package]] name = "unicode-ident" -version = "1.0.11" +version = "1.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "301abaae475aa91687eb82514b328ab47a211a533026cb25fc3e519b86adfc3c" +checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" [[package]] name = "unicode-normalization" @@ -9851,15 +9986,15 @@ dependencies = [ [[package]] name = "unicode-segmentation" -version = "1.10.1" +version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1dd624098567895118886609431a7c3b8f516e41d30e0643f03d94592a147e36" +checksum = "d4c87d22b6e3f4a18d4d40ef354e97c90fcb14dd91d7dc0aa9d8a1172ebf7202" [[package]] name = "unicode-width" -version = "0.1.10" +version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0edd1e5b14653f783770bce4a4dabb4a5108a5370a5f5d8cfe8710c361f6c8b" +checksum = "e51733f11c9c4f72aa0c160008246859e340b00807569a0da0e7a1079b27ba85" [[package]] name = "unicode-xid" @@ -9873,16 +10008,6 @@ version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c" -[[package]] -name = "universal-hash" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f214e8f697e925001e66ec2c6e37a4ef93f0f78c2eed7814394e10c62025b05" -dependencies = [ - "generic-array 0.14.7", - "subtle 2.4.1", -] - [[package]] name = "universal-hash" version = "0.5.1" @@ -9890,14 +10015,14 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fc1de2c688dc15305988b563c3854064043356019f97a4b46276fe734c4f07ea" dependencies = [ "crypto-common", - "subtle 2.4.1", + "subtle 2.5.0", ] [[package]] name = "unsigned-varint" -version = "0.7.1" +version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d86a8dc7f45e4c1b0d30e43038c38f274e77af056aa5f74b93c2cf9eb3c1c836" +checksum = "6889a77d49f1f013504cec6bf97a2c730394adedaeb1deb5ea08949a50541105" dependencies = [ "asynchronous-codec", "bytes", @@ -9911,14 +10036,20 @@ version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a" +[[package]] +name = "untrusted" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" + [[package]] name = "url" -version = "2.4.0" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50bff7831e19200a85b17131d085c25d7811bc4e186efdaf54bbd132994a88cb" +checksum = "31e6302e3bb753d46e83516cae55ae196fc0c309407cf11ab35cc51a4c2a4633" dependencies = [ "form_urlencoded", - "idna 0.4.0", + "idna 0.5.0", "percent-encoding", ] @@ -9936,9 +10067,9 @@ checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d" [[package]] name = "value-bag" -version = "1.7.0" +version = "1.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "126e423afe2dd9ac52142e7e9d5ce4135d7e13776c529d27fd6bc49f19e3280b" +checksum = "74797339c3b98616c009c7c3eb53a0ce41e85c8ec66bd3db96ed132d20cfdee8" [[package]] name = "vcpkg" @@ -9982,7 +10113,7 @@ dependencies = [ "rand", "rand_chacha", "rand_core 0.6.4", - "sha2 0.10.7", + "sha2 0.10.8", "sha3", "thiserror", "zeroize", @@ -9990,15 +10121,15 @@ dependencies = [ [[package]] name = "waker-fn" -version = "1.1.0" +version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d5b2c62b4012a3e1eca5a7e077d13b3bf498c4073e33ccd58626607748ceeca" +checksum = "f3c4517f54858c779bbcbf228f4fca63d121bf85fbecb2dc578cdf4a39395690" [[package]] name = "walkdir" -version = "2.3.3" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "36df944cda56c7d8d8b7496af378e6b16de9284591917d307c9b4d313c44e698" +checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b" dependencies = [ "same-file", "winapi-util", @@ -10027,9 +10158,9 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" [[package]] name = "wasm-bindgen" -version = "0.2.87" +version = "0.2.92" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7706a72ab36d8cb1f80ffbf0e071533974a60d0a308d01a5d0375bf60499a342" +checksum = "4be2531df63900aeb2bca0daaaddec08491ee64ceecbee5076636a3b026795a8" dependencies = [ "cfg-if 1.0.0", "wasm-bindgen-macro", @@ -10037,24 +10168,24 @@ dependencies = [ [[package]] name = "wasm-bindgen-backend" -version = "0.2.87" +version = "0.2.92" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ef2b6d3c510e9625e5fe6f509ab07d66a760f0885d858736483c32ed7809abd" +checksum = "614d787b966d3989fa7bb98a654e369c762374fd3213d212cfc0251257e747da" dependencies = [ "bumpalo", "log", "once_cell", - "proc-macro2 1.0.76", + "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.53", + "syn 2.0.57", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-futures" -version = "0.4.37" +version = "0.4.42" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c02dbc21516f9f1f04f187958890d7e6026df8d16540b7ad9492bc34a67cea03" +checksum = "76bc14366121efc8dbb487ab05bcc9d346b3b5ec0eaa76e46594cabbe51762c0" dependencies = [ "cfg-if 1.0.0", "js-sys", @@ -10064,9 +10195,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.87" +version = "0.2.92" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dee495e55982a3bd48105a7b947fd2a9b4a8ae3010041b9e0faab3f9cd028f1d" +checksum = "a1f8823de937b71b9460c0c34e25f3da88250760bec0ebac694b49997550d726" dependencies = [ "quote 1.0.35", "wasm-bindgen-macro-support", @@ -10074,22 +10205,22 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.87" +version = "0.2.92" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "54681b18a46765f095758388f2d0cf16eb8d4169b639ab575a8f5693af210c7b" +checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7" dependencies = [ - "proc-macro2 1.0.76", + "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.53", + "syn 2.0.57", "wasm-bindgen-backend", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" -version = "0.2.87" +version = "0.2.92" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca6ad05a4870b2bf5fe995117d3728437bd27d7cd5f06f13c17443ef369775a1" +checksum = "af190c94f2773fdb3729c55b007a722abb5384da03bc0986df4c289bf5567e96" [[package]] name = "wasm-instrument" @@ -10131,9 +10262,9 @@ dependencies = [ [[package]] name = "wasmi_arena" -version = "0.4.0" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "401c1f35e413fac1846d4843745589d9ec678977ab35a384db8ae7830525d468" +checksum = "104a7f73be44570cac297b3035d76b169d6599637631cf37a1703326a0727073" [[package]] name = "wasmi_core" @@ -10210,14 +10341,14 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c86437fa68626fe896e5afc69234bb2b5894949083586535f200385adfd71213" dependencies = [ "anyhow", - "base64 0.21.2", + "base64 0.21.7", "bincode", "directories-next", "file-per-thread-logger", "log", - "rustix 0.36.15", + "rustix 0.36.17", "serde", - "sha2 0.10.7", + "sha2 0.10.8", "toml 0.5.11", "windows-sys 0.45.0", "zstd 0.11.2+zstd.1.5.2", @@ -10235,7 +10366,7 @@ dependencies = [ "cranelift-frontend", "cranelift-native", "cranelift-wasm", - "gimli", + "gimli 0.27.3", "log", "object 0.30.4", "target-lexicon", @@ -10254,7 +10385,7 @@ dependencies = [ "anyhow", "cranelift-codegen", "cranelift-native", - "gimli", + "gimli 0.27.3", "object 0.30.4", "target-lexicon", "wasmtime-environ", @@ -10268,7 +10399,7 @@ checksum = "a990198cee4197423045235bf89d3359e69bd2ea031005f4c2d901125955c949" dependencies = [ "anyhow", "cranelift-entity", - "gimli", + "gimli 0.27.3", "indexmap 1.9.3", "log", "object 0.30.4", @@ -10290,7 +10421,7 @@ dependencies = [ "bincode", "cfg-if 1.0.0", "cpp_demangle", - "gimli", + "gimli 0.27.3", "log", "object 0.30.4", "rustc-demangle", @@ -10311,7 +10442,7 @@ checksum = "6e0554b84c15a27d76281d06838aed94e13a77d7bf604bbbaf548aa20eb93846" dependencies = [ "object 0.30.4", "once_cell", - "rustix 0.36.15", + "rustix 0.36.17", ] [[package]] @@ -10339,10 +10470,10 @@ dependencies = [ "log", "mach", "memfd", - "memoffset 0.8.0", + "memoffset", "paste", "rand", - "rustix 0.36.15", + "rustix 0.36.17", "wasmtime-asm-macros", "wasmtime-environ", "wasmtime-jit-debug", @@ -10363,9 +10494,9 @@ dependencies = [ [[package]] name = "web-sys" -version = "0.3.64" +version = "0.3.69" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b85cbef8c220a6abc02aefd892dfc0fc23afb1c6a426316ec33253a3877249b" +checksum = "77afa9a11836342370f4817622a2f0f418b134426d91a82dfb48f532d2ec13ef" dependencies = [ "js-sys", "wasm-bindgen", @@ -10373,12 +10504,12 @@ dependencies = [ [[package]] name = "webpki" -version = "0.22.2" +version = "0.22.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07ecc0cd7cac091bf682ec5efa18b1cff79d617b84181f38b3951dbe135f607f" +checksum = "ed63aea5ce73d0ff405984102c42de94fc55a6b75765d621c65262469b3c9b53" dependencies = [ - "ring 0.16.20", - "untrusted", + "ring 0.17.8", + "untrusted 0.9.0", ] [[package]] @@ -10392,20 +10523,21 @@ dependencies = [ [[package]] name = "which" -version = "4.4.0" +version = "4.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2441c784c52b289a054b7201fc93253e288f094e2f4be9058343127c4226a269" +checksum = "87ba24419a2078cd2b0f2ede2691b6c66d8e47836da3b6db8265ebad47afbfc7" dependencies = [ "either", - "libc", + "home", "once_cell", + "rustix 0.38.32", ] [[package]] name = "wide" -version = "0.7.11" +version = "0.7.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa469ffa65ef7e0ba0f164183697b89b854253fd31aeb92358b7b6155177d62f" +checksum = "89beec544f246e679fc25490e3f8e08003bc4bf612068f325120dad4cea02c1c" dependencies = [ "bytemuck", "safe_arch", @@ -10435,9 +10567,9 @@ checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" [[package]] name = "winapi-util" -version = "0.1.5" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178" +checksum = "f29e6f9198ba0d26b4c9f07dbe6f9ed633e1f3d5b8b414090084349e46a52596" dependencies = [ "winapi", ] @@ -10450,34 +10582,31 @@ checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" [[package]] name = "windows" -version = "0.34.0" +version = "0.51.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "45296b64204227616fdbf2614cefa4c236b98ee64dfaaaa435207ed99fe7829f" +checksum = "ca229916c5ee38c2f2bc1e9d8f04df975b4bd93f9955dc69fabb5d91270045c9" dependencies = [ - "windows_aarch64_msvc 0.34.0", - "windows_i686_gnu 0.34.0", - "windows_i686_msvc 0.34.0", - "windows_x86_64_gnu 0.34.0", - "windows_x86_64_msvc 0.34.0", + "windows-core 0.51.1", + "windows-targets 0.48.5", ] [[package]] name = "windows" -version = "0.48.0" +version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e686886bc078bc1b0b600cac0147aadb815089b6e4da64016cbd754b6342700f" +checksum = "e48a53791691ab099e5e2ad123536d0fff50652600abaf43bbf952894110d0be" dependencies = [ - "windows-targets 0.48.1", + "windows-core 0.52.0", + "windows-targets 0.52.4", ] [[package]] -name = "windows" -version = "0.52.0" +name = "windows-core" +version = "0.51.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e48a53791691ab099e5e2ad123536d0fff50652600abaf43bbf952894110d0be" +checksum = "f1f8cf84f35d2db49a46868f947758c7a1138116f7fac3bc844f43ade1292e64" dependencies = [ - "windows-core", - "windows-targets 0.52.0", + "windows-targets 0.48.5", ] [[package]] @@ -10486,7 +10615,7 @@ version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "33ab640c8d7e35bf8ba19b884ba838ceb4fba93a4e8c65a9059d08afcfc683d9" dependencies = [ - "windows-targets 0.52.0", + "windows-targets 0.52.4", ] [[package]] @@ -10504,7 +10633,7 @@ version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" dependencies = [ - "windows-targets 0.48.1", + "windows-targets 0.48.5", ] [[package]] @@ -10513,7 +10642,7 @@ version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" dependencies = [ - "windows-targets 0.52.0", + "windows-targets 0.52.4", ] [[package]] @@ -10533,32 +10662,32 @@ dependencies = [ [[package]] name = "windows-targets" -version = "0.48.1" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05d4b17490f70499f20b9e791dcf6a299785ce8af4d709018206dc5b4953e95f" +checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" dependencies = [ - "windows_aarch64_gnullvm 0.48.0", - "windows_aarch64_msvc 0.48.0", - "windows_i686_gnu 0.48.0", - "windows_i686_msvc 0.48.0", - "windows_x86_64_gnu 0.48.0", - "windows_x86_64_gnullvm 0.48.0", - "windows_x86_64_msvc 0.48.0", + "windows_aarch64_gnullvm 0.48.5", + "windows_aarch64_msvc 0.48.5", + "windows_i686_gnu 0.48.5", + "windows_i686_msvc 0.48.5", + "windows_x86_64_gnu 0.48.5", + "windows_x86_64_gnullvm 0.48.5", + "windows_x86_64_msvc 0.48.5", ] [[package]] name = "windows-targets" -version = "0.52.0" +version = "0.52.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a18201040b24831fbb9e4eb208f8892e1f50a37feb53cc7ff887feb8f50e7cd" +checksum = "7dd37b7e5ab9018759f893a1952c9420d060016fc19a472b4bb20d1bdd694d1b" dependencies = [ - "windows_aarch64_gnullvm 0.52.0", - "windows_aarch64_msvc 0.52.0", - "windows_i686_gnu 0.52.0", - "windows_i686_msvc 0.52.0", - "windows_x86_64_gnu 0.52.0", - "windows_x86_64_gnullvm 0.52.0", - "windows_x86_64_msvc 0.52.0", + "windows_aarch64_gnullvm 0.52.4", + "windows_aarch64_msvc 0.52.4", + "windows_i686_gnu 0.52.4", + "windows_i686_msvc 0.52.4", + "windows_x86_64_gnu 0.52.4", + "windows_x86_64_gnullvm 0.52.4", + "windows_x86_64_msvc 0.52.4", ] [[package]] @@ -10569,21 +10698,15 @@ checksum = "597a5118570b68bc08d8d59125332c54f1ba9d9adeedeef5b99b02ba2b0698f8" [[package]] name = "windows_aarch64_gnullvm" -version = "0.48.0" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91ae572e1b79dba883e0d315474df7305d12f569b400fcf90581b06062f7e1bc" +checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" [[package]] name = "windows_aarch64_gnullvm" -version = "0.52.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb7764e35d4db8a7921e09562a0304bf2f93e0a51bfccee0bd0bb0b666b015ea" - -[[package]] -name = "windows_aarch64_msvc" -version = "0.34.0" +version = "0.52.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "17cffbe740121affb56fad0fc0e421804adf0ae00891205213b5cecd30db881d" +checksum = "bcf46cf4c365c6f2d1cc93ce535f2c8b244591df96ceee75d8e83deb70a9cac9" [[package]] name = "windows_aarch64_msvc" @@ -10593,21 +10716,15 @@ checksum = "e08e8864a60f06ef0d0ff4ba04124db8b0fb3be5776a5cd47641e942e58c4d43" [[package]] name = "windows_aarch64_msvc" -version = "0.48.0" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2ef27e0d7bdfcfc7b868b317c1d32c641a6fe4629c171b8928c7b08d98d7cf3" +checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" [[package]] name = "windows_aarch64_msvc" -version = "0.52.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbaa0368d4f1d2aaefc55b6fcfee13f41544ddf36801e793edbbfd7d7df075ef" - -[[package]] -name = "windows_i686_gnu" -version = "0.34.0" +version = "0.52.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2564fde759adb79129d9b4f54be42b32c89970c18ebf93124ca8870a498688ed" +checksum = "da9f259dd3bcf6990b55bffd094c4f7235817ba4ceebde8e6d11cd0c5633b675" [[package]] name = "windows_i686_gnu" @@ -10617,21 +10734,15 @@ checksum = "c61d927d8da41da96a81f029489353e68739737d3beca43145c8afec9a31a84f" [[package]] name = "windows_i686_gnu" -version = "0.48.0" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "622a1962a7db830d6fd0a69683c80a18fda201879f0f447f065a3b7467daa241" +checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" [[package]] name = "windows_i686_gnu" -version = "0.52.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a28637cb1fa3560a16915793afb20081aba2c92ee8af57b4d5f28e4b3e7df313" - -[[package]] -name = "windows_i686_msvc" -version = "0.34.0" +version = "0.52.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9cd9d32ba70453522332c14d38814bceeb747d80b3958676007acadd7e166956" +checksum = "b474d8268f99e0995f25b9f095bc7434632601028cf86590aea5c8a5cb7801d3" [[package]] name = "windows_i686_msvc" @@ -10641,21 +10752,15 @@ checksum = "44d840b6ec649f480a41c8d80f9c65108b92d89345dd94027bfe06ac444d1060" [[package]] name = "windows_i686_msvc" -version = "0.48.0" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4542c6e364ce21bf45d69fdd2a8e455fa38d316158cfd43b3ac1c5b1b19f8e00" +checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" [[package]] name = "windows_i686_msvc" -version = "0.52.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ffe5e8e31046ce6230cc7215707b816e339ff4d4d67c65dffa206fd0f7aa7b9a" - -[[package]] -name = "windows_x86_64_gnu" -version = "0.34.0" +version = "0.52.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cfce6deae227ee8d356d19effc141a509cc503dfd1f850622ec4b0f84428e1f4" +checksum = "1515e9a29e5bed743cb4415a9ecf5dfca648ce85ee42e15873c3cd8610ff8e02" [[package]] name = "windows_x86_64_gnu" @@ -10665,15 +10770,15 @@ checksum = "8de912b8b8feb55c064867cf047dda097f92d51efad5b491dfb98f6bbb70cb36" [[package]] name = "windows_x86_64_gnu" -version = "0.48.0" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca2b8a661f7628cbd23440e50b05d705db3686f894fc9580820623656af974b1" +checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" [[package]] name = "windows_x86_64_gnu" -version = "0.52.0" +version = "0.52.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d6fa32db2bc4a2f5abeacf2b69f7992cd09dca97498da74a151a3132c26befd" +checksum = "5eee091590e89cc02ad514ffe3ead9eb6b660aedca2183455434b93546371a03" [[package]] name = "windows_x86_64_gnullvm" @@ -10683,21 +10788,15 @@ checksum = "26d41b46a36d453748aedef1486d5c7a85db22e56aff34643984ea85514e94a3" [[package]] name = "windows_x86_64_gnullvm" -version = "0.48.0" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7896dbc1f41e08872e9d5e8f8baa8fdd2677f29468c4e156210174edc7f7b953" +checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" [[package]] name = "windows_x86_64_gnullvm" -version = "0.52.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a657e1e9d3f514745a572a6846d3c7aa7dbe1658c056ed9c3344c4109a6949e" - -[[package]] -name = "windows_x86_64_msvc" -version = "0.34.0" +version = "0.52.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d19538ccc21819d01deaf88d6a17eae6596a12e9aafdbb97916fb49896d89de9" +checksum = "77ca79f2451b49fa9e2af39f0747fe999fcda4f5e241b2898624dca97a1f2177" [[package]] name = "windows_x86_64_msvc" @@ -10707,21 +10806,21 @@ checksum = "9aec5da331524158c6d1a4ac0ab1541149c0b9505fde06423b02f5ef0106b9f0" [[package]] name = "windows_x86_64_msvc" -version = "0.48.0" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a" +checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" [[package]] name = "windows_x86_64_msvc" -version = "0.52.0" +version = "0.52.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dff9641d1cd4be8d1a070daf9e3773c5f67e78b4d9d42263020c057706765c04" +checksum = "32b752e52a2da0ddfbdbcc6fceadfeede4c939ed16d13e648833a61dfb611ed8" [[package]] name = "winnow" -version = "0.5.0" +version = "0.5.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81fac9742fd1ad1bd9643b991319f72dd031016d44b77039a26977eb667141e7" +checksum = "f593a95398737aeed53e489c785df13f3618e41dbcd6718c6addbf1395aa6876" dependencies = [ "memchr", ] @@ -10767,9 +10866,9 @@ dependencies = [ [[package]] name = "x25519-dalek" -version = "2.0.0" +version = "2.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb66477291e7e8d2b0ff1bcb900bf29489a9692816d79874bea351e7a8b6de96" +checksum = "c7e468321c81fb07fa7f4c636c3972b9100f0346e5b6a9f2bd0603a52f7ed277" dependencies = [ "curve25519-dalek 4.1.1", "rand_core 0.6.4", @@ -10798,12 +10897,12 @@ dependencies = [ [[package]] name = "xcm-procedural" version = "7.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#9d2963c29d9b7ea949851a166e0cb2792fc66fff" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ "Inflector", - "proc-macro2 1.0.76", + "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.53", + "syn 2.0.57", ] [[package]] @@ -10850,9 +10949,9 @@ version = "0.7.32" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9ce1b18ccd8e73a9321186f97e46f9f04b778851177567b1975109d26a08d2a6" dependencies = [ - "proc-macro2 1.0.76", + "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.53", + "syn 2.0.57", ] [[package]] @@ -10870,9 +10969,9 @@ version = "1.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ - "proc-macro2 1.0.76", + "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.53", + "syn 2.0.57", ] [[package]] @@ -10890,7 +10989,7 @@ version = "0.12.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1a27595e173641171fc74a1232b7b1c7a7cb6e18222c11e9dfb9888fa424c53c" dependencies = [ - "zstd-safe 6.0.5+zstd.1.5.4", + "zstd-safe 6.0.6", ] [[package]] @@ -10905,9 +11004,9 @@ dependencies = [ [[package]] name = "zstd-safe" -version = "6.0.5+zstd.1.5.4" +version = "6.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d56d9e60b4b1758206c238a10165fbcae3ca37b01744e394c463463f6529d23b" +checksum = "ee98ffd0b48ee95e6c5168188e44a54550b1564d9d530ee21d5f0eaed1069581" dependencies = [ "libc", "zstd-sys", @@ -10915,11 +11014,10 @@ dependencies = [ [[package]] name = "zstd-sys" -version = "2.0.8+zstd.1.5.5" +version = "2.0.10+zstd.1.5.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5556e6ee25d32df2586c098bbfa278803692a20d0ab9565e049480d52707ec8c" +checksum = "c253a4914af5bafc8fa8c86ee400827e83cf6ec01195ec1f1ed8441bf00d65aa" dependencies = [ "cc", - "libc", "pkg-config", ] diff --git a/Dockerfile b/Dockerfile index 99831af41..ed18e4abe 100644 --- a/Dockerfile +++ b/Dockerfile @@ -8,7 +8,7 @@ # # See the `deployments/README.md` for all the available `PROJECT` values. -FROM docker.io/paritytech/bridges-ci:production as builder +FROM docker.io/paritytech/ci-unified:latest as builder USER root WORKDIR /parity-bridges-common diff --git a/README.md b/README.md index a2ce213d2..8bfa39841 100644 --- a/README.md +++ b/README.md @@ -38,10 +38,10 @@ cargo test --all ``` Also you can build the repo with [Parity CI Docker -image](https://github.com/paritytech/scripts/tree/master/dockerfiles/bridges-ci): +image](https://github.com/paritytech/scripts/tree/master/dockerfiles/ci-unified): ```bash -docker pull paritytech/bridges-ci:production +docker pull paritytech/ci-unified:latest mkdir ~/cache chown 1000:1000 ~/cache #processes in the container runs as "nonroot" user with UID 1000 docker run --rm -it -w /shellhere/parity-bridges-common \ @@ -49,7 +49,7 @@ docker run --rm -it -w /shellhere/parity-bridges-common \ -v "$(pwd)":/shellhere/parity-bridges-common \ -e CARGO_HOME=/cache/cargo/ \ -e SCCACHE_DIR=/cache/sccache/ \ - -e CARGO_TARGET_DIR=/cache/target/ paritytech/bridges-ci:production cargo build --all + -e CARGO_TARGET_DIR=/cache/target/ paritytech/ci-unified:latest cargo build --all #artifacts can be found in ~/cache/target ``` diff --git a/bin/runtime-common/Cargo.toml b/bin/runtime-common/Cargo.toml index d71e9f6ba..6304c83b9 100644 --- a/bin/runtime-common/Cargo.toml +++ b/bin/runtime-common/Cargo.toml @@ -11,7 +11,7 @@ license = "GPL-3.0-or-later WITH Classpath-exception-2.0" workspace = true [dependencies] -codec = { package = "parity-scale-codec", version = "3.1.5", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "3.6.1", default-features = false, features = ["derive"] } hash-db = { version = "0.16.0", default-features = false } log = { workspace = true } scale-info = { version = "2.11.1", default-features = false, features = ["derive"] } diff --git a/chains/chain-asset-hub-rococo/Cargo.toml b/chains/chain-asset-hub-rococo/Cargo.toml index ae1f05e0e..660f0f4db 100644 --- a/chains/chain-asset-hub-rococo/Cargo.toml +++ b/chains/chain-asset-hub-rococo/Cargo.toml @@ -5,12 +5,13 @@ version = "0.4.0" authors.workspace = true edition.workspace = true license = "GPL-3.0-or-later WITH Classpath-exception-2.0" +repository.workspace = true [lints] workspace = true [dependencies] -codec = { package = "parity-scale-codec", version = "3.1.5", default-features = false } +codec = { package = "parity-scale-codec", version = "3.6.1", default-features = false } scale-info = { version = "2.11.1", default-features = false, features = ["derive"] } # Substrate Dependencies diff --git a/chains/chain-asset-hub-westend/Cargo.toml b/chains/chain-asset-hub-westend/Cargo.toml index 14c049f13..4022258ac 100644 --- a/chains/chain-asset-hub-westend/Cargo.toml +++ b/chains/chain-asset-hub-westend/Cargo.toml @@ -5,12 +5,13 @@ version = "0.3.0" authors.workspace = true edition.workspace = true license = "GPL-3.0-or-later WITH Classpath-exception-2.0" +repository.workspace = true [lints] workspace = true [dependencies] -codec = { package = "parity-scale-codec", version = "3.1.5", default-features = false } +codec = { package = "parity-scale-codec", version = "3.6.1", default-features = false } scale-info = { version = "2.11.1", default-features = false, features = ["derive"] } # Substrate Dependencies diff --git a/chains/chain-bridge-hub-cumulus/Cargo.toml b/chains/chain-bridge-hub-cumulus/Cargo.toml index 82c87d8bc..b87b5fefd 100644 --- a/chains/chain-bridge-hub-cumulus/Cargo.toml +++ b/chains/chain-bridge-hub-cumulus/Cargo.toml @@ -5,6 +5,7 @@ version = "0.7.0" authors.workspace = true edition.workspace = true license = "GPL-3.0-or-later WITH Classpath-exception-2.0" +repository.workspace = true [lints] workspace = true diff --git a/chains/chain-bridge-hub-kusama/Cargo.toml b/chains/chain-bridge-hub-kusama/Cargo.toml index 2075fe254..71ee785d4 100644 --- a/chains/chain-bridge-hub-kusama/Cargo.toml +++ b/chains/chain-bridge-hub-kusama/Cargo.toml @@ -5,6 +5,7 @@ version = "0.6.0" authors.workspace = true edition.workspace = true license = "GPL-3.0-or-later WITH Classpath-exception-2.0" +repository.workspace = true [lints] workspace = true diff --git a/chains/chain-bridge-hub-polkadot/Cargo.toml b/chains/chain-bridge-hub-polkadot/Cargo.toml index edef6e612..dd4729673 100644 --- a/chains/chain-bridge-hub-polkadot/Cargo.toml +++ b/chains/chain-bridge-hub-polkadot/Cargo.toml @@ -5,6 +5,7 @@ version = "0.6.0" authors.workspace = true edition.workspace = true license = "GPL-3.0-or-later WITH Classpath-exception-2.0" +repository.workspace = true [lints] workspace = true diff --git a/chains/chain-bridge-hub-rococo/Cargo.toml b/chains/chain-bridge-hub-rococo/Cargo.toml index 0370fa1c6..a8e0003ee 100644 --- a/chains/chain-bridge-hub-rococo/Cargo.toml +++ b/chains/chain-bridge-hub-rococo/Cargo.toml @@ -5,6 +5,7 @@ version = "0.7.0" authors.workspace = true edition.workspace = true license = "GPL-3.0-or-later WITH Classpath-exception-2.0" +repository.workspace = true [lints] workspace = true diff --git a/chains/chain-bridge-hub-westend/Cargo.toml b/chains/chain-bridge-hub-westend/Cargo.toml index ea452d89d..09bf743c6 100644 --- a/chains/chain-bridge-hub-westend/Cargo.toml +++ b/chains/chain-bridge-hub-westend/Cargo.toml @@ -5,6 +5,7 @@ version = "0.3.0" authors.workspace = true edition.workspace = true license = "GPL-3.0-or-later WITH Classpath-exception-2.0" +repository.workspace = true [lints] workspace = true diff --git a/chains/chain-kusama/Cargo.toml b/chains/chain-kusama/Cargo.toml index 56a4386af..2a59937da 100644 --- a/chains/chain-kusama/Cargo.toml +++ b/chains/chain-kusama/Cargo.toml @@ -5,6 +5,7 @@ version = "0.5.0" authors.workspace = true edition.workspace = true license = "GPL-3.0-or-later WITH Classpath-exception-2.0" +repository.workspace = true [lints] workspace = true diff --git a/chains/chain-polkadot-bulletin/Cargo.toml b/chains/chain-polkadot-bulletin/Cargo.toml index 121f0c57c..c20a94cfd 100644 --- a/chains/chain-polkadot-bulletin/Cargo.toml +++ b/chains/chain-polkadot-bulletin/Cargo.toml @@ -5,12 +5,13 @@ version = "0.4.0" authors.workspace = true edition.workspace = true license = "GPL-3.0-or-later WITH Classpath-exception-2.0" +repository.workspace = true [lints] workspace = true [dependencies] -codec = { package = "parity-scale-codec", version = "3.1.5", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "3.6.1", default-features = false, features = ["derive"] } scale-info = { version = "2.11.1", default-features = false, features = ["derive"] } # Bridge Dependencies diff --git a/chains/chain-polkadot/Cargo.toml b/chains/chain-polkadot/Cargo.toml index 5faee3bd3..f942e4fe8 100644 --- a/chains/chain-polkadot/Cargo.toml +++ b/chains/chain-polkadot/Cargo.toml @@ -5,6 +5,7 @@ version = "0.5.0" authors.workspace = true edition.workspace = true license = "GPL-3.0-or-later WITH Classpath-exception-2.0" +repository.workspace = true [lints] workspace = true diff --git a/chains/chain-rococo/Cargo.toml b/chains/chain-rococo/Cargo.toml index 401611beb..a86e87551 100644 --- a/chains/chain-rococo/Cargo.toml +++ b/chains/chain-rococo/Cargo.toml @@ -5,6 +5,7 @@ version = "0.6.0" authors.workspace = true edition.workspace = true license = "GPL-3.0-or-later WITH Classpath-exception-2.0" +repository.workspace = true [lints] workspace = true diff --git a/chains/chain-westend/Cargo.toml b/chains/chain-westend/Cargo.toml index d697e2b36..6f5c48139 100644 --- a/chains/chain-westend/Cargo.toml +++ b/chains/chain-westend/Cargo.toml @@ -5,6 +5,7 @@ version = "0.3.0" authors.workspace = true edition.workspace = true license = "GPL-3.0-or-later WITH Classpath-exception-2.0" +repository.workspace = true [lints] workspace = true diff --git a/modules/beefy/Cargo.toml b/modules/beefy/Cargo.toml index ab3447f65..d0e5204dc 100644 --- a/modules/beefy/Cargo.toml +++ b/modules/beefy/Cargo.toml @@ -2,16 +2,17 @@ name = "pallet-bridge-beefy" version = "0.1.0" description = "Module implementing BEEFY on-chain light client used for bridging consensus of substrate-based chains." -authors = ["Parity Technologies "] -edition = "2021" +authors.workspace = true +edition.workspace = true license = "GPL-3.0-or-later WITH Classpath-exception-2.0" +repository.workspace = true publish = false [lints] workspace = true [dependencies] -codec = { package = "parity-scale-codec", version = "3.1.5", default-features = false } +codec = { package = "parity-scale-codec", version = "3.6.1", default-features = false } log = { workspace = true } scale-info = { version = "2.11.1", default-features = false, features = ["derive"] } serde = { optional = true, workspace = true } @@ -34,7 +35,7 @@ sp-consensus-beefy = { git = "https://github.com/paritytech/polkadot-sdk", branc mmr-lib = { package = "ckb-merkle-mountain-range", version = "0.3.2" } pallet-beefy-mmr = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } pallet-mmr = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } -rand = "0.8" +rand = "0.8.5" sp-io = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } bp-test-utils = { path = "../../primitives/test-utils" } diff --git a/modules/grandpa/Cargo.toml b/modules/grandpa/Cargo.toml index 2388767f9..b3deefc87 100644 --- a/modules/grandpa/Cargo.toml +++ b/modules/grandpa/Cargo.toml @@ -5,6 +5,7 @@ description = "Module implementing GRANDPA on-chain light client used for bridgi authors.workspace = true edition.workspace = true license = "GPL-3.0-or-later WITH Classpath-exception-2.0" +repository.workspace = true [lints] workspace = true @@ -12,7 +13,7 @@ workspace = true # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] -codec = { package = "parity-scale-codec", version = "3.1.5", default-features = false } +codec = { package = "parity-scale-codec", version = "3.6.1", default-features = false } finality-grandpa = { version = "0.16.2", default-features = false } log = { workspace = true } scale-info = { version = "2.11.1", default-features = false, features = ["derive"] } diff --git a/modules/messages/Cargo.toml b/modules/messages/Cargo.toml index ebe7db408..24ad437be 100644 --- a/modules/messages/Cargo.toml +++ b/modules/messages/Cargo.toml @@ -5,12 +5,13 @@ version = "0.7.0" authors.workspace = true edition.workspace = true license = "GPL-3.0-or-later WITH Classpath-exception-2.0" +repository.workspace = true [lints] workspace = true [dependencies] -codec = { package = "parity-scale-codec", version = "3.1.5", default-features = false } +codec = { package = "parity-scale-codec", version = "3.6.1", default-features = false } log = { workspace = true } num-traits = { version = "0.2", default-features = false } scale-info = { version = "2.11.1", default-features = false, features = ["derive"] } diff --git a/modules/parachains/Cargo.toml b/modules/parachains/Cargo.toml index 068e9a907..6352b21b8 100644 --- a/modules/parachains/Cargo.toml +++ b/modules/parachains/Cargo.toml @@ -5,12 +5,13 @@ description = "Module that allows bridged relay chains to exchange information o authors.workspace = true edition.workspace = true license = "GPL-3.0-or-later WITH Classpath-exception-2.0" +repository.workspace = true [lints] workspace = true [dependencies] -codec = { package = "parity-scale-codec", version = "3.1.5", default-features = false } +codec = { package = "parity-scale-codec", version = "3.6.1", default-features = false } log = { workspace = true } scale-info = { version = "2.11.1", default-features = false, features = ["derive"] } diff --git a/modules/relayers/Cargo.toml b/modules/relayers/Cargo.toml index 9073b9bbe..ae57e36f7 100644 --- a/modules/relayers/Cargo.toml +++ b/modules/relayers/Cargo.toml @@ -5,12 +5,13 @@ version = "0.7.0" authors.workspace = true edition.workspace = true license = "GPL-3.0-or-later WITH Classpath-exception-2.0" +repository.workspace = true [lints] workspace = true [dependencies] -codec = { package = "parity-scale-codec", version = "3.1.5", default-features = false } +codec = { package = "parity-scale-codec", version = "3.6.1", default-features = false } log = { workspace = true } scale-info = { version = "2.11.1", default-features = false, features = ["derive"] } diff --git a/modules/xcm-bridge-hub-router/Cargo.toml b/modules/xcm-bridge-hub-router/Cargo.toml index 89c02f706..af130c5e7 100644 --- a/modules/xcm-bridge-hub-router/Cargo.toml +++ b/modules/xcm-bridge-hub-router/Cargo.toml @@ -5,12 +5,13 @@ version = "0.5.0" authors.workspace = true edition.workspace = true license = "GPL-3.0-or-later WITH Classpath-exception-2.0" +repository.workspace = true [lints] workspace = true [dependencies] -codec = { package = "parity-scale-codec", version = "3.1.5", default-features = false } +codec = { package = "parity-scale-codec", version = "3.6.1", default-features = false } log = { workspace = true } scale-info = { version = "2.11.1", default-features = false, features = ["bit-vec", "derive", "serde"] } diff --git a/modules/xcm-bridge-hub/Cargo.toml b/modules/xcm-bridge-hub/Cargo.toml index 8654c65f6..d7e562530 100644 --- a/modules/xcm-bridge-hub/Cargo.toml +++ b/modules/xcm-bridge-hub/Cargo.toml @@ -5,12 +5,13 @@ version = "0.2.0" authors.workspace = true edition.workspace = true license = "GPL-3.0-or-later WITH Classpath-exception-2.0" +repository.workspace = true [lints] workspace = true [dependencies] -codec = { package = "parity-scale-codec", version = "3.1.5", default-features = false } +codec = { package = "parity-scale-codec", version = "3.6.1", default-features = false } log = { workspace = true } scale-info = { version = "2.11.1", default-features = false, features = ["derive"] } diff --git a/primitives/beefy/Cargo.toml b/primitives/beefy/Cargo.toml index eef3dcf96..f1992e59b 100644 --- a/primitives/beefy/Cargo.toml +++ b/primitives/beefy/Cargo.toml @@ -2,16 +2,17 @@ name = "bp-beefy" description = "Primitives of pallet-bridge-beefy module." version = "0.1.0" -authors = ["Parity Technologies "] -edition = "2021" +authors.workspace = true +edition.workspace = true license = "GPL-3.0-or-later WITH Classpath-exception-2.0" +repository.workspace = true publish = false [lints] workspace = true [dependencies] -codec = { package = "parity-scale-codec", version = "3.1.5", default-features = false, features = ["bit-vec", "derive"] } +codec = { package = "parity-scale-codec", version = "3.6.1", default-features = false, features = ["bit-vec", "derive"] } scale-info = { version = "2.11.1", default-features = false, features = ["bit-vec", "derive"] } serde = { default-features = false, features = ["alloc", "derive"], workspace = true } diff --git a/primitives/header-chain/Cargo.toml b/primitives/header-chain/Cargo.toml index 8dc9a8ae7..f38d75454 100644 --- a/primitives/header-chain/Cargo.toml +++ b/primitives/header-chain/Cargo.toml @@ -5,12 +5,13 @@ version = "0.7.0" authors.workspace = true edition.workspace = true license = "GPL-3.0-or-later WITH Classpath-exception-2.0" +repository.workspace = true [lints] workspace = true [dependencies] -codec = { package = "parity-scale-codec", version = "3.1.5", default-features = false } +codec = { package = "parity-scale-codec", version = "3.6.1", default-features = false } finality-grandpa = { version = "0.16.2", default-features = false } scale-info = { version = "2.11.1", default-features = false, features = ["derive"] } serde = { features = ["alloc", "derive"], workspace = true } diff --git a/primitives/messages/Cargo.toml b/primitives/messages/Cargo.toml index 93b0efa27..8bacff709 100644 --- a/primitives/messages/Cargo.toml +++ b/primitives/messages/Cargo.toml @@ -5,12 +5,13 @@ version = "0.7.0" authors.workspace = true edition.workspace = true license = "GPL-3.0-or-later WITH Classpath-exception-2.0" +repository.workspace = true [lints] workspace = true [dependencies] -codec = { package = "parity-scale-codec", version = "3.1.5", default-features = false, features = ["bit-vec", "derive"] } +codec = { package = "parity-scale-codec", version = "3.6.1", default-features = false, features = ["bit-vec", "derive"] } scale-info = { version = "2.11.1", default-features = false, features = ["bit-vec", "derive"] } serde = { features = ["alloc", "derive"], workspace = true } diff --git a/primitives/parachains/Cargo.toml b/primitives/parachains/Cargo.toml index 610b7b336..1606dbfcd 100644 --- a/primitives/parachains/Cargo.toml +++ b/primitives/parachains/Cargo.toml @@ -5,12 +5,13 @@ version = "0.7.0" authors.workspace = true edition.workspace = true license = "GPL-3.0-or-later WITH Classpath-exception-2.0" +repository.workspace = true [lints] workspace = true [dependencies] -codec = { package = "parity-scale-codec", version = "3.1.5", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "3.6.1", default-features = false, features = ["derive"] } impl-trait-for-tuples = "0.2" scale-info = { version = "2.11.1", default-features = false, features = ["derive"] } diff --git a/primitives/polkadot-core/Cargo.toml b/primitives/polkadot-core/Cargo.toml index 6c20d64da..b85586405 100644 --- a/primitives/polkadot-core/Cargo.toml +++ b/primitives/polkadot-core/Cargo.toml @@ -5,12 +5,13 @@ version = "0.7.0" authors.workspace = true edition.workspace = true license = "GPL-3.0-or-later WITH Classpath-exception-2.0" +repository.workspace = true [lints] workspace = true [dependencies] -codec = { package = "parity-scale-codec", version = "3.1.5", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "3.6.1", default-features = false, features = ["derive"] } parity-util-mem = { version = "0.12.0", optional = true } scale-info = { version = "2.11.1", default-features = false, features = ["derive"] } serde = { optional = true, features = ["derive"], workspace = true, default-features = true } diff --git a/primitives/relayers/Cargo.toml b/primitives/relayers/Cargo.toml index 441d24060..46bc034ef 100644 --- a/primitives/relayers/Cargo.toml +++ b/primitives/relayers/Cargo.toml @@ -5,12 +5,13 @@ version = "0.7.0" authors.workspace = true edition.workspace = true license = "GPL-3.0-or-later WITH Classpath-exception-2.0" +repository.workspace = true [lints] workspace = true [dependencies] -codec = { package = "parity-scale-codec", version = "3.1.5", default-features = false, features = ["bit-vec", "derive"] } +codec = { package = "parity-scale-codec", version = "3.6.1", default-features = false, features = ["bit-vec", "derive"] } scale-info = { version = "2.11.1", default-features = false, features = ["bit-vec", "derive"] } # Bridge Dependencies diff --git a/primitives/runtime/Cargo.toml b/primitives/runtime/Cargo.toml index c2b2a794b..258b57682 100644 --- a/primitives/runtime/Cargo.toml +++ b/primitives/runtime/Cargo.toml @@ -5,12 +5,13 @@ version = "0.7.0" authors.workspace = true edition.workspace = true license = "GPL-3.0-or-later WITH Classpath-exception-2.0" +repository.workspace = true [lints] workspace = true [dependencies] -codec = { package = "parity-scale-codec", version = "3.1.5", default-features = false } +codec = { package = "parity-scale-codec", version = "3.6.1", default-features = false } hash-db = { version = "0.16.0", default-features = false } impl-trait-for-tuples = "0.2.2" log = { workspace = true } diff --git a/primitives/test-utils/Cargo.toml b/primitives/test-utils/Cargo.toml index 1b3ac9ee6..b46868a0a 100644 --- a/primitives/test-utils/Cargo.toml +++ b/primitives/test-utils/Cargo.toml @@ -5,6 +5,7 @@ description = "Utilities for testing substrate-based runtime bridge code" authors.workspace = true edition.workspace = true license = "GPL-3.0-or-later WITH Classpath-exception-2.0" +repository.workspace = true [lints] workspace = true @@ -14,7 +15,7 @@ bp-header-chain = { path = "../header-chain", default-features = false } bp-parachains = { path = "../parachains", default-features = false } bp-polkadot-core = { path = "../polkadot-core", default-features = false } bp-runtime = { path = "../runtime", default-features = false } -codec = { package = "parity-scale-codec", version = "3.1.5", default-features = false } +codec = { package = "parity-scale-codec", version = "3.6.1", default-features = false } ed25519-dalek = { version = "2.1", default-features = false } finality-grandpa = { version = "0.16.2", default-features = false } sp-application-crypto = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } diff --git a/primitives/xcm-bridge-hub-router/Cargo.toml b/primitives/xcm-bridge-hub-router/Cargo.toml index bc0ff5c60..c3fe409b6 100644 --- a/primitives/xcm-bridge-hub-router/Cargo.toml +++ b/primitives/xcm-bridge-hub-router/Cargo.toml @@ -5,12 +5,13 @@ version = "0.6.0" authors.workspace = true edition.workspace = true license = "GPL-3.0-or-later WITH Classpath-exception-2.0" +repository.workspace = true [lints] workspace = true [dependencies] -codec = { package = "parity-scale-codec", version = "3.1.5", default-features = false, features = ["bit-vec", "derive"] } +codec = { package = "parity-scale-codec", version = "3.6.1", default-features = false, features = ["bit-vec", "derive"] } scale-info = { version = "2.11.1", default-features = false, features = ["bit-vec", "derive"] } # Substrate Dependencies diff --git a/primitives/xcm-bridge-hub/Cargo.toml b/primitives/xcm-bridge-hub/Cargo.toml index 1a5bb742e..904307100 100644 --- a/primitives/xcm-bridge-hub/Cargo.toml +++ b/primitives/xcm-bridge-hub/Cargo.toml @@ -5,6 +5,7 @@ version = "0.2.0" authors.workspace = true edition.workspace = true license = "GPL-3.0-or-later WITH Classpath-exception-2.0" +repository.workspace = true [lints] workspace = true diff --git a/relay-clients/client-bridge-hub-kusama/Cargo.toml b/relay-clients/client-bridge-hub-kusama/Cargo.toml index 2fc45b5fb..b1bffa0f6 100644 --- a/relay-clients/client-bridge-hub-kusama/Cargo.toml +++ b/relay-clients/client-bridge-hub-kusama/Cargo.toml @@ -1,15 +1,16 @@ [package] name = "relay-bridge-hub-kusama-client" version = "0.1.0" -authors = ["Parity Technologies "] -edition = "2021" +authors.workspace = true +edition.workspace = true license = "GPL-3.0-or-later WITH Classpath-exception-2.0" +repository.workspace = true [lints] workspace = true [dependencies] -codec = { package = "parity-scale-codec", version = "3.1.5", features = ["derive"] } +codec = { package = "parity-scale-codec", version = "3.6.1", features = ["derive"] } scale-info = { version = "2.11.1", default-features = false, features = ["derive"] } #relay-substrate-client = { path = "../client-substrate" } subxt = { version = "0.32.1", default-features = false, features = ["native"] } diff --git a/relay-clients/client-bridge-hub-polkadot/Cargo.toml b/relay-clients/client-bridge-hub-polkadot/Cargo.toml index ea546cdd4..c7de55c89 100644 --- a/relay-clients/client-bridge-hub-polkadot/Cargo.toml +++ b/relay-clients/client-bridge-hub-polkadot/Cargo.toml @@ -1,15 +1,16 @@ [package] name = "relay-bridge-hub-polkadot-client" version = "0.1.0" -authors = ["Parity Technologies "] -edition = "2021" +authors.workspace = true +edition.workspace = true license = "GPL-3.0-or-later WITH Classpath-exception-2.0" +repository.workspace = true [lints] workspace = true [dependencies] -codec = { package = "parity-scale-codec", version = "3.1.5", features = ["derive"] } +codec = { package = "parity-scale-codec", version = "3.6.1", features = ["derive"] } scale-info = { version = "2.11.1", default-features = false, features = ["derive"] } subxt = { version = "0.32.1", default-features = false, features = ["native"] } diff --git a/relay-clients/client-bridge-hub-rococo/Cargo.toml b/relay-clients/client-bridge-hub-rococo/Cargo.toml index dba2aecf0..b354773a0 100644 --- a/relay-clients/client-bridge-hub-rococo/Cargo.toml +++ b/relay-clients/client-bridge-hub-rococo/Cargo.toml @@ -1,15 +1,16 @@ [package] name = "relay-bridge-hub-rococo-client" version = "0.1.0" -authors = ["Parity Technologies "] -edition = "2021" +authors.workspace = true +edition.workspace = true license = "GPL-3.0-or-later WITH Classpath-exception-2.0" +repository.workspace = true [lints] workspace = true [dependencies] -codec = { package = "parity-scale-codec", version = "3.1.5", features = ["derive"] } +codec = { package = "parity-scale-codec", version = "3.6.1", features = ["derive"] } scale-info = { version = "2.11.1", default-features = false, features = ["derive"] } subxt = { version = "0.32.1", default-features = false, features = ["native"] } diff --git a/relay-clients/client-bridge-hub-westend/Cargo.toml b/relay-clients/client-bridge-hub-westend/Cargo.toml index 410e096be..b7fc3bcb1 100644 --- a/relay-clients/client-bridge-hub-westend/Cargo.toml +++ b/relay-clients/client-bridge-hub-westend/Cargo.toml @@ -1,15 +1,16 @@ [package] name = "relay-bridge-hub-westend-client" version = "0.1.0" -authors = ["Parity Technologies "] -edition = "2021" +authors.workspace = true +edition.workspace = true license = "GPL-3.0-or-later WITH Classpath-exception-2.0" +repository.workspace = true [lints] workspace = true [dependencies] -codec = { package = "parity-scale-codec", version = "3.1.5", features = ["derive"] } +codec = { package = "parity-scale-codec", version = "3.6.1", features = ["derive"] } scale-info = { version = "2.11.1", default-features = false, features = ["derive"] } subxt = { version = "0.32.1", default-features = false, features = ["native"] } diff --git a/relay-clients/client-kusama/Cargo.toml b/relay-clients/client-kusama/Cargo.toml index a1dbb36e3..ea3e9f305 100644 --- a/relay-clients/client-kusama/Cargo.toml +++ b/relay-clients/client-kusama/Cargo.toml @@ -1,15 +1,16 @@ [package] name = "relay-kusama-client" version = "0.1.0" -authors = ["Parity Technologies "] -edition = "2021" +authors.workspace = true +edition.workspace = true license = "GPL-3.0-or-later WITH Classpath-exception-2.0" +repository.workspace = true [lints] workspace = true [dependencies] -codec = { package = "parity-scale-codec", version = "3.1.5", features = ["derive"] } +codec = { package = "parity-scale-codec", version = "3.6.1", features = ["derive"] } scale-info = { version = "2.11.1", default-features = false, features = ["derive"] } subxt = { version = "0.32.1", default-features = false, features = ["native"] } diff --git a/relay-clients/client-polkadot-bulletin/Cargo.toml b/relay-clients/client-polkadot-bulletin/Cargo.toml index 8b3d42050..6a1b61525 100644 --- a/relay-clients/client-polkadot-bulletin/Cargo.toml +++ b/relay-clients/client-polkadot-bulletin/Cargo.toml @@ -1,15 +1,16 @@ [package] name = "relay-polkadot-bulletin-client" version = "0.1.0" -authors = ["Parity Technologies "] -edition = "2021" +authors.workspace = true +edition.workspace = true license = "GPL-3.0-or-later WITH Classpath-exception-2.0" +repository.workspace = true [lints] workspace = true [dependencies] -codec = { package = "parity-scale-codec", version = "3.1.5", features = ["derive"] } +codec = { package = "parity-scale-codec", version = "3.6.1", features = ["derive"] } scale-info = { version = "2.11.1", default-features = false, features = ["derive"] } subxt = { version = "0.32.1", default-features = false, features = ["native"] } diff --git a/relay-clients/client-polkadot/Cargo.toml b/relay-clients/client-polkadot/Cargo.toml index 440db33c9..969fc37a8 100644 --- a/relay-clients/client-polkadot/Cargo.toml +++ b/relay-clients/client-polkadot/Cargo.toml @@ -1,15 +1,16 @@ [package] name = "relay-polkadot-client" version = "0.1.0" -authors = ["Parity Technologies "] -edition = "2021" +authors.workspace = true +edition.workspace = true license = "GPL-3.0-or-later WITH Classpath-exception-2.0" +repository.workspace = true [lints] workspace = true [dependencies] -codec = { package = "parity-scale-codec", version = "3.1.5", features = ["derive"] } +codec = { package = "parity-scale-codec", version = "3.6.1", features = ["derive"] } scale-info = { version = "2.11.1", default-features = false, features = ["derive"] } subxt = { version = "0.32.1", default-features = false, features = ["native"] } diff --git a/relay-clients/client-rococo/Cargo.toml b/relay-clients/client-rococo/Cargo.toml index ecdf33fdd..445707f5a 100644 --- a/relay-clients/client-rococo/Cargo.toml +++ b/relay-clients/client-rococo/Cargo.toml @@ -1,15 +1,16 @@ [package] name = "relay-rococo-client" version = "0.1.0" -authors = ["Parity Technologies "] -edition = "2021" +authors.workspace = true +edition.workspace = true license = "GPL-3.0-or-later WITH Classpath-exception-2.0" +repository.workspace = true [lints] workspace = true [dependencies] -codec = { package = "parity-scale-codec", version = "3.1.5", features = ["derive"] } +codec = { package = "parity-scale-codec", version = "3.6.1", features = ["derive"] } scale-info = { version = "2.11.1", default-features = false, features = ["derive"] } subxt = { version = "0.32.1", default-features = false, features = ["native"] } diff --git a/relay-clients/client-westend/Cargo.toml b/relay-clients/client-westend/Cargo.toml index 0f7188cc3..b69ddd990 100644 --- a/relay-clients/client-westend/Cargo.toml +++ b/relay-clients/client-westend/Cargo.toml @@ -1,15 +1,16 @@ [package] name = "relay-westend-client" version = "0.1.0" -authors = ["Parity Technologies "] -edition = "2021" +authors.workspace = true +edition.workspace = true license = "GPL-3.0-or-later WITH Classpath-exception-2.0" +repository.workspace = true [lints] workspace = true [dependencies] -codec = { package = "parity-scale-codec", version = "3.1.5", features = ["derive"] } +codec = { package = "parity-scale-codec", version = "3.6.1", features = ["derive"] } scale-info = { version = "2.11.1", default-features = false, features = ["derive"] } subxt = { version = "0.32.1", default-features = false, features = ["native"] } diff --git a/relays/client-substrate/Cargo.toml b/relays/client-substrate/Cargo.toml index 51e3edefa..85ebce1f9 100644 --- a/relays/client-substrate/Cargo.toml +++ b/relays/client-substrate/Cargo.toml @@ -1,23 +1,24 @@ [package] name = "relay-substrate-client" version = "0.1.0" -authors = ["Parity Technologies "] -edition = "2021" +authors.workspace = true +edition.workspace = true license = "GPL-3.0-or-later WITH Classpath-exception-2.0" +repository.workspace = true publish = false [lints] workspace = true [dependencies] -async-std = { version = "1.6.5", features = ["attributes"] } +async-std = { version = "1.9.0", features = ["attributes"] } async-trait = "0.1.79" -codec = { package = "parity-scale-codec", version = "3.1.5" } +codec = { package = "parity-scale-codec", version = "3.6.1" } futures = "0.3.30" -jsonrpsee = { version = "0.17", features = ["macros", "ws-client"] } +jsonrpsee = { version = "0.22", features = ["macros", "ws-client"] } log = { workspace = true } num-traits = "0.2" -rand = "0.8" +rand = "0.8.5" scale-info = { version = "2.11.1", features = ["derive"] } tokio = { version = "1.37", features = ["rt-multi-thread"] } thiserror = { workspace = true } diff --git a/relays/client-substrate/src/error.rs b/relays/client-substrate/src/error.rs index 257771b70..0b4466818 100644 --- a/relays/client-substrate/src/error.rs +++ b/relays/client-substrate/src/error.rs @@ -18,7 +18,7 @@ use crate::SimpleRuntimeVersion; use bp_polkadot_core::parachains::ParaId; -use jsonrpsee::core::Error as RpcError; +use jsonrpsee::core::ClientError as RpcError; use relay_utils::MaybeConnectionError; use sc_rpc_api::system::Health; use sp_core::storage::StorageKey; diff --git a/relays/client-substrate/src/rpc.rs b/relays/client-substrate/src/rpc.rs index 35ab08c0f..60c29cdeb 100644 --- a/relays/client-substrate/src/rpc.rs +++ b/relays/client-substrate/src/rpc.rs @@ -21,7 +21,7 @@ use async_trait::async_trait; use crate::{Chain, ChainWithGrandpa, TransactionStatusOf}; use jsonrpsee::{ - core::{client::Subscription, RpcResult}, + core::{client::Subscription, ClientError}, proc_macros::rpc, ws_client::WsClient, }; @@ -110,7 +110,9 @@ pub(crate) trait SubstrateState { #[async_trait] pub trait SubstrateFinalityClient { /// Subscribe to finality justifications. - async fn subscribe_justifications(client: &WsClient) -> RpcResult>; + async fn subscribe_justifications( + client: &WsClient, + ) -> Result, ClientError>; } /// RPC methods of Substrate `grandpa` namespace, that we are using. @@ -125,7 +127,9 @@ pub(crate) trait SubstrateGrandpa { pub struct SubstrateGrandpaFinalityClient; #[async_trait] impl SubstrateFinalityClient for SubstrateGrandpaFinalityClient { - async fn subscribe_justifications(client: &WsClient) -> RpcResult> { + async fn subscribe_justifications( + client: &WsClient, + ) -> Result, ClientError> { SubstrateGrandpaClient::::subscribe_justifications(client).await } } @@ -144,7 +148,9 @@ pub struct SubstrateBeefyFinalityClient; // TODO: Use `ChainWithBeefy` instead of `Chain` after #1606 is merged #[async_trait] impl SubstrateFinalityClient for SubstrateBeefyFinalityClient { - async fn subscribe_justifications(client: &WsClient) -> RpcResult> { + async fn subscribe_justifications( + client: &WsClient, + ) -> Result, ClientError> { SubstrateBeefyClient::::subscribe_justifications(client).await } } diff --git a/relays/equivocation/Cargo.toml b/relays/equivocation/Cargo.toml index 3b99e4349..e7146e05f 100644 --- a/relays/equivocation/Cargo.toml +++ b/relays/equivocation/Cargo.toml @@ -1,9 +1,10 @@ [package] name = "equivocation-detector" version = "0.1.0" -authors = ["Parity Technologies "] -edition = "2021" +authors.workspace = true +edition.workspace = true license = "GPL-3.0-or-later WITH Classpath-exception-2.0" +repository.workspace = true description = "Equivocation detector" publish = false @@ -11,7 +12,7 @@ publish = false workspace = true [dependencies] -async-std = { version = "1.6.5", features = ["attributes"] } +async-std = { version = "1.9.0", features = ["attributes"] } async-trait = "0.1.79" bp-header-chain = { path = "../../primitives/header-chain" } finality-relay = { path = "../finality" } diff --git a/relays/finality/Cargo.toml b/relays/finality/Cargo.toml index 53d2ce579..5ee4b10fa 100644 --- a/relays/finality/Cargo.toml +++ b/relays/finality/Cargo.toml @@ -1,9 +1,10 @@ [package] name = "finality-relay" version = "0.1.0" -authors = ["Parity Technologies "] -edition = "2021" +authors.workspace = true +edition.workspace = true license = "GPL-3.0-or-later WITH Classpath-exception-2.0" +repository.workspace = true description = "Finality proofs relay" publish = false @@ -11,7 +12,7 @@ publish = false workspace = true [dependencies] -async-std = "1.6.5" +async-std = "1.9.0" async-trait = "0.1.79" backoff = "0.4" bp-header-chain = { path = "../../primitives/header-chain" } diff --git a/relays/lib-substrate-relay/Cargo.toml b/relays/lib-substrate-relay/Cargo.toml index 76be5a92a..7e7e774d7 100644 --- a/relays/lib-substrate-relay/Cargo.toml +++ b/relays/lib-substrate-relay/Cargo.toml @@ -1,9 +1,10 @@ [package] name = "substrate-relay-helper" version = "0.1.0" -authors = ["Parity Technologies "] -edition = "2021" +authors.workspace = true +edition.workspace = true license = "GPL-3.0-or-later WITH Classpath-exception-2.0" +repository.workspace = true publish = false [lints] @@ -13,7 +14,7 @@ workspace = true anyhow = "1.0" async-std = "1.9.0" async-trait = "0.1.79" -codec = { package = "parity-scale-codec", version = "3.1.5" } +codec = { package = "parity-scale-codec", version = "3.6.1" } futures = "0.3.30" hex = "0.4" log = { workspace = true } diff --git a/relays/messages/Cargo.toml b/relays/messages/Cargo.toml index b3f0a9fd1..8a411e508 100644 --- a/relays/messages/Cargo.toml +++ b/relays/messages/Cargo.toml @@ -1,16 +1,17 @@ [package] name = "messages-relay" version = "0.1.0" -authors = ["Parity Technologies "] -edition = "2021" +authors.workspace = true +edition.workspace = true license = "GPL-3.0-or-later WITH Classpath-exception-2.0" +repository.workspace = true publish = false [lints] workspace = true [dependencies] -async-std = { version = "1.6.5", features = ["attributes"] } +async-std = { version = "1.9.0", features = ["attributes"] } async-trait = "0.1.79" env_logger = "0.11" futures = "0.3.30" diff --git a/relays/messages/src/message_race_loop.rs b/relays/messages/src/message_race_loop.rs index f28be7884..31341a9a0 100644 --- a/relays/messages/src/message_race_loop.rs +++ b/relays/messages/src/message_race_loop.rs @@ -313,7 +313,7 @@ where } fn nonces_to_submit(&self) -> Option> { - self.nonces_to_submit.as_ref().map(|(_, nonces, _)| nonces.clone()) + self.nonces_to_submit.clone().map(|(_, nonces, _)| nonces) } fn reset_nonces_to_submit(&mut self) { diff --git a/relays/parachains/Cargo.toml b/relays/parachains/Cargo.toml index f37e636a9..e691168e7 100644 --- a/relays/parachains/Cargo.toml +++ b/relays/parachains/Cargo.toml @@ -1,16 +1,17 @@ [package] name = "parachains-relay" version = "0.1.0" -authors = ["Parity Technologies "] -edition = "2018" +authors.workspace = true +edition.workspace = true license = "GPL-3.0-or-later WITH Classpath-exception-2.0" +repository.workspace = true publish = false [lints] workspace = true [dependencies] -async-std = "1.6.5" +async-std = "1.9.0" async-trait = "0.1.79" futures = "0.3.30" log = { workspace = true } @@ -22,6 +23,6 @@ bp-polkadot-core = { path = "../../primitives/polkadot-core" } relay-substrate-client = { path = "../client-substrate" } [dev-dependencies] -codec = { package = "parity-scale-codec", version = "3.1.5" } +codec = { package = "parity-scale-codec", version = "3.6.1" } relay-substrate-client = { path = "../client-substrate", features = ["test-helpers"] } sp-core = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } diff --git a/relays/utils/Cargo.toml b/relays/utils/Cargo.toml index 13fda4773..8d9addb9b 100644 --- a/relays/utils/Cargo.toml +++ b/relays/utils/Cargo.toml @@ -1,9 +1,10 @@ [package] name = "relay-utils" version = "0.1.0" -authors = ["Parity Technologies "] -edition = "2021" +authors.workspace = true +edition.workspace = true license = "GPL-3.0-or-later WITH Classpath-exception-2.0" +repository.workspace = true publish = false [lints] @@ -12,7 +13,7 @@ workspace = true [dependencies] ansi_term = "0.12" anyhow = "1.0" -async-std = "1.6.5" +async-std = "1.9.0" async-trait = "0.1.79" backoff = "0.4" isahc = "1.2" diff --git a/substrate-relay/Cargo.toml b/substrate-relay/Cargo.toml index 8f9489ab2..3a428099e 100644 --- a/substrate-relay/Cargo.toml +++ b/substrate-relay/Cargo.toml @@ -1,9 +1,10 @@ [package] name = "substrate-relay" version = "1.2.1" -authors = ["Parity Technologies "] -edition = "2021" +authors.workspace = true +edition.workspace = true license = "GPL-3.0-or-later WITH Classpath-exception-2.0" +repository.workspace = true [lints] workspace = true @@ -12,7 +13,7 @@ workspace = true anyhow = "1.0" async-std = "1.9.0" async-trait = "0.1.79" -codec = { package = "parity-scale-codec", version = "3.1.5" } +codec = { package = "parity-scale-codec", version = "3.6.1" } env_logger = "0.11" futures = "0.3.30" hex = "0.4" diff --git a/tools/runtime-codegen/Cargo.lock b/tools/runtime-codegen/Cargo.lock index 0307d37cb..0a92d9c9b 100644 --- a/tools/runtime-codegen/Cargo.lock +++ b/tools/runtime-codegen/Cargo.lock @@ -104,9 +104,9 @@ dependencies = [ [[package]] name = "anstream" -version = "0.6.4" +version = "0.6.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2ab91ebe16eb252986481c5b62f6098f3b698a45e34b5b98200cf20dd2484a44" +checksum = "d96bd03f33fe50a863e394ee9718a706f988b9079b20c3784fb726e7678b62fb" dependencies = [ "anstyle", "anstyle-parse", @@ -756,9 +756,9 @@ dependencies = [ [[package]] name = "clap" -version = "4.4.7" +version = "4.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac495e00dcec98c83465d5ad66c5c4fabd652fd6686e7c6269b117e729a6f17b" +checksum = "90bc066a67923782aa8515dbaea16946c5bcc5addbd668bb80af688e53e548a0" dependencies = [ "clap_builder", "clap_derive", @@ -766,9 +766,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.4.7" +version = "4.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c77ed9a32a62e6ca27175d00d29d05ca32e396ea1eb5fb01d8256b669cec7663" +checksum = "ae129e2e766ae0ec03484e609954119f123cc1fe650337e155d03b022f24f7b4" dependencies = [ "anstream", "anstyle", @@ -778,11 +778,11 @@ dependencies = [ [[package]] name = "clap_derive" -version = "4.4.7" +version = "4.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf9804afaaf59a91e75b022a30fb7229a7901f60c755489cc61c9b423b836442" +checksum = "528131438037fd55894f62d6e9f068b8f45ac57ffa77517819645d10aed04f64" dependencies = [ - "heck", + "heck 0.5.0", "proc-macro2", "quote", "syn 2.0.38", @@ -790,9 +790,9 @@ dependencies = [ [[package]] name = "clap_lex" -version = "0.6.0" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "702fc72eb24e5a1e48ce58027a675bc24edd52096d5397d4aea7c6dd9eca0bd1" +checksum = "98cc8fbded0c607b7ba9dd60cd98df59af97e84d24e49c8557331cfc26d301ce" [[package]] name = "color-eyre" @@ -1738,6 +1738,12 @@ version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" +[[package]] +name = "heck" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" + [[package]] name = "hermit-abi" version = "0.3.3" @@ -3955,9 +3961,9 @@ checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" [[package]] name = "strsim" -version = "0.10.0" +version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" +checksum = "5ee073c9e4cd00e28217186dbe12796d692868f432bf2e97ee73bed0c56dfa01" [[package]] name = "subrpcer" @@ -4011,7 +4017,7 @@ version = "0.32.1" source = "git+https://github.com/paritytech/subxt?branch=master#40aca5ba65f1181e8496eb91615d73c0d3c01502" dependencies = [ "frame-metadata 16.0.0 (registry+https://github.com/rust-lang/crates.io-index)", - "heck", + "heck 0.4.1", "hex", "jsonrpsee", "parity-scale-codec", diff --git a/tools/runtime-codegen/Cargo.toml b/tools/runtime-codegen/Cargo.toml index 24fe717f3..3787d173a 100644 --- a/tools/runtime-codegen/Cargo.toml +++ b/tools/runtime-codegen/Cargo.toml @@ -5,14 +5,15 @@ description = "Tool for generating bridge runtime code from metadata" authors = ["Parity Technologies "] edition = "2021" license = "GPL-3.0-or-later WITH Classpath-exception-2.0" +repository = "https://github.com/paritytech/parity-bridges-common.git" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [workspace] [dependencies] -clap = { version = "4.4.6", features = ["derive", "cargo"] } -codec = { package = "parity-scale-codec", version = "3.1.5", default-features = false, features = ["derive"] } +clap = { version = "4.5.3", features = ["derive", "cargo"] } +codec = { package = "parity-scale-codec", version = "3.6.1", default-features = false, features = ["derive"] } color-eyre = "0.6.1" proc-macro2 = "1.0.56" quote = { version = "1.0.33" } -- GitLab From bea13eab99af22f943b06397324c14fb2d7149e1 Mon Sep 17 00:00:00 2001 From: Serban Iorga Date: Tue, 2 Apr 2024 13:55:07 +0300 Subject: [PATCH 23/39] ckb-merkle-mountain-range -> 0.5.2 (#2911) --- Cargo.lock | 13 ++----------- modules/beefy/Cargo.toml | 2 +- modules/beefy/src/mock_chain.rs | 4 ++-- 3 files changed, 5 insertions(+), 14 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 5050cc63f..fc3fb032f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1606,15 +1606,6 @@ dependencies = [ "zeroize", ] -[[package]] -name = "ckb-merkle-mountain-range" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4f061f97d64fd1822664bdfb722f7ae5469a97b77567390f7442be5b5dc82a5b" -dependencies = [ - "cfg-if 0.1.10", -] - [[package]] name = "ckb-merkle-mountain-range" version = "0.5.2" @@ -5342,7 +5333,7 @@ dependencies = [ "bp-beefy", "bp-runtime", "bp-test-utils", - "ckb-merkle-mountain-range 0.3.2", + "ckb-merkle-mountain-range", "frame-support", "frame-system", "log", @@ -8584,7 +8575,7 @@ name = "sp-mmr-primitives" version = "26.0.0" source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" dependencies = [ - "ckb-merkle-mountain-range 0.5.2", + "ckb-merkle-mountain-range", "log", "parity-scale-codec", "scale-info", diff --git a/modules/beefy/Cargo.toml b/modules/beefy/Cargo.toml index d0e5204dc..2c552430c 100644 --- a/modules/beefy/Cargo.toml +++ b/modules/beefy/Cargo.toml @@ -32,7 +32,7 @@ sp-std = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" [dev-dependencies] sp-consensus-beefy = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } -mmr-lib = { package = "ckb-merkle-mountain-range", version = "0.3.2" } +mmr-lib = { package = "ckb-merkle-mountain-range", version = "0.5.2" } pallet-beefy-mmr = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } pallet-mmr = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } rand = "0.8.5" diff --git a/modules/beefy/src/mock_chain.rs b/modules/beefy/src/mock_chain.rs index c4fa74915..c83907f83 100644 --- a/modules/beefy/src/mock_chain.rs +++ b/modules/beefy/src/mock_chain.rs @@ -290,10 +290,10 @@ pub struct BridgedMmrHashMerge; impl mmr_lib::Merge for BridgedMmrHashMerge { type Item = TestBridgedMmrNode; - fn merge(left: &Self::Item, right: &Self::Item) -> Self::Item { + fn merge(left: &Self::Item, right: &Self::Item) -> mmr_lib::Result { let mut concat = left.hash().as_ref().to_vec(); concat.extend_from_slice(right.hash().as_ref()); - TestBridgedMmrNode::Hash(TestBridgedMmrHashing::hash(&concat)) + Ok(TestBridgedMmrNode::Hash(TestBridgedMmrHashing::hash(&concat))) } } -- GitLab From cfe1e7ded5bbd640ed3c531908ac9362e435bb01 Mon Sep 17 00:00:00 2001 From: Serban Iorga Date: Tue, 9 Apr 2024 15:54:33 +0300 Subject: [PATCH 24/39] Backport changes from polakdot-sdk (#2920) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Migrate fee payment from `Currency` to `fungible` (#2292) Part of https://github.com/paritytech/polkadot-sdk/issues/226 Related https://github.com/paritytech/polkadot-sdk/issues/1833 - Deprecate `CurrencyAdapter` and introduce `FungibleAdapter` - Deprecate `ToStakingPot` and replace usage with `ResolveTo` - Required creating a new `StakingPotAccountId` struct that implements `TypedGet` for the staking pot account ID - Update parachain common utils `DealWithFees`, `ToAuthor` and `AssetsToBlockAuthor` implementations to use `fungible` - Update runtime XCM Weight Traders to use `ResolveTo` instead of `ToStakingPot` - Update runtime Transaction Payment pallets to use `FungibleAdapter` instead of `CurrencyAdapter` - [x] Blocked by https://github.com/paritytech/polkadot-sdk/pull/1296, needs the `Unbalanced::decrease_balance` fix (cherry picked from commit bda4e75ac49786a7246531cf729b25c208cd38e6) * Upgrade `trie-db` from `0.28.0` to `0.29.0` (#3982) - What does this PR do? 1. Upgrades `trie-db`'s version to the latest release. This release includes, among others, an implementation of `DoubleEndedIterator` for the `TrieDB` struct, allowing to iterate both backwards and forwards within the leaves of a trie. 2. Upgrades `trie-bench` to `0.39.0` for compatibility. 3. Upgrades `criterion` to `0.5.1` for compatibility. - Why are these changes needed? Besides keeping up with the upgrade of `trie-db`, this specifically adds the functionality of iterating back on the leafs of a trie, with `sp-trie`. In a project we're currently working on, this comes very handy to verify a Merkle proof that is the response to a challenge. The challenge is a random hash that (most likely) will not be an existing leaf in the trie. So the challenged user, has to provide a Merkle proof of the previous and next existing leafs in the trie, that surround the random challenged hash. Without having DoubleEnded iterators, we're forced to iterate until we find the first existing leaf, like so: ```rust // ************* VERIFIER (RUNTIME) ************* // Verify proof. This generates a partial trie based on the proof and // checks that the root hash matches the `expected_root`. let (memdb, root) = proof.to_memory_db(Some(&root)).unwrap(); let trie = TrieDBBuilder::>::new(&memdb, &root).build(); // Print all leaf node keys and values. println!("\nPrinting leaf nodes of partial tree..."); for key in trie.key_iter().unwrap() { if key.is_ok() { println!("Leaf node key: {:?}", key.clone().unwrap()); let val = trie.get(&key.unwrap()); if val.is_ok() { println!("Leaf node value: {:?}", val.unwrap()); } else { println!("Leaf node value: None"); } } } println!("RECONSTRUCTED TRIE {:#?}", trie); // Create an iterator over the leaf nodes. let mut iter = trie.iter().unwrap(); // First element with a value should be the previous existing leaf to the challenged hash. let mut prev_key = None; for element in &mut iter { if element.is_ok() { let (key, _) = element.unwrap(); prev_key = Some(key); break; } } assert!(prev_key.is_some()); // Since hashes are `Vec` ordered in big-endian, we can compare them directly. assert!(prev_key.unwrap() <= challenge_hash.to_vec()); // The next element should exist (meaning there is no other existing leaf between the // previous and next leaf) and it should be greater than the challenged hash. let next_key = iter.next().unwrap().unwrap().0; assert!(next_key >= challenge_hash.to_vec()); ``` With DoubleEnded iterators, we can avoid that, like this: ```rust // ************* VERIFIER (RUNTIME) ************* // Verify proof. This generates a partial trie based on the proof and // checks that the root hash matches the `expected_root`. let (memdb, root) = proof.to_memory_db(Some(&root)).unwrap(); let trie = TrieDBBuilder::>::new(&memdb, &root).build(); // Print all leaf node keys and values. println!("\nPrinting leaf nodes of partial tree..."); for key in trie.key_iter().unwrap() { if key.is_ok() { println!("Leaf node key: {:?}", key.clone().unwrap()); let val = trie.get(&key.unwrap()); if val.is_ok() { println!("Leaf node value: {:?}", val.unwrap()); } else { println!("Leaf node value: None"); } } } // println!("RECONSTRUCTED TRIE {:#?}", trie); println!("\nChallenged key: {:?}", challenge_hash); // Create an iterator over the leaf nodes. let mut double_ended_iter = trie.into_double_ended_iter().unwrap(); // First element with a value should be the previous existing leaf to the challenged hash. double_ended_iter.seek(&challenge_hash.to_vec()).unwrap(); let next_key = double_ended_iter.next_back().unwrap().unwrap().0; let prev_key = double_ended_iter.next_back().unwrap().unwrap().0; // Since hashes are `Vec` ordered in big-endian, we can compare them directly. println!("Prev key: {:?}", prev_key); assert!(prev_key <= challenge_hash.to_vec()); println!("Next key: {:?}", next_key); assert!(next_key >= challenge_hash.to_vec()); ``` - How were these changes implemented and what do they affect? All that is needed for this functionality to be exposed is changing the version number of `trie-db` in all the `Cargo.toml`s applicable, and re-exporting some additional structs from `trie-db` in `sp-trie`. --------- Co-authored-by: Bastian Köcher (cherry picked from commit 4e73c0fcd37e4e8c14aeb83b5c9e680981e16079) * Update polkadot-sdk refs * Fix Cargo.lock --------- Co-authored-by: Liam Aharon Co-authored-by: Facundo Farall <37149322+ffarall@users.noreply.github.com> --- Cargo.lock | 1304 ++++++++++++++++++++++---------- bin/runtime-common/src/mock.rs | 2 +- primitives/runtime/Cargo.toml | 2 +- 3 files changed, 918 insertions(+), 390 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index fc3fb032f..e1d46834c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -52,7 +52,7 @@ version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b169f7a6d4742236a0a00c541b845991d0ac43e546831af1249753ab4c3aa3a0" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "cipher 0.4.4", "cpufeatures", ] @@ -77,7 +77,7 @@ version = "0.7.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "891477e0c6a8957309ee5c45a6368af3ae14bb510732d2684ffa19af310920f9" dependencies = [ - "getrandom 0.2.12", + "getrandom 0.2.14", "once_cell", "version_check", ] @@ -88,8 +88,8 @@ version = "0.8.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e89da841a80418a9b391ebaea17f5c112ffaaa96f621d2c285b5174da76b9011" dependencies = [ - "cfg-if 1.0.0", - "getrandom 0.2.12", + "cfg-if", + "getrandom 0.2.14", "once_cell", "version_check", "zerocopy", @@ -208,7 +208,7 @@ dependencies = [ "proc-macro-error", "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.57", + "syn 2.0.58", ] [[package]] @@ -481,7 +481,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "94893f1e0c6eeab764ade8dc4c0db24caf4fe7cbbaafc0eba0a9030f447b5185" dependencies = [ "num-traits", - "rand", + "rand 0.8.5", "rayon", ] @@ -598,7 +598,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f28243a43d821d11341ab73c80bed182dc015c514b951616cf79bd4af39af0c3" dependencies = [ "concurrent-queue", - "event-listener 5.2.0", + "event-listener 5.3.0", "event-listener-strategy 0.5.1", "futures-core", "pin-project-lite 0.2.14", @@ -606,9 +606,9 @@ dependencies = [ [[package]] name = "async-executor" -version = "1.9.1" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "10b3e585719c2358d2660232671ca8ca4ddb4be4ce8a1842d6c2dc8685303316" +checksum = "5f98c37cf288e302c16ef6c8472aad1e034c6c84ce5ea7b8101c98eb4a802fee" dependencies = [ "async-lock 3.3.0", "async-task", @@ -653,7 +653,7 @@ checksum = "0fc5b45d93ef0529756f812ca52e44c221b35341892d3dcc34132ac02f3dd2af" dependencies = [ "async-lock 2.8.0", "autocfg", - "cfg-if 1.0.0", + "cfg-if", "concurrent-queue", "futures-lite 1.13.0", "log", @@ -672,7 +672,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dcccb0f599cfa2f8ace422d3555572f47424da5648a4382a9dd0310ff8210884" dependencies = [ "async-lock 3.3.0", - "cfg-if 1.0.0", + "cfg-if", "concurrent-queue", "futures-io", "futures-lite 2.3.0", @@ -725,7 +725,7 @@ dependencies = [ "async-lock 2.8.0", "async-signal", "blocking", - "cfg-if 1.0.0", + "cfg-if", "event-listener 3.1.0", "futures-lite 1.13.0", "rustix 0.38.32", @@ -741,7 +741,7 @@ dependencies = [ "async-io 2.3.2", "async-lock 2.8.0", "atomic-waker", - "cfg-if 1.0.0", + "cfg-if", "futures-core", "futures-io", "rustix 0.38.32", @@ -791,7 +791,7 @@ checksum = "a507401cad91ec6a857ed5513a2073c82a9b9048762b885bb98655b306964681" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.57", + "syn 2.0.58", ] [[package]] @@ -842,9 +842,9 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b62ddb9cb1ec0a098ad4bbf9344d0713fa193ae1a80af55febcff2627b6a00c1" dependencies = [ - "getrandom 0.2.12", + "getrandom 0.2.14", "instant", - "rand", + "rand 0.8.5", ] [[package]] @@ -855,7 +855,7 @@ checksum = "26b05800d2e817c8b3b4b54abd461726265fa9789ae34330622f2db9ee696f9d" dependencies = [ "addr2line 0.21.0", "cc", - "cfg-if 1.0.0", + "cfg-if", "libc", "miniz_oxide", "object 0.32.2", @@ -876,7 +876,7 @@ dependencies = [ "dleq_vrf", "fflonk", "merlin", - "rand_chacha", + "rand_chacha 0.3.1", "rand_core 0.6.4", "ring 0.1.0", "sha2 0.10.8", @@ -915,6 +915,12 @@ version = "0.21.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" +[[package]] +name = "base64" +version = "0.22.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9475866fec1451be56a3c2400fd081ff546538961565ccb5b7142cbd22bc7a51" + [[package]] name = "base64ct" version = "1.6.0" @@ -933,7 +939,7 @@ dependencies = [ [[package]] name = "binary-merkle-tree" version = "13.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "hash-db", "log", @@ -1046,6 +1052,30 @@ dependencies = [ "constant_time_eq 0.3.0", ] +[[package]] +name = "blake2s_simd" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94230421e395b9920d23df13ea5d77a20e1725331f90fbbf6df6040b33f756ae" +dependencies = [ + "arrayref", + "arrayvec 0.7.4", + "constant_time_eq 0.3.0", +] + +[[package]] +name = "blake3" +version = "1.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30cca6d3674597c30ddf2c587bf8d9d65c9a84d2326d941cc79c9842dfe0ef52" +dependencies = [ + "arrayref", + "arrayvec 0.7.4", + "cc", + "cfg-if", + "constant_time_eq 0.3.0", +] + [[package]] name = "block-buffer" version = "0.9.0" @@ -1363,7 +1393,7 @@ dependencies = [ "bp-parachains", "bp-polkadot-core", "bp-runtime", - "ed25519-dalek", + "ed25519-dalek 2.1.1", "finality-grandpa", "parity-scale-codec", "sp-application-crypto", @@ -1457,9 +1487,9 @@ dependencies = [ [[package]] name = "bumpalo" -version = "3.15.4" +version = "3.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ff69b9dd49fd426c69a0db9fc04dd934cdb6645ff000864d98f7e2af8830eaa" +checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c" [[package]] name = "byte-slice-cast" @@ -1509,9 +1539,9 @@ checksum = "a2698f953def977c68f935bb0dfa959375ad4638570e969e2f1e9f433cbf1af6" [[package]] name = "cc" -version = "1.0.90" +version = "1.0.92" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8cd6604a82acf3039f1144f54b8eb34e91ffba622051189e71b781822d5ee1f5" +checksum = "2678b2e3449475e95b0aa6f9b506a28e61b3dc8996592b983695e8ebb58a8b41" dependencies = [ "jobserver", "libc", @@ -1526,12 +1556,6 @@ dependencies = [ "smallvec", ] -[[package]] -name = "cfg-if" -version = "0.1.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822" - [[package]] name = "cfg-if" version = "1.0.0" @@ -1554,7 +1578,7 @@ version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c3613f74bd2eac03dad61bd53dbe620703d4371614fe0bc3b9f04dd36fe4e818" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "cipher 0.4.4", "cpufeatures", ] @@ -1586,6 +1610,32 @@ dependencies = [ "windows-targets 0.52.4", ] +[[package]] +name = "cid" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9b68e3193982cd54187d71afdb2a271ad4cf8af157858e9cb911b91321de143" +dependencies = [ + "core2", + "multibase", + "multihash 0.17.0", + "serde", + "unsigned-varint", +] + +[[package]] +name = "cid" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fd94671561e36e4e7de75f753f577edafb0e7c05d6e4547229fdf7938fbcd2c3" +dependencies = [ + "core2", + "multibase", + "multihash 0.18.1", + "serde", + "unsigned-varint", +] + [[package]] name = "cipher" version = "0.2.5" @@ -1612,7 +1662,7 @@ version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "56ccb671c5921be8a84686e6212ca184cb1d7c51cadcdbfcbd1cc3f042f5dfb8" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", ] [[package]] @@ -1636,6 +1686,16 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "acbf1af155f9b9ef647e42cdc158db4b64a1b61f743629225fde6f3e0be2a7c7" +[[package]] +name = "combine" +version = "4.6.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "35ed6e9d84f0b51a7f52daf1c7d71dd136fd7a3f41a8462b8cdb8c78d920fad4" +dependencies = [ + "bytes", + "memchr", +] + [[package]] name = "common" version = "0.1.0" @@ -1649,7 +1709,7 @@ dependencies = [ "fflonk", "getrandom_or_panic", "merlin", - "rand_chacha", + "rand_chacha 0.3.1", ] [[package]] @@ -1688,7 +1748,7 @@ version = "0.1.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f9d839f2a20b0aee515dc581a6172f2321f96cab76c1a38a4c584a194955390e" dependencies = [ - "getrandom 0.2.12", + "getrandom 0.2.14", "once_cell", "tiny-keccak", ] @@ -1748,7 +1808,7 @@ version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "eeaa953eaad386a53111e47172c2fedba671e5684c8dd601a5f474f4f118710f" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", ] [[package]] @@ -1858,13 +1918,28 @@ dependencies = [ "wasmtime-types", ] +[[package]] +name = "crc" +version = "3.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69e6e4d7b33a94f0991c26729976b10ebde1d34c3ee82408fb536164fa10d636" +dependencies = [ + "crc-catalog", +] + +[[package]] +name = "crc-catalog" +version = "2.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19d374276b40fb8bbdee95aef7c7fa6b5316ec764510eb64b8dd0e2ed0d7e7f5" + [[package]] name = "crc32fast" version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b3855a8a784b474f333699ef2bbca9db2c4a1f6d9088a90a2d25b1eb53111eaa" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", ] [[package]] @@ -2009,7 +2084,7 @@ version = "4.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e89b8c6a2e4b1f45971ad09761aafb85514a84744b67a95e32c3cc1352d1f65c" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "cpufeatures", "curve25519-dalek-derive", "digest 0.10.7", @@ -2028,7 +2103,7 @@ checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.57", + "syn 2.0.58", ] [[package]] @@ -2089,7 +2164,7 @@ dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", "strsim 0.10.0", - "syn 2.0.57", + "syn 2.0.58", ] [[package]] @@ -2111,7 +2186,7 @@ checksum = "a668eda54683121533a393014d8692171709ff57a7d61f187b6e782719f8933f" dependencies = [ "darling_core 0.20.8", "quote 1.0.35", - "syn 2.0.57", + "syn 2.0.58", ] [[package]] @@ -2142,9 +2217,9 @@ dependencies = [ [[package]] name = "der" -version = "0.7.8" +version = "0.7.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fffa369a668c8af7dbf8b5e56c9f744fbd399949ed171606040001947de40b1c" +checksum = "f55bf8e7b65898637379c1b74eb1551107c8294ed26d855ceb9fd1a09cfc9bc0" dependencies = [ "const-oid", "zeroize", @@ -2203,7 +2278,7 @@ checksum = "d65d7ce8132b7c0e54497a4d9a55a1c2a0912a0d786cf894472ba818fba45762" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.57", + "syn 2.0.58", ] [[package]] @@ -2261,7 +2336,7 @@ version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "339ee130d97a610ea5a5872d2bbb130fdf68884ff09d3028b81bec8a1ac23bbc" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "dirs-sys-next", ] @@ -2284,7 +2359,7 @@ checksum = "487585f4d0c6655fe74905e2504d8ad6908e4db67f744eb140876906c2f3175d" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.57", + "syn 2.0.58", ] [[package]] @@ -2324,7 +2399,7 @@ dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", "regex", - "syn 2.0.57", + "syn 2.0.58", "termcolor", "toml 0.8.12", "walkdir", @@ -2338,9 +2413,9 @@ checksum = "1435fa1053d8b2fbbe9be7e97eca7f33d37b28409959813daefc1446a14247f1" [[package]] name = "downcast-rs" -version = "1.2.0" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ea835d29036a4087793836fa931b08837ad5e957da9e23886b29586fb9b6650" +checksum = "75b325c5dbd37f80359721ad39aca5a29fb04c89279657cffdda8736d0c0b9d2" [[package]] name = "dtoa" @@ -2386,10 +2461,19 @@ dependencies = [ "elliptic-curve", "rfc6979", "serdect", - "signature", + "signature 2.2.0", "spki", ] +[[package]] +name = "ed25519" +version = "1.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91cff35c70bba8a626e3185d8cd48cc11b5437e1a5bcd15b9b5fa3c64b6dfee7" +dependencies = [ + "signature 1.6.4", +] + [[package]] name = "ed25519" version = "2.2.3" @@ -2397,7 +2481,21 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "115531babc129696a58c64a4fef0a8bf9e9698629fb97e9e40767d235cfbcd53" dependencies = [ "pkcs8", - "signature", + "signature 2.2.0", +] + +[[package]] +name = "ed25519-dalek" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c762bae6dcaf24c4c84667b8579785430908723d5c889f469d76a41d59cc7a9d" +dependencies = [ + "curve25519-dalek 3.2.0", + "ed25519 1.5.3", + "rand 0.7.3", + "serde", + "sha2 0.9.9", + "zeroize", ] [[package]] @@ -2407,7 +2505,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4a3daa8e81a3963a60642bcc1f90a670680bd4a77535faa384e9d1c79d620871" dependencies = [ "curve25519-dalek 4.1.1", - "ed25519", + "ed25519 2.2.3", "rand_core 0.6.4", "serde", "sha2 0.10.8", @@ -2461,7 +2559,7 @@ version = "0.8.33" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7268b386296a025e474d5140678f75d6de9493ae55a5d709eeb9dd08149945e1" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", ] [[package]] @@ -2476,6 +2574,18 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "enum-as-inner" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5ffccbb6966c05b32ef8fbac435df276c4ae4d3dc55a8cd0eb9745e6c12f546a" +dependencies = [ + "heck 0.4.1", + "proc-macro2 1.0.79", + "quote 1.0.35", + "syn 2.0.58", +] + [[package]] name = "env_filter" version = "0.1.0" @@ -2606,9 +2716,9 @@ dependencies = [ [[package]] name = "event-listener" -version = "5.2.0" +version = "5.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b5fb89194fa3cad959b833185b3063ba881dbfc7030680b314250779fb4cc91" +checksum = "6d9944b8ca13534cdfb2800775f8dd4902ff3fc75a50101466decadfdf322a24" dependencies = [ "concurrent-queue", "parking", @@ -2631,7 +2741,7 @@ version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "332f51cb23d20b0de8458b86580878211da09bcd4503cb579c225b3d124cabb3" dependencies = [ - "event-listener 5.2.0", + "event-listener 5.3.0", "pin-project-lite 0.2.14", ] @@ -2646,7 +2756,7 @@ dependencies = [ "prettier-please", "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.57", + "syn 2.0.58", ] [[package]] @@ -2747,7 +2857,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "835c052cb0c08c1acf6ffd71c022172e18723949c8282f2b9f27efbc51e64534" dependencies = [ "byteorder", - "rand", + "rand 0.8.5", "rustc-hex", "static_assertions", ] @@ -2784,6 +2894,21 @@ version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" +[[package]] +name = "foreign-types" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" +dependencies = [ + "foreign-types-shared", +] + +[[package]] +name = "foreign-types-shared" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" + [[package]] name = "form_urlencoded" version = "1.2.1" @@ -2802,7 +2927,7 @@ checksum = "6c2141d6d6c8512188a7891b4b01590a45f6dac67afb4f255c4124dbb86d4eaa" [[package]] name = "frame-benchmarking" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "frame-support", "frame-support-procedural", @@ -2830,7 +2955,7 @@ version = "15.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "878babb0b136e731cc77ec2fd883ff02745ff21e6fb662729953d44923df009c" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "parity-scale-codec", "scale-info", ] @@ -2841,7 +2966,7 @@ version = "16.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "87cf1549fba25a6fcac22785b61698317d958e96cac72a59102ea45b9ae64692" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "parity-scale-codec", "scale-info", "serde", @@ -2850,7 +2975,7 @@ dependencies = [ [[package]] name = "frame-support" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "aquamarine", "array-bytes 6.2.2", @@ -2891,11 +3016,11 @@ dependencies = [ [[package]] name = "frame-support-procedural" version = "23.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "Inflector", "cfg-expr", - "derive-syn-parse 0.1.5", + "derive-syn-parse 0.2.0", "expander", "frame-support-procedural-tools", "itertools", @@ -2904,37 +3029,37 @@ dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", "sp-crypto-hashing", - "syn 2.0.57", + "syn 2.0.58", ] [[package]] name = "frame-support-procedural-tools" version = "10.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "frame-support-procedural-tools-derive", "proc-macro-crate 3.1.0", "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.57", + "syn 2.0.58", ] [[package]] name = "frame-support-procedural-tools-derive" version = "11.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.57", + "syn 2.0.58", ] [[package]] name = "frame-system" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "docify", "frame-support", "log", @@ -3049,7 +3174,7 @@ checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.57", + "syn 2.0.58", ] [[package]] @@ -3134,18 +3259,18 @@ version = "0.1.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8fc3cb4d91f53b50155bdcfd23f6a4c39ae1969c2ae85982b135750cccaf5fce" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "libc", "wasi 0.9.0+wasi-snapshot-preview1", ] [[package]] name = "getrandom" -version = "0.2.12" +version = "0.2.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "190092ea657667030ac6a35e305e62fc4dd69fd98ac98631e5d3a2b1575a12b5" +checksum = "94b22e06ecb0110981051723910cbf0b5f5e09a2062dd7663334ee79a9d1286c" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "libc", "wasi 0.11.0+wasi-snapshot-preview1", ] @@ -3156,7 +3281,7 @@ version = "0.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6ea1015b5a70616b688dc230cfe50c8af89d972cb132d5a622814d29773b10b9" dependencies = [ - "rand", + "rand 0.8.5", "rand_core 0.6.4", ] @@ -3212,9 +3337,9 @@ dependencies = [ [[package]] name = "h2" -version = "0.3.25" +version = "0.3.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4fbd2820c5e49886948654ab546d0688ff24530286bdcf8fca3cefb16d4618eb" +checksum = "81fe527a889e1532da5c525686d96d4c2e74cdd345badf8dfef9f6b39dd5f5e8" dependencies = [ "bytes", "fnv", @@ -3509,6 +3634,16 @@ dependencies = [ "unicode-normalization", ] +[[package]] +name = "idna" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d20d6b07bfbc108882d88ed8e37d39636dcc260e15e30c45e6ba089610b917c" +dependencies = [ + "unicode-bidi", + "unicode-normalization", +] + [[package]] name = "idna" version = "0.5.0" @@ -3647,7 +3782,7 @@ version = "0.1.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", ] [[package]] @@ -3796,14 +3931,14 @@ dependencies = [ [[package]] name = "jsonrpsee" -version = "0.22.3" +version = "0.22.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3cdbb7cb6f3ba28f5b212dd250ab4483105efc3e381f5c8bb90340f14f0a2cc3" +checksum = "c4b0e68d9af1f066c06d6e2397583795b912d78537d7d907c561e82c13d69fa1" dependencies = [ - "jsonrpsee-core 0.22.3", + "jsonrpsee-core 0.22.4", "jsonrpsee-proc-macros", "jsonrpsee-server", - "jsonrpsee-types 0.22.3", + "jsonrpsee-types 0.22.4", "jsonrpsee-ws-client", "tokio", "tracing", @@ -3831,13 +3966,13 @@ dependencies = [ [[package]] name = "jsonrpsee-client-transport" -version = "0.22.3" +version = "0.22.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ab2e14e727d2faf388c99d9ca5210566ed3b044f07d92c29c3611718d178380" +checksum = "92f254f56af1ae84815b9b1325094743dcf05b92abb5e94da2e81a35cff0cada" dependencies = [ "futures-util", "http", - "jsonrpsee-core 0.22.3", + "jsonrpsee-core 0.22.4", "pin-project", "rustls-native-certs 0.7.0", "rustls-pki-types", @@ -3874,21 +4009,20 @@ dependencies = [ [[package]] name = "jsonrpsee-core" -version = "0.22.3" +version = "0.22.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "71962a1c49af43adf81d337e4ebc93f3c915faf6eccaa14d74e255107dfd7723" +checksum = "274d68152c24aa78977243bb56f28d7946e6aa309945b37d33174a3f92d89a3a" dependencies = [ "anyhow", - "async-lock 3.3.0", "async-trait", "beef", "futures-timer", "futures-util", "hyper", - "jsonrpsee-types 0.22.3", + "jsonrpsee-types 0.22.4", "parking_lot 0.12.1", "pin-project", - "rand", + "rand 0.8.5", "rustc-hash", "serde", "serde_json", @@ -3920,28 +4054,28 @@ dependencies = [ [[package]] name = "jsonrpsee-proc-macros" -version = "0.22.3" +version = "0.22.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d7c2416c400c94b2e864603c51a5bbd5b103386da1f5e58cbf01e7bb3ef0833" +checksum = "2c326f9e95aeff7d707b2ffde72c22a52acc975ba1c48587776c02b90c4747a6" dependencies = [ "heck 0.4.1", "proc-macro-crate 3.1.0", "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.57", + "syn 2.0.58", ] [[package]] name = "jsonrpsee-server" -version = "0.22.3" +version = "0.22.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4882e640e70c2553e3d9487e6f4dddd5fd11918f25e40fa45218f9fe29ed2152" +checksum = "3b5bfbda5f8fb63f997102fd18f73e35e34c84c6dcdbdbbe72c6e48f6d2c959b" dependencies = [ "futures-util", "http", "hyper", - "jsonrpsee-core 0.22.3", - "jsonrpsee-types 0.22.3", + "jsonrpsee-core 0.22.4", + "jsonrpsee-types 0.22.4", "pin-project", "route-recognizer", "serde", @@ -3971,9 +4105,9 @@ dependencies = [ [[package]] name = "jsonrpsee-types" -version = "0.22.3" +version = "0.22.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e53c72de6cd2ad6ac1aa6e848206ef8b736f92ed02354959130373dfa5b3cbd" +checksum = "3dc828e537868d6b12bbb07ec20324909a22ced6efca0057c825c3e1126b2c6d" dependencies = [ "anyhow", "beef", @@ -3984,14 +4118,14 @@ dependencies = [ [[package]] name = "jsonrpsee-ws-client" -version = "0.22.3" +version = "0.22.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c8a07ab8da9a283b906f6735ddd17d3680158bb72259e853441d1dd0167079ec" +checksum = "32f00abe918bf34b785f87459b9205790e5361a3f7437adb50e928dc243f27eb" dependencies = [ "http", - "jsonrpsee-client-transport 0.22.3", - "jsonrpsee-core 0.22.3", - "jsonrpsee-types 0.22.3", + "jsonrpsee-client-transport 0.22.4", + "jsonrpsee-core 0.22.4", + "jsonrpsee-types 0.22.4", "url", ] @@ -4001,7 +4135,7 @@ version = "0.13.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "956ff9b67e26e1a6a866cb758f12c6f8746208489e3e4a4b5580802f2f0a587b" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "ecdsa", "elliptic-curve", "once_cell", @@ -4079,7 +4213,7 @@ dependencies = [ "bytes", "futures", "futures-timer", - "getrandom 0.2.12", + "getrandom 0.2.14", "instant", "libp2p-allow-block-list", "libp2p-connection-limits", @@ -4141,13 +4275,13 @@ dependencies = [ "libp2p-identity", "log", "multiaddr", - "multihash", + "multihash 0.17.0", "multistream-select", "once_cell", "parking_lot 0.12.1", "pin-project", "quick-protobuf", - "rand", + "rand 0.8.5", "rw-stream-sink", "smallvec", "thiserror", @@ -4166,7 +4300,7 @@ dependencies = [ "log", "parking_lot 0.12.1", "smallvec", - "trust-dns-resolver", + "trust-dns-resolver 0.22.0", ] [[package]] @@ -4198,12 +4332,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "276bb57e7af15d8f100d3c11cbdd32c6752b7eef4ba7a18ecf464972c07abcce" dependencies = [ "bs58 0.4.0", - "ed25519-dalek", + "ed25519-dalek 2.1.1", "log", "multiaddr", - "multihash", + "multihash 0.17.0", "quick-protobuf", - "rand", + "rand 0.8.5", "sha2 0.10.8", "thiserror", "zeroize", @@ -4228,7 +4362,7 @@ dependencies = [ "libp2p-swarm", "log", "quick-protobuf", - "rand", + "rand 0.8.5", "sha2 0.10.8", "smallvec", "thiserror", @@ -4250,11 +4384,11 @@ dependencies = [ "libp2p-identity", "libp2p-swarm", "log", - "rand", + "rand 0.8.5", "smallvec", "socket2 0.4.10", "tokio", - "trust-dns-proto", + "trust-dns-proto 0.22.0", "void", ] @@ -4286,7 +4420,7 @@ dependencies = [ "log", "once_cell", "quick-protobuf", - "rand", + "rand 0.8.5", "sha2 0.10.8", "snow", "static_assertions", @@ -4308,7 +4442,7 @@ dependencies = [ "libp2p-core", "libp2p-swarm", "log", - "rand", + "rand 0.8.5", "void", ] @@ -4328,7 +4462,7 @@ dependencies = [ "log", "parking_lot 0.12.1", "quinn-proto", - "rand", + "rand 0.8.5", "rustls 0.20.9", "thiserror", "tokio", @@ -4346,7 +4480,7 @@ dependencies = [ "libp2p-core", "libp2p-identity", "libp2p-swarm", - "rand", + "rand 0.8.5", "smallvec", ] @@ -4365,7 +4499,7 @@ dependencies = [ "libp2p-identity", "libp2p-swarm-derive", "log", - "rand", + "rand 0.8.5", "smallvec", "tokio", "void", @@ -4413,7 +4547,7 @@ dependencies = [ "rustls 0.20.9", "thiserror", "webpki", - "x509-parser", + "x509-parser 0.14.0", "yasna", ] @@ -4486,7 +4620,7 @@ dependencies = [ "libsecp256k1-core", "libsecp256k1-gen-ecmult", "libsecp256k1-gen-genmult", - "rand", + "rand 0.8.5", "serde", "sha2 0.9.9", "typenum", @@ -4587,6 +4721,60 @@ dependencies = [ "keystream", ] +[[package]] +name = "litep2p" +version = "0.3.0" +source = "git+https://github.com/paritytech/litep2p?branch=master#b142c9eb611fb2fe78d2830266a3675b37299ceb" +dependencies = [ + "async-trait", + "bs58 0.4.0", + "bytes", + "cid 0.10.1", + "ed25519-dalek 1.0.1", + "futures", + "futures-timer", + "hex-literal", + "indexmap 2.2.6", + "libc", + "mockall", + "multiaddr", + "multihash 0.17.0", + "network-interface", + "nohash-hasher", + "parking_lot 0.12.1", + "pin-project", + "prost", + "prost-build", + "quinn", + "rand 0.8.5", + "rcgen", + "ring 0.16.20", + "rustls 0.20.9", + "serde", + "sha2 0.10.8", + "simple-dns", + "smallvec", + "snow", + "socket2 0.5.6", + "static_assertions", + "str0m", + "thiserror", + "tokio", + "tokio-stream", + "tokio-tungstenite", + "tokio-util", + "tracing", + "trust-dns-resolver 0.23.2", + "uint", + "unsigned-varint", + "url", + "webpki", + "x25519-dalek 2.0.1", + "x509-parser 0.15.1", + "yasna", + "zeroize", +] + [[package]] name = "lock_api" version = "0.4.11" @@ -4651,7 +4839,7 @@ dependencies = [ "macro_magic_core", "macro_magic_macros", "quote 1.0.35", - "syn 2.0.57", + "syn 2.0.58", ] [[package]] @@ -4665,7 +4853,7 @@ dependencies = [ "macro_magic_core_macros", "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.57", + "syn 2.0.58", ] [[package]] @@ -4676,7 +4864,7 @@ checksum = "9ea73aa640dc01d62a590d48c0c3521ed739d53b27f919b25c3551e233481654" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.57", + "syn 2.0.58", ] [[package]] @@ -4687,7 +4875,7 @@ checksum = "ef9d79ae96aaba821963320eb2b6e34d17df1e5a83d8a1985c29cc5be59577b3" dependencies = [ "macro_magic_core", "quote 1.0.35", - "syn 2.0.57", + "syn 2.0.58", ] [[package]] @@ -4698,9 +4886,9 @@ checksum = "ffbee8634e0d45d258acb448e7eaab3fce7a0a467395d4d9f228e3c1f01fb2e4" [[package]] name = "matchers" -version = "0.0.1" +version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f099785f7595cc4b4553a174ce30dd7589ef93391ff414dbb67f62392b9e0ce1" +checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558" dependencies = [ "regex-automata 0.1.10", ] @@ -4842,8 +5030,8 @@ dependencies = [ "lioness", "log", "parking_lot 0.12.1", - "rand", - "rand_chacha", + "rand 0.8.5", + "rand_chacha 0.3.1", "rand_distr", "subtle 2.5.0", "thiserror", @@ -4856,7 +5044,7 @@ version = "0.11.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4c84490118f2ee2d74570d114f3d0493cbf02790df303d2707606c3e14e07c96" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "downcast", "fragile", "lazy_static", @@ -4871,7 +5059,7 @@ version = "0.11.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "22ce75669015c4f47b289fd4d4f56e894e4c96003ffdf3ac51313126f94c6cbb" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "proc-macro2 1.0.79", "quote 1.0.35", "syn 1.0.109", @@ -4888,7 +5076,7 @@ dependencies = [ "data-encoding", "log", "multibase", - "multihash", + "multihash 0.17.0", "percent-encoding", "serde", "static_assertions", @@ -4913,8 +5101,31 @@ version = "0.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "835d6ff01d610179fbce3de1694d007e500bf33a7f29689838941d6bf783ae40" dependencies = [ + "blake2b_simd", + "blake2s_simd", + "blake3", "core2", + "digest 0.10.7", "multihash-derive", + "sha2 0.10.8", + "sha3", + "unsigned-varint", +] + +[[package]] +name = "multihash" +version = "0.18.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cfd8a792c1694c6da4f68db0a9d707c72bd260994da179e6030a5dcee00bb815" +dependencies = [ + "blake2b_simd", + "blake2s_simd", + "blake3", + "core2", + "digest 0.10.7", + "multihash-derive", + "sha2 0.10.8", + "sha3", "unsigned-varint", ] @@ -5034,9 +5245,9 @@ dependencies = [ [[package]] name = "netlink-sys" -version = "0.8.5" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6471bf08e7ac0135876a9581bf3217ef0333c191c128d34878079f42ee150411" +checksum = "416060d346fbaf1f23f9512963e3e878f1a78e707cb699ba9215761754244307" dependencies = [ "bytes", "futures", @@ -5045,6 +5256,18 @@ dependencies = [ "tokio", ] +[[package]] +name = "network-interface" +version = "1.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae72fd9dbd7f55dda80c00d66acc3b2130436fcba9ea89118fc508eaae48dfb0" +dependencies = [ + "cc", + "libc", + "thiserror", + "winapi", +] + [[package]] name = "nix" version = "0.24.3" @@ -5052,7 +5275,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fa52e972a9a719cecb6864fb88568781eb706bac2cd1d4f04a648542dbf78069" dependencies = [ "bitflags 1.3.2", - "cfg-if 1.0.0", + "cfg-if", "libc", ] @@ -5099,6 +5322,16 @@ dependencies = [ "winapi", ] +[[package]] +name = "nu-ansi-term" +version = "0.46.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84" +dependencies = [ + "overload", + "winapi", +] + [[package]] name = "num-bigint" version = "0.4.4" @@ -5233,12 +5466,47 @@ version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c08d65885ee38876c4f86fa503fb49d7b507c2b62552df7c70b2fce627e06381" +[[package]] +name = "openssl" +version = "0.10.64" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95a0481286a310808298130d22dd1fef0fa571e05a8f44ec801801e84b216b1f" +dependencies = [ + "bitflags 2.5.0", + "cfg-if", + "foreign-types", + "libc", + "once_cell", + "openssl-macros", + "openssl-sys", +] + +[[package]] +name = "openssl-macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" +dependencies = [ + "proc-macro2 1.0.79", + "quote 1.0.35", + "syn 2.0.58", +] + [[package]] name = "openssl-probe" version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" +[[package]] +name = "openssl-src" +version = "300.2.3+3.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5cff92b6f71555b61bb9315f7c64da3ca43d87531622120fea0195fc761b4843" +dependencies = [ + "cc", +] + [[package]] name = "openssl-sys" version = "0.9.102" @@ -5247,14 +5515,21 @@ checksum = "c597637d56fbc83893a35eb0dd04b2b8e7a50c91e64e9493e398b5df4fb45fa2" dependencies = [ "cc", "libc", + "openssl-src", "pkg-config", "vcpkg", ] +[[package]] +name = "overload" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" + [[package]] name = "pallet-authorship" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "frame-support", "frame-system", @@ -5268,7 +5543,7 @@ dependencies = [ [[package]] name = "pallet-balances" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "docify", "frame-benchmarking", @@ -5284,7 +5559,7 @@ dependencies = [ [[package]] name = "pallet-beefy" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "frame-support", "frame-system", @@ -5304,7 +5579,7 @@ dependencies = [ [[package]] name = "pallet-beefy-mmr" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "array-bytes 6.2.2", "binary-merkle-tree", @@ -5340,7 +5615,7 @@ dependencies = [ "pallet-beefy-mmr", "pallet-mmr", "parity-scale-codec", - "rand", + "rand 0.8.5", "scale-info", "serde", "sp-consensus-beefy", @@ -5439,7 +5714,7 @@ dependencies = [ [[package]] name = "pallet-grandpa" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "frame-benchmarking", "frame-support", @@ -5462,7 +5737,7 @@ dependencies = [ [[package]] name = "pallet-mmr" version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "frame-benchmarking", "frame-support", @@ -5480,7 +5755,7 @@ dependencies = [ [[package]] name = "pallet-session" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "frame-support", "frame-system", @@ -5502,7 +5777,7 @@ dependencies = [ [[package]] name = "pallet-timestamp" version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "docify", "frame-benchmarking", @@ -5522,7 +5797,7 @@ dependencies = [ [[package]] name = "pallet-transaction-payment" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "frame-support", "frame-system", @@ -5538,7 +5813,7 @@ dependencies = [ [[package]] name = "pallet-transaction-payment-rpc-runtime-api" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "pallet-transaction-payment", "parity-scale-codec", @@ -5550,7 +5825,7 @@ dependencies = [ [[package]] name = "pallet-utility" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "frame-benchmarking", "frame-support", @@ -5629,7 +5904,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4e69bf016dc406eff7d53a7d3f7cf1c2e72c82b9088aac1118591e36dd2cd3e9" dependencies = [ "bitcoin_hashes 0.13.0", - "rand", + "rand 0.8.5", "rand_core 0.5.1", "serde", "unicode-normalization", @@ -5674,7 +5949,7 @@ version = "0.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0d32c34f4f5ca7f9196001c0aba5a1f9a5a12382c8944b8b0f90233282d1e8f8" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "ethereum-types", "hashbrown 0.12.3", "impl-trait-for-tuples", @@ -5736,7 +6011,7 @@ version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "60a2cfe6f0ad2bfc16aefa463b497d5c7a5ecd44a23efa72aa342d90177356dc" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "instant", "libc", "redox_syscall 0.2.16", @@ -5750,7 +6025,7 @@ version = "0.9.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4c42a9226546d68acdd9c0a280d17ce19bfe27a46bf68784e4066115788d008e" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "libc", "redox_syscall 0.4.1", "smallvec", @@ -5832,7 +6107,7 @@ checksum = "2f38a4412a78282e09a2cf38d195ea5420d15ba0602cb375210efbc877243965" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.57", + "syn 2.0.58", ] [[package]] @@ -5889,7 +6164,7 @@ checksum = "db23d408679286588f4d4644f965003d056e3dd5abcaaa938116871d7ce2fee7" [[package]] name = "polkadot-core-primitives" version = "7.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "parity-scale-codec", "scale-info", @@ -5901,7 +6176,7 @@ dependencies = [ [[package]] name = "polkadot-parachain-primitives" version = "6.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "bounded-collections", "derive_more", @@ -5918,7 +6193,7 @@ dependencies = [ [[package]] name = "polkadot-primitives" version = "7.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "bitvec", "hex-literal", @@ -5991,7 +6266,7 @@ dependencies = [ "polkavm-common", "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.57", + "syn 2.0.58", ] [[package]] @@ -6001,7 +6276,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8ba81f7b5faac81e528eb6158a6f3c9e0bb1008e0ffa19653bc8dea925ecb429" dependencies = [ "polkavm-derive-impl", - "syn 2.0.57", + "syn 2.0.58", ] [[package]] @@ -6018,7 +6293,7 @@ checksum = "4b2d323e8ca7996b3e23126511a523f7e62924d93ecd5ae73b333815b0eb3dce" dependencies = [ "autocfg", "bitflags 1.3.2", - "cfg-if 1.0.0", + "cfg-if", "concurrent-queue", "libc", "log", @@ -6032,7 +6307,7 @@ version = "3.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e0c976a60b2d7e99d6f229e414670a9b85d13ac305cc6d1e9c134de58c5aaaf6" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "concurrent-queue", "hermit-abi 0.3.9", "pin-project-lite 0.2.14", @@ -6058,7 +6333,7 @@ version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9d1fe60d06143b2430aa532c94cfe9e29783047f06c0d7fd359a9a51b729fa25" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "cpufeatures", "opaque-debug 0.3.1", "universal-hash", @@ -6113,7 +6388,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "22020dfcf177fcc7bf5deaf7440af371400c67c0de14c399938d8ed4fb4645d3" dependencies = [ "proc-macro2 1.0.79", - "syn 2.0.57", + "syn 2.0.58", ] [[package]] @@ -6200,7 +6475,7 @@ checksum = "834da187cfe638ae8abb0203f0b33e5ccdb02a28e7199f2f47b3e2754f50edca" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.57", + "syn 2.0.58", ] [[package]] @@ -6227,7 +6502,7 @@ version = "0.13.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "449811d15fbdf5ceb5c1144416066429cf82316e2ec8ce0c1f6f8a02e7bbcf8c" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "fnv", "lazy_static", "memchr", @@ -6255,7 +6530,7 @@ checksum = "440f724eba9f6996b75d63681b0a92b06947f1457076d503a4d2e2c8f56442b8" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.57", + "syn 2.0.58", ] [[package]] @@ -6360,6 +6635,24 @@ dependencies = [ "pin-project-lite 0.1.12", ] +[[package]] +name = "quinn" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2e8b432585672228923edbbf64b8b12c14e1112f62e88737655b4a083dbcd78e" +dependencies = [ + "bytes", + "pin-project-lite 0.2.14", + "quinn-proto", + "quinn-udp", + "rustc-hash", + "rustls 0.20.9", + "thiserror", + "tokio", + "tracing", + "webpki", +] + [[package]] name = "quinn-proto" version = "0.9.6" @@ -6367,7 +6660,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "94b0b33c13a79f669c85defaf4c275dc86a0c0372807d0ca3d78e0bb87274863" dependencies = [ "bytes", - "rand", + "rand 0.8.5", "ring 0.16.20", "rustc-hash", "rustls 0.20.9", @@ -6378,6 +6671,19 @@ dependencies = [ "webpki", ] +[[package]] +name = "quinn-udp" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "641538578b21f5e5c8ea733b736895576d0fe329bb883b937db6f4d163dbaaf4" +dependencies = [ + "libc", + "quinn-proto", + "socket2 0.4.10", + "tracing", + "windows-sys 0.42.0", +] + [[package]] name = "quote" version = "0.6.13" @@ -6402,6 +6708,19 @@ version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dc33ff2d4973d518d823d61aa239014831e521c75da58e3df4840d3f47749d09" +[[package]] +name = "rand" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03" +dependencies = [ + "getrandom 0.1.16", + "libc", + "rand_chacha 0.2.2", + "rand_core 0.5.1", + "rand_hc", +] + [[package]] name = "rand" version = "0.8.5" @@ -6409,10 +6728,20 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" dependencies = [ "libc", - "rand_chacha", + "rand_chacha 0.3.1", "rand_core 0.6.4", ] +[[package]] +name = "rand_chacha" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f4c8ed856279c9737206bf725bf36935d8666ead7aa69b52be55af369d193402" +dependencies = [ + "ppv-lite86", + "rand_core 0.5.1", +] + [[package]] name = "rand_chacha" version = "0.3.1" @@ -6438,7 +6767,7 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" dependencies = [ - "getrandom 0.2.12", + "getrandom 0.2.14", ] [[package]] @@ -6448,7 +6777,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "32cb0b9bc82b0a0876c2dd994a7e7a2683d3e7390ca40e6886785ef0c7e3ee31" dependencies = [ "num-traits", - "rand", + "rand 0.8.5", +] + +[[package]] +name = "rand_hc" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c" +dependencies = [ + "rand_core 0.5.1", ] [[package]] @@ -6533,7 +6871,7 @@ version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bd283d9651eeda4b2a83a43c1c91b266c40fd76ecd39a50a8c630ae69dc72891" dependencies = [ - "getrandom 0.2.12", + "getrandom 0.2.14", "libredox", "thiserror", ] @@ -6555,7 +6893,7 @@ checksum = "5fddb4f8d99b0a2ebafc65a87a69a7b9875e4b1ae1f00db265d300ef7f28bccc" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.57", + "syn 2.0.58", ] [[package]] @@ -6790,7 +7128,7 @@ dependencies = [ "frame-support", "frame-system", "futures", - "jsonrpsee 0.22.3", + "jsonrpsee 0.22.4", "log", "num-traits", "pallet-balances", @@ -6799,7 +7137,7 @@ dependencies = [ "pallet-transaction-payment-rpc-runtime-api", "pallet-utility", "parity-scale-codec", - "rand", + "rand 0.8.5", "relay-utils", "sc-chain-spec", "sc-rpc-api", @@ -6919,8 +7257,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c17fa4cb658e3583423e915b9f3acc01cceaee1860e33d59ebae66adc3a2dc0d" dependencies = [ "cc", - "cfg-if 1.0.0", - "getrandom 0.2.12", + "cfg-if", + "getrandom 0.2.14", "libc", "spin 0.9.8", "untrusted 0.9.0", @@ -7092,7 +7430,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8f1fb85efa936c42c6d5fc28d2629bb51e4b2f4b8a5211e297d599cc5a093792" dependencies = [ "openssl-probe", - "rustls-pemfile 2.1.1", + "rustls-pemfile 2.1.2", "rustls-pki-types", "schannel", "security-framework", @@ -7109,11 +7447,11 @@ dependencies = [ [[package]] name = "rustls-pemfile" -version = "2.1.1" +version = "2.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f48172685e6ff52a556baa527774f61fcaa884f59daf3375c62a3f1cd2549dab" +checksum = "29993a25686778eb88d4189742cd713c9bce943bc54251a33509dc63cbacf73d" dependencies = [ - "base64 0.21.7", + "base64 0.22.0", "rustls-pki-types", ] @@ -7146,9 +7484,9 @@ dependencies = [ [[package]] name = "rustversion" -version = "1.0.14" +version = "1.0.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ffc183a10b4478d04cbbbfc96d0873219d962dd5accaff2ffbd4ceb7df837f4" +checksum = "80af6f9131f277a45a3fba6ce8e2258037bb0477a67e610d3c1fe046ab31de47" [[package]] name = "ruzstd" @@ -7199,7 +7537,7 @@ dependencies = [ [[package]] name = "sc-allocator" version = "23.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "log", "sp-core", @@ -7209,8 +7547,8 @@ dependencies = [ [[package]] name = "sc-chain-spec" -version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +version = "28.0.0" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "array-bytes 6.2.2", "docify", @@ -7231,23 +7569,24 @@ dependencies = [ "sp-io", "sp-runtime", "sp-state-machine", + "sp-tracing 16.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=master)", ] [[package]] name = "sc-chain-spec-derive" version = "11.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "proc-macro-crate 3.1.0", "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.57", + "syn 2.0.58", ] [[package]] name = "sc-client-api" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "fnv", "futures", @@ -7274,16 +7613,16 @@ dependencies = [ [[package]] name = "sc-consensus" version = "0.33.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "async-trait", "futures", "futures-timer", - "libp2p-identity", "log", "mockall", "parking_lot 0.12.1", "sc-client-api", + "sc-network-types", "sc-utils", "serde", "sp-api", @@ -7299,7 +7638,7 @@ dependencies = [ [[package]] name = "sc-executor" version = "0.32.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "parity-scale-codec", "parking_lot 0.12.1", @@ -7322,7 +7661,7 @@ dependencies = [ [[package]] name = "sc-executor-common" version = "0.29.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "polkavm", "sc-allocator", @@ -7335,7 +7674,7 @@ dependencies = [ [[package]] name = "sc-executor-polkavm" version = "0.29.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "log", "polkavm", @@ -7346,10 +7685,10 @@ dependencies = [ [[package]] name = "sc-executor-wasmtime" version = "0.29.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "anyhow", - "cfg-if 1.0.0", + "cfg-if", "libc", "log", "parking_lot 0.12.1", @@ -7364,7 +7703,7 @@ dependencies = [ [[package]] name = "sc-mixnet" version = "0.4.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "array-bytes 4.2.0", "arrayvec 0.7.4", @@ -7372,7 +7711,6 @@ dependencies = [ "bytes", "futures", "futures-timer", - "libp2p-identity", "log", "mixnet", "multiaddr", @@ -7380,6 +7718,7 @@ dependencies = [ "parking_lot 0.12.1", "sc-client-api", "sc-network", + "sc-network-types", "sc-transaction-pool-api", "sp-api", "sp-consensus", @@ -7393,13 +7732,14 @@ dependencies = [ [[package]] name = "sc-network" version = "0.34.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "array-bytes 6.2.2", "async-channel 1.9.0", "async-trait", "asynchronous-codec", "bytes", + "cid 0.9.0", "either", "fnv", "futures", @@ -7407,16 +7747,22 @@ dependencies = [ "ip_network", "libp2p", "linked_hash_set", + "litep2p", "log", "mockall", + "once_cell", "parity-scale-codec", "parking_lot 0.12.1", "partial_sort", "pin-project", - "rand", + "prost", + "prost-build", + "rand 0.8.5", "sc-client-api", "sc-network-common", + "sc-network-types", "sc-utils", + "schnellru", "serde", "serde_json", "smallvec", @@ -7429,6 +7775,7 @@ dependencies = [ "tokio", "tokio-stream", "unsigned-varint", + "void", "wasm-timer", "zeroize", ] @@ -7436,7 +7783,7 @@ dependencies = [ [[package]] name = "sc-network-common" version = "0.33.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "async-trait", "bitflags 1.3.2", @@ -7445,17 +7792,32 @@ dependencies = [ "parity-scale-codec", "prost-build", "sc-consensus", + "sc-network-types", "sp-consensus", "sp-consensus-grandpa", "sp-runtime", ] +[[package]] +name = "sc-network-types" +version = "0.10.0-dev" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +dependencies = [ + "bs58 0.4.0", + "libp2p-identity", + "litep2p", + "multiaddr", + "multihash 0.17.0", + "rand 0.8.5", + "thiserror", +] + [[package]] name = "sc-rpc-api" version = "0.33.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ - "jsonrpsee 0.22.3", + "jsonrpsee 0.22.4", "parity-scale-codec", "sc-chain-spec", "sc-mixnet", @@ -7473,7 +7835,7 @@ dependencies = [ [[package]] name = "sc-telemetry" version = "15.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "chrono", "futures", @@ -7481,7 +7843,8 @@ dependencies = [ "log", "parking_lot 0.12.1", "pin-project", - "rand", + "rand 0.8.5", + "sc-network", "sc-utils", "serde", "serde_json", @@ -7492,7 +7855,7 @@ dependencies = [ [[package]] name = "sc-transaction-pool-api" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "async-trait", "futures", @@ -7508,7 +7871,7 @@ dependencies = [ [[package]] name = "sc-utils" version = "14.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "async-channel 1.9.0", "futures", @@ -7589,12 +7952,12 @@ dependencies = [ [[package]] name = "scale-info" -version = "2.11.1" +version = "2.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "788745a868b0e751750388f4e6546eb921ef714a4317fa6954f7cde114eb2eb7" +checksum = "7c453e59a955f81fb62ee5d596b450383d699f152d350e9d23a0db2adb78e4c0" dependencies = [ "bitvec", - "cfg-if 1.0.0", + "cfg-if", "derive_more", "parity-scale-codec", "scale-info-derive", @@ -7603,9 +7966,9 @@ dependencies = [ [[package]] name = "scale-info-derive" -version = "2.11.1" +version = "2.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7dc2f4e8bc344b9fc3d5f74f72c2e55bfc38d28dc2ebc69c194a3df424e4d9ac" +checksum = "18cf6c6447f813ef19eb450e985bcce6705f9ce7660db221b59093d15c79c4b7" dependencies = [ "proc-macro-crate 1.1.3", "proc-macro2 1.0.79", @@ -7649,7 +8012,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "772575a524feeb803e5b0fcbc6dd9f367e579488197c94c6e4023aad2305774d" dependencies = [ "ahash 0.8.11", - "cfg-if 1.0.0", + "cfg-if", "hashbrown 0.13.2", ] @@ -7704,6 +8067,21 @@ dependencies = [ "untrusted 0.9.0", ] +[[package]] +name = "sctp-proto" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f64cef148d3295c730c3cb340b0b252a4d570b1c7d4bf0808f88540b0a888bc" +dependencies = [ + "bytes", + "crc", + "fxhash", + "log", + "rand 0.8.5", + "slab", + "thiserror", +] + [[package]] name = "sec1" version = "0.7.3" @@ -7801,7 +8179,7 @@ checksum = "7eb0b34b42edc17f6b7cac84a52a1c5f0e1bb2227e997ca9011ea3dd34e8610b" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.57", + "syn 2.0.58", ] [[package]] @@ -7842,12 +8220,44 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "99cd6713db3cf16b6c84e06321e049a9b9f699826e16096d23bbcc44d15d51a6" dependencies = [ "block-buffer 0.9.0", - "cfg-if 1.0.0", + "cfg-if", "cpufeatures", "digest 0.9.0", "opaque-debug 0.3.1", ] +[[package]] +name = "sha-1" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f5058ada175748e33390e40e872bd0fe59a19f265d0158daa551c5a88a76009c" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest 0.10.7", + "sha1-asm", +] + +[[package]] +name = "sha1" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest 0.10.7", +] + +[[package]] +name = "sha1-asm" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2ba6947745e7f86be3b8af00b7355857085dbdf8901393c89514510eb61f4e21" +dependencies = [ + "cc", +] + [[package]] name = "sha2" version = "0.9.9" @@ -7855,7 +8265,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4d58a1e1bf39749807d89cf2d98ac2dfa0ff1cb3faa38fbb64dd88ac8013d800" dependencies = [ "block-buffer 0.9.0", - "cfg-if 1.0.0", + "cfg-if", "cpufeatures", "digest 0.9.0", "opaque-debug 0.3.1", @@ -7867,7 +8277,7 @@ version = "0.10.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "cpufeatures", "digest 0.10.7", ] @@ -7922,6 +8332,12 @@ dependencies = [ "libc", ] +[[package]] +name = "signature" +version = "1.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "74233d3b3b2f6d4b006dc19dee745e73e2a6bfb6f93607cd3b02bd5b00797d7c" + [[package]] name = "signature" version = "2.2.0" @@ -7945,6 +8361,15 @@ dependencies = [ "wide", ] +[[package]] +name = "simple-dns" +version = "0.5.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cae9a3fcdadafb6d97f4c0e007e4247b114ee0f119f650c3cbf3a8b3a1479694" +dependencies = [ + "bitflags 2.5.0", +] + [[package]] name = "simple-mermaid" version = "0.1.1" @@ -8040,8 +8465,8 @@ dependencies = [ "num-traits", "pbkdf2", "pin-project", - "rand", - "rand_chacha", + "rand 0.8.5", + "rand_chacha 0.3.1", "ruzstd", "schnorrkel 0.10.2", "serde", @@ -8078,7 +8503,7 @@ dependencies = [ "log", "lru 0.10.1", "parking_lot 0.12.1", - "rand", + "rand 0.8.5", "serde", "serde_json", "siphasher", @@ -8137,14 +8562,14 @@ dependencies = [ "http", "httparse", "log", - "rand", - "sha-1", + "rand 0.8.5", + "sha-1 0.9.8", ] [[package]] name = "sp-api" version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "hash-db", "log", @@ -8166,7 +8591,7 @@ dependencies = [ [[package]] name = "sp-api-proc-macro" version = "15.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "Inflector", "blake2 0.10.6", @@ -8174,13 +8599,13 @@ dependencies = [ "proc-macro-crate 3.1.0", "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.57", + "syn 2.0.58", ] [[package]] name = "sp-application-crypto" version = "30.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "parity-scale-codec", "scale-info", @@ -8193,7 +8618,7 @@ dependencies = [ [[package]] name = "sp-arithmetic" version = "23.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "docify", "integer-sqrt", @@ -8226,7 +8651,7 @@ dependencies = [ [[package]] name = "sp-authority-discovery" version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "parity-scale-codec", "scale-info", @@ -8238,7 +8663,7 @@ dependencies = [ [[package]] name = "sp-blockchain" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "futures", "log", @@ -8256,7 +8681,7 @@ dependencies = [ [[package]] name = "sp-consensus" version = "0.32.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "async-trait", "futures", @@ -8271,7 +8696,7 @@ dependencies = [ [[package]] name = "sp-consensus-beefy" version = "13.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "lazy_static", "parity-scale-codec", @@ -8285,13 +8710,13 @@ dependencies = [ "sp-keystore", "sp-mmr-primitives", "sp-runtime", - "strum 0.24.1", + "strum", ] [[package]] name = "sp-consensus-grandpa" version = "13.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "finality-grandpa", "log", @@ -8308,7 +8733,7 @@ dependencies = [ [[package]] name = "sp-consensus-slots" version = "0.32.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "parity-scale-codec", "scale-info", @@ -8319,7 +8744,7 @@ dependencies = [ [[package]] name = "sp-core" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "array-bytes 6.2.2", "bandersnatch_vrfs", @@ -8343,7 +8768,7 @@ dependencies = [ "parking_lot 0.12.1", "paste", "primitive-types", - "rand", + "rand 0.8.5", "scale-info", "schnorrkel 0.11.4", "secp256k1", @@ -8381,7 +8806,7 @@ dependencies = [ [[package]] name = "sp-crypto-ec-utils" version = "0.10.0" -source = "git+https://github.com/paritytech/polkadot-sdk#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "ark-bls12-377", "ark-bls12-377-ext", @@ -8400,8 +8825,8 @@ dependencies = [ [[package]] name = "sp-crypto-hashing" -version = "0.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +version = "0.1.0" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "blake2b_simd", "byteorder", @@ -8413,18 +8838,18 @@ dependencies = [ [[package]] name = "sp-crypto-hashing-proc-macro" -version = "0.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +version = "0.1.0" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "quote 1.0.35", "sp-crypto-hashing", - "syn 2.0.57", + "syn 2.0.58", ] [[package]] name = "sp-database" version = "10.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "kvdb", "parking_lot 0.12.1", @@ -8433,27 +8858,27 @@ dependencies = [ [[package]] name = "sp-debug-derive" version = "14.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.57", + "syn 2.0.58", ] [[package]] name = "sp-debug-derive" version = "14.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.57", + "syn 2.0.58", ] [[package]] name = "sp-externalities" version = "0.25.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "environmental", "parity-scale-codec", @@ -8463,7 +8888,7 @@ dependencies = [ [[package]] name = "sp-externalities" version = "0.25.0" -source = "git+https://github.com/paritytech/polkadot-sdk#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "environmental", "parity-scale-codec", @@ -8472,9 +8897,11 @@ dependencies = [ [[package]] name = "sp-genesis-builder" -version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +version = "0.8.0" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ + "parity-scale-codec", + "scale-info", "serde_json", "sp-api", "sp-runtime", @@ -8483,7 +8910,7 @@ dependencies = [ [[package]] name = "sp-inherents" version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "async-trait", "impl-trait-for-tuples", @@ -8496,10 +8923,10 @@ dependencies = [ [[package]] name = "sp-io" version = "30.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "bytes", - "ed25519-dalek", + "ed25519-dalek 2.1.1", "libsecp256k1", "log", "parity-scale-codec", @@ -8522,17 +8949,17 @@ dependencies = [ [[package]] name = "sp-keyring" version = "31.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "sp-core", "sp-runtime", - "strum 0.24.1", + "strum", ] [[package]] name = "sp-keystore" version = "0.34.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "parity-scale-codec", "parking_lot 0.12.1", @@ -8543,7 +8970,7 @@ dependencies = [ [[package]] name = "sp-maybe-compressed-blob" version = "11.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "thiserror", "zstd 0.12.4", @@ -8552,7 +8979,7 @@ dependencies = [ [[package]] name = "sp-metadata-ir" version = "0.6.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "frame-metadata 16.0.0", "parity-scale-codec", @@ -8562,7 +8989,7 @@ dependencies = [ [[package]] name = "sp-mixnet" version = "0.4.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "parity-scale-codec", "scale-info", @@ -8573,7 +9000,7 @@ dependencies = [ [[package]] name = "sp-mmr-primitives" version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "ckb-merkle-mountain-range", "log", @@ -8590,7 +9017,7 @@ dependencies = [ [[package]] name = "sp-panic-handler" version = "13.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "backtrace", "lazy_static", @@ -8600,7 +9027,7 @@ dependencies = [ [[package]] name = "sp-rpc" version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "rustc-hash", "serde", @@ -8610,7 +9037,7 @@ dependencies = [ [[package]] name = "sp-runtime" version = "31.0.1" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "docify", "either", @@ -8619,7 +9046,7 @@ dependencies = [ "log", "parity-scale-codec", "paste", - "rand", + "rand 0.8.5", "scale-info", "serde", "simple-mermaid", @@ -8634,7 +9061,7 @@ dependencies = [ [[package]] name = "sp-runtime-interface" version = "24.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "bytes", "impl-trait-for-tuples", @@ -8653,7 +9080,7 @@ dependencies = [ [[package]] name = "sp-runtime-interface" version = "24.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "bytes", "impl-trait-for-tuples", @@ -8672,33 +9099,33 @@ dependencies = [ [[package]] name = "sp-runtime-interface-proc-macro" version = "17.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "Inflector", "expander", "proc-macro-crate 3.1.0", "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.57", + "syn 2.0.58", ] [[package]] name = "sp-runtime-interface-proc-macro" version = "17.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "Inflector", "expander", "proc-macro-crate 3.1.0", "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.57", + "syn 2.0.58", ] [[package]] name = "sp-session" version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "parity-scale-codec", "scale-info", @@ -8712,7 +9139,7 @@ dependencies = [ [[package]] name = "sp-staking" version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "impl-trait-for-tuples", "parity-scale-codec", @@ -8725,13 +9152,13 @@ dependencies = [ [[package]] name = "sp-state-machine" version = "0.35.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "hash-db", "log", "parity-scale-codec", "parking_lot 0.12.1", - "rand", + "rand 0.8.5", "smallvec", "sp-core", "sp-externalities 0.25.0 (git+https://github.com/paritytech/polkadot-sdk?branch=master)", @@ -8745,14 +9172,14 @@ dependencies = [ [[package]] name = "sp-statement-store" version = "10.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "aes-gcm", "curve25519-dalek 4.1.1", - "ed25519-dalek", + "ed25519-dalek 2.1.1", "hkdf", "parity-scale-codec", - "rand", + "rand 0.8.5", "scale-info", "sha2 0.10.8", "sp-api", @@ -8775,17 +9202,17 @@ checksum = "53458e3c57df53698b3401ec0934bea8e8cfce034816873c0b0abbd83d7bac0d" [[package]] name = "sp-std" version = "14.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" [[package]] name = "sp-std" version = "14.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" [[package]] name = "sp-storage" version = "19.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "impl-serde", "parity-scale-codec", @@ -8797,7 +9224,7 @@ dependencies = [ [[package]] name = "sp-storage" version = "19.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "impl-serde", "parity-scale-codec", @@ -8809,7 +9236,7 @@ dependencies = [ [[package]] name = "sp-timestamp" version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "async-trait", "parity-scale-codec", @@ -8821,7 +9248,7 @@ dependencies = [ [[package]] name = "sp-tracing" version = "16.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "parity-scale-codec", "tracing", @@ -8832,7 +9259,7 @@ dependencies = [ [[package]] name = "sp-tracing" version = "16.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "parity-scale-codec", "tracing", @@ -8843,7 +9270,7 @@ dependencies = [ [[package]] name = "sp-trie" version = "29.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "ahash 0.8.11", "hash-db", @@ -8852,7 +9279,7 @@ dependencies = [ "nohash-hasher", "parity-scale-codec", "parking_lot 0.12.1", - "rand", + "rand 0.8.5", "scale-info", "schnellru", "sp-core", @@ -8866,7 +9293,7 @@ dependencies = [ [[package]] name = "sp-version" version = "29.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "impl-serde", "parity-scale-codec", @@ -8883,18 +9310,18 @@ dependencies = [ [[package]] name = "sp-version-proc-macro" version = "13.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "parity-scale-codec", "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.57", + "syn 2.0.58", ] [[package]] name = "sp-wasm-interface" version = "20.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "anyhow", "impl-trait-for-tuples", @@ -8906,19 +9333,17 @@ dependencies = [ [[package]] name = "sp-wasm-interface" version = "20.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ - "anyhow", "impl-trait-for-tuples", "log", "parity-scale-codec", - "wasmtime", ] [[package]] name = "sp-weights" version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "bounded-collections", "parity-scale-codec", @@ -8975,7 +9400,7 @@ checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" [[package]] name = "staging-xcm" version = "7.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "array-bytes 6.2.2", "bounded-collections", @@ -8993,7 +9418,7 @@ dependencies = [ [[package]] name = "staging-xcm-builder" version = "7.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "frame-support", "frame-system", @@ -9015,7 +9440,7 @@ dependencies = [ [[package]] name = "staging-xcm-executor" version = "7.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "environmental", "frame-benchmarking", @@ -9039,6 +9464,26 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" +[[package]] +name = "str0m" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee48572247f422dcbe68630c973f8296fbd5157119cd36a3223e48bf83d47727" +dependencies = [ + "combine", + "crc", + "hmac 0.12.1", + "once_cell", + "openssl", + "openssl-sys", + "rand 0.8.5", + "sctp-proto", + "serde", + "sha-1 0.10.1", + "thiserror", + "tracing", +] + [[package]] name = "strsim" version = "0.8.0" @@ -9075,35 +9520,13 @@ dependencies = [ "syn 1.0.109", ] -[[package]] -name = "strum" -version = "0.24.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "063e6045c0e62079840579a7e47a355ae92f60eb74daaf156fb1e84ba164e63f" -dependencies = [ - "strum_macros 0.24.3", -] - [[package]] name = "strum" version = "0.26.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5d8cec3501a5194c432b2b7976db6b7d10ec95c253208b45f83f7136aa985e29" dependencies = [ - "strum_macros 0.26.2", -] - -[[package]] -name = "strum_macros" -version = "0.24.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e385be0d24f186b4ce2f9982191e7101bb737312ad61c1f2f984f34bcf85d59" -dependencies = [ - "heck 0.4.1", - "proc-macro2 1.0.79", - "quote 1.0.35", - "rustversion", - "syn 1.0.109", + "strum_macros", ] [[package]] @@ -9116,13 +9539,13 @@ dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", "rustversion", - "syn 2.0.57", + "syn 2.0.58", ] [[package]] name = "substrate-bip39" version = "0.4.7" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "hmac 0.12.1", "pbkdf2", @@ -9134,7 +9557,7 @@ dependencies = [ [[package]] name = "substrate-prometheus-endpoint" version = "0.17.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "hyper", "log", @@ -9192,7 +9615,7 @@ dependencies = [ "sp-keyring", "sp-runtime", "structopt", - "strum 0.26.2", + "strum", "substrate-relay-helper", "tempfile", ] @@ -9236,7 +9659,7 @@ dependencies = [ "sp-core", "sp-runtime", "structopt", - "strum 0.26.2", + "strum", "thiserror", ] @@ -9306,7 +9729,7 @@ dependencies = [ "quote 1.0.35", "scale-info", "subxt-metadata", - "syn 2.0.57", + "syn 2.0.58", "thiserror", "tokio", ] @@ -9337,7 +9760,7 @@ dependencies = [ "darling 0.20.8", "proc-macro-error", "subxt-codegen", - "syn 2.0.57", + "syn 2.0.58", ] [[package]] @@ -9377,9 +9800,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.57" +version = "2.0.58" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "11a6ae1e52eb25aab8f3fb9fca13be982a373b8f1157ca14b897a825ba4a2d35" +checksum = "44cfb93f38070beee36b3fef7d4f5a16f27751d94b187b666a5cc5e9b0d30687" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", @@ -9400,11 +9823,11 @@ dependencies = [ [[package]] name = "sysinfo" -version = "0.30.7" +version = "0.30.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c385888ef380a852a16209afc8cfad22795dd8873d69c9a14d2e2088f118d18" +checksum = "e9a84fe4cfc513b41cb2596b624e561ec9e7e1c4b46328e496ed56a53514ef2a" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "core-foundation-sys", "libc", "ntapi", @@ -9452,7 +9875,7 @@ version = "3.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "85b77fafb263dd9d05cbeac119526425676db3784113aa9295c88498cbf8bff1" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "fastrand 2.0.2", "rustix 0.38.32", "windows-sys 0.52.0", @@ -9508,7 +9931,7 @@ checksum = "e4c60d69f36615a077cc7663b9cb8e42275722d23e58a7fa3d2c7f2915d09d04" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.57", + "syn 2.0.58", ] [[package]] @@ -9519,7 +9942,7 @@ checksum = "c61f3ba182994efc43764a46c018c347bc492c79f024e705f46567b418f6d4f7" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.57", + "syn 2.0.58", ] [[package]] @@ -9528,7 +9951,7 @@ version = "1.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8b9ef9bad013ada3808854ceac7b46812a6465ba368859a37e2100283d2d719c" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "once_cell", ] @@ -9615,7 +10038,7 @@ checksum = "5b8a1e28f2deaa14e508979454cb3a223b10b938b45af148bc0986de36f1923b" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.57", + "syn 2.0.58", ] [[package]] @@ -9651,6 +10074,21 @@ dependencies = [ "tokio-util", ] +[[package]] +name = "tokio-tungstenite" +version = "0.20.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "212d5dcb2a1ce06d81107c3d0ffa3121fe974b73f068c8282cb1c32328113b6c" +dependencies = [ + "futures-util", + "log", + "rustls 0.21.10", + "rustls-native-certs 0.6.3", + "tokio", + "tokio-rustls 0.24.1", + "tungstenite", +] + [[package]] name = "tokio-util" version = "0.7.10" @@ -9778,7 +10216,7 @@ checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.57", + "syn 2.0.58", ] [[package]] @@ -9803,55 +10241,40 @@ dependencies = [ [[package]] name = "tracing-log" -version = "0.1.4" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f751112709b4e791d8ce53e32c4ed2d353565a795ce84da2285393f41557bdf2" +checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3" dependencies = [ "log", "once_cell", "tracing-core", ] -[[package]] -name = "tracing-serde" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bc6b213177105856957181934e4920de57730fc69bf42c37ee5bb664d406d9e1" -dependencies = [ - "serde", - "tracing-core", -] - [[package]] name = "tracing-subscriber" -version = "0.2.25" +version = "0.3.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e0d2eaa99c3c2e41547cfa109e910a68ea03823cccad4a0525dcbc9b01e8c71" +checksum = "ad0f048c97dbd9faa9b7df56362b8ebcaa52adb06b498c050d2f4e32f90a7a8b" dependencies = [ - "ansi_term", - "chrono", - "lazy_static", "matchers", + "nu-ansi-term", + "once_cell", "regex", - "serde", - "serde_json", "sharded-slab", "smallvec", "thread_local", "tracing", "tracing-core", "tracing-log", - "tracing-serde", ] [[package]] name = "trie-db" -version = "0.28.0" +version = "0.29.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff28e0f815c2fea41ebddf148e008b077d2faddb026c9555b29696114d602642" +checksum = "65ed83be775d85ebb0e272914fff6462c39b3ddd6dc67b5c1c41271aad280c69" dependencies = [ "hash-db", - "hashbrown 0.13.2", "log", "rustc-hex", "smallvec", @@ -9873,16 +10296,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4f7f83d1e4a0e4358ac54c5c3681e5d7da5efc5a7a632c90bb6d6669ddd9bc26" dependencies = [ "async-trait", - "cfg-if 1.0.0", + "cfg-if", "data-encoding", - "enum-as-inner", + "enum-as-inner 0.5.1", "futures-channel", "futures-io", "futures-util", "idna 0.2.3", "ipnet", "lazy_static", - "rand", + "rand 0.8.5", "smallvec", "socket2 0.4.10", "thiserror", @@ -9892,13 +10315,38 @@ dependencies = [ "url", ] +[[package]] +name = "trust-dns-proto" +version = "0.23.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3119112651c157f4488931a01e586aa459736e9d6046d3bd9105ffb69352d374" +dependencies = [ + "async-trait", + "cfg-if", + "data-encoding", + "enum-as-inner 0.6.0", + "futures-channel", + "futures-io", + "futures-util", + "idna 0.4.0", + "ipnet", + "once_cell", + "rand 0.8.5", + "smallvec", + "thiserror", + "tinyvec", + "tokio", + "tracing", + "url", +] + [[package]] name = "trust-dns-resolver" version = "0.22.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "aff21aa4dcefb0a1afbfac26deb0adc93888c7d295fb63ab273ef276ba2b7cfe" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "futures-util", "ipconfig", "lazy_static", @@ -9909,7 +10357,28 @@ dependencies = [ "thiserror", "tokio", "tracing", - "trust-dns-proto", + "trust-dns-proto 0.22.0", +] + +[[package]] +name = "trust-dns-resolver" +version = "0.23.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "10a3e6c3aff1718b3c73e395d1f35202ba2ffa847c6a62eea0db8fb4cfe30be6" +dependencies = [ + "cfg-if", + "futures-util", + "ipconfig", + "lru-cache", + "once_cell", + "parking_lot 0.12.1", + "rand 0.8.5", + "resolv-conf", + "smallvec", + "thiserror", + "tokio", + "tracing", + "trust-dns-proto 0.23.2", ] [[package]] @@ -9924,15 +10393,35 @@ version = "1.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f4f195fd851901624eee5a58c4bb2b4f06399148fcd0ed336e6f1cb60a9881df" +[[package]] +name = "tungstenite" +version = "0.20.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e3dac10fd62eaf6617d3a904ae222845979aec67c615d1c842b4002c7666fb9" +dependencies = [ + "byteorder", + "bytes", + "data-encoding", + "http", + "httparse", + "log", + "rand 0.8.5", + "rustls 0.21.10", + "sha1", + "thiserror", + "url", + "utf-8", +] + [[package]] name = "twox-hash" version = "1.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "97fee6b57c6a41524a810daee9286c02d7752c4253064d0b05472833a438f675" dependencies = [ - "cfg-if 0.1.10", + "cfg-if", "digest 0.10.7", - "rand", + "rand 0.8.5", "static_assertions", ] @@ -10019,6 +10508,7 @@ dependencies = [ "bytes", "futures-io", "futures-util", + "tokio-util", ] [[package]] @@ -10044,6 +10534,12 @@ dependencies = [ "percent-encoding", ] +[[package]] +name = "utf-8" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9" + [[package]] name = "utf8parse" version = "0.2.1" @@ -10101,8 +10597,8 @@ dependencies = [ "arrayref", "constcat", "digest 0.10.7", - "rand", - "rand_chacha", + "rand 0.8.5", + "rand_chacha 0.3.1", "rand_core 0.6.4", "sha2 0.10.8", "sha3", @@ -10153,7 +10649,7 @@ version = "0.2.92" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4be2531df63900aeb2bca0daaaddec08491ee64ceecbee5076636a3b026795a8" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "wasm-bindgen-macro", ] @@ -10168,7 +10664,7 @@ dependencies = [ "once_cell", "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.57", + "syn 2.0.58", "wasm-bindgen-shared", ] @@ -10178,7 +10674,7 @@ version = "0.4.42" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "76bc14366121efc8dbb487ab05bcc9d346b3b5ec0eaa76e46594cabbe51762c0" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "js-sys", "wasm-bindgen", "web-sys", @@ -10202,7 +10698,7 @@ checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.57", + "syn 2.0.58", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -10296,7 +10792,7 @@ checksum = "f907fdead3153cb9bfb7a93bbd5b62629472dc06dee83605358c64c52ed3dda9" dependencies = [ "anyhow", "bincode", - "cfg-if 1.0.0", + "cfg-if", "indexmap 1.9.3", "libc", "log", @@ -10322,7 +10818,7 @@ version = "8.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d3b9daa7c14cd4fa3edbf69de994408d5f4b7b0959ac13fa69d465f6597f810d" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", ] [[package]] @@ -10410,7 +10906,7 @@ dependencies = [ "addr2line 0.19.0", "anyhow", "bincode", - "cfg-if 1.0.0", + "cfg-if", "cpp_demangle", "gimli 0.27.3", "log", @@ -10442,7 +10938,7 @@ version = "8.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "aecae978b13f7f67efb23bd827373ace4578f2137ec110bbf6a4a7cde4121bbd" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "libc", "windows-sys 0.45.0", ] @@ -10455,7 +10951,7 @@ checksum = "658cf6f325232b6760e202e5255d823da5e348fdea827eff0a2a22319000b441" dependencies = [ "anyhow", "cc", - "cfg-if 1.0.0", + "cfg-if", "indexmap 1.9.3", "libc", "log", @@ -10463,7 +10959,7 @@ dependencies = [ "memfd", "memoffset", "paste", - "rand", + "rand 0.8.5", "rustix 0.36.17", "wasmtime-asm-macros", "wasmtime-environ", @@ -10536,9 +11032,9 @@ dependencies = [ [[package]] name = "widestring" -version = "1.0.2" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "653f141f39ec16bba3c5abe400a0c60da7468261cc2cbf36805022876bc721a8" +checksum = "7219d36b6eac893fa81e84ebe06485e7dcbb616177469b142df14f1f4deb1311" [[package]] name = "winapi" @@ -10609,6 +11105,21 @@ dependencies = [ "windows-targets 0.52.4", ] +[[package]] +name = "windows-sys" +version = "0.42.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a3e1820f08b8513f676f7ab6c1f99ff312fb97b553d30ff4dd86f9f15728aa7" +dependencies = [ + "windows_aarch64_gnullvm 0.42.2", + "windows_aarch64_msvc 0.42.2", + "windows_i686_gnu 0.42.2", + "windows_i686_msvc 0.42.2", + "windows_x86_64_gnu 0.42.2", + "windows_x86_64_gnullvm 0.42.2", + "windows_x86_64_msvc 0.42.2", +] + [[package]] name = "windows-sys" version = "0.45.0" @@ -10831,7 +11342,7 @@ version = "0.50.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "524e57b2c537c0f9b1e69f1965311ec12182b4122e45035b1508cd24d2adadb1" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "windows-sys 0.48.0", ] @@ -10885,15 +11396,32 @@ dependencies = [ "time", ] +[[package]] +name = "x509-parser" +version = "0.15.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7069fba5b66b9193bd2c5d3d4ff12b839118f6bcbef5328efafafb5395cf63da" +dependencies = [ + "asn1-rs", + "data-encoding", + "der-parser", + "lazy_static", + "nom", + "oid-registry", + "rusticata-macros", + "thiserror", + "time", +] + [[package]] name = "xcm-procedural" version = "7.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#e6bd9205432bb524e94c9bd13048d645ec9aa5c7" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" dependencies = [ "Inflector", "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.57", + "syn 2.0.58", ] [[package]] @@ -10906,7 +11434,7 @@ dependencies = [ "log", "nohash-hasher", "parking_lot 0.12.1", - "rand", + "rand 0.8.5", "static_assertions", ] @@ -10942,7 +11470,7 @@ checksum = "9ce1b18ccd8e73a9321186f97e46f9f04b778851177567b1975109d26a08d2a6" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.57", + "syn 2.0.58", ] [[package]] @@ -10962,7 +11490,7 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.57", + "syn 2.0.58", ] [[package]] diff --git a/bin/runtime-common/src/mock.rs b/bin/runtime-common/src/mock.rs index 8c4cb2233..ad71cd0d4 100644 --- a/bin/runtime-common/src/mock.rs +++ b/bin/runtime-common/src/mock.rs @@ -166,7 +166,7 @@ impl pallet_balances::Config for TestRuntime { #[derive_impl(pallet_transaction_payment::config_preludes::TestDefaultConfig)] impl pallet_transaction_payment::Config for TestRuntime { - type OnChargeTransaction = pallet_transaction_payment::CurrencyAdapter; + type OnChargeTransaction = pallet_transaction_payment::FungibleAdapter; type OperationalFeeMultiplier = ConstU8<5>; type WeightToFee = IdentityFee; type LengthToFee = ConstantMultiplier; diff --git a/primitives/runtime/Cargo.toml b/primitives/runtime/Cargo.toml index 258b57682..d8e293cb6 100644 --- a/primitives/runtime/Cargo.toml +++ b/primitives/runtime/Cargo.toml @@ -29,7 +29,7 @@ sp-runtime = { git = "https://github.com/paritytech/polkadot-sdk", branch = "mas sp-state-machine = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } sp-std = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } sp-trie = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -trie-db = { version = "0.28.0", default-features = false } +trie-db = { version = "0.29.0", default-features = false } [dev-dependencies] hex-literal = "0.4" -- GitLab From a174cfa9f8c83cb50b1985b73333049c39bcc659 Mon Sep 17 00:00:00 2001 From: Serban Iorga Date: Wed, 10 Apr 2024 14:49:36 +0300 Subject: [PATCH 25/39] Consume migrated crates from `polkadot-sdk` (#2921) * Remove migrated crates * Reference polkadot-sdk for the migrated crates * Leftovers * Fixes --- .gitlab-ci.yml | 4 +- Cargo.lock | 462 +-- Cargo.toml | 37 - bin/runtime-common/Cargo.toml | 100 - bin/runtime-common/src/integrity.rs | 348 --- bin/runtime-common/src/lib.rs | 223 -- bin/runtime-common/src/messages.rs | 701 ----- bin/runtime-common/src/messages_api.rs | 66 - .../src/messages_benchmarking.rs | 314 -- bin/runtime-common/src/messages_call_ext.rs | 692 ----- bin/runtime-common/src/messages_generation.rs | 150 - .../src/messages_xcm_extension.rs | 502 ---- bin/runtime-common/src/mock.rs | 427 --- .../src/parachains_benchmarking.rs | 88 - bin/runtime-common/src/priority_calculator.rs | 202 -- .../src/refund_relayer_extension.rs | 2585 ----------------- chains/chain-asset-hub-rococo/Cargo.toml | 30 - chains/chain-asset-hub-rococo/src/lib.rs | 48 - chains/chain-asset-hub-westend/Cargo.toml | 30 - chains/chain-asset-hub-westend/src/lib.rs | 48 - chains/chain-bridge-hub-cumulus/Cargo.toml | 41 - chains/chain-bridge-hub-cumulus/src/lib.rs | 170 -- chains/chain-bridge-hub-kusama/Cargo.toml | 37 - chains/chain-bridge-hub-kusama/src/lib.rs | 93 - chains/chain-bridge-hub-polkadot/Cargo.toml | 38 - chains/chain-bridge-hub-polkadot/src/lib.rs | 85 - chains/chain-bridge-hub-rococo/Cargo.toml | 37 - chains/chain-bridge-hub-rococo/src/lib.rs | 111 - chains/chain-bridge-hub-westend/Cargo.toml | 38 - chains/chain-bridge-hub-westend/src/lib.rs | 102 - chains/chain-kusama/Cargo.toml | 36 - chains/chain-kusama/src/lib.rs | 78 - chains/chain-polkadot-bulletin/Cargo.toml | 46 - chains/chain-polkadot-bulletin/src/lib.rs | 227 -- chains/chain-polkadot/Cargo.toml | 36 - chains/chain-polkadot/src/lib.rs | 80 - chains/chain-rococo/Cargo.toml | 36 - chains/chain-rococo/src/lib.rs | 78 - chains/chain-westend/Cargo.toml | 36 - chains/chain-westend/src/lib.rs | 78 - docs/bridge-relayers-claim-rewards.png | Bin 35621 -> 0 bytes docs/bridge-relayers-deregister.png | Bin 10115 -> 0 bytes docs/bridge-relayers-register.png | Bin 51026 -> 0 bytes docs/complex-relay.html | 85 - docs/grandpa-finality-relay.html | 47 - docs/high-level-overview.md | 184 -- docs/messages-relay.html | 78 - docs/parachains-finality-relay.html | 55 - docs/polkadot-kusama-bridge-overview.md | 129 - docs/polkadot-kusama-bridge.html | 67 - docs/running-relayer.md | 343 --- modules/beefy/Cargo.toml | 63 - modules/beefy/src/lib.rs | 651 ----- modules/beefy/src/mock.rs | 193 -- modules/beefy/src/mock_chain.rs | 299 -- modules/beefy/src/utils.rs | 361 --- modules/grandpa/Cargo.toml | 72 - modules/grandpa/README.md | 101 - modules/grandpa/src/benchmarking.rs | 142 - modules/grandpa/src/call_ext.rs | 426 --- modules/grandpa/src/lib.rs | 1527 ---------- modules/grandpa/src/mock.rs | 112 - modules/grandpa/src/storage_types.rs | 136 - modules/grandpa/src/weights.rs | 167 -- modules/messages/Cargo.toml | 64 - modules/messages/README.md | 201 -- modules/messages/src/benchmarking.rs | 461 --- modules/messages/src/inbound_lane.rs | 556 ---- modules/messages/src/lib.rs | 2117 -------------- modules/messages/src/mock.rs | 461 --- modules/messages/src/outbound_lane.rs | 424 --- modules/messages/src/weights.rs | 525 ---- modules/messages/src/weights_ext.rs | 488 ---- modules/parachains/Cargo.toml | 71 - modules/parachains/README.md | 90 - modules/parachains/src/benchmarking.rs | 116 - modules/parachains/src/call_ext.rs | 263 -- modules/parachains/src/lib.rs | 1650 ----------- modules/parachains/src/mock.rs | 328 --- modules/parachains/src/weights.rs | 273 -- modules/parachains/src/weights_ext.rs | 107 - modules/relayers/Cargo.toml | 71 - modules/relayers/README.md | 14 - modules/relayers/src/benchmarking.rs | 131 - modules/relayers/src/lib.rs | 922 ------ modules/relayers/src/mock.rs | 149 - modules/relayers/src/payment_adapter.rs | 158 - modules/relayers/src/stake_adapter.rs | 186 -- modules/relayers/src/weights.rs | 259 -- modules/relayers/src/weights_ext.rs | 49 - modules/xcm-bridge-hub-router/Cargo.toml | 67 - .../xcm-bridge-hub-router/src/benchmarking.rs | 95 - modules/xcm-bridge-hub-router/src/lib.rs | 568 ---- modules/xcm-bridge-hub-router/src/mock.rs | 148 - modules/xcm-bridge-hub-router/src/weights.rs | 208 -- modules/xcm-bridge-hub/Cargo.toml | 78 - modules/xcm-bridge-hub/src/exporter.rs | 206 -- modules/xcm-bridge-hub/src/lib.rs | 118 - modules/xcm-bridge-hub/src/mock.rs | 317 -- primitives/beefy/Cargo.toml | 47 - primitives/beefy/src/lib.rs | 151 - primitives/header-chain/Cargo.toml | 49 - .../header-chain/src/justification/mod.rs | 132 - .../verification/equivocation.rs | 200 -- .../src/justification/verification/mod.rs | 333 --- .../justification/verification/optimizer.rs | 142 - .../src/justification/verification/strict.rs | 108 - primitives/header-chain/src/lib.rs | 388 --- primitives/header-chain/src/storage_keys.rs | 104 - .../tests/implementation_match.rs | 411 --- .../tests/justification/equivocation.rs | 124 - .../tests/justification/optimizer.rs | 196 -- .../tests/justification/strict.rs | 202 -- primitives/header-chain/tests/tests.rs | 23 - primitives/messages/Cargo.toml | 44 - primitives/messages/src/lib.rs | 567 ---- primitives/messages/src/source_chain.rs | 179 -- primitives/messages/src/storage_keys.rs | 128 - primitives/messages/src/target_chain.rs | 212 -- primitives/parachains/Cargo.toml | 43 - primitives/parachains/src/lib.rs | 184 -- primitives/polkadot-core/Cargo.toml | 49 - primitives/polkadot-core/src/lib.rs | 384 --- primitives/polkadot-core/src/parachains.rs | 106 - primitives/relayers/Cargo.toml | 42 - primitives/relayers/src/lib.rs | 206 -- primitives/relayers/src/registration.rs | 121 - primitives/runtime/Cargo.toml | 55 - primitives/runtime/src/chain.rs | 414 --- primitives/runtime/src/extensions.rs | 154 - primitives/runtime/src/lib.rs | 545 ---- primitives/runtime/src/messages.rs | 36 - primitives/runtime/src/storage_proof.rs | 272 -- primitives/runtime/src/storage_types.rs | 91 - primitives/test-utils/Cargo.toml | 44 - primitives/test-utils/src/keyring.rs | 94 - primitives/test-utils/src/lib.rs | 347 --- primitives/xcm-bridge-hub-router/Cargo.toml | 23 - primitives/xcm-bridge-hub-router/src/lib.rs | 66 - primitives/xcm-bridge-hub/Cargo.toml | 20 - primitives/xcm-bridge-hub/src/lib.rs | 24 - .../client-bridge-hub-kusama/Cargo.toml | 17 +- .../client-bridge-hub-polkadot/Cargo.toml | 20 +- .../client-bridge-hub-rococo/Cargo.toml | 16 +- .../client-bridge-hub-westend/Cargo.toml | 18 +- relay-clients/client-kusama/Cargo.toml | 10 +- .../client-polkadot-bulletin/Cargo.toml | 16 +- relay-clients/client-polkadot/Cargo.toml | 10 +- relay-clients/client-rococo/Cargo.toml | 8 +- relay-clients/client-westend/Cargo.toml | 10 +- relays/client-substrate/Cargo.toml | 61 - relays/client-substrate/src/calls.rs | 59 - relays/client-substrate/src/chain.rs | 286 -- relays/client-substrate/src/client.rs | 990 ------- relays/client-substrate/src/error.rs | 165 -- relays/client-substrate/src/guard.rs | 196 -- relays/client-substrate/src/lib.rs | 101 - .../src/metrics/float_storage_value.rs | 133 - relays/client-substrate/src/metrics/mod.rs | 21 - relays/client-substrate/src/rpc.rs | 176 -- relays/client-substrate/src/sync_header.rs | 61 - relays/client-substrate/src/test_chain.rs | 132 - .../src/transaction_tracker.rs | 447 --- relays/equivocation/Cargo.toml | 23 - relays/equivocation/src/block_checker.rs | 471 --- relays/equivocation/src/equivocation_loop.rs | 308 -- relays/equivocation/src/lib.rs | 137 - relays/equivocation/src/mock.rs | 285 -- relays/equivocation/src/reporter.rs | 129 - relays/finality/Cargo.toml | 25 - relays/finality/README.md | 60 - relays/finality/src/base.rs | 51 - relays/finality/src/finality_loop.rs | 698 ----- relays/finality/src/finality_proofs.rs | 222 -- relays/finality/src/headers.rs | 238 -- relays/finality/src/lib.rs | 91 - relays/finality/src/mock.rs | 213 -- relays/finality/src/sync_loop_metrics.rs | 95 - relays/lib-substrate-relay/Cargo.toml | 62 - relays/lib-substrate-relay/src/cli/bridge.rs | 110 - .../src/cli/chain_schema.rs | 261 -- .../src/cli/detect_equivocations.rs | 65 - .../src/cli/init_bridge.rs | 85 - relays/lib-substrate-relay/src/cli/mod.rs | 192 -- .../src/cli/relay_headers.rs | 76 - .../src/cli/relay_headers_and_messages/mod.rs | 492 ---- .../parachain_to_parachain.rs | 217 -- .../relay_to_parachain.rs | 199 -- .../relay_to_relay.rs | 169 -- .../src/cli/relay_messages.rs | 89 - .../src/cli/relay_parachains.rs | 91 - .../src/equivocation/mod.rs | 223 -- .../src/equivocation/source.rs | 109 - .../src/equivocation/target.rs | 111 - relays/lib-substrate-relay/src/error.rs | 63 - .../src/finality/initialize.rs | 163 -- .../lib-substrate-relay/src/finality/mod.rs | 270 -- .../src/finality/source.rs | 259 -- .../src/finality/target.rs | 130 - .../src/finality_base/engine.rs | 464 --- .../src/finality_base/mod.rs | 107 - relays/lib-substrate-relay/src/lib.rs | 129 - .../lib-substrate-relay/src/messages_lane.rs | 587 ---- .../src/messages_metrics.rs | 190 -- .../src/messages_source.rs | 713 ----- .../src/messages_target.rs | 300 -- .../src/on_demand/headers.rs | 550 ---- .../lib-substrate-relay/src/on_demand/mod.rs | 48 - .../src/on_demand/parachains.rs | 1033 ------- .../lib-substrate-relay/src/parachains/mod.rs | 108 - .../src/parachains/source.rs | 181 -- .../src/parachains/target.rs | 148 - relays/messages/Cargo.toml | 29 - relays/messages/src/lib.rs | 37 - relays/messages/src/message_lane.rs | 71 - relays/messages/src/message_lane_loop.rs | 1277 -------- relays/messages/src/message_race_delivery.rs | 1405 --------- relays/messages/src/message_race_limits.rs | 206 -- relays/messages/src/message_race_loop.rs | 835 ------ relays/messages/src/message_race_receiving.rs | 235 -- relays/messages/src/message_race_strategy.rs | 628 ---- relays/messages/src/metrics.rs | 148 - relays/parachains/Cargo.toml | 28 - relays/parachains/README.md | 50 - relays/parachains/src/lib.rs | 32 - relays/parachains/src/parachains_loop.rs | 985 ------- .../parachains/src/parachains_loop_metrics.rs | 86 - relays/utils/Cargo.toml | 38 - relays/utils/src/error.rs | 46 - relays/utils/src/initialize.rs | 136 - relays/utils/src/lib.rs | 318 -- relays/utils/src/metrics.rs | 192 -- relays/utils/src/metrics/float_json_value.rs | 147 - relays/utils/src/metrics/global.rs | 118 - relays/utils/src/relay_loop.rs | 262 -- scripts/verify-pallets-build.sh | 141 - substrate-relay/Cargo.toml | 34 +- 237 files changed, 203 insertions(+), 54051 deletions(-) delete mode 100644 bin/runtime-common/Cargo.toml delete mode 100644 bin/runtime-common/src/integrity.rs delete mode 100644 bin/runtime-common/src/lib.rs delete mode 100644 bin/runtime-common/src/messages.rs delete mode 100644 bin/runtime-common/src/messages_api.rs delete mode 100644 bin/runtime-common/src/messages_benchmarking.rs delete mode 100644 bin/runtime-common/src/messages_call_ext.rs delete mode 100644 bin/runtime-common/src/messages_generation.rs delete mode 100644 bin/runtime-common/src/messages_xcm_extension.rs delete mode 100644 bin/runtime-common/src/mock.rs delete mode 100644 bin/runtime-common/src/parachains_benchmarking.rs delete mode 100644 bin/runtime-common/src/priority_calculator.rs delete mode 100644 bin/runtime-common/src/refund_relayer_extension.rs delete mode 100644 chains/chain-asset-hub-rococo/Cargo.toml delete mode 100644 chains/chain-asset-hub-rococo/src/lib.rs delete mode 100644 chains/chain-asset-hub-westend/Cargo.toml delete mode 100644 chains/chain-asset-hub-westend/src/lib.rs delete mode 100644 chains/chain-bridge-hub-cumulus/Cargo.toml delete mode 100644 chains/chain-bridge-hub-cumulus/src/lib.rs delete mode 100644 chains/chain-bridge-hub-kusama/Cargo.toml delete mode 100644 chains/chain-bridge-hub-kusama/src/lib.rs delete mode 100644 chains/chain-bridge-hub-polkadot/Cargo.toml delete mode 100644 chains/chain-bridge-hub-polkadot/src/lib.rs delete mode 100644 chains/chain-bridge-hub-rococo/Cargo.toml delete mode 100644 chains/chain-bridge-hub-rococo/src/lib.rs delete mode 100644 chains/chain-bridge-hub-westend/Cargo.toml delete mode 100644 chains/chain-bridge-hub-westend/src/lib.rs delete mode 100644 chains/chain-kusama/Cargo.toml delete mode 100644 chains/chain-kusama/src/lib.rs delete mode 100644 chains/chain-polkadot-bulletin/Cargo.toml delete mode 100644 chains/chain-polkadot-bulletin/src/lib.rs delete mode 100644 chains/chain-polkadot/Cargo.toml delete mode 100644 chains/chain-polkadot/src/lib.rs delete mode 100644 chains/chain-rococo/Cargo.toml delete mode 100644 chains/chain-rococo/src/lib.rs delete mode 100644 chains/chain-westend/Cargo.toml delete mode 100644 chains/chain-westend/src/lib.rs delete mode 100644 docs/bridge-relayers-claim-rewards.png delete mode 100644 docs/bridge-relayers-deregister.png delete mode 100644 docs/bridge-relayers-register.png delete mode 100644 docs/complex-relay.html delete mode 100644 docs/grandpa-finality-relay.html delete mode 100644 docs/high-level-overview.md delete mode 100644 docs/messages-relay.html delete mode 100644 docs/parachains-finality-relay.html delete mode 100644 docs/polkadot-kusama-bridge-overview.md delete mode 100644 docs/polkadot-kusama-bridge.html delete mode 100644 docs/running-relayer.md delete mode 100644 modules/beefy/Cargo.toml delete mode 100644 modules/beefy/src/lib.rs delete mode 100644 modules/beefy/src/mock.rs delete mode 100644 modules/beefy/src/mock_chain.rs delete mode 100644 modules/beefy/src/utils.rs delete mode 100644 modules/grandpa/Cargo.toml delete mode 100644 modules/grandpa/README.md delete mode 100644 modules/grandpa/src/benchmarking.rs delete mode 100644 modules/grandpa/src/call_ext.rs delete mode 100644 modules/grandpa/src/lib.rs delete mode 100644 modules/grandpa/src/mock.rs delete mode 100644 modules/grandpa/src/storage_types.rs delete mode 100644 modules/grandpa/src/weights.rs delete mode 100644 modules/messages/Cargo.toml delete mode 100644 modules/messages/README.md delete mode 100644 modules/messages/src/benchmarking.rs delete mode 100644 modules/messages/src/inbound_lane.rs delete mode 100644 modules/messages/src/lib.rs delete mode 100644 modules/messages/src/mock.rs delete mode 100644 modules/messages/src/outbound_lane.rs delete mode 100644 modules/messages/src/weights.rs delete mode 100644 modules/messages/src/weights_ext.rs delete mode 100644 modules/parachains/Cargo.toml delete mode 100644 modules/parachains/README.md delete mode 100644 modules/parachains/src/benchmarking.rs delete mode 100644 modules/parachains/src/call_ext.rs delete mode 100644 modules/parachains/src/lib.rs delete mode 100644 modules/parachains/src/mock.rs delete mode 100644 modules/parachains/src/weights.rs delete mode 100644 modules/parachains/src/weights_ext.rs delete mode 100644 modules/relayers/Cargo.toml delete mode 100644 modules/relayers/README.md delete mode 100644 modules/relayers/src/benchmarking.rs delete mode 100644 modules/relayers/src/lib.rs delete mode 100644 modules/relayers/src/mock.rs delete mode 100644 modules/relayers/src/payment_adapter.rs delete mode 100644 modules/relayers/src/stake_adapter.rs delete mode 100644 modules/relayers/src/weights.rs delete mode 100644 modules/relayers/src/weights_ext.rs delete mode 100644 modules/xcm-bridge-hub-router/Cargo.toml delete mode 100644 modules/xcm-bridge-hub-router/src/benchmarking.rs delete mode 100644 modules/xcm-bridge-hub-router/src/lib.rs delete mode 100644 modules/xcm-bridge-hub-router/src/mock.rs delete mode 100644 modules/xcm-bridge-hub-router/src/weights.rs delete mode 100644 modules/xcm-bridge-hub/Cargo.toml delete mode 100644 modules/xcm-bridge-hub/src/exporter.rs delete mode 100644 modules/xcm-bridge-hub/src/lib.rs delete mode 100644 modules/xcm-bridge-hub/src/mock.rs delete mode 100644 primitives/beefy/Cargo.toml delete mode 100644 primitives/beefy/src/lib.rs delete mode 100644 primitives/header-chain/Cargo.toml delete mode 100644 primitives/header-chain/src/justification/mod.rs delete mode 100644 primitives/header-chain/src/justification/verification/equivocation.rs delete mode 100644 primitives/header-chain/src/justification/verification/mod.rs delete mode 100644 primitives/header-chain/src/justification/verification/optimizer.rs delete mode 100644 primitives/header-chain/src/justification/verification/strict.rs delete mode 100644 primitives/header-chain/src/lib.rs delete mode 100644 primitives/header-chain/src/storage_keys.rs delete mode 100644 primitives/header-chain/tests/implementation_match.rs delete mode 100644 primitives/header-chain/tests/justification/equivocation.rs delete mode 100644 primitives/header-chain/tests/justification/optimizer.rs delete mode 100644 primitives/header-chain/tests/justification/strict.rs delete mode 100644 primitives/header-chain/tests/tests.rs delete mode 100644 primitives/messages/Cargo.toml delete mode 100644 primitives/messages/src/lib.rs delete mode 100644 primitives/messages/src/source_chain.rs delete mode 100644 primitives/messages/src/storage_keys.rs delete mode 100644 primitives/messages/src/target_chain.rs delete mode 100644 primitives/parachains/Cargo.toml delete mode 100644 primitives/parachains/src/lib.rs delete mode 100644 primitives/polkadot-core/Cargo.toml delete mode 100644 primitives/polkadot-core/src/lib.rs delete mode 100644 primitives/polkadot-core/src/parachains.rs delete mode 100644 primitives/relayers/Cargo.toml delete mode 100644 primitives/relayers/src/lib.rs delete mode 100644 primitives/relayers/src/registration.rs delete mode 100644 primitives/runtime/Cargo.toml delete mode 100644 primitives/runtime/src/chain.rs delete mode 100644 primitives/runtime/src/extensions.rs delete mode 100644 primitives/runtime/src/lib.rs delete mode 100644 primitives/runtime/src/messages.rs delete mode 100644 primitives/runtime/src/storage_proof.rs delete mode 100644 primitives/runtime/src/storage_types.rs delete mode 100644 primitives/test-utils/Cargo.toml delete mode 100644 primitives/test-utils/src/keyring.rs delete mode 100644 primitives/test-utils/src/lib.rs delete mode 100644 primitives/xcm-bridge-hub-router/Cargo.toml delete mode 100644 primitives/xcm-bridge-hub-router/src/lib.rs delete mode 100644 primitives/xcm-bridge-hub/Cargo.toml delete mode 100644 primitives/xcm-bridge-hub/src/lib.rs delete mode 100644 relays/client-substrate/Cargo.toml delete mode 100644 relays/client-substrate/src/calls.rs delete mode 100644 relays/client-substrate/src/chain.rs delete mode 100644 relays/client-substrate/src/client.rs delete mode 100644 relays/client-substrate/src/error.rs delete mode 100644 relays/client-substrate/src/guard.rs delete mode 100644 relays/client-substrate/src/lib.rs delete mode 100644 relays/client-substrate/src/metrics/float_storage_value.rs delete mode 100644 relays/client-substrate/src/metrics/mod.rs delete mode 100644 relays/client-substrate/src/rpc.rs delete mode 100644 relays/client-substrate/src/sync_header.rs delete mode 100644 relays/client-substrate/src/test_chain.rs delete mode 100644 relays/client-substrate/src/transaction_tracker.rs delete mode 100644 relays/equivocation/Cargo.toml delete mode 100644 relays/equivocation/src/block_checker.rs delete mode 100644 relays/equivocation/src/equivocation_loop.rs delete mode 100644 relays/equivocation/src/lib.rs delete mode 100644 relays/equivocation/src/mock.rs delete mode 100644 relays/equivocation/src/reporter.rs delete mode 100644 relays/finality/Cargo.toml delete mode 100644 relays/finality/README.md delete mode 100644 relays/finality/src/base.rs delete mode 100644 relays/finality/src/finality_loop.rs delete mode 100644 relays/finality/src/finality_proofs.rs delete mode 100644 relays/finality/src/headers.rs delete mode 100644 relays/finality/src/lib.rs delete mode 100644 relays/finality/src/mock.rs delete mode 100644 relays/finality/src/sync_loop_metrics.rs delete mode 100644 relays/lib-substrate-relay/Cargo.toml delete mode 100644 relays/lib-substrate-relay/src/cli/bridge.rs delete mode 100644 relays/lib-substrate-relay/src/cli/chain_schema.rs delete mode 100644 relays/lib-substrate-relay/src/cli/detect_equivocations.rs delete mode 100644 relays/lib-substrate-relay/src/cli/init_bridge.rs delete mode 100644 relays/lib-substrate-relay/src/cli/mod.rs delete mode 100644 relays/lib-substrate-relay/src/cli/relay_headers.rs delete mode 100644 relays/lib-substrate-relay/src/cli/relay_headers_and_messages/mod.rs delete mode 100644 relays/lib-substrate-relay/src/cli/relay_headers_and_messages/parachain_to_parachain.rs delete mode 100644 relays/lib-substrate-relay/src/cli/relay_headers_and_messages/relay_to_parachain.rs delete mode 100644 relays/lib-substrate-relay/src/cli/relay_headers_and_messages/relay_to_relay.rs delete mode 100644 relays/lib-substrate-relay/src/cli/relay_messages.rs delete mode 100644 relays/lib-substrate-relay/src/cli/relay_parachains.rs delete mode 100644 relays/lib-substrate-relay/src/equivocation/mod.rs delete mode 100644 relays/lib-substrate-relay/src/equivocation/source.rs delete mode 100644 relays/lib-substrate-relay/src/equivocation/target.rs delete mode 100644 relays/lib-substrate-relay/src/error.rs delete mode 100644 relays/lib-substrate-relay/src/finality/initialize.rs delete mode 100644 relays/lib-substrate-relay/src/finality/mod.rs delete mode 100644 relays/lib-substrate-relay/src/finality/source.rs delete mode 100644 relays/lib-substrate-relay/src/finality/target.rs delete mode 100644 relays/lib-substrate-relay/src/finality_base/engine.rs delete mode 100644 relays/lib-substrate-relay/src/finality_base/mod.rs delete mode 100644 relays/lib-substrate-relay/src/lib.rs delete mode 100644 relays/lib-substrate-relay/src/messages_lane.rs delete mode 100644 relays/lib-substrate-relay/src/messages_metrics.rs delete mode 100644 relays/lib-substrate-relay/src/messages_source.rs delete mode 100644 relays/lib-substrate-relay/src/messages_target.rs delete mode 100644 relays/lib-substrate-relay/src/on_demand/headers.rs delete mode 100644 relays/lib-substrate-relay/src/on_demand/mod.rs delete mode 100644 relays/lib-substrate-relay/src/on_demand/parachains.rs delete mode 100644 relays/lib-substrate-relay/src/parachains/mod.rs delete mode 100644 relays/lib-substrate-relay/src/parachains/source.rs delete mode 100644 relays/lib-substrate-relay/src/parachains/target.rs delete mode 100644 relays/messages/Cargo.toml delete mode 100644 relays/messages/src/lib.rs delete mode 100644 relays/messages/src/message_lane.rs delete mode 100644 relays/messages/src/message_lane_loop.rs delete mode 100644 relays/messages/src/message_race_delivery.rs delete mode 100644 relays/messages/src/message_race_limits.rs delete mode 100644 relays/messages/src/message_race_loop.rs delete mode 100644 relays/messages/src/message_race_receiving.rs delete mode 100644 relays/messages/src/message_race_strategy.rs delete mode 100644 relays/messages/src/metrics.rs delete mode 100644 relays/parachains/Cargo.toml delete mode 100644 relays/parachains/README.md delete mode 100644 relays/parachains/src/lib.rs delete mode 100644 relays/parachains/src/parachains_loop.rs delete mode 100644 relays/parachains/src/parachains_loop_metrics.rs delete mode 100644 relays/utils/Cargo.toml delete mode 100644 relays/utils/src/error.rs delete mode 100644 relays/utils/src/initialize.rs delete mode 100644 relays/utils/src/lib.rs delete mode 100644 relays/utils/src/metrics.rs delete mode 100644 relays/utils/src/metrics/float_json_value.rs delete mode 100644 relays/utils/src/metrics/global.rs delete mode 100644 relays/utils/src/relay_loop.rs delete mode 100755 scripts/verify-pallets-build.sh diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index d6a2644b0..3c5b4aa6f 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -121,7 +121,7 @@ check: <<: *docker-env <<: *test-refs script: &check-script - - SKIP_WASM_BUILD=1 time cargo check --locked --verbose --workspace --features runtime-benchmarks + - SKIP_WASM_BUILD=1 time cargo check --locked --verbose --workspace check-nightly: stage: test @@ -142,7 +142,7 @@ test: # Enable this, when you see: "`cargo metadata` can not fail on project `Cargo.toml`" #- time cargo fetch --manifest-path=`cargo metadata --format-version=1 | jq --compact-output --raw-output ".packages[] | select(.name == \"polkadot-runtime\").manifest_path"` #- time cargo fetch --manifest-path=`cargo metadata --format-version=1 | jq --compact-output --raw-output ".packages[] | select(.name == \"kusama-runtime\").manifest_path"` - - CARGO_NET_OFFLINE=true SKIP_WASM_BUILD=1 time cargo test --verbose --workspace --features runtime-benchmarks + - CARGO_NET_OFFLINE=true SKIP_WASM_BUILD=1 time cargo test --verbose --workspace test-nightly: stage: test diff --git a/Cargo.lock b/Cargo.lock index e1d46834c..3097d7422 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -936,15 +936,6 @@ dependencies = [ "serde", ] -[[package]] -name = "binary-merkle-tree" -version = "13.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" -dependencies = [ - "hash-db", - "log", -] - [[package]] name = "bincode" version = "1.3.3" @@ -1122,46 +1113,10 @@ dependencies = [ "serde", ] -[[package]] -name = "bp-asset-hub-rococo" -version = "0.4.0" -dependencies = [ - "bp-xcm-bridge-hub-router", - "frame-support", - "parity-scale-codec", - "scale-info", -] - -[[package]] -name = "bp-asset-hub-westend" -version = "0.3.0" -dependencies = [ - "bp-xcm-bridge-hub-router", - "frame-support", - "parity-scale-codec", - "scale-info", -] - -[[package]] -name = "bp-beefy" -version = "0.1.0" -dependencies = [ - "binary-merkle-tree", - "bp-runtime", - "frame-support", - "pallet-beefy-mmr", - "pallet-mmr", - "parity-scale-codec", - "scale-info", - "serde", - "sp-consensus-beefy", - "sp-runtime", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=master)", -] - [[package]] name = "bp-bridge-hub-cumulus" version = "0.7.0" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "bp-messages", "bp-polkadot-core", @@ -1176,6 +1131,7 @@ dependencies = [ [[package]] name = "bp-bridge-hub-kusama" version = "0.6.0" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "bp-bridge-hub-cumulus", "bp-messages", @@ -1189,6 +1145,7 @@ dependencies = [ [[package]] name = "bp-bridge-hub-polkadot" version = "0.6.0" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "bp-bridge-hub-cumulus", "bp-messages", @@ -1202,6 +1159,7 @@ dependencies = [ [[package]] name = "bp-bridge-hub-rococo" version = "0.7.0" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "bp-bridge-hub-cumulus", "bp-messages", @@ -1215,6 +1173,7 @@ dependencies = [ [[package]] name = "bp-bridge-hub-westend" version = "0.3.0" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "bp-bridge-hub-cumulus", "bp-messages", @@ -1228,13 +1187,11 @@ dependencies = [ [[package]] name = "bp-header-chain" version = "0.7.0" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "bp-runtime", - "bp-test-utils", "finality-grandpa", "frame-support", - "hex", - "hex-literal", "parity-scale-codec", "scale-info", "serde", @@ -1247,6 +1204,7 @@ dependencies = [ [[package]] name = "bp-kusama" version = "0.5.0" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "bp-header-chain", "bp-polkadot-core", @@ -1259,12 +1217,11 @@ dependencies = [ [[package]] name = "bp-messages" version = "0.7.0" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "bp-header-chain", "bp-runtime", "frame-support", - "hex", - "hex-literal", "parity-scale-codec", "scale-info", "serde", @@ -1275,6 +1232,7 @@ dependencies = [ [[package]] name = "bp-parachains" version = "0.7.0" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "bp-header-chain", "bp-polkadot-core", @@ -1291,6 +1249,7 @@ dependencies = [ [[package]] name = "bp-polkadot" version = "0.5.0" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "bp-header-chain", "bp-polkadot-core", @@ -1303,6 +1262,7 @@ dependencies = [ [[package]] name = "bp-polkadot-bulletin" version = "0.4.0" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "bp-header-chain", "bp-messages", @@ -1320,12 +1280,12 @@ dependencies = [ [[package]] name = "bp-polkadot-core" version = "0.7.0" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "bp-messages", "bp-runtime", "frame-support", "frame-system", - "hex", "parity-scale-codec", "parity-util-mem", "scale-info", @@ -1338,12 +1298,11 @@ dependencies = [ [[package]] name = "bp-relayers" version = "0.7.0" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "bp-messages", "bp-runtime", "frame-support", - "hex", - "hex-literal", "parity-scale-codec", "scale-info", "sp-runtime", @@ -1353,6 +1312,7 @@ dependencies = [ [[package]] name = "bp-rococo" version = "0.6.0" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "bp-header-chain", "bp-polkadot-core", @@ -1365,11 +1325,11 @@ dependencies = [ [[package]] name = "bp-runtime" version = "0.7.0" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "frame-support", "frame-system", "hash-db", - "hex-literal", "impl-trait-for-tuples", "log", "num-traits", @@ -1388,6 +1348,7 @@ dependencies = [ [[package]] name = "bp-test-utils" version = "0.7.0" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "bp-header-chain", "bp-parachains", @@ -1407,6 +1368,7 @@ dependencies = [ [[package]] name = "bp-westend" version = "0.3.0" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "bp-header-chain", "bp-polkadot-core", @@ -1419,6 +1381,7 @@ dependencies = [ [[package]] name = "bp-xcm-bridge-hub" version = "0.2.0" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=master)", ] @@ -1426,6 +1389,7 @@ dependencies = [ [[package]] name = "bp-xcm-bridge-hub-router" version = "0.6.0" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "parity-scale-codec", "scale-info", @@ -1436,6 +1400,7 @@ dependencies = [ [[package]] name = "bridge-runtime-common" version = "0.7.0" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "bp-header-chain", "bp-messages", @@ -1443,14 +1408,12 @@ dependencies = [ "bp-polkadot-core", "bp-relayers", "bp-runtime", - "bp-test-utils", "bp-xcm-bridge-hub", "bp-xcm-bridge-hub-router", "frame-support", "frame-system", "hash-db", "log", - "pallet-balances", "pallet-bridge-grandpa", "pallet-bridge-messages", "pallet-bridge-parachains", @@ -1467,7 +1430,6 @@ dependencies = [ "sp-trie", "staging-xcm", "staging-xcm-builder", - "static_assertions", ] [[package]] @@ -1656,15 +1618,6 @@ dependencies = [ "zeroize", ] -[[package]] -name = "ckb-merkle-mountain-range" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56ccb671c5921be8a84686e6212ca184cb1d7c51cadcdbfcbd1cc3f042f5dfb8" -dependencies = [ - "cfg-if", -] - [[package]] name = "clap" version = "2.34.0" @@ -2637,6 +2590,7 @@ checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" [[package]] name = "equivocation-detector" version = "0.1.0" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "async-std", "async-trait", @@ -2838,6 +2792,7 @@ dependencies = [ [[package]] name = "finality-relay" version = "0.1.0" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "async-std", "async-trait", @@ -2846,7 +2801,6 @@ dependencies = [ "futures", "log", "num-traits", - "parking_lot 0.12.1", "relay-utils", ] @@ -2927,7 +2881,7 @@ checksum = "6c2141d6d6c8512188a7891b4b01590a45f6dac67afb4f255c4124dbb86d4eaa" [[package]] name = "frame-benchmarking" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "frame-support", "frame-support-procedural", @@ -2975,7 +2929,7 @@ dependencies = [ [[package]] name = "frame-support" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "aquamarine", "array-bytes 6.2.2", @@ -3016,7 +2970,7 @@ dependencies = [ [[package]] name = "frame-support-procedural" version = "23.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "Inflector", "cfg-expr", @@ -3035,7 +2989,7 @@ dependencies = [ [[package]] name = "frame-support-procedural-tools" version = "10.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "frame-support-procedural-tools-derive", "proc-macro-crate 3.1.0", @@ -3047,7 +3001,7 @@ dependencies = [ [[package]] name = "frame-support-procedural-tools-derive" version = "11.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", @@ -3057,7 +3011,7 @@ dependencies = [ [[package]] name = "frame-system" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "cfg-if", "docify", @@ -4966,6 +4920,7 @@ dependencies = [ [[package]] name = "messages-relay" version = "0.1.0" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "async-std", "async-trait", @@ -5529,7 +5484,7 @@ checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" [[package]] name = "pallet-authorship" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "frame-support", "frame-system", @@ -5543,7 +5498,7 @@ dependencies = [ [[package]] name = "pallet-balances" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "docify", "frame-benchmarking", @@ -5556,78 +5511,10 @@ dependencies = [ "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=master)", ] -[[package]] -name = "pallet-beefy" -version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" -dependencies = [ - "frame-support", - "frame-system", - "log", - "pallet-authorship", - "pallet-session", - "parity-scale-codec", - "scale-info", - "serde", - "sp-consensus-beefy", - "sp-runtime", - "sp-session", - "sp-staking", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=master)", -] - -[[package]] -name = "pallet-beefy-mmr" -version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" -dependencies = [ - "array-bytes 6.2.2", - "binary-merkle-tree", - "frame-support", - "frame-system", - "log", - "pallet-beefy", - "pallet-mmr", - "pallet-session", - "parity-scale-codec", - "scale-info", - "serde", - "sp-api", - "sp-consensus-beefy", - "sp-core", - "sp-io", - "sp-runtime", - "sp-state-machine", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=master)", -] - -[[package]] -name = "pallet-bridge-beefy" -version = "0.1.0" -dependencies = [ - "bp-beefy", - "bp-runtime", - "bp-test-utils", - "ckb-merkle-mountain-range", - "frame-support", - "frame-system", - "log", - "pallet-beefy-mmr", - "pallet-mmr", - "parity-scale-codec", - "rand 0.8.5", - "scale-info", - "serde", - "sp-consensus-beefy", - "sp-core", - "sp-io", - "sp-runtime", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=master)", -] - [[package]] name = "pallet-bridge-grandpa" version = "0.7.0" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "bp-header-chain", "bp-runtime", @@ -5640,8 +5527,6 @@ dependencies = [ "parity-scale-codec", "scale-info", "sp-consensus-grandpa", - "sp-core", - "sp-io", "sp-runtime", "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=master)", "sp-trie", @@ -5650,19 +5535,17 @@ dependencies = [ [[package]] name = "pallet-bridge-messages" version = "0.7.0" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "bp-messages", "bp-runtime", - "bp-test-utils", "frame-benchmarking", "frame-support", "frame-system", "log", "num-traits", - "pallet-balances", "parity-scale-codec", "scale-info", - "sp-io", "sp-runtime", "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=master)", ] @@ -5670,12 +5553,12 @@ dependencies = [ [[package]] name = "pallet-bridge-parachains" version = "0.7.0" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "bp-header-chain", "bp-parachains", "bp-polkadot-core", "bp-runtime", - "bp-test-utils", "frame-benchmarking", "frame-support", "frame-system", @@ -5683,8 +5566,6 @@ dependencies = [ "pallet-bridge-grandpa", "parity-scale-codec", "scale-info", - "sp-core", - "sp-io", "sp-runtime", "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=master)", "sp-trie", @@ -5693,6 +5574,7 @@ dependencies = [ [[package]] name = "pallet-bridge-relayers" version = "0.7.0" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "bp-messages", "bp-relayers", @@ -5701,12 +5583,10 @@ dependencies = [ "frame-support", "frame-system", "log", - "pallet-balances", "pallet-bridge-messages", "parity-scale-codec", "scale-info", "sp-arithmetic", - "sp-io", "sp-runtime", "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=master)", ] @@ -5714,7 +5594,7 @@ dependencies = [ [[package]] name = "pallet-grandpa" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "frame-benchmarking", "frame-support", @@ -5734,28 +5614,10 @@ dependencies = [ "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=master)", ] -[[package]] -name = "pallet-mmr" -version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" -dependencies = [ - "frame-benchmarking", - "frame-support", - "frame-system", - "log", - "parity-scale-codec", - "scale-info", - "sp-core", - "sp-io", - "sp-mmr-primitives", - "sp-runtime", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=master)", -] - [[package]] name = "pallet-session" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "frame-support", "frame-system", @@ -5777,7 +5639,7 @@ dependencies = [ [[package]] name = "pallet-timestamp" version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "docify", "frame-benchmarking", @@ -5797,7 +5659,7 @@ dependencies = [ [[package]] name = "pallet-transaction-payment" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "frame-support", "frame-system", @@ -5813,7 +5675,7 @@ dependencies = [ [[package]] name = "pallet-transaction-payment-rpc-runtime-api" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "pallet-transaction-payment", "parity-scale-codec", @@ -5825,7 +5687,7 @@ dependencies = [ [[package]] name = "pallet-utility" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "frame-benchmarking", "frame-support", @@ -5838,63 +5700,18 @@ dependencies = [ "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=master)", ] -[[package]] -name = "pallet-xcm-bridge-hub" -version = "0.2.0" -dependencies = [ - "bp-header-chain", - "bp-messages", - "bp-runtime", - "bp-xcm-bridge-hub", - "bridge-runtime-common", - "frame-support", - "frame-system", - "log", - "pallet-balances", - "pallet-bridge-messages", - "parity-scale-codec", - "scale-info", - "sp-core", - "sp-io", - "sp-runtime", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=master)", - "staging-xcm", - "staging-xcm-builder", - "staging-xcm-executor", -] - -[[package]] -name = "pallet-xcm-bridge-hub-router" -version = "0.5.0" -dependencies = [ - "bp-xcm-bridge-hub-router", - "frame-benchmarking", - "frame-support", - "frame-system", - "log", - "parity-scale-codec", - "scale-info", - "sp-core", - "sp-io", - "sp-runtime", - "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=master)", - "staging-xcm", - "staging-xcm-builder", -] - [[package]] name = "parachains-relay" version = "0.1.0" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "async-std", "async-trait", "bp-polkadot-core", "futures", "log", - "parity-scale-codec", "relay-substrate-client", "relay-utils", - "sp-core", ] [[package]] @@ -5905,7 +5722,7 @@ checksum = "4e69bf016dc406eff7d53a7d3f7cf1c2e72c82b9088aac1118591e36dd2cd3e9" dependencies = [ "bitcoin_hashes 0.13.0", "rand 0.8.5", - "rand_core 0.5.1", + "rand_core 0.6.4", "serde", "unicode-normalization", ] @@ -6164,7 +5981,7 @@ checksum = "db23d408679286588f4d4644f965003d056e3dd5abcaaa938116871d7ce2fee7" [[package]] name = "polkadot-core-primitives" version = "7.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "parity-scale-codec", "scale-info", @@ -6176,7 +5993,7 @@ dependencies = [ [[package]] name = "polkadot-parachain-primitives" version = "6.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "bounded-collections", "derive_more", @@ -6193,7 +6010,7 @@ dependencies = [ [[package]] name = "polkadot-primitives" version = "7.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "bitvec", "hex-literal", @@ -7117,6 +6934,7 @@ dependencies = [ [[package]] name = "relay-substrate-client" version = "0.1.0" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "async-std", "async-trait", @@ -7158,6 +6976,7 @@ dependencies = [ [[package]] name = "relay-utils" version = "0.1.0" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "ansi_term", "anyhow", @@ -7537,7 +7356,7 @@ dependencies = [ [[package]] name = "sc-allocator" version = "23.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "log", "sp-core", @@ -7548,7 +7367,7 @@ dependencies = [ [[package]] name = "sc-chain-spec" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "array-bytes 6.2.2", "docify", @@ -7575,7 +7394,7 @@ dependencies = [ [[package]] name = "sc-chain-spec-derive" version = "11.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "proc-macro-crate 3.1.0", "proc-macro2 1.0.79", @@ -7586,7 +7405,7 @@ dependencies = [ [[package]] name = "sc-client-api" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "fnv", "futures", @@ -7613,7 +7432,7 @@ dependencies = [ [[package]] name = "sc-consensus" version = "0.33.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "async-trait", "futures", @@ -7638,7 +7457,7 @@ dependencies = [ [[package]] name = "sc-executor" version = "0.32.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "parity-scale-codec", "parking_lot 0.12.1", @@ -7661,7 +7480,7 @@ dependencies = [ [[package]] name = "sc-executor-common" version = "0.29.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "polkavm", "sc-allocator", @@ -7674,7 +7493,7 @@ dependencies = [ [[package]] name = "sc-executor-polkavm" version = "0.29.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "log", "polkavm", @@ -7685,7 +7504,7 @@ dependencies = [ [[package]] name = "sc-executor-wasmtime" version = "0.29.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "anyhow", "cfg-if", @@ -7703,7 +7522,7 @@ dependencies = [ [[package]] name = "sc-mixnet" version = "0.4.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "array-bytes 4.2.0", "arrayvec 0.7.4", @@ -7732,7 +7551,7 @@ dependencies = [ [[package]] name = "sc-network" version = "0.34.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "array-bytes 6.2.2", "async-channel 1.9.0", @@ -7783,7 +7602,7 @@ dependencies = [ [[package]] name = "sc-network-common" version = "0.33.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "async-trait", "bitflags 1.3.2", @@ -7801,7 +7620,7 @@ dependencies = [ [[package]] name = "sc-network-types" version = "0.10.0-dev" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "bs58 0.4.0", "libp2p-identity", @@ -7815,7 +7634,7 @@ dependencies = [ [[package]] name = "sc-rpc-api" version = "0.33.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "jsonrpsee 0.22.4", "parity-scale-codec", @@ -7835,7 +7654,7 @@ dependencies = [ [[package]] name = "sc-telemetry" version = "15.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "chrono", "futures", @@ -7855,7 +7674,7 @@ dependencies = [ [[package]] name = "sc-transaction-pool-api" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "async-trait", "futures", @@ -7871,7 +7690,7 @@ dependencies = [ [[package]] name = "sc-utils" version = "14.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "async-channel 1.9.0", "futures", @@ -8569,7 +8388,7 @@ dependencies = [ [[package]] name = "sp-api" version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "hash-db", "log", @@ -8591,7 +8410,7 @@ dependencies = [ [[package]] name = "sp-api-proc-macro" version = "15.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "Inflector", "blake2 0.10.6", @@ -8605,7 +8424,7 @@ dependencies = [ [[package]] name = "sp-application-crypto" version = "30.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "parity-scale-codec", "scale-info", @@ -8618,7 +8437,7 @@ dependencies = [ [[package]] name = "sp-arithmetic" version = "23.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "docify", "integer-sqrt", @@ -8651,7 +8470,7 @@ dependencies = [ [[package]] name = "sp-authority-discovery" version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "parity-scale-codec", "scale-info", @@ -8663,7 +8482,7 @@ dependencies = [ [[package]] name = "sp-blockchain" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "futures", "log", @@ -8681,7 +8500,7 @@ dependencies = [ [[package]] name = "sp-consensus" version = "0.32.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "async-trait", "futures", @@ -8693,30 +8512,10 @@ dependencies = [ "thiserror", ] -[[package]] -name = "sp-consensus-beefy" -version = "13.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" -dependencies = [ - "lazy_static", - "parity-scale-codec", - "scale-info", - "serde", - "sp-api", - "sp-application-crypto", - "sp-core", - "sp-crypto-hashing", - "sp-io", - "sp-keystore", - "sp-mmr-primitives", - "sp-runtime", - "strum", -] - [[package]] name = "sp-consensus-grandpa" version = "13.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "finality-grandpa", "log", @@ -8733,7 +8532,7 @@ dependencies = [ [[package]] name = "sp-consensus-slots" version = "0.32.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "parity-scale-codec", "scale-info", @@ -8744,7 +8543,7 @@ dependencies = [ [[package]] name = "sp-core" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "array-bytes 6.2.2", "bandersnatch_vrfs", @@ -8806,7 +8605,7 @@ dependencies = [ [[package]] name = "sp-crypto-ec-utils" version = "0.10.0" -source = "git+https://github.com/paritytech/polkadot-sdk#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "ark-bls12-377", "ark-bls12-377-ext", @@ -8826,7 +8625,7 @@ dependencies = [ [[package]] name = "sp-crypto-hashing" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "blake2b_simd", "byteorder", @@ -8839,7 +8638,7 @@ dependencies = [ [[package]] name = "sp-crypto-hashing-proc-macro" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "quote 1.0.35", "sp-crypto-hashing", @@ -8849,7 +8648,7 @@ dependencies = [ [[package]] name = "sp-database" version = "10.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "kvdb", "parking_lot 0.12.1", @@ -8858,7 +8657,7 @@ dependencies = [ [[package]] name = "sp-debug-derive" version = "14.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", @@ -8868,7 +8667,7 @@ dependencies = [ [[package]] name = "sp-debug-derive" version = "14.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", @@ -8878,7 +8677,7 @@ dependencies = [ [[package]] name = "sp-externalities" version = "0.25.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "environmental", "parity-scale-codec", @@ -8888,7 +8687,7 @@ dependencies = [ [[package]] name = "sp-externalities" version = "0.25.0" -source = "git+https://github.com/paritytech/polkadot-sdk#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "environmental", "parity-scale-codec", @@ -8898,7 +8697,7 @@ dependencies = [ [[package]] name = "sp-genesis-builder" version = "0.8.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "parity-scale-codec", "scale-info", @@ -8910,7 +8709,7 @@ dependencies = [ [[package]] name = "sp-inherents" version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "async-trait", "impl-trait-for-tuples", @@ -8923,7 +8722,7 @@ dependencies = [ [[package]] name = "sp-io" version = "30.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "bytes", "ed25519-dalek 2.1.1", @@ -8949,7 +8748,7 @@ dependencies = [ [[package]] name = "sp-keyring" version = "31.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "sp-core", "sp-runtime", @@ -8959,7 +8758,7 @@ dependencies = [ [[package]] name = "sp-keystore" version = "0.34.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "parity-scale-codec", "parking_lot 0.12.1", @@ -8970,7 +8769,7 @@ dependencies = [ [[package]] name = "sp-maybe-compressed-blob" version = "11.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "thiserror", "zstd 0.12.4", @@ -8979,7 +8778,7 @@ dependencies = [ [[package]] name = "sp-metadata-ir" version = "0.6.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "frame-metadata 16.0.0", "parity-scale-codec", @@ -8989,7 +8788,7 @@ dependencies = [ [[package]] name = "sp-mixnet" version = "0.4.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "parity-scale-codec", "scale-info", @@ -8997,27 +8796,10 @@ dependencies = [ "sp-application-crypto", ] -[[package]] -name = "sp-mmr-primitives" -version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" -dependencies = [ - "ckb-merkle-mountain-range", - "log", - "parity-scale-codec", - "scale-info", - "serde", - "sp-api", - "sp-core", - "sp-debug-derive 14.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=master)", - "sp-runtime", - "thiserror", -] - [[package]] name = "sp-panic-handler" version = "13.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "backtrace", "lazy_static", @@ -9027,7 +8809,7 @@ dependencies = [ [[package]] name = "sp-rpc" version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "rustc-hash", "serde", @@ -9037,7 +8819,7 @@ dependencies = [ [[package]] name = "sp-runtime" version = "31.0.1" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "docify", "either", @@ -9061,7 +8843,7 @@ dependencies = [ [[package]] name = "sp-runtime-interface" version = "24.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "bytes", "impl-trait-for-tuples", @@ -9080,7 +8862,7 @@ dependencies = [ [[package]] name = "sp-runtime-interface" version = "24.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "bytes", "impl-trait-for-tuples", @@ -9099,7 +8881,7 @@ dependencies = [ [[package]] name = "sp-runtime-interface-proc-macro" version = "17.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "Inflector", "expander", @@ -9112,7 +8894,7 @@ dependencies = [ [[package]] name = "sp-runtime-interface-proc-macro" version = "17.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "Inflector", "expander", @@ -9125,7 +8907,7 @@ dependencies = [ [[package]] name = "sp-session" version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "parity-scale-codec", "scale-info", @@ -9139,7 +8921,7 @@ dependencies = [ [[package]] name = "sp-staking" version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "impl-trait-for-tuples", "parity-scale-codec", @@ -9152,7 +8934,7 @@ dependencies = [ [[package]] name = "sp-state-machine" version = "0.35.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "hash-db", "log", @@ -9172,7 +8954,7 @@ dependencies = [ [[package]] name = "sp-statement-store" version = "10.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "aes-gcm", "curve25519-dalek 4.1.1", @@ -9202,17 +8984,17 @@ checksum = "53458e3c57df53698b3401ec0934bea8e8cfce034816873c0b0abbd83d7bac0d" [[package]] name = "sp-std" version = "14.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" [[package]] name = "sp-std" version = "14.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk#92e142555d45f97aa88d241665d9952d12f4ae40" [[package]] name = "sp-storage" version = "19.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "impl-serde", "parity-scale-codec", @@ -9224,7 +9006,7 @@ dependencies = [ [[package]] name = "sp-storage" version = "19.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "impl-serde", "parity-scale-codec", @@ -9236,7 +9018,7 @@ dependencies = [ [[package]] name = "sp-timestamp" version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "async-trait", "parity-scale-codec", @@ -9248,7 +9030,7 @@ dependencies = [ [[package]] name = "sp-tracing" version = "16.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "parity-scale-codec", "tracing", @@ -9259,7 +9041,7 @@ dependencies = [ [[package]] name = "sp-tracing" version = "16.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "parity-scale-codec", "tracing", @@ -9270,7 +9052,7 @@ dependencies = [ [[package]] name = "sp-trie" version = "29.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "ahash 0.8.11", "hash-db", @@ -9293,7 +9075,7 @@ dependencies = [ [[package]] name = "sp-version" version = "29.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "impl-serde", "parity-scale-codec", @@ -9310,7 +9092,7 @@ dependencies = [ [[package]] name = "sp-version-proc-macro" version = "13.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "parity-scale-codec", "proc-macro2 1.0.79", @@ -9321,7 +9103,7 @@ dependencies = [ [[package]] name = "sp-wasm-interface" version = "20.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "anyhow", "impl-trait-for-tuples", @@ -9333,7 +9115,7 @@ dependencies = [ [[package]] name = "sp-wasm-interface" version = "20.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "impl-trait-for-tuples", "log", @@ -9343,7 +9125,7 @@ dependencies = [ [[package]] name = "sp-weights" version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "bounded-collections", "parity-scale-codec", @@ -9400,7 +9182,7 @@ checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" [[package]] name = "staging-xcm" version = "7.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "array-bytes 6.2.2", "bounded-collections", @@ -9418,7 +9200,7 @@ dependencies = [ [[package]] name = "staging-xcm-builder" version = "7.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "frame-support", "frame-system", @@ -9440,7 +9222,7 @@ dependencies = [ [[package]] name = "staging-xcm-executor" version = "7.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "environmental", "frame-benchmarking", @@ -9545,7 +9327,7 @@ dependencies = [ [[package]] name = "substrate-bip39" version = "0.4.7" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "hmac 0.12.1", "pbkdf2", @@ -9557,7 +9339,7 @@ dependencies = [ [[package]] name = "substrate-prometheus-endpoint" version = "0.17.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "hyper", "log", @@ -9623,6 +9405,7 @@ dependencies = [ [[package]] name = "substrate-relay-helper" version = "0.1.0" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "anyhow", "async-std", @@ -9649,7 +9432,6 @@ dependencies = [ "pallet-bridge-messages", "pallet-bridge-parachains", "pallet-grandpa", - "pallet-transaction-payment", "parachains-relay", "parity-scale-codec", "rbtag", @@ -11416,7 +11198,7 @@ dependencies = [ [[package]] name = "xcm-procedural" version = "7.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#74a42cebc1a9fd4e4a7713d5e41caba77a0fa172" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "Inflector", "proc-macro2 1.0.79", diff --git a/Cargo.toml b/Cargo.toml index d79967875..2666706d3 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -7,36 +7,6 @@ license = "GPL-3.0-only" [workspace] resolver = "2" members = [ - "bin/runtime-common", - "chains/chain-asset-hub-rococo", - "chains/chain-asset-hub-westend", - "chains/chain-bridge-hub-cumulus", - "chains/chain-bridge-hub-kusama", - "chains/chain-bridge-hub-polkadot", - "chains/chain-bridge-hub-rococo", - "chains/chain-bridge-hub-westend", - "chains/chain-kusama", - "chains/chain-polkadot", - "chains/chain-polkadot-bulletin", - "chains/chain-rococo", - "chains/chain-westend", - "modules/beefy", - "modules/grandpa", - "modules/messages", - "modules/parachains", - "modules/relayers", - "modules/xcm-bridge-hub", - "modules/xcm-bridge-hub-router", - "primitives/beefy", - "primitives/header-chain", - "primitives/messages", - "primitives/parachains", - "primitives/polkadot-core", - "primitives/relayers", - "primitives/runtime", - "primitives/test-utils", - "primitives/xcm-bridge-hub", - "primitives/xcm-bridge-hub-router", "relay-clients/client-bridge-hub-kusama", "relay-clients/client-bridge-hub-polkadot", "relay-clients/client-bridge-hub-rococo", @@ -46,13 +16,6 @@ members = [ "relay-clients/client-polkadot-bulletin", "relay-clients/client-rococo", "relay-clients/client-westend", - "relays/client-substrate", - "relays/equivocation", - "relays/finality", - "relays/lib-substrate-relay", - "relays/messages", - "relays/parachains", - "relays/utils", "substrate-relay", ] diff --git a/bin/runtime-common/Cargo.toml b/bin/runtime-common/Cargo.toml deleted file mode 100644 index 6304c83b9..000000000 --- a/bin/runtime-common/Cargo.toml +++ /dev/null @@ -1,100 +0,0 @@ -[package] -name = "bridge-runtime-common" -version = "0.7.0" -description = "Common types and functions that may be used by substrate-based runtimes of all bridged chains" -authors.workspace = true -edition.workspace = true -repository.workspace = true -license = "GPL-3.0-or-later WITH Classpath-exception-2.0" - -[lints] -workspace = true - -[dependencies] -codec = { package = "parity-scale-codec", version = "3.6.1", default-features = false, features = ["derive"] } -hash-db = { version = "0.16.0", default-features = false } -log = { workspace = true } -scale-info = { version = "2.11.1", default-features = false, features = ["derive"] } -static_assertions = { version = "1.1", optional = true } - -# Bridge dependencies - -bp-header-chain = { path = "../../primitives/header-chain", default-features = false } -bp-messages = { path = "../../primitives/messages", default-features = false } -bp-parachains = { path = "../../primitives/parachains", default-features = false } -bp-polkadot-core = { path = "../../primitives/polkadot-core", default-features = false } -bp-relayers = { path = "../../primitives/relayers", default-features = false } -bp-runtime = { path = "../../primitives/runtime", default-features = false } -bp-xcm-bridge-hub = { path = "../../primitives/xcm-bridge-hub", default-features = false } -bp-xcm-bridge-hub-router = { path = "../../primitives/xcm-bridge-hub-router", default-features = false } -pallet-bridge-grandpa = { path = "../../modules/grandpa", default-features = false } -pallet-bridge-messages = { path = "../../modules/messages", default-features = false } -pallet-bridge-parachains = { path = "../../modules/parachains", default-features = false } -pallet-bridge-relayers = { path = "../../modules/relayers", default-features = false } - -# Substrate dependencies - -frame-support = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -frame-system = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -pallet-transaction-payment = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -pallet-utility = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -sp-api = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -sp-core = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -sp-io = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -sp-runtime = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -sp-std = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -sp-trie = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } - -# Polkadot dependencies -xcm = { package = "staging-xcm", git = "https://github.com/paritytech/polkadot-sdk", default-features = false , branch = "master" } -xcm-builder = { package = "staging-xcm-builder", git = "https://github.com/paritytech/polkadot-sdk", default-features = false , branch = "master" } - -[dev-dependencies] -bp-test-utils = { path = "../../primitives/test-utils" } -pallet-balances = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } - -[features] -default = ["std"] -std = [ - "bp-header-chain/std", - "bp-messages/std", - "bp-parachains/std", - "bp-polkadot-core/std", - "bp-relayers/std", - "bp-runtime/std", - "bp-xcm-bridge-hub-router/std", - "bp-xcm-bridge-hub/std", - "codec/std", - "frame-support/std", - "frame-system/std", - "hash-db/std", - "log/std", - "pallet-bridge-grandpa/std", - "pallet-bridge-messages/std", - "pallet-bridge-parachains/std", - "pallet-bridge-relayers/std", - "pallet-transaction-payment/std", - "pallet-utility/std", - "scale-info/std", - "sp-api/std", - "sp-core/std", - "sp-io/std", - "sp-runtime/std", - "sp-std/std", - "sp-trie/std", - "xcm-builder/std", - "xcm/std", -] -runtime-benchmarks = [ - "frame-support/runtime-benchmarks", - "frame-system/runtime-benchmarks", - "pallet-balances/runtime-benchmarks", - "pallet-bridge-grandpa/runtime-benchmarks", - "pallet-bridge-messages/runtime-benchmarks", - "pallet-bridge-parachains/runtime-benchmarks", - "pallet-bridge-relayers/runtime-benchmarks", - "pallet-utility/runtime-benchmarks", - "sp-runtime/runtime-benchmarks", - "xcm-builder/runtime-benchmarks", -] -integrity-test = ["static_assertions"] diff --git a/bin/runtime-common/src/integrity.rs b/bin/runtime-common/src/integrity.rs deleted file mode 100644 index d3827a14d..000000000 --- a/bin/runtime-common/src/integrity.rs +++ /dev/null @@ -1,348 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Integrity tests for chain constants and pallets configuration. -//! -//! Most of the tests in this module assume that the bridge is using standard (see `crate::messages` -//! module for details) configuration. - -use crate::{messages, messages::MessageBridge}; - -use bp_messages::{InboundLaneData, MessageNonce}; -use bp_runtime::{Chain, ChainId}; -use codec::Encode; -use frame_support::{storage::generator::StorageValue, traits::Get, weights::Weight}; -use frame_system::limits; -use pallet_bridge_messages::WeightInfoExt as _; - -/// Macro that ensures that the runtime configuration and chain primitives crate are sharing -/// the same types (nonce, block number, hash, hasher, account id and header). -#[macro_export] -macro_rules! assert_chain_types( - ( runtime: $r:path, this_chain: $this:path ) => { - { - // if one of asserts fail, then either bridge isn't configured properly (or alternatively - non-standard - // configuration is used), or something has broke existing configuration (meaning that all bridged chains - // and relays will stop functioning) - use frame_system::{Config as SystemConfig, pallet_prelude::{BlockNumberFor, HeaderFor}}; - use static_assertions::assert_type_eq_all; - - assert_type_eq_all!(<$r as SystemConfig>::Nonce, bp_runtime::NonceOf<$this>); - assert_type_eq_all!(BlockNumberFor<$r>, bp_runtime::BlockNumberOf<$this>); - assert_type_eq_all!(<$r as SystemConfig>::Hash, bp_runtime::HashOf<$this>); - assert_type_eq_all!(<$r as SystemConfig>::Hashing, bp_runtime::HasherOf<$this>); - assert_type_eq_all!(<$r as SystemConfig>::AccountId, bp_runtime::AccountIdOf<$this>); - assert_type_eq_all!(HeaderFor<$r>, bp_runtime::HeaderOf<$this>); - } - } -); - -/// Macro that ensures that the bridge GRANDPA pallet is configured properly to bridge with given -/// chain. -#[macro_export] -macro_rules! assert_bridge_grandpa_pallet_types( - ( runtime: $r:path, with_bridged_chain_grandpa_instance: $i:path, bridged_chain: $bridged:path ) => { - { - // if one of asserts fail, then either bridge isn't configured properly (or alternatively - non-standard - // configuration is used), or something has broke existing configuration (meaning that all bridged chains - // and relays will stop functioning) - use pallet_bridge_grandpa::Config as GrandpaConfig; - use static_assertions::assert_type_eq_all; - - assert_type_eq_all!(<$r as GrandpaConfig<$i>>::BridgedChain, $bridged); - } - } -); - -/// Macro that ensures that the bridge messages pallet is configured properly to bridge using given -/// configuration. -#[macro_export] -macro_rules! assert_bridge_messages_pallet_types( - ( - runtime: $r:path, - with_bridged_chain_messages_instance: $i:path, - bridge: $bridge:path - ) => { - { - // if one of asserts fail, then either bridge isn't configured properly (or alternatively - non-standard - // configuration is used), or something has broke existing configuration (meaning that all bridged chains - // and relays will stop functioning) - use $crate::messages::{ - source::{FromThisChainMessagePayload, TargetHeaderChainAdapter}, - target::{FromBridgedChainMessagePayload, SourceHeaderChainAdapter}, - AccountIdOf, BalanceOf, BridgedChain, ThisChain, - }; - use pallet_bridge_messages::Config as MessagesConfig; - use static_assertions::assert_type_eq_all; - - assert_type_eq_all!(<$r as MessagesConfig<$i>>::OutboundPayload, FromThisChainMessagePayload); - - assert_type_eq_all!(<$r as MessagesConfig<$i>>::InboundRelayer, AccountIdOf>); - - assert_type_eq_all!(<$r as MessagesConfig<$i>>::TargetHeaderChain, TargetHeaderChainAdapter<$bridge>); - assert_type_eq_all!(<$r as MessagesConfig<$i>>::SourceHeaderChain, SourceHeaderChainAdapter<$bridge>); - } - } -); - -/// Macro that combines four other macro calls - `assert_chain_types`, `assert_bridge_types`, -/// `assert_bridge_grandpa_pallet_types` and `assert_bridge_messages_pallet_types`. It may be used -/// at the chain that is implementing complete standard messages bridge (i.e. with bridge GRANDPA -/// and messages pallets deployed). -#[macro_export] -macro_rules! assert_complete_bridge_types( - ( - runtime: $r:path, - with_bridged_chain_grandpa_instance: $gi:path, - with_bridged_chain_messages_instance: $mi:path, - bridge: $bridge:path, - this_chain: $this:path, - bridged_chain: $bridged:path, - ) => { - $crate::assert_chain_types!(runtime: $r, this_chain: $this); - $crate::assert_bridge_grandpa_pallet_types!( - runtime: $r, - with_bridged_chain_grandpa_instance: $gi, - bridged_chain: $bridged - ); - $crate::assert_bridge_messages_pallet_types!( - runtime: $r, - with_bridged_chain_messages_instance: $mi, - bridge: $bridge - ); - } -); - -/// Parameters for asserting chain-related constants. -#[derive(Debug)] -pub struct AssertChainConstants { - /// Block length limits of the chain. - pub block_length: limits::BlockLength, - /// Block weight limits of the chain. - pub block_weights: limits::BlockWeights, -} - -/// Test that our hardcoded, chain-related constants, are matching chain runtime configuration. -/// -/// In particular, this test ensures that: -/// -/// 1) block weight limits are matching; -/// 2) block size limits are matching. -pub fn assert_chain_constants(params: AssertChainConstants) -where - R: frame_system::Config, -{ - // we don't check runtime version here, because in our case we'll be building relay from one - // repo and runtime will live in another repo, along with outdated relay version. To avoid - // unneeded commits, let's not raise an error in case of version mismatch. - - // if one of following assert fails, it means that we may need to upgrade bridged chain and - // relay to use updated constants. If constants are now smaller than before, it may lead to - // undeliverable messages. - - // `BlockLength` struct is not implementing `PartialEq`, so we compare encoded values here. - assert_eq!( - R::BlockLength::get().encode(), - params.block_length.encode(), - "BlockLength from runtime ({:?}) differ from hardcoded: {:?}", - R::BlockLength::get(), - params.block_length, - ); - // `BlockWeights` struct is not implementing `PartialEq`, so we compare encoded values here - assert_eq!( - R::BlockWeights::get().encode(), - params.block_weights.encode(), - "BlockWeights from runtime ({:?}) differ from hardcoded: {:?}", - R::BlockWeights::get(), - params.block_weights, - ); -} - -/// Test that the constants, used in GRANDPA pallet configuration are valid. -pub fn assert_bridge_grandpa_pallet_constants() -where - R: pallet_bridge_grandpa::Config, - GI: 'static, -{ - assert!( - R::HeadersToKeep::get() > 0, - "HeadersToKeep ({}) must be larger than zero", - R::HeadersToKeep::get(), - ); -} - -/// Parameters for asserting messages pallet constants. -#[derive(Debug)] -pub struct AssertBridgeMessagesPalletConstants { - /// Maximal number of unrewarded relayer entries in a confirmation transaction at the bridged - /// chain. - pub max_unrewarded_relayers_in_bridged_confirmation_tx: MessageNonce, - /// Maximal number of unconfirmed messages in a confirmation transaction at the bridged chain. - pub max_unconfirmed_messages_in_bridged_confirmation_tx: MessageNonce, - /// Identifier of the bridged chain. - pub bridged_chain_id: ChainId, -} - -/// Test that the constants, used in messages pallet configuration are valid. -pub fn assert_bridge_messages_pallet_constants(params: AssertBridgeMessagesPalletConstants) -where - R: pallet_bridge_messages::Config, - MI: 'static, -{ - assert!( - !R::ActiveOutboundLanes::get().is_empty(), - "ActiveOutboundLanes ({:?}) must not be empty", - R::ActiveOutboundLanes::get(), - ); - assert!( - R::MaxUnrewardedRelayerEntriesAtInboundLane::get() <= params.max_unrewarded_relayers_in_bridged_confirmation_tx, - "MaxUnrewardedRelayerEntriesAtInboundLane ({}) must be <= than the hardcoded value for bridged chain: {}", - R::MaxUnrewardedRelayerEntriesAtInboundLane::get(), - params.max_unrewarded_relayers_in_bridged_confirmation_tx, - ); - assert!( - R::MaxUnconfirmedMessagesAtInboundLane::get() <= params.max_unconfirmed_messages_in_bridged_confirmation_tx, - "MaxUnrewardedRelayerEntriesAtInboundLane ({}) must be <= than the hardcoded value for bridged chain: {}", - R::MaxUnconfirmedMessagesAtInboundLane::get(), - params.max_unconfirmed_messages_in_bridged_confirmation_tx, - ); - assert_eq!(R::BridgedChainId::get(), params.bridged_chain_id); -} - -/// Parameters for asserting bridge pallet names. -#[derive(Debug)] -pub struct AssertBridgePalletNames<'a> { - /// Name of the messages pallet, deployed at the bridged chain and used to bridge with this - /// chain. - pub with_this_chain_messages_pallet_name: &'a str, - /// Name of the GRANDPA pallet, deployed at this chain and used to bridge with the bridged - /// chain. - pub with_bridged_chain_grandpa_pallet_name: &'a str, - /// Name of the messages pallet, deployed at this chain and used to bridge with the bridged - /// chain. - pub with_bridged_chain_messages_pallet_name: &'a str, -} - -/// Tests that bridge pallet names used in `construct_runtime!()` macro call are matching constants -/// from chain primitives crates. -pub fn assert_bridge_pallet_names(params: AssertBridgePalletNames) -where - B: MessageBridge, - R: pallet_bridge_grandpa::Config + pallet_bridge_messages::Config, - GI: 'static, - MI: 'static, -{ - assert_eq!(B::BRIDGED_MESSAGES_PALLET_NAME, params.with_this_chain_messages_pallet_name); - assert_eq!( - pallet_bridge_grandpa::PalletOwner::::storage_value_final_key().to_vec(), - bp_runtime::storage_value_key(params.with_bridged_chain_grandpa_pallet_name, "PalletOwner",).0, - ); - assert_eq!( - pallet_bridge_messages::PalletOwner::::storage_value_final_key().to_vec(), - bp_runtime::storage_value_key( - params.with_bridged_chain_messages_pallet_name, - "PalletOwner", - ) - .0, - ); -} - -/// Parameters for asserting complete standard messages bridge. -#[derive(Debug)] -pub struct AssertCompleteBridgeConstants<'a> { - /// Parameters to assert this chain constants. - pub this_chain_constants: AssertChainConstants, - /// Parameters to assert messages pallet constants. - pub messages_pallet_constants: AssertBridgeMessagesPalletConstants, - /// Parameters to assert pallet names constants. - pub pallet_names: AssertBridgePalletNames<'a>, -} - -/// All bridge-related constants tests for the complete standard messages bridge (i.e. with bridge -/// GRANDPA and messages pallets deployed). -pub fn assert_complete_bridge_constants(params: AssertCompleteBridgeConstants) -where - R: frame_system::Config - + pallet_bridge_grandpa::Config - + pallet_bridge_messages::Config, - GI: 'static, - MI: 'static, - B: MessageBridge, -{ - assert_chain_constants::(params.this_chain_constants); - assert_bridge_grandpa_pallet_constants::(); - assert_bridge_messages_pallet_constants::(params.messages_pallet_constants); - assert_bridge_pallet_names::(params.pallet_names); -} - -/// Check that the message lane weights are correct. -pub fn check_message_lane_weights< - C: Chain, - T: frame_system::Config + pallet_bridge_messages::Config, - MessagesPalletInstance: 'static, ->( - bridged_chain_extra_storage_proof_size: u32, - this_chain_max_unrewarded_relayers: MessageNonce, - this_chain_max_unconfirmed_messages: MessageNonce, - // whether `RefundBridgedParachainMessages` extension is deployed at runtime and is used for - // refunding this bridge transactions? - // - // in other words: pass true for all known production chains - runtime_includes_refund_extension: bool, -) { - type Weights = >::WeightInfo; - - // check basic weight assumptions - pallet_bridge_messages::ensure_weights_are_correct::>(); - - // check that weights allow us to receive messages - let max_incoming_message_proof_size = bridged_chain_extra_storage_proof_size - .saturating_add(messages::target::maximal_incoming_message_size(C::max_extrinsic_size())); - pallet_bridge_messages::ensure_able_to_receive_message::>( - C::max_extrinsic_size(), - C::max_extrinsic_weight(), - max_incoming_message_proof_size, - messages::target::maximal_incoming_message_dispatch_weight(C::max_extrinsic_weight()), - ); - - // check that weights allow us to receive delivery confirmations - let max_incoming_inbound_lane_data_proof_size = - InboundLaneData::<()>::encoded_size_hint_u32(this_chain_max_unrewarded_relayers as _); - pallet_bridge_messages::ensure_able_to_receive_confirmation::>( - C::max_extrinsic_size(), - C::max_extrinsic_weight(), - max_incoming_inbound_lane_data_proof_size, - this_chain_max_unrewarded_relayers, - this_chain_max_unconfirmed_messages, - ); - - // check that extra weights of delivery/confirmation transactions include the weight - // of `RefundBridgedParachainMessages` operations. This signed extension assumes the worst case - // (i.e. slashing if delivery transaction was invalid) and refunds some weight if - // assumption was wrong (i.e. if we did refund instead of slashing). This check - // ensures the extension will not refund weight when it doesn't need to (i.e. if pallet - // weights do not account weights of refund extension). - if runtime_includes_refund_extension { - assert_ne!( - Weights::::receive_messages_proof_overhead_from_runtime(), - Weight::zero() - ); - assert_ne!( - Weights::::receive_messages_delivery_proof_overhead_from_runtime(), - Weight::zero() - ); - } -} diff --git a/bin/runtime-common/src/lib.rs b/bin/runtime-common/src/lib.rs deleted file mode 100644 index 2722f6f1c..000000000 --- a/bin/runtime-common/src/lib.rs +++ /dev/null @@ -1,223 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Common types/functions that may be used by runtimes of all bridged chains. - -#![warn(missing_docs)] -#![cfg_attr(not(feature = "std"), no_std)] - -use crate::messages_call_ext::MessagesCallSubType; -use pallet_bridge_grandpa::CallSubType as GrandpaCallSubType; -use pallet_bridge_parachains::CallSubType as ParachainsCallSubtype; -use sp_runtime::transaction_validity::TransactionValidity; - -pub mod messages; -pub mod messages_api; -pub mod messages_benchmarking; -pub mod messages_call_ext; -pub mod messages_generation; -pub mod messages_xcm_extension; -pub mod parachains_benchmarking; -pub mod priority_calculator; -pub mod refund_relayer_extension; - -mod mock; - -#[cfg(feature = "integrity-test")] -pub mod integrity; - -const LOG_TARGET_BRIDGE_DISPATCH: &str = "runtime::bridge-dispatch"; - -/// A duplication of the `FilterCall` trait. -/// -/// We need this trait in order to be able to implement it for the messages pallet, -/// since the implementation is done outside of the pallet crate. -pub trait BridgeRuntimeFilterCall { - /// Checks if a runtime call is valid. - fn validate(call: &Call) -> TransactionValidity; -} - -impl BridgeRuntimeFilterCall for pallet_bridge_grandpa::Pallet -where - T: pallet_bridge_grandpa::Config, - T::RuntimeCall: GrandpaCallSubType, -{ - fn validate(call: &T::RuntimeCall) -> TransactionValidity { - GrandpaCallSubType::::check_obsolete_submit_finality_proof(call) - } -} - -impl BridgeRuntimeFilterCall - for pallet_bridge_parachains::Pallet -where - T: pallet_bridge_parachains::Config, - T::RuntimeCall: ParachainsCallSubtype, -{ - fn validate(call: &T::RuntimeCall) -> TransactionValidity { - ParachainsCallSubtype::::check_obsolete_submit_parachain_heads(call) - } -} - -impl, I: 'static> BridgeRuntimeFilterCall - for pallet_bridge_messages::Pallet -where - T::RuntimeCall: MessagesCallSubType, -{ - /// Validate messages in order to avoid "mining" messages delivery and delivery confirmation - /// transactions, that are delivering outdated messages/confirmations. Without this validation, - /// even honest relayers may lose their funds if there are multiple relays running and - /// submitting the same messages/confirmations. - fn validate(call: &T::RuntimeCall) -> TransactionValidity { - call.check_obsolete_call() - } -} - -/// Declares a runtime-specific `BridgeRejectObsoleteHeadersAndMessages` signed extension. -/// -/// ## Example -/// -/// ```nocompile -/// generate_bridge_reject_obsolete_headers_and_messages!{ -/// Call, AccountId -/// BridgeRococoGrandpa, BridgeRococoMessages, -/// BridgeRococoParachains -/// } -/// ``` -/// -/// The goal of this extension is to avoid "mining" transactions that provide outdated bridged -/// headers and messages. Without that extension, even honest relayers may lose their funds if -/// there are multiple relays running and submitting the same information. -#[macro_export] -macro_rules! generate_bridge_reject_obsolete_headers_and_messages { - ($call:ty, $account_id:ty, $($filter_call:ty),*) => { - #[derive(Clone, codec::Decode, Default, codec::Encode, Eq, PartialEq, sp_runtime::RuntimeDebug, scale_info::TypeInfo)] - pub struct BridgeRejectObsoleteHeadersAndMessages; - impl sp_runtime::traits::SignedExtension for BridgeRejectObsoleteHeadersAndMessages { - const IDENTIFIER: &'static str = "BridgeRejectObsoleteHeadersAndMessages"; - type AccountId = $account_id; - type Call = $call; - type AdditionalSigned = (); - type Pre = (); - - fn additional_signed(&self) -> sp_std::result::Result< - (), - sp_runtime::transaction_validity::TransactionValidityError, - > { - Ok(()) - } - - fn validate( - &self, - _who: &Self::AccountId, - call: &Self::Call, - _info: &sp_runtime::traits::DispatchInfoOf, - _len: usize, - ) -> sp_runtime::transaction_validity::TransactionValidity { - let valid = sp_runtime::transaction_validity::ValidTransaction::default(); - $( - let valid = valid - .combine_with(<$filter_call as $crate::BridgeRuntimeFilterCall<$call>>::validate(call)?); - )* - Ok(valid) - } - - fn pre_dispatch( - self, - who: &Self::AccountId, - call: &Self::Call, - info: &sp_runtime::traits::DispatchInfoOf, - len: usize, - ) -> Result { - self.validate(who, call, info, len).map(drop) - } - } - }; -} - -#[cfg(test)] -mod tests { - use crate::BridgeRuntimeFilterCall; - use frame_support::{assert_err, assert_ok}; - use sp_runtime::{ - traits::SignedExtension, - transaction_validity::{InvalidTransaction, TransactionValidity, ValidTransaction}, - }; - - pub struct MockCall { - data: u32, - } - - impl sp_runtime::traits::Dispatchable for MockCall { - type RuntimeOrigin = (); - type Config = (); - type Info = (); - type PostInfo = (); - - fn dispatch( - self, - _origin: Self::RuntimeOrigin, - ) -> sp_runtime::DispatchResultWithInfo { - unimplemented!() - } - } - - struct FirstFilterCall; - impl BridgeRuntimeFilterCall for FirstFilterCall { - fn validate(call: &MockCall) -> TransactionValidity { - if call.data <= 1 { - return InvalidTransaction::Custom(1).into() - } - - Ok(ValidTransaction { priority: 1, ..Default::default() }) - } - } - - struct SecondFilterCall; - impl BridgeRuntimeFilterCall for SecondFilterCall { - fn validate(call: &MockCall) -> TransactionValidity { - if call.data <= 2 { - return InvalidTransaction::Custom(2).into() - } - - Ok(ValidTransaction { priority: 2, ..Default::default() }) - } - } - - #[test] - fn test() { - generate_bridge_reject_obsolete_headers_and_messages!( - MockCall, - (), - FirstFilterCall, - SecondFilterCall - ); - - assert_err!( - BridgeRejectObsoleteHeadersAndMessages.validate(&(), &MockCall { data: 1 }, &(), 0), - InvalidTransaction::Custom(1) - ); - - assert_err!( - BridgeRejectObsoleteHeadersAndMessages.validate(&(), &MockCall { data: 2 }, &(), 0), - InvalidTransaction::Custom(2) - ); - - assert_ok!( - BridgeRejectObsoleteHeadersAndMessages.validate(&(), &MockCall { data: 3 }, &(), 0), - ValidTransaction { priority: 3, ..Default::default() } - ) - } -} diff --git a/bin/runtime-common/src/messages.rs b/bin/runtime-common/src/messages.rs deleted file mode 100644 index 4aca53f3b..000000000 --- a/bin/runtime-common/src/messages.rs +++ /dev/null @@ -1,701 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Types that allow runtime to act as a source/target endpoint of message lanes. -//! -//! Messages are assumed to be encoded `Call`s of the target chain. Call-dispatch -//! pallet is used to dispatch incoming messages. Message identified by a tuple -//! of to elements - message lane id and message nonce. - -pub use bp_runtime::{RangeInclusiveExt, UnderlyingChainOf, UnderlyingChainProvider}; - -use bp_header_chain::HeaderChain; -use bp_messages::{ - source_chain::TargetHeaderChain, - target_chain::{ProvedLaneMessages, ProvedMessages, SourceHeaderChain}, - InboundLaneData, LaneId, Message, MessageKey, MessageNonce, MessagePayload, OutboundLaneData, - VerificationError, -}; -use bp_runtime::{Chain, RawStorageProof, Size, StorageProofChecker}; -use codec::{Decode, Encode}; -use frame_support::{traits::Get, weights::Weight}; -use hash_db::Hasher; -use scale_info::TypeInfo; -use sp_runtime::RuntimeDebug; -use sp_std::{convert::TryFrom, marker::PhantomData, vec::Vec}; - -/// Bidirectional message bridge. -pub trait MessageBridge { - /// Name of the paired messages pallet instance at the Bridged chain. - /// - /// Should be the name that is used in the `construct_runtime!()` macro. - const BRIDGED_MESSAGES_PALLET_NAME: &'static str; - - /// This chain in context of message bridge. - type ThisChain: ThisChainWithMessages; - /// Bridged chain in context of message bridge. - type BridgedChain: BridgedChainWithMessages; - /// Bridged header chain. - type BridgedHeaderChain: HeaderChain>; -} - -/// This chain that has `pallet-bridge-messages` module. -pub trait ThisChainWithMessages: UnderlyingChainProvider { - /// Call origin on the chain. - type RuntimeOrigin; -} - -/// Bridged chain that has `pallet-bridge-messages` module. -pub trait BridgedChainWithMessages: UnderlyingChainProvider {} - -/// This chain in context of message bridge. -pub type ThisChain = ::ThisChain; -/// Bridged chain in context of message bridge. -pub type BridgedChain = ::BridgedChain; -/// Hash used on the chain. -pub type HashOf = bp_runtime::HashOf<::Chain>; -/// Hasher used on the chain. -pub type HasherOf = bp_runtime::HasherOf>; -/// Account id used on the chain. -pub type AccountIdOf = bp_runtime::AccountIdOf>; -/// Type of balances that is used on the chain. -pub type BalanceOf = bp_runtime::BalanceOf>; - -/// Sub-module that is declaring types required for processing This -> Bridged chain messages. -pub mod source { - use super::*; - - /// Message payload for This -> Bridged chain messages. - pub type FromThisChainMessagePayload = crate::messages_xcm_extension::XcmAsPlainPayload; - - /// Maximal size of outbound message payload. - pub struct FromThisChainMaximalOutboundPayloadSize(PhantomData); - - impl Get for FromThisChainMaximalOutboundPayloadSize { - fn get() -> u32 { - maximal_message_size::() - } - } - - /// Messages delivery proof from bridged chain: - /// - /// - hash of finalized header; - /// - storage proof of inbound lane state; - /// - lane id. - #[derive(Clone, Decode, Encode, Eq, PartialEq, RuntimeDebug, TypeInfo)] - pub struct FromBridgedChainMessagesDeliveryProof { - /// Hash of the bridge header the proof is for. - pub bridged_header_hash: BridgedHeaderHash, - /// Storage trie proof generated for [`Self::bridged_header_hash`]. - pub storage_proof: RawStorageProof, - /// Lane id of which messages were delivered and the proof is for. - pub lane: LaneId, - } - - impl Size for FromBridgedChainMessagesDeliveryProof { - fn size(&self) -> u32 { - u32::try_from( - self.storage_proof - .iter() - .fold(0usize, |sum, node| sum.saturating_add(node.len())), - ) - .unwrap_or(u32::MAX) - } - } - - /// 'Parsed' message delivery proof - inbound lane id and its state. - pub type ParsedMessagesDeliveryProofFromBridgedChain = - (LaneId, InboundLaneData>>); - - /// Return maximal message size of This -> Bridged chain message. - pub fn maximal_message_size() -> u32 { - super::target::maximal_incoming_message_size( - UnderlyingChainOf::>::max_extrinsic_size(), - ) - } - - /// `TargetHeaderChain` implementation that is using default types and perform default checks. - pub struct TargetHeaderChainAdapter(PhantomData); - - impl TargetHeaderChain>> - for TargetHeaderChainAdapter - { - type MessagesDeliveryProof = FromBridgedChainMessagesDeliveryProof>>; - - fn verify_message(payload: &FromThisChainMessagePayload) -> Result<(), VerificationError> { - verify_chain_message::(payload) - } - - fn verify_messages_delivery_proof( - proof: Self::MessagesDeliveryProof, - ) -> Result<(LaneId, InboundLaneData>>), VerificationError> { - verify_messages_delivery_proof::(proof) - } - } - - /// Do basic Bridged-chain specific verification of This -> Bridged chain message. - /// - /// Ok result from this function means that the delivery transaction with this message - /// may be 'mined' by the target chain. - pub fn verify_chain_message( - payload: &FromThisChainMessagePayload, - ) -> Result<(), VerificationError> { - // IMPORTANT: any error that is returned here is fatal for the bridge, because - // this code is executed at the bridge hub and message sender actually lives - // at some sibling parachain. So we are failing **after** the message has been - // sent and we can't report it back to sender (unless error report mechanism is - // embedded into message and its dispatcher). - - // apart from maximal message size check (see below), we should also check the message - // dispatch weight here. But we assume that the bridged chain will just push the message - // to some queue (XCMP, UMP, DMP), so the weight is constant and fits the block. - - // The maximal size of extrinsic at Substrate-based chain depends on the - // `frame_system::Config::MaximumBlockLength` and - // `frame_system::Config::AvailableBlockRatio` constants. This check is here to be sure that - // the lane won't stuck because message is too large to fit into delivery transaction. - // - // **IMPORTANT NOTE**: the delivery transaction contains storage proof of the message, not - // the message itself. The proof is always larger than the message. But unless chain state - // is enormously large, it should be several dozens/hundreds of bytes. The delivery - // transaction also contains signatures and signed extensions. Because of this, we reserve - // 1/3 of the the maximal extrinsic size for this data. - if payload.len() > maximal_message_size::() as usize { - return Err(VerificationError::MessageTooLarge) - } - - Ok(()) - } - - /// Verify proof of This -> Bridged chain messages delivery. - /// - /// This function is used when Bridged chain is directly using GRANDPA finality. For Bridged - /// parachains, please use the `verify_messages_delivery_proof_from_parachain`. - pub fn verify_messages_delivery_proof( - proof: FromBridgedChainMessagesDeliveryProof>>, - ) -> Result, VerificationError> { - let FromBridgedChainMessagesDeliveryProof { bridged_header_hash, storage_proof, lane } = - proof; - let mut storage = - B::BridgedHeaderChain::storage_proof_checker(bridged_header_hash, storage_proof) - .map_err(VerificationError::HeaderChain)?; - // Messages delivery proof is just proof of single storage key read => any error - // is fatal. - let storage_inbound_lane_data_key = bp_messages::storage_keys::inbound_lane_data_key( - B::BRIDGED_MESSAGES_PALLET_NAME, - &lane, - ); - let inbound_lane_data = storage - .read_and_decode_mandatory_value(storage_inbound_lane_data_key.0.as_ref()) - .map_err(VerificationError::InboundLaneStorage)?; - - // check that the storage proof doesn't have any untouched trie nodes - storage.ensure_no_unused_nodes().map_err(VerificationError::StorageProof)?; - - Ok((lane, inbound_lane_data)) - } -} - -/// Sub-module that is declaring types required for processing Bridged -> This chain messages. -pub mod target { - use super::*; - - /// Decoded Bridged -> This message payload. - pub type FromBridgedChainMessagePayload = crate::messages_xcm_extension::XcmAsPlainPayload; - - /// Messages proof from bridged chain: - /// - /// - hash of finalized header; - /// - storage proof of messages and (optionally) outbound lane state; - /// - lane id; - /// - nonces (inclusive range) of messages which are included in this proof. - #[derive(Clone, Decode, Encode, Eq, PartialEq, RuntimeDebug, TypeInfo)] - pub struct FromBridgedChainMessagesProof { - /// Hash of the finalized bridged header the proof is for. - pub bridged_header_hash: BridgedHeaderHash, - /// A storage trie proof of messages being delivered. - pub storage_proof: RawStorageProof, - /// Messages in this proof are sent over this lane. - pub lane: LaneId, - /// Nonce of the first message being delivered. - pub nonces_start: MessageNonce, - /// Nonce of the last message being delivered. - pub nonces_end: MessageNonce, - } - - impl Size for FromBridgedChainMessagesProof { - fn size(&self) -> u32 { - u32::try_from( - self.storage_proof - .iter() - .fold(0usize, |sum, node| sum.saturating_add(node.len())), - ) - .unwrap_or(u32::MAX) - } - } - - /// Return maximal dispatch weight of the message we're able to receive. - pub fn maximal_incoming_message_dispatch_weight(maximal_extrinsic_weight: Weight) -> Weight { - maximal_extrinsic_weight / 2 - } - - /// Return maximal message size given maximal extrinsic size. - pub fn maximal_incoming_message_size(maximal_extrinsic_size: u32) -> u32 { - maximal_extrinsic_size / 3 * 2 - } - - /// `SourceHeaderChain` implementation that is using default types and perform default checks. - pub struct SourceHeaderChainAdapter(PhantomData); - - impl SourceHeaderChain for SourceHeaderChainAdapter { - type MessagesProof = FromBridgedChainMessagesProof>>; - - fn verify_messages_proof( - proof: Self::MessagesProof, - messages_count: u32, - ) -> Result, VerificationError> { - verify_messages_proof::(proof, messages_count) - } - } - - /// Verify proof of Bridged -> This chain messages. - /// - /// This function is used when Bridged chain is directly using GRANDPA finality. For Bridged - /// parachains, please use the `verify_messages_proof_from_parachain`. - /// - /// The `messages_count` argument verification (sane limits) is supposed to be made - /// outside of this function. This function only verifies that the proof declares exactly - /// `messages_count` messages. - pub fn verify_messages_proof( - proof: FromBridgedChainMessagesProof>>, - messages_count: u32, - ) -> Result, VerificationError> { - let FromBridgedChainMessagesProof { - bridged_header_hash, - storage_proof, - lane, - nonces_start, - nonces_end, - } = proof; - let storage = - B::BridgedHeaderChain::storage_proof_checker(bridged_header_hash, storage_proof) - .map_err(VerificationError::HeaderChain)?; - let mut parser = StorageProofCheckerAdapter::<_, B> { storage, _dummy: Default::default() }; - let nonces_range = nonces_start..=nonces_end; - - // receiving proofs where end < begin is ok (if proof includes outbound lane state) - let messages_in_the_proof = nonces_range.checked_len().unwrap_or(0); - if messages_in_the_proof != MessageNonce::from(messages_count) { - return Err(VerificationError::MessagesCountMismatch) - } - - // Read messages first. All messages that are claimed to be in the proof must - // be in the proof. So any error in `read_value`, or even missing value is fatal. - // - // Mind that we allow proofs with no messages if outbound lane state is proved. - let mut messages = Vec::with_capacity(messages_in_the_proof as _); - for nonce in nonces_range { - let message_key = MessageKey { lane_id: lane, nonce }; - let message_payload = parser.read_and_decode_message_payload(&message_key)?; - messages.push(Message { key: message_key, payload: message_payload }); - } - - // Now let's check if proof contains outbound lane state proof. It is optional, so - // we simply ignore `read_value` errors and missing value. - let proved_lane_messages = ProvedLaneMessages { - lane_state: parser.read_and_decode_outbound_lane_data(&lane)?, - messages, - }; - - // Now we may actually check if the proof is empty or not. - if proved_lane_messages.lane_state.is_none() && proved_lane_messages.messages.is_empty() { - return Err(VerificationError::EmptyMessageProof) - } - - // check that the storage proof doesn't have any untouched trie nodes - parser - .storage - .ensure_no_unused_nodes() - .map_err(VerificationError::StorageProof)?; - - // We only support single lane messages in this generated_schema - let mut proved_messages = ProvedMessages::new(); - proved_messages.insert(lane, proved_lane_messages); - - Ok(proved_messages) - } - - struct StorageProofCheckerAdapter { - storage: StorageProofChecker, - _dummy: sp_std::marker::PhantomData, - } - - impl StorageProofCheckerAdapter { - fn read_and_decode_outbound_lane_data( - &mut self, - lane_id: &LaneId, - ) -> Result, VerificationError> { - let storage_outbound_lane_data_key = bp_messages::storage_keys::outbound_lane_data_key( - B::BRIDGED_MESSAGES_PALLET_NAME, - lane_id, - ); - - self.storage - .read_and_decode_opt_value(storage_outbound_lane_data_key.0.as_ref()) - .map_err(VerificationError::OutboundLaneStorage) - } - - fn read_and_decode_message_payload( - &mut self, - message_key: &MessageKey, - ) -> Result { - let storage_message_key = bp_messages::storage_keys::message_key( - B::BRIDGED_MESSAGES_PALLET_NAME, - &message_key.lane_id, - message_key.nonce, - ); - self.storage - .read_and_decode_mandatory_value(storage_message_key.0.as_ref()) - .map_err(VerificationError::MessageStorage) - } - } -} - -/// The `BridgeMessagesCall` used by a chain. -pub type BridgeMessagesCallOf = bp_messages::BridgeMessagesCall< - bp_runtime::AccountIdOf, - target::FromBridgedChainMessagesProof>, - source::FromBridgedChainMessagesDeliveryProof>, ->; - -#[cfg(test)] -mod tests { - use super::*; - use crate::{ - messages_generation::{ - encode_all_messages, encode_lane_data, prepare_messages_storage_proof, - }, - mock::*, - }; - use bp_header_chain::{HeaderChainError, StoredHeaderDataBuilder}; - use bp_runtime::{HeaderId, StorageProofError}; - use codec::Encode; - use sp_core::H256; - use sp_runtime::traits::Header as _; - - #[test] - fn verify_chain_message_rejects_message_with_too_large_declared_weight() { - assert!(source::verify_chain_message::(&vec![ - 42; - BRIDGED_CHAIN_MAX_EXTRINSIC_WEIGHT - - 1 - ]) - .is_err()); - } - - #[test] - fn verify_chain_message_rejects_message_too_large_message() { - assert!(source::verify_chain_message::(&vec![ - 0; - source::maximal_message_size::() - as usize + 1 - ],) - .is_err()); - } - - #[test] - fn verify_chain_message_accepts_maximal_message() { - assert_eq!( - source::verify_chain_message::(&vec![ - 0; - source::maximal_message_size::() - as _ - ],), - Ok(()), - ); - } - - fn using_messages_proof( - nonces_end: MessageNonce, - outbound_lane_data: Option, - encode_message: impl Fn(MessageNonce, &MessagePayload) -> Option>, - encode_outbound_lane_data: impl Fn(&OutboundLaneData) -> Vec, - test: impl Fn(target::FromBridgedChainMessagesProof) -> R, - ) -> R { - let (state_root, storage_proof) = prepare_messages_storage_proof::( - TEST_LANE_ID, - 1..=nonces_end, - outbound_lane_data, - bp_runtime::StorageProofSize::Minimal(0), - vec![42], - encode_message, - encode_outbound_lane_data, - ); - - sp_io::TestExternalities::new(Default::default()).execute_with(move || { - let bridged_header = BridgedChainHeader::new( - 0, - Default::default(), - state_root, - Default::default(), - Default::default(), - ); - let bridged_header_hash = bridged_header.hash(); - - pallet_bridge_grandpa::BestFinalized::::put(HeaderId( - 0, - bridged_header_hash, - )); - pallet_bridge_grandpa::ImportedHeaders::::insert( - bridged_header_hash, - bridged_header.build(), - ); - test(target::FromBridgedChainMessagesProof { - bridged_header_hash, - storage_proof, - lane: TEST_LANE_ID, - nonces_start: 1, - nonces_end, - }) - }) - } - - #[test] - fn messages_proof_is_rejected_if_declared_less_than_actual_number_of_messages() { - assert_eq!( - using_messages_proof(10, None, encode_all_messages, encode_lane_data, |proof| { - target::verify_messages_proof::(proof, 5) - }), - Err(VerificationError::MessagesCountMismatch), - ); - } - - #[test] - fn messages_proof_is_rejected_if_declared_more_than_actual_number_of_messages() { - assert_eq!( - using_messages_proof(10, None, encode_all_messages, encode_lane_data, |proof| { - target::verify_messages_proof::(proof, 15) - }), - Err(VerificationError::MessagesCountMismatch), - ); - } - - #[test] - fn message_proof_is_rejected_if_header_is_missing_from_the_chain() { - assert_eq!( - using_messages_proof(10, None, encode_all_messages, encode_lane_data, |proof| { - let bridged_header_hash = - pallet_bridge_grandpa::BestFinalized::::get().unwrap().1; - pallet_bridge_grandpa::ImportedHeaders::::remove(bridged_header_hash); - target::verify_messages_proof::(proof, 10) - }), - Err(VerificationError::HeaderChain(HeaderChainError::UnknownHeader)), - ); - } - - #[test] - fn message_proof_is_rejected_if_header_state_root_mismatches() { - assert_eq!( - using_messages_proof(10, None, encode_all_messages, encode_lane_data, |proof| { - let bridged_header_hash = - pallet_bridge_grandpa::BestFinalized::::get().unwrap().1; - pallet_bridge_grandpa::ImportedHeaders::::insert( - bridged_header_hash, - BridgedChainHeader::new( - 0, - Default::default(), - Default::default(), - Default::default(), - Default::default(), - ) - .build(), - ); - target::verify_messages_proof::(proof, 10) - }), - Err(VerificationError::HeaderChain(HeaderChainError::StorageProof( - StorageProofError::StorageRootMismatch - ))), - ); - } - - #[test] - fn message_proof_is_rejected_if_it_has_duplicate_trie_nodes() { - assert_eq!( - using_messages_proof(10, None, encode_all_messages, encode_lane_data, |mut proof| { - let node = proof.storage_proof.pop().unwrap(); - proof.storage_proof.push(node.clone()); - proof.storage_proof.push(node); - target::verify_messages_proof::(proof, 10) - },), - Err(VerificationError::HeaderChain(HeaderChainError::StorageProof( - StorageProofError::DuplicateNodesInProof - ))), - ); - } - - #[test] - fn message_proof_is_rejected_if_it_has_unused_trie_nodes() { - assert_eq!( - using_messages_proof(10, None, encode_all_messages, encode_lane_data, |mut proof| { - proof.storage_proof.push(vec![42]); - target::verify_messages_proof::(proof, 10) - },), - Err(VerificationError::StorageProof(StorageProofError::UnusedNodesInTheProof)), - ); - } - - #[test] - fn message_proof_is_rejected_if_required_message_is_missing() { - matches!( - using_messages_proof( - 10, - None, - |n, m| if n != 5 { Some(m.encode()) } else { None }, - encode_lane_data, - |proof| target::verify_messages_proof::(proof, 10) - ), - Err(VerificationError::MessageStorage(StorageProofError::StorageValueEmpty)), - ); - } - - #[test] - fn message_proof_is_rejected_if_message_decode_fails() { - matches!( - using_messages_proof( - 10, - None, - |n, m| { - let mut m = m.encode(); - if n == 5 { - m = vec![42] - } - Some(m) - }, - encode_lane_data, - |proof| target::verify_messages_proof::(proof, 10), - ), - Err(VerificationError::MessageStorage(StorageProofError::StorageValueDecodeFailed(_))), - ); - } - - #[test] - fn message_proof_is_rejected_if_outbound_lane_state_decode_fails() { - matches!( - using_messages_proof( - 10, - Some(OutboundLaneData { - oldest_unpruned_nonce: 1, - latest_received_nonce: 1, - latest_generated_nonce: 1, - }), - encode_all_messages, - |d| { - let mut d = d.encode(); - d.truncate(1); - d - }, - |proof| target::verify_messages_proof::(proof, 10), - ), - Err(VerificationError::OutboundLaneStorage( - StorageProofError::StorageValueDecodeFailed(_) - )), - ); - } - - #[test] - fn message_proof_is_rejected_if_it_is_empty() { - assert_eq!( - using_messages_proof(0, None, encode_all_messages, encode_lane_data, |proof| { - target::verify_messages_proof::(proof, 0) - },), - Err(VerificationError::EmptyMessageProof), - ); - } - - #[test] - fn non_empty_message_proof_without_messages_is_accepted() { - assert_eq!( - using_messages_proof( - 0, - Some(OutboundLaneData { - oldest_unpruned_nonce: 1, - latest_received_nonce: 1, - latest_generated_nonce: 1, - }), - encode_all_messages, - encode_lane_data, - |proof| target::verify_messages_proof::(proof, 0), - ), - Ok(vec![( - TEST_LANE_ID, - ProvedLaneMessages { - lane_state: Some(OutboundLaneData { - oldest_unpruned_nonce: 1, - latest_received_nonce: 1, - latest_generated_nonce: 1, - }), - messages: Vec::new(), - }, - )] - .into_iter() - .collect()), - ); - } - - #[test] - fn non_empty_message_proof_is_accepted() { - assert_eq!( - using_messages_proof( - 1, - Some(OutboundLaneData { - oldest_unpruned_nonce: 1, - latest_received_nonce: 1, - latest_generated_nonce: 1, - }), - encode_all_messages, - encode_lane_data, - |proof| target::verify_messages_proof::(proof, 1), - ), - Ok(vec![( - TEST_LANE_ID, - ProvedLaneMessages { - lane_state: Some(OutboundLaneData { - oldest_unpruned_nonce: 1, - latest_received_nonce: 1, - latest_generated_nonce: 1, - }), - messages: vec![Message { - key: MessageKey { lane_id: TEST_LANE_ID, nonce: 1 }, - payload: vec![42], - }], - }, - )] - .into_iter() - .collect()), - ); - } - - #[test] - fn verify_messages_proof_does_not_panic_if_messages_count_mismatches() { - assert_eq!( - using_messages_proof(1, None, encode_all_messages, encode_lane_data, |mut proof| { - proof.nonces_end = u64::MAX; - target::verify_messages_proof::(proof, u32::MAX) - },), - Err(VerificationError::MessagesCountMismatch), - ); - } -} diff --git a/bin/runtime-common/src/messages_api.rs b/bin/runtime-common/src/messages_api.rs deleted file mode 100644 index 7fbdeb366..000000000 --- a/bin/runtime-common/src/messages_api.rs +++ /dev/null @@ -1,66 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Helpers for implementing various message-related runtime API methods. - -use bp_messages::{ - InboundMessageDetails, LaneId, MessageNonce, MessagePayload, OutboundMessageDetails, -}; -use sp_std::vec::Vec; - -/// Implementation of the `To*OutboundLaneApi::message_details`. -pub fn outbound_message_details( - lane: LaneId, - begin: MessageNonce, - end: MessageNonce, -) -> Vec -where - Runtime: pallet_bridge_messages::Config, - MessagesPalletInstance: 'static, -{ - (begin..=end) - .filter_map(|nonce| { - let message_data = - pallet_bridge_messages::Pallet::::outbound_message_data(lane, nonce)?; - Some(OutboundMessageDetails { - nonce, - // dispatch message weight is always zero at the source chain, since we're paying for - // dispatch at the target chain - dispatch_weight: frame_support::weights::Weight::zero(), - size: message_data.len() as _, - }) - }) - .collect() -} - -/// Implementation of the `To*InboundLaneApi::message_details`. -pub fn inbound_message_details( - lane: LaneId, - messages: Vec<(MessagePayload, OutboundMessageDetails)>, -) -> Vec -where - Runtime: pallet_bridge_messages::Config, - MessagesPalletInstance: 'static, -{ - messages - .into_iter() - .map(|(payload, details)| { - pallet_bridge_messages::Pallet::::inbound_message_data( - lane, payload, details, - ) - }) - .collect() -} diff --git a/bin/runtime-common/src/messages_benchmarking.rs b/bin/runtime-common/src/messages_benchmarking.rs deleted file mode 100644 index 0c7a9ad1a..000000000 --- a/bin/runtime-common/src/messages_benchmarking.rs +++ /dev/null @@ -1,314 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Everything required to run benchmarks of messages module, based on -//! `bridge_runtime_common::messages` implementation. - -#![cfg(feature = "runtime-benchmarks")] - -use crate::{ - messages::{ - source::FromBridgedChainMessagesDeliveryProof, target::FromBridgedChainMessagesProof, - AccountIdOf, BridgedChain, HashOf, MessageBridge, ThisChain, - }, - messages_generation::{ - encode_all_messages, encode_lane_data, prepare_message_delivery_storage_proof, - prepare_messages_storage_proof, - }, -}; - -use bp_messages::MessagePayload; -use bp_polkadot_core::parachains::ParaHash; -use bp_runtime::{Chain, Parachain, StorageProofSize, UnderlyingChainOf}; -use codec::Encode; -use frame_support::weights::Weight; -use pallet_bridge_messages::benchmarking::{MessageDeliveryProofParams, MessageProofParams}; -use sp_runtime::traits::{Header, Zero}; -use sp_std::prelude::*; -use xcm::latest::prelude::*; - -/// Prepare inbound bridge message according to given message proof parameters. -fn prepare_inbound_message( - params: &MessageProofParams, - successful_dispatch_message_generator: impl Fn(usize) -> MessagePayload, -) -> MessagePayload { - // we only care about **this** message size when message proof needs to be `Minimal` - let expected_size = match params.size { - StorageProofSize::Minimal(size) => size as usize, - _ => 0, - }; - - // if we don't need a correct message, then we may just return some random blob - if !params.is_successful_dispatch_expected { - return vec![0u8; expected_size] - } - - // else let's prepare successful message. - let msg = successful_dispatch_message_generator(expected_size); - assert!( - msg.len() >= expected_size, - "msg.len(): {} does not match expected_size: {}", - expected_size, - msg.len() - ); - msg -} - -/// Prepare proof of messages for the `receive_messages_proof` call. -/// -/// In addition to returning valid messages proof, environment is prepared to verify this message -/// proof. -/// -/// This method is intended to be used when benchmarking pallet, linked to the chain that -/// uses GRANDPA finality. For parachains, please use the `prepare_message_proof_from_parachain` -/// function. -pub fn prepare_message_proof_from_grandpa_chain( - params: MessageProofParams, - message_generator: impl Fn(usize) -> MessagePayload, -) -> (FromBridgedChainMessagesProof>>, Weight) -where - R: pallet_bridge_grandpa::Config>>, - FI: 'static, - B: MessageBridge, -{ - // prepare storage proof - let (state_root, storage_proof) = prepare_messages_storage_proof::( - params.lane, - params.message_nonces.clone(), - params.outbound_lane_data.clone(), - params.size, - prepare_inbound_message(¶ms, message_generator), - encode_all_messages, - encode_lane_data, - ); - - // update runtime storage - let (_, bridged_header_hash) = insert_header_to_grandpa_pallet::(state_root); - - ( - FromBridgedChainMessagesProof { - bridged_header_hash, - storage_proof, - lane: params.lane, - nonces_start: *params.message_nonces.start(), - nonces_end: *params.message_nonces.end(), - }, - Weight::MAX / 1000, - ) -} - -/// Prepare proof of messages for the `receive_messages_proof` call. -/// -/// In addition to returning valid messages proof, environment is prepared to verify this message -/// proof. -/// -/// This method is intended to be used when benchmarking pallet, linked to the chain that -/// uses parachain finality. For GRANDPA chains, please use the -/// `prepare_message_proof_from_grandpa_chain` function. -pub fn prepare_message_proof_from_parachain( - params: MessageProofParams, - message_generator: impl Fn(usize) -> MessagePayload, -) -> (FromBridgedChainMessagesProof>>, Weight) -where - R: pallet_bridge_parachains::Config, - PI: 'static, - B: MessageBridge, - UnderlyingChainOf>: Chain + Parachain, -{ - // prepare storage proof - let (state_root, storage_proof) = prepare_messages_storage_proof::( - params.lane, - params.message_nonces.clone(), - params.outbound_lane_data.clone(), - params.size, - prepare_inbound_message(¶ms, message_generator), - encode_all_messages, - encode_lane_data, - ); - - // update runtime storage - let (_, bridged_header_hash) = - insert_header_to_parachains_pallet::>>(state_root); - - ( - FromBridgedChainMessagesProof { - bridged_header_hash, - storage_proof, - lane: params.lane, - nonces_start: *params.message_nonces.start(), - nonces_end: *params.message_nonces.end(), - }, - Weight::MAX / 1000, - ) -} - -/// Prepare proof of messages delivery for the `receive_messages_delivery_proof` call. -/// -/// This method is intended to be used when benchmarking pallet, linked to the chain that -/// uses GRANDPA finality. For parachains, please use the -/// `prepare_message_delivery_proof_from_parachain` function. -pub fn prepare_message_delivery_proof_from_grandpa_chain( - params: MessageDeliveryProofParams>>, -) -> FromBridgedChainMessagesDeliveryProof>> -where - R: pallet_bridge_grandpa::Config>>, - FI: 'static, - B: MessageBridge, -{ - // prepare storage proof - let lane = params.lane; - let (state_root, storage_proof) = prepare_message_delivery_storage_proof::( - params.lane, - params.inbound_lane_data, - params.size, - ); - - // update runtime storage - let (_, bridged_header_hash) = insert_header_to_grandpa_pallet::(state_root); - - FromBridgedChainMessagesDeliveryProof { - bridged_header_hash: bridged_header_hash.into(), - storage_proof, - lane, - } -} - -/// Prepare proof of messages delivery for the `receive_messages_delivery_proof` call. -/// -/// This method is intended to be used when benchmarking pallet, linked to the chain that -/// uses parachain finality. For GRANDPA chains, please use the -/// `prepare_message_delivery_proof_from_grandpa_chain` function. -pub fn prepare_message_delivery_proof_from_parachain( - params: MessageDeliveryProofParams>>, -) -> FromBridgedChainMessagesDeliveryProof>> -where - R: pallet_bridge_parachains::Config, - PI: 'static, - B: MessageBridge, - UnderlyingChainOf>: Chain + Parachain, -{ - // prepare storage proof - let lane = params.lane; - let (state_root, storage_proof) = prepare_message_delivery_storage_proof::( - params.lane, - params.inbound_lane_data, - params.size, - ); - - // update runtime storage - let (_, bridged_header_hash) = - insert_header_to_parachains_pallet::>>(state_root); - - FromBridgedChainMessagesDeliveryProof { - bridged_header_hash: bridged_header_hash.into(), - storage_proof, - lane, - } -} - -/// Insert header to the bridge GRANDPA pallet. -pub(crate) fn insert_header_to_grandpa_pallet( - state_root: bp_runtime::HashOf, -) -> (bp_runtime::BlockNumberOf, bp_runtime::HashOf) -where - R: pallet_bridge_grandpa::Config, - GI: 'static, - R::BridgedChain: bp_runtime::Chain, -{ - let bridged_block_number = Zero::zero(); - let bridged_header = bp_runtime::HeaderOf::::new( - bridged_block_number, - Default::default(), - state_root, - Default::default(), - Default::default(), - ); - let bridged_header_hash = bridged_header.hash(); - pallet_bridge_grandpa::initialize_for_benchmarks::(bridged_header); - (bridged_block_number, bridged_header_hash) -} - -/// Insert header to the bridge parachains pallet. -pub(crate) fn insert_header_to_parachains_pallet( - state_root: bp_runtime::HashOf, -) -> (bp_runtime::BlockNumberOf, bp_runtime::HashOf) -where - R: pallet_bridge_parachains::Config, - PI: 'static, - PC: Chain + Parachain, -{ - let bridged_block_number = Zero::zero(); - let bridged_header = bp_runtime::HeaderOf::::new( - bridged_block_number, - Default::default(), - state_root, - Default::default(), - Default::default(), - ); - let bridged_header_hash = bridged_header.hash(); - pallet_bridge_parachains::initialize_for_benchmarks::(bridged_header); - (bridged_block_number, bridged_header_hash) -} - -/// Returns callback which generates `BridgeMessage` from Polkadot XCM builder based on -/// `expected_message_size` for benchmark. -pub fn generate_xcm_builder_bridge_message_sample( - destination: InteriorLocation, -) -> impl Fn(usize) -> MessagePayload { - move |expected_message_size| -> MessagePayload { - // For XCM bridge hubs, it is the message that - // will be pushed further to some XCM queue (XCMP/UMP) - let location = xcm::VersionedInteriorLocation::V4(destination.clone()); - let location_encoded_size = location.encoded_size(); - - // we don't need to be super-precise with `expected_size` here - let xcm_size = expected_message_size.saturating_sub(location_encoded_size); - let xcm_data_size = xcm_size.saturating_sub( - // minus empty instruction size - Instruction::<()>::ExpectPallet { - index: 0, - name: vec![], - module_name: vec![], - crate_major: 0, - min_crate_minor: 0, - } - .encoded_size(), - ); - - log::trace!( - target: "runtime::bridge-benchmarks", - "generate_xcm_builder_bridge_message_sample with expected_message_size: {}, location_encoded_size: {}, xcm_size: {}, xcm_data_size: {}", - expected_message_size, location_encoded_size, xcm_size, xcm_data_size, - ); - - let xcm = xcm::VersionedXcm::<()>::V4( - vec![Instruction::<()>::ExpectPallet { - index: 0, - name: vec![42; xcm_data_size], - module_name: vec![], - crate_major: 0, - min_crate_minor: 0, - }] - .into(), - ); - - // this is the `BridgeMessage` from polkadot xcm builder, but it has no constructor - // or public fields, so just tuple - // (double encoding, because `.encode()` is called on original Xcm BLOB when it is pushed - // to the storage) - (location, xcm).encode().encode() - } -} diff --git a/bin/runtime-common/src/messages_call_ext.rs b/bin/runtime-common/src/messages_call_ext.rs deleted file mode 100644 index fb07f7b6d..000000000 --- a/bin/runtime-common/src/messages_call_ext.rs +++ /dev/null @@ -1,692 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Signed extension for the `pallet-bridge-messages` that is able to reject obsolete -//! (and some other invalid) transactions. - -use crate::messages::{ - source::FromBridgedChainMessagesDeliveryProof, target::FromBridgedChainMessagesProof, -}; -use bp_messages::{target_chain::MessageDispatch, InboundLaneData, LaneId, MessageNonce}; -use bp_runtime::OwnedBridgeModule; -use frame_support::{ - dispatch::CallableCallFor, - traits::{Get, IsSubType}, -}; -use pallet_bridge_messages::{Config, Pallet}; -use sp_runtime::{transaction_validity::TransactionValidity, RuntimeDebug}; -use sp_std::ops::RangeInclusive; - -/// Generic info about a messages delivery/confirmation proof. -#[derive(PartialEq, RuntimeDebug)] -pub struct BaseMessagesProofInfo { - /// Message lane, used by the call. - pub lane_id: LaneId, - /// Nonces of messages, included in the call. - /// - /// For delivery transaction, it is nonces of bundled messages. For confirmation - /// transaction, it is nonces that are to be confirmed during the call. - pub bundled_range: RangeInclusive, - /// Nonce of the best message, stored by this chain before the call is dispatched. - /// - /// For delivery transaction, it is the nonce of best delivered message before the call. - /// For confirmation transaction, it is the nonce of best confirmed message before the call. - pub best_stored_nonce: MessageNonce, -} - -impl BaseMessagesProofInfo { - /// Returns true if `bundled_range` continues the `0..=best_stored_nonce` range. - fn appends_to_stored_nonce(&self) -> bool { - Some(*self.bundled_range.start()) == self.best_stored_nonce.checked_add(1) - } -} - -/// Occupation state of the unrewarded relayers vector. -#[derive(PartialEq, RuntimeDebug)] -#[cfg_attr(test, derive(Default))] -pub struct UnrewardedRelayerOccupation { - /// The number of remaining unoccupied entries for new relayers. - pub free_relayer_slots: MessageNonce, - /// The number of messages that we are ready to accept. - pub free_message_slots: MessageNonce, -} - -/// Info about a `ReceiveMessagesProof` call which tries to update a single lane. -#[derive(PartialEq, RuntimeDebug)] -pub struct ReceiveMessagesProofInfo { - /// Base messages proof info - pub base: BaseMessagesProofInfo, - /// State of unrewarded relayers vector. - pub unrewarded_relayers: UnrewardedRelayerOccupation, -} - -impl ReceiveMessagesProofInfo { - /// Returns true if: - /// - /// - either inbound lane is ready to accept bundled messages; - /// - /// - or there are no bundled messages, but the inbound lane is blocked by too many unconfirmed - /// messages and/or unrewarded relayers. - fn is_obsolete(&self, is_dispatcher_active: bool) -> bool { - // if dispatcher is inactive, we don't accept any delivery transactions - if !is_dispatcher_active { - return true - } - - // transactions with zero bundled nonces are not allowed, unless they're message - // delivery transactions, which brings reward confirmations required to unblock - // the lane - if self.base.bundled_range.is_empty() { - let empty_transactions_allowed = - // we allow empty transactions when we can't accept delivery from new relayers - self.unrewarded_relayers.free_relayer_slots == 0 || - // or if we can't accept new messages at all - self.unrewarded_relayers.free_message_slots == 0; - - return !empty_transactions_allowed - } - - // otherwise we require bundled messages to continue stored range - !self.base.appends_to_stored_nonce() - } -} - -/// Info about a `ReceiveMessagesDeliveryProof` call which tries to update a single lane. -#[derive(PartialEq, RuntimeDebug)] -pub struct ReceiveMessagesDeliveryProofInfo(pub BaseMessagesProofInfo); - -impl ReceiveMessagesDeliveryProofInfo { - /// Returns true if outbound lane is ready to accept confirmations of bundled messages. - fn is_obsolete(&self) -> bool { - self.0.bundled_range.is_empty() || !self.0.appends_to_stored_nonce() - } -} - -/// Info about a `ReceiveMessagesProof` or a `ReceiveMessagesDeliveryProof` call -/// which tries to update a single lane. -#[derive(PartialEq, RuntimeDebug)] -pub enum CallInfo { - /// Messages delivery call info. - ReceiveMessagesProof(ReceiveMessagesProofInfo), - /// Messages delivery confirmation call info. - ReceiveMessagesDeliveryProof(ReceiveMessagesDeliveryProofInfo), -} - -impl CallInfo { - /// Returns range of messages, bundled with the call. - pub fn bundled_messages(&self) -> RangeInclusive { - match *self { - Self::ReceiveMessagesProof(ref info) => info.base.bundled_range.clone(), - Self::ReceiveMessagesDeliveryProof(ref info) => info.0.bundled_range.clone(), - } - } -} - -/// Helper struct that provides methods for working with a call supported by `CallInfo`. -pub struct CallHelper, I: 'static> { - _phantom_data: sp_std::marker::PhantomData<(T, I)>, -} - -impl, I: 'static> CallHelper { - /// Returns true if: - /// - /// - call is `receive_messages_proof` and all messages have been delivered; - /// - /// - call is `receive_messages_delivery_proof` and all messages confirmations have been - /// received. - pub fn was_successful(info: &CallInfo) -> bool { - match info { - CallInfo::ReceiveMessagesProof(info) => { - let inbound_lane_data = - pallet_bridge_messages::InboundLanes::::get(info.base.lane_id); - if info.base.bundled_range.is_empty() { - let post_occupation = - unrewarded_relayers_occupation::(&inbound_lane_data); - // we don't care about `free_relayer_slots` here - it is checked in - // `is_obsolete` and every relayer has delivered at least one message, - // so if relayer slots are released, then message slots are also - // released - return post_occupation.free_message_slots > - info.unrewarded_relayers.free_message_slots - } - - inbound_lane_data.last_delivered_nonce() == *info.base.bundled_range.end() - }, - CallInfo::ReceiveMessagesDeliveryProof(info) => { - let outbound_lane_data = - pallet_bridge_messages::OutboundLanes::::get(info.0.lane_id); - outbound_lane_data.latest_received_nonce == *info.0.bundled_range.end() - }, - } - } -} - -/// Trait representing a call that is a sub type of `pallet_bridge_messages::Call`. -pub trait MessagesCallSubType, I: 'static>: - IsSubType, T>> -{ - /// Create a new instance of `ReceiveMessagesProofInfo` from a `ReceiveMessagesProof` call. - fn receive_messages_proof_info(&self) -> Option; - - /// Create a new instance of `ReceiveMessagesDeliveryProofInfo` from - /// a `ReceiveMessagesDeliveryProof` call. - fn receive_messages_delivery_proof_info(&self) -> Option; - - /// Create a new instance of `CallInfo` from a `ReceiveMessagesProof` - /// or a `ReceiveMessagesDeliveryProof` call. - fn call_info(&self) -> Option; - - /// Create a new instance of `CallInfo` from a `ReceiveMessagesProof` - /// or a `ReceiveMessagesDeliveryProof` call, if the call is for the provided lane. - fn call_info_for(&self, lane_id: LaneId) -> Option; - - /// Ensures that a `ReceiveMessagesProof` or a `ReceiveMessagesDeliveryProof` call: - /// - /// - does not deliver already delivered messages. We require all messages in the - /// `ReceiveMessagesProof` call to be undelivered; - /// - /// - does not submit empty `ReceiveMessagesProof` call with zero messages, unless the lane - /// needs to be unblocked by providing relayer rewards proof; - /// - /// - brings no new delivery confirmations in a `ReceiveMessagesDeliveryProof` call. We require - /// at least one new delivery confirmation in the unrewarded relayers set; - /// - /// - does not violate some basic (easy verifiable) messages pallet rules obsolete (like - /// submitting a call when a pallet is halted or delivering messages when a dispatcher is - /// inactive). - /// - /// If one of above rules is violated, the transaction is treated as invalid. - fn check_obsolete_call(&self) -> TransactionValidity; -} - -impl< - BridgedHeaderHash, - SourceHeaderChain: bp_messages::target_chain::SourceHeaderChain< - MessagesProof = FromBridgedChainMessagesProof, - >, - TargetHeaderChain: bp_messages::source_chain::TargetHeaderChain< - >::OutboundPayload, - ::AccountId, - MessagesDeliveryProof = FromBridgedChainMessagesDeliveryProof, - >, - Call: IsSubType, T>>, - T: frame_system::Config - + Config, - I: 'static, - > MessagesCallSubType for T::RuntimeCall -{ - fn receive_messages_proof_info(&self) -> Option { - if let Some(pallet_bridge_messages::Call::::receive_messages_proof { - ref proof, - .. - }) = self.is_sub_type() - { - let inbound_lane_data = pallet_bridge_messages::InboundLanes::::get(proof.lane); - - return Some(ReceiveMessagesProofInfo { - base: BaseMessagesProofInfo { - lane_id: proof.lane, - // we want all messages in this range to be new for us. Otherwise transaction - // will be considered obsolete. - bundled_range: proof.nonces_start..=proof.nonces_end, - best_stored_nonce: inbound_lane_data.last_delivered_nonce(), - }, - unrewarded_relayers: unrewarded_relayers_occupation::(&inbound_lane_data), - }) - } - - None - } - - fn receive_messages_delivery_proof_info(&self) -> Option { - if let Some(pallet_bridge_messages::Call::::receive_messages_delivery_proof { - ref proof, - ref relayers_state, - .. - }) = self.is_sub_type() - { - let outbound_lane_data = pallet_bridge_messages::OutboundLanes::::get(proof.lane); - - return Some(ReceiveMessagesDeliveryProofInfo(BaseMessagesProofInfo { - lane_id: proof.lane, - // there's a time frame between message delivery, message confirmation and reward - // confirmation. Because of that, we can't assume that our state has been confirmed - // to the bridged chain. So we are accepting any proof that brings new - // confirmations. - bundled_range: outbound_lane_data.latest_received_nonce + 1..= - relayers_state.last_delivered_nonce, - best_stored_nonce: outbound_lane_data.latest_received_nonce, - })) - } - - None - } - - fn call_info(&self) -> Option { - if let Some(info) = self.receive_messages_proof_info() { - return Some(CallInfo::ReceiveMessagesProof(info)) - } - - if let Some(info) = self.receive_messages_delivery_proof_info() { - return Some(CallInfo::ReceiveMessagesDeliveryProof(info)) - } - - None - } - - fn call_info_for(&self, lane_id: LaneId) -> Option { - self.call_info().filter(|info| { - let actual_lane_id = match info { - CallInfo::ReceiveMessagesProof(info) => info.base.lane_id, - CallInfo::ReceiveMessagesDeliveryProof(info) => info.0.lane_id, - }; - actual_lane_id == lane_id - }) - } - - fn check_obsolete_call(&self) -> TransactionValidity { - let is_pallet_halted = Pallet::::ensure_not_halted().is_err(); - match self.call_info() { - Some(proof_info) if is_pallet_halted => { - log::trace!( - target: pallet_bridge_messages::LOG_TARGET, - "Rejecting messages transaction on halted pallet: {:?}", - proof_info - ); - - return sp_runtime::transaction_validity::InvalidTransaction::Call.into() - }, - Some(CallInfo::ReceiveMessagesProof(proof_info)) - if proof_info.is_obsolete(T::MessageDispatch::is_active()) => - { - log::trace!( - target: pallet_bridge_messages::LOG_TARGET, - "Rejecting obsolete messages delivery transaction: {:?}", - proof_info - ); - - return sp_runtime::transaction_validity::InvalidTransaction::Stale.into() - }, - Some(CallInfo::ReceiveMessagesDeliveryProof(proof_info)) - if proof_info.is_obsolete() => - { - log::trace!( - target: pallet_bridge_messages::LOG_TARGET, - "Rejecting obsolete messages confirmation transaction: {:?}", - proof_info, - ); - - return sp_runtime::transaction_validity::InvalidTransaction::Stale.into() - }, - _ => {}, - } - - Ok(sp_runtime::transaction_validity::ValidTransaction::default()) - } -} - -/// Returns occupation state of unrewarded relayers vector. -fn unrewarded_relayers_occupation, I: 'static>( - inbound_lane_data: &InboundLaneData, -) -> UnrewardedRelayerOccupation { - UnrewardedRelayerOccupation { - free_relayer_slots: T::MaxUnrewardedRelayerEntriesAtInboundLane::get() - .saturating_sub(inbound_lane_data.relayers.len() as MessageNonce), - free_message_slots: { - let unconfirmed_messages = inbound_lane_data - .last_delivered_nonce() - .saturating_sub(inbound_lane_data.last_confirmed_nonce); - T::MaxUnconfirmedMessagesAtInboundLane::get().saturating_sub(unconfirmed_messages) - }, - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::{ - messages::{ - source::FromBridgedChainMessagesDeliveryProof, target::FromBridgedChainMessagesProof, - }, - messages_call_ext::MessagesCallSubType, - mock::{ - DummyMessageDispatch, MaxUnconfirmedMessagesAtInboundLane, - MaxUnrewardedRelayerEntriesAtInboundLane, TestRuntime, ThisChainRuntimeCall, - }, - }; - use bp_messages::{DeliveredMessages, UnrewardedRelayer, UnrewardedRelayersState}; - use sp_std::ops::RangeInclusive; - - fn fill_unrewarded_relayers() { - let mut inbound_lane_state = - pallet_bridge_messages::InboundLanes::::get(LaneId([0, 0, 0, 0])); - for n in 0..MaxUnrewardedRelayerEntriesAtInboundLane::get() { - inbound_lane_state.relayers.push_back(UnrewardedRelayer { - relayer: Default::default(), - messages: DeliveredMessages { begin: n + 1, end: n + 1 }, - }); - } - pallet_bridge_messages::InboundLanes::::insert( - LaneId([0, 0, 0, 0]), - inbound_lane_state, - ); - } - - fn fill_unrewarded_messages() { - let mut inbound_lane_state = - pallet_bridge_messages::InboundLanes::::get(LaneId([0, 0, 0, 0])); - inbound_lane_state.relayers.push_back(UnrewardedRelayer { - relayer: Default::default(), - messages: DeliveredMessages { - begin: 1, - end: MaxUnconfirmedMessagesAtInboundLane::get(), - }, - }); - pallet_bridge_messages::InboundLanes::::insert( - LaneId([0, 0, 0, 0]), - inbound_lane_state, - ); - } - - fn deliver_message_10() { - pallet_bridge_messages::InboundLanes::::insert( - LaneId([0, 0, 0, 0]), - bp_messages::InboundLaneData { relayers: Default::default(), last_confirmed_nonce: 10 }, - ); - } - - fn validate_message_delivery( - nonces_start: bp_messages::MessageNonce, - nonces_end: bp_messages::MessageNonce, - ) -> bool { - ThisChainRuntimeCall::BridgeMessages( - pallet_bridge_messages::Call::::receive_messages_proof { - relayer_id_at_bridged_chain: 42, - messages_count: nonces_end.checked_sub(nonces_start).map(|x| x + 1).unwrap_or(0) - as u32, - dispatch_weight: frame_support::weights::Weight::zero(), - proof: FromBridgedChainMessagesProof { - bridged_header_hash: Default::default(), - storage_proof: vec![], - lane: LaneId([0, 0, 0, 0]), - nonces_start, - nonces_end, - }, - }, - ) - .check_obsolete_call() - .is_ok() - } - - #[test] - fn extension_rejects_obsolete_messages() { - sp_io::TestExternalities::new(Default::default()).execute_with(|| { - // when current best delivered is message#10 and we're trying to deliver messages 8..=9 - // => tx is rejected - deliver_message_10(); - assert!(!validate_message_delivery(8, 9)); - }); - } - - #[test] - fn extension_rejects_same_message() { - sp_io::TestExternalities::new(Default::default()).execute_with(|| { - // when current best delivered is message#10 and we're trying to import messages 10..=10 - // => tx is rejected - deliver_message_10(); - assert!(!validate_message_delivery(8, 10)); - }); - } - - #[test] - fn extension_rejects_call_with_some_obsolete_messages() { - sp_io::TestExternalities::new(Default::default()).execute_with(|| { - // when current best delivered is message#10 and we're trying to deliver messages - // 10..=15 => tx is rejected - deliver_message_10(); - assert!(!validate_message_delivery(10, 15)); - }); - } - - #[test] - fn extension_rejects_call_with_future_messages() { - sp_io::TestExternalities::new(Default::default()).execute_with(|| { - // when current best delivered is message#10 and we're trying to deliver messages - // 13..=15 => tx is rejected - deliver_message_10(); - assert!(!validate_message_delivery(13, 15)); - }); - } - - #[test] - fn extension_reject_call_when_dispatcher_is_inactive() { - sp_io::TestExternalities::new(Default::default()).execute_with(|| { - // when current best delivered is message#10 and we're trying to deliver message 11..=15 - // => tx is accepted, but we have inactive dispatcher, so... - deliver_message_10(); - - DummyMessageDispatch::deactivate(); - assert!(!validate_message_delivery(11, 15)); - }); - } - - #[test] - fn extension_rejects_empty_delivery_with_rewards_confirmations_if_there_are_free_relayer_and_message_slots( - ) { - sp_io::TestExternalities::new(Default::default()).execute_with(|| { - deliver_message_10(); - assert!(!validate_message_delivery(10, 9)); - }); - } - - #[test] - fn extension_accepts_empty_delivery_with_rewards_confirmations_if_there_are_no_free_relayer_slots( - ) { - sp_io::TestExternalities::new(Default::default()).execute_with(|| { - deliver_message_10(); - fill_unrewarded_relayers(); - assert!(validate_message_delivery(10, 9)); - }); - } - - #[test] - fn extension_accepts_empty_delivery_with_rewards_confirmations_if_there_are_no_free_message_slots( - ) { - sp_io::TestExternalities::new(Default::default()).execute_with(|| { - fill_unrewarded_messages(); - assert!(validate_message_delivery( - MaxUnconfirmedMessagesAtInboundLane::get(), - MaxUnconfirmedMessagesAtInboundLane::get() - 1 - )); - }); - } - - #[test] - fn extension_accepts_new_messages() { - sp_io::TestExternalities::new(Default::default()).execute_with(|| { - // when current best delivered is message#10 and we're trying to deliver message 11..=15 - // => tx is accepted - deliver_message_10(); - assert!(validate_message_delivery(11, 15)); - }); - } - - fn confirm_message_10() { - pallet_bridge_messages::OutboundLanes::::insert( - LaneId([0, 0, 0, 0]), - bp_messages::OutboundLaneData { - oldest_unpruned_nonce: 0, - latest_received_nonce: 10, - latest_generated_nonce: 10, - }, - ); - } - - fn validate_message_confirmation(last_delivered_nonce: bp_messages::MessageNonce) -> bool { - ThisChainRuntimeCall::BridgeMessages( - pallet_bridge_messages::Call::::receive_messages_delivery_proof { - proof: FromBridgedChainMessagesDeliveryProof { - bridged_header_hash: Default::default(), - storage_proof: Vec::new(), - lane: LaneId([0, 0, 0, 0]), - }, - relayers_state: UnrewardedRelayersState { - last_delivered_nonce, - ..Default::default() - }, - }, - ) - .check_obsolete_call() - .is_ok() - } - - #[test] - fn extension_rejects_obsolete_confirmations() { - sp_io::TestExternalities::new(Default::default()).execute_with(|| { - // when current best confirmed is message#10 and we're trying to confirm message#5 => tx - // is rejected - confirm_message_10(); - assert!(!validate_message_confirmation(5)); - }); - } - - #[test] - fn extension_rejects_same_confirmation() { - sp_io::TestExternalities::new(Default::default()).execute_with(|| { - // when current best confirmed is message#10 and we're trying to confirm message#10 => - // tx is rejected - confirm_message_10(); - assert!(!validate_message_confirmation(10)); - }); - } - - #[test] - fn extension_rejects_empty_confirmation_even_if_there_are_no_free_unrewarded_entries() { - sp_io::TestExternalities::new(Default::default()).execute_with(|| { - confirm_message_10(); - fill_unrewarded_relayers(); - assert!(!validate_message_confirmation(10)); - }); - } - - #[test] - fn extension_accepts_new_confirmation() { - sp_io::TestExternalities::new(Default::default()).execute_with(|| { - // when current best confirmed is message#10 and we're trying to confirm message#15 => - // tx is accepted - confirm_message_10(); - assert!(validate_message_confirmation(15)); - }); - } - - fn was_message_delivery_successful( - bundled_range: RangeInclusive, - is_empty: bool, - ) -> bool { - CallHelper::::was_successful(&CallInfo::ReceiveMessagesProof( - ReceiveMessagesProofInfo { - base: BaseMessagesProofInfo { - lane_id: LaneId([0, 0, 0, 0]), - bundled_range, - best_stored_nonce: 0, // doesn't matter for `was_successful` - }, - unrewarded_relayers: UnrewardedRelayerOccupation { - free_relayer_slots: 0, // doesn't matter for `was_successful` - free_message_slots: if is_empty { - 0 - } else { - MaxUnconfirmedMessagesAtInboundLane::get() - }, - }, - }, - )) - } - - #[test] - #[allow(clippy::reversed_empty_ranges)] - fn was_successful_returns_false_for_failed_reward_confirmation_transaction() { - sp_io::TestExternalities::new(Default::default()).execute_with(|| { - fill_unrewarded_messages(); - assert!(!was_message_delivery_successful(10..=9, true)); - }); - } - - #[test] - #[allow(clippy::reversed_empty_ranges)] - fn was_successful_returns_true_for_successful_reward_confirmation_transaction() { - sp_io::TestExternalities::new(Default::default()).execute_with(|| { - assert!(was_message_delivery_successful(10..=9, true)); - }); - } - - #[test] - fn was_successful_returns_false_for_failed_delivery() { - sp_io::TestExternalities::new(Default::default()).execute_with(|| { - deliver_message_10(); - assert!(!was_message_delivery_successful(10..=12, false)); - }); - } - - #[test] - fn was_successful_returns_false_for_partially_successful_delivery() { - sp_io::TestExternalities::new(Default::default()).execute_with(|| { - deliver_message_10(); - assert!(!was_message_delivery_successful(9..=12, false)); - }); - } - - #[test] - fn was_successful_returns_true_for_successful_delivery() { - sp_io::TestExternalities::new(Default::default()).execute_with(|| { - deliver_message_10(); - assert!(was_message_delivery_successful(9..=10, false)); - }); - } - - fn was_message_confirmation_successful(bundled_range: RangeInclusive) -> bool { - CallHelper::::was_successful(&CallInfo::ReceiveMessagesDeliveryProof( - ReceiveMessagesDeliveryProofInfo(BaseMessagesProofInfo { - lane_id: LaneId([0, 0, 0, 0]), - bundled_range, - best_stored_nonce: 0, // doesn't matter for `was_successful` - }), - )) - } - - #[test] - fn was_successful_returns_false_for_failed_confirmation() { - sp_io::TestExternalities::new(Default::default()).execute_with(|| { - confirm_message_10(); - assert!(!was_message_confirmation_successful(10..=12)); - }); - } - - #[test] - fn was_successful_returns_false_for_partially_successful_confirmation() { - sp_io::TestExternalities::new(Default::default()).execute_with(|| { - confirm_message_10(); - assert!(!was_message_confirmation_successful(9..=12)); - }); - } - - #[test] - fn was_successful_returns_true_for_successful_confirmation() { - sp_io::TestExternalities::new(Default::default()).execute_with(|| { - confirm_message_10(); - assert!(was_message_confirmation_successful(9..=10)); - }); - } -} diff --git a/bin/runtime-common/src/messages_generation.rs b/bin/runtime-common/src/messages_generation.rs deleted file mode 100644 index c37aaa5d4..000000000 --- a/bin/runtime-common/src/messages_generation.rs +++ /dev/null @@ -1,150 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Helpers for generating message storage proofs, that are used by tests and by benchmarks. - -use crate::messages::{AccountIdOf, BridgedChain, HashOf, HasherOf, MessageBridge, ThisChain}; - -use bp_messages::{ - storage_keys, InboundLaneData, LaneId, MessageKey, MessageNonce, MessagePayload, - OutboundLaneData, -}; -use bp_runtime::{record_all_trie_keys, RawStorageProof, StorageProofSize}; -use codec::Encode; -use sp_std::{ops::RangeInclusive, prelude::*}; -use sp_trie::{trie_types::TrieDBMutBuilderV1, LayoutV1, MemoryDB, TrieMut}; - -/// Simple and correct message data encode function. -pub fn encode_all_messages(_: MessageNonce, m: &MessagePayload) -> Option> { - Some(m.encode()) -} - -/// Simple and correct outbound lane data encode function. -pub fn encode_lane_data(d: &OutboundLaneData) -> Vec { - d.encode() -} - -/// Prepare storage proof of given messages. -/// -/// Returns state trie root and nodes with prepared messages. -pub fn prepare_messages_storage_proof( - lane: LaneId, - message_nonces: RangeInclusive, - outbound_lane_data: Option, - size: StorageProofSize, - message_payload: MessagePayload, - encode_message: impl Fn(MessageNonce, &MessagePayload) -> Option>, - encode_outbound_lane_data: impl Fn(&OutboundLaneData) -> Vec, -) -> (HashOf>, RawStorageProof) -where - B: MessageBridge, - HashOf>: Copy + Default, -{ - // prepare Bridged chain storage with messages and (optionally) outbound lane state - let message_count = message_nonces.end().saturating_sub(*message_nonces.start()) + 1; - let mut storage_keys = Vec::with_capacity(message_count as usize + 1); - let mut root = Default::default(); - let mut mdb = MemoryDB::default(); - { - let mut trie = - TrieDBMutBuilderV1::>>::new(&mut mdb, &mut root).build(); - - // insert messages - for (i, nonce) in message_nonces.into_iter().enumerate() { - let message_key = MessageKey { lane_id: lane, nonce }; - let message_payload = match encode_message(nonce, &message_payload) { - Some(message_payload) => - if i == 0 { - grow_trie_leaf_value(message_payload, size) - } else { - message_payload - }, - None => continue, - }; - let storage_key = storage_keys::message_key( - B::BRIDGED_MESSAGES_PALLET_NAME, - &message_key.lane_id, - message_key.nonce, - ) - .0; - trie.insert(&storage_key, &message_payload) - .map_err(|_| "TrieMut::insert has failed") - .expect("TrieMut::insert should not fail in benchmarks"); - storage_keys.push(storage_key); - } - - // insert outbound lane state - if let Some(outbound_lane_data) = outbound_lane_data.as_ref().map(encode_outbound_lane_data) - { - let storage_key = - storage_keys::outbound_lane_data_key(B::BRIDGED_MESSAGES_PALLET_NAME, &lane).0; - trie.insert(&storage_key, &outbound_lane_data) - .map_err(|_| "TrieMut::insert has failed") - .expect("TrieMut::insert should not fail in benchmarks"); - storage_keys.push(storage_key); - } - } - - // generate storage proof to be delivered to This chain - let storage_proof = record_all_trie_keys::>>, _>(&mdb, &root) - .map_err(|_| "record_all_trie_keys has failed") - .expect("record_all_trie_keys should not fail in benchmarks"); - (root, storage_proof) -} - -/// Prepare storage proof of given messages delivery. -/// -/// Returns state trie root and nodes with prepared messages. -pub fn prepare_message_delivery_storage_proof( - lane: LaneId, - inbound_lane_data: InboundLaneData>>, - size: StorageProofSize, -) -> (HashOf>, RawStorageProof) -where - B: MessageBridge, -{ - // prepare Bridged chain storage with inbound lane state - let storage_key = storage_keys::inbound_lane_data_key(B::BRIDGED_MESSAGES_PALLET_NAME, &lane).0; - let mut root = Default::default(); - let mut mdb = MemoryDB::default(); - { - let mut trie = - TrieDBMutBuilderV1::>>::new(&mut mdb, &mut root).build(); - let inbound_lane_data = grow_trie_leaf_value(inbound_lane_data.encode(), size); - trie.insert(&storage_key, &inbound_lane_data) - .map_err(|_| "TrieMut::insert has failed") - .expect("TrieMut::insert should not fail in benchmarks"); - } - - // generate storage proof to be delivered to This chain - let storage_proof = record_all_trie_keys::>>, _>(&mdb, &root) - .map_err(|_| "record_all_trie_keys has failed") - .expect("record_all_trie_keys should not fail in benchmarks"); - - (root, storage_proof) -} - -/// Add extra data to the trie leaf value so that it'll be of given size. -pub fn grow_trie_leaf_value(mut value: Vec, size: StorageProofSize) -> Vec { - match size { - StorageProofSize::Minimal(_) => (), - StorageProofSize::HasLargeLeaf(size) if size as usize > value.len() => { - value.extend(sp_std::iter::repeat(42u8).take(size as usize - value.len())); - }, - StorageProofSize::HasLargeLeaf(_) => (), - } - value -} diff --git a/bin/runtime-common/src/messages_xcm_extension.rs b/bin/runtime-common/src/messages_xcm_extension.rs deleted file mode 100644 index 46ed4da0d..000000000 --- a/bin/runtime-common/src/messages_xcm_extension.rs +++ /dev/null @@ -1,502 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Module provides utilities for easier XCM handling, e.g: -//! `XcmExecutor` -> `MessageSender` -> `OutboundMessageQueue` -//! | -//! `Relayer` -//! | -//! `XcmRouter` <- `MessageDispatch` <- `InboundMessageQueue` - -use bp_messages::{ - source_chain::OnMessagesDelivered, - target_chain::{DispatchMessage, MessageDispatch}, - LaneId, MessageNonce, -}; -use bp_runtime::messages::MessageDispatchResult; -pub use bp_xcm_bridge_hub::XcmAsPlainPayload; -use bp_xcm_bridge_hub_router::XcmChannelStatusProvider; -use codec::{Decode, Encode}; -use frame_support::{traits::Get, weights::Weight, CloneNoBound, EqNoBound, PartialEqNoBound}; -use pallet_bridge_messages::{ - Config as MessagesConfig, OutboundLanesCongestedSignals, WeightInfoExt as MessagesPalletWeights, -}; -use scale_info::TypeInfo; -use sp_runtime::SaturatedConversion; -use sp_std::{fmt::Debug, marker::PhantomData}; -use xcm::prelude::*; -use xcm_builder::{DispatchBlob, DispatchBlobError}; - -/// Message dispatch result type for single message. -#[derive(CloneNoBound, EqNoBound, PartialEqNoBound, Encode, Decode, Debug, TypeInfo)] -pub enum XcmBlobMessageDispatchResult { - /// We've been unable to decode message payload. - InvalidPayload, - /// Message has been dispatched. - Dispatched, - /// Message has **NOT** been dispatched because of given error. - NotDispatched(#[codec(skip)] Option), -} - -/// [`XcmBlobMessageDispatch`] is responsible for dispatching received messages -/// -/// It needs to be used at the target bridge hub. -pub struct XcmBlobMessageDispatch { - _marker: sp_std::marker::PhantomData<(DispatchBlob, Weights, Channel)>, -} - -impl< - BlobDispatcher: DispatchBlob, - Weights: MessagesPalletWeights, - Channel: XcmChannelStatusProvider, - > MessageDispatch for XcmBlobMessageDispatch -{ - type DispatchPayload = XcmAsPlainPayload; - type DispatchLevelResult = XcmBlobMessageDispatchResult; - - fn is_active() -> bool { - !Channel::is_congested() - } - - fn dispatch_weight(message: &mut DispatchMessage) -> Weight { - match message.data.payload { - Ok(ref payload) => { - let payload_size = payload.encoded_size().saturated_into(); - Weights::message_dispatch_weight(payload_size) - }, - Err(_) => Weight::zero(), - } - } - - fn dispatch( - message: DispatchMessage, - ) -> MessageDispatchResult { - let payload = match message.data.payload { - Ok(payload) => payload, - Err(e) => { - log::error!( - target: crate::LOG_TARGET_BRIDGE_DISPATCH, - "[XcmBlobMessageDispatch] payload error: {:?} - message_nonce: {:?}", - e, - message.key.nonce - ); - return MessageDispatchResult { - unspent_weight: Weight::zero(), - dispatch_level_result: XcmBlobMessageDispatchResult::InvalidPayload, - } - }, - }; - let dispatch_level_result = match BlobDispatcher::dispatch_blob(payload) { - Ok(_) => { - log::debug!( - target: crate::LOG_TARGET_BRIDGE_DISPATCH, - "[XcmBlobMessageDispatch] DispatchBlob::dispatch_blob was ok - message_nonce: {:?}", - message.key.nonce - ); - XcmBlobMessageDispatchResult::Dispatched - }, - Err(e) => { - log::error!( - target: crate::LOG_TARGET_BRIDGE_DISPATCH, - "[XcmBlobMessageDispatch] DispatchBlob::dispatch_blob failed, error: {:?} - message_nonce: {:?}", - e, message.key.nonce - ); - XcmBlobMessageDispatchResult::NotDispatched(Some(e)) - }, - }; - MessageDispatchResult { unspent_weight: Weight::zero(), dispatch_level_result } - } -} - -/// A pair of sending chain location and message lane, used by this chain to send messages -/// over the bridge. -#[cfg_attr(feature = "std", derive(Debug, Eq, PartialEq))] -pub struct SenderAndLane { - /// Sending chain relative location. - pub location: Location, - /// Message lane, used by the sending chain. - pub lane: LaneId, -} - -impl SenderAndLane { - /// Create new object using provided location and lane. - pub fn new(location: Location, lane: LaneId) -> Self { - SenderAndLane { location, lane } - } -} - -/// [`XcmBlobHauler`] is responsible for sending messages to the bridge "point-to-point link" from -/// one side, where on the other it can be dispatched by [`XcmBlobMessageDispatch`]. -pub trait XcmBlobHauler { - /// Runtime that has messages pallet deployed. - type Runtime: MessagesConfig; - /// Instance of the messages pallet that is used to send messages. - type MessagesInstance: 'static; - - /// Actual XCM message sender (`HRMP` or `UMP`) to the source chain - /// location (`Self::SenderAndLane::get().location`). - type ToSourceChainSender: SendXcm; - /// An XCM message that is sent to the sending chain when the bridge queue becomes congested. - type CongestedMessage: Get>>; - /// An XCM message that is sent to the sending chain when the bridge queue becomes not - /// congested. - type UncongestedMessage: Get>>; - - /// Returns `true` if we want to handle congestion. - fn supports_congestion_detection() -> bool { - Self::CongestedMessage::get().is_some() || Self::UncongestedMessage::get().is_some() - } -} - -/// XCM bridge adapter which connects [`XcmBlobHauler`] with [`pallet_bridge_messages`] and -/// makes sure that XCM blob is sent to the outbound lane to be relayed. -/// -/// It needs to be used at the source bridge hub. -pub struct XcmBlobHaulerAdapter( - sp_std::marker::PhantomData<(XcmBlobHauler, Lanes)>, -); - -impl< - H: XcmBlobHauler, - Lanes: Get>, - > OnMessagesDelivered for XcmBlobHaulerAdapter -{ - fn on_messages_delivered(lane: LaneId, enqueued_messages: MessageNonce) { - if let Some(sender_and_lane) = - Lanes::get().iter().find(|link| link.0.lane == lane).map(|link| &link.0) - { - // notify XCM queue manager about updated lane state - LocalXcmQueueManager::::on_bridge_messages_delivered( - sender_and_lane, - enqueued_messages, - ); - } - } -} - -/// Manager of local XCM queues (and indirectly - underlying transport channels) that -/// controls the queue state. -/// -/// It needs to be used at the source bridge hub. -pub struct LocalXcmQueueManager(PhantomData); - -/// Maximal number of messages in the outbound bridge queue. Once we reach this limit, we -/// send a "congestion" XCM message to the sending chain. -const OUTBOUND_LANE_CONGESTED_THRESHOLD: MessageNonce = 8_192; - -/// After we have sent "congestion" XCM message to the sending chain, we wait until number -/// of messages in the outbound bridge queue drops to this count, before sending `uncongestion` -/// XCM message. -const OUTBOUND_LANE_UNCONGESTED_THRESHOLD: MessageNonce = 1_024; - -impl LocalXcmQueueManager { - /// Must be called whenever we push a message to the bridge lane. - pub fn on_bridge_message_enqueued( - sender_and_lane: &SenderAndLane, - enqueued_messages: MessageNonce, - ) { - // skip if we dont want to handle congestion - if !H::supports_congestion_detection() { - return - } - - // if we have already sent the congestion signal, we don't want to do anything - if Self::is_congested_signal_sent(sender_and_lane.lane) { - return - } - - // if the bridge queue is not congested, we don't want to do anything - let is_congested = enqueued_messages > OUTBOUND_LANE_CONGESTED_THRESHOLD; - if !is_congested { - return - } - - log::info!( - target: crate::LOG_TARGET_BRIDGE_DISPATCH, - "Sending 'congested' XCM message to {:?} to avoid overloading lane {:?}: there are\ - {} messages queued at the bridge queue", - sender_and_lane.location, - sender_and_lane.lane, - enqueued_messages, - ); - - if let Err(e) = Self::send_congested_signal(sender_and_lane) { - log::info!( - target: crate::LOG_TARGET_BRIDGE_DISPATCH, - "Failed to send the 'congested' XCM message to {:?}: {:?}", - sender_and_lane.location, - e, - ); - } - } - - /// Must be called whenever we receive a message delivery confirmation. - pub fn on_bridge_messages_delivered( - sender_and_lane: &SenderAndLane, - enqueued_messages: MessageNonce, - ) { - // skip if we don't want to handle congestion - if !H::supports_congestion_detection() { - return - } - - // if we have not sent the congestion signal before, we don't want to do anything - if !Self::is_congested_signal_sent(sender_and_lane.lane) { - return - } - - // if the bridge queue is still congested, we don't want to do anything - let is_congested = enqueued_messages > OUTBOUND_LANE_UNCONGESTED_THRESHOLD; - if is_congested { - return - } - - log::info!( - target: crate::LOG_TARGET_BRIDGE_DISPATCH, - "Sending 'uncongested' XCM message to {:?}. Lane {:?}: there are\ - {} messages queued at the bridge queue", - sender_and_lane.location, - sender_and_lane.lane, - enqueued_messages, - ); - - if let Err(e) = Self::send_uncongested_signal(sender_and_lane) { - log::info!( - target: crate::LOG_TARGET_BRIDGE_DISPATCH, - "Failed to send the 'uncongested' XCM message to {:?}: {:?}", - sender_and_lane.location, - e, - ); - } - } - - /// Returns true if we have sent "congested" signal to the `sending_chain_location`. - fn is_congested_signal_sent(lane: LaneId) -> bool { - OutboundLanesCongestedSignals::::get(lane) - } - - /// Send congested signal to the `sending_chain_location`. - fn send_congested_signal(sender_and_lane: &SenderAndLane) -> Result<(), SendError> { - if let Some(msg) = H::CongestedMessage::get() { - send_xcm::(sender_and_lane.location.clone(), msg)?; - OutboundLanesCongestedSignals::::insert( - sender_and_lane.lane, - true, - ); - } - Ok(()) - } - - /// Send `uncongested` signal to the `sending_chain_location`. - fn send_uncongested_signal(sender_and_lane: &SenderAndLane) -> Result<(), SendError> { - if let Some(msg) = H::UncongestedMessage::get() { - send_xcm::(sender_and_lane.location.clone(), msg)?; - OutboundLanesCongestedSignals::::remove( - sender_and_lane.lane, - ); - } - Ok(()) - } -} - -/// Adapter for the implementation of `GetVersion`, which attempts to find the minimal -/// configured XCM version between the destination `dest` and the bridge hub location provided as -/// `Get`. -pub struct XcmVersionOfDestAndRemoteBridge( - sp_std::marker::PhantomData<(Version, RemoteBridge)>, -); -impl> GetVersion - for XcmVersionOfDestAndRemoteBridge -{ - fn get_version_for(dest: &Location) -> Option { - let dest_version = Version::get_version_for(dest); - let bridge_hub_version = Version::get_version_for(&RemoteBridge::get()); - - match (dest_version, bridge_hub_version) { - (Some(dv), Some(bhv)) => Some(sp_std::cmp::min(dv, bhv)), - (Some(dv), None) => Some(dv), - (None, Some(bhv)) => Some(bhv), - (None, None) => None, - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::mock::*; - - use bp_messages::OutboundLaneData; - use frame_support::parameter_types; - use pallet_bridge_messages::OutboundLanes; - - parameter_types! { - pub TestSenderAndLane: SenderAndLane = SenderAndLane { - location: Location::new(1, [Parachain(1000)]), - lane: TEST_LANE_ID, - }; - pub TestLanes: sp_std::vec::Vec<(SenderAndLane, (NetworkId, InteriorLocation))> = sp_std::vec![ - (TestSenderAndLane::get(), (NetworkId::ByGenesis([0; 32]), InteriorLocation::Here)) - ]; - pub DummyXcmMessage: Xcm<()> = Xcm::new(); - } - - struct DummySendXcm; - - impl DummySendXcm { - fn messages_sent() -> u32 { - frame_support::storage::unhashed::get(b"DummySendXcm").unwrap_or(0) - } - } - - impl SendXcm for DummySendXcm { - type Ticket = (); - - fn validate( - _destination: &mut Option, - _message: &mut Option>, - ) -> SendResult { - Ok(((), Default::default())) - } - - fn deliver(_ticket: Self::Ticket) -> Result { - let messages_sent: u32 = Self::messages_sent(); - frame_support::storage::unhashed::put(b"DummySendXcm", &(messages_sent + 1)); - Ok(XcmHash::default()) - } - } - - struct TestBlobHauler; - - impl XcmBlobHauler for TestBlobHauler { - type Runtime = TestRuntime; - type MessagesInstance = (); - - type ToSourceChainSender = DummySendXcm; - type CongestedMessage = DummyXcmMessage; - type UncongestedMessage = DummyXcmMessage; - } - - type TestBlobHaulerAdapter = XcmBlobHaulerAdapter; - - fn fill_up_lane_to_congestion() -> MessageNonce { - let latest_generated_nonce = OUTBOUND_LANE_CONGESTED_THRESHOLD; - OutboundLanes::::insert( - TEST_LANE_ID, - OutboundLaneData { - oldest_unpruned_nonce: 0, - latest_received_nonce: 0, - latest_generated_nonce, - }, - ); - latest_generated_nonce - } - - #[test] - fn congested_signal_is_not_sent_twice() { - run_test(|| { - let enqueued = fill_up_lane_to_congestion(); - - // next sent message leads to congested signal - LocalXcmQueueManager::::on_bridge_message_enqueued( - &TestSenderAndLane::get(), - enqueued + 1, - ); - assert_eq!(DummySendXcm::messages_sent(), 1); - - // next sent message => we don't sent another congested signal - LocalXcmQueueManager::::on_bridge_message_enqueued( - &TestSenderAndLane::get(), - enqueued, - ); - assert_eq!(DummySendXcm::messages_sent(), 1); - }); - } - - #[test] - fn congested_signal_is_not_sent_when_outbound_lane_is_not_congested() { - run_test(|| { - LocalXcmQueueManager::::on_bridge_message_enqueued( - &TestSenderAndLane::get(), - 1, - ); - assert_eq!(DummySendXcm::messages_sent(), 0); - }); - } - - #[test] - fn congested_signal_is_sent_when_outbound_lane_is_congested() { - run_test(|| { - let enqueued = fill_up_lane_to_congestion(); - - // next sent message leads to congested signal - LocalXcmQueueManager::::on_bridge_message_enqueued( - &TestSenderAndLane::get(), - enqueued + 1, - ); - assert_eq!(DummySendXcm::messages_sent(), 1); - assert!(LocalXcmQueueManager::::is_congested_signal_sent(TEST_LANE_ID)); - }); - } - - #[test] - fn uncongested_signal_is_not_sent_when_messages_are_delivered_at_other_lane() { - run_test(|| { - LocalXcmQueueManager::::send_congested_signal(&TestSenderAndLane::get()).unwrap(); - assert_eq!(DummySendXcm::messages_sent(), 1); - - // when we receive a delivery report for other lane, we don't send an uncongested signal - TestBlobHaulerAdapter::on_messages_delivered(LaneId([42, 42, 42, 42]), 0); - assert_eq!(DummySendXcm::messages_sent(), 1); - }); - } - - #[test] - fn uncongested_signal_is_not_sent_when_we_havent_send_congested_signal_before() { - run_test(|| { - TestBlobHaulerAdapter::on_messages_delivered(TEST_LANE_ID, 0); - assert_eq!(DummySendXcm::messages_sent(), 0); - }); - } - - #[test] - fn uncongested_signal_is_not_sent_if_outbound_lane_is_still_congested() { - run_test(|| { - LocalXcmQueueManager::::send_congested_signal(&TestSenderAndLane::get()).unwrap(); - assert_eq!(DummySendXcm::messages_sent(), 1); - - TestBlobHaulerAdapter::on_messages_delivered( - TEST_LANE_ID, - OUTBOUND_LANE_UNCONGESTED_THRESHOLD + 1, - ); - assert_eq!(DummySendXcm::messages_sent(), 1); - }); - } - - #[test] - fn uncongested_signal_is_sent_if_outbound_lane_is_uncongested() { - run_test(|| { - LocalXcmQueueManager::::send_congested_signal(&TestSenderAndLane::get()).unwrap(); - assert_eq!(DummySendXcm::messages_sent(), 1); - - TestBlobHaulerAdapter::on_messages_delivered( - TEST_LANE_ID, - OUTBOUND_LANE_UNCONGESTED_THRESHOLD, - ); - assert_eq!(DummySendXcm::messages_sent(), 2); - }); - } -} diff --git a/bin/runtime-common/src/mock.rs b/bin/runtime-common/src/mock.rs deleted file mode 100644 index ad71cd0d4..000000000 --- a/bin/runtime-common/src/mock.rs +++ /dev/null @@ -1,427 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! A mock runtime for testing different stuff in the crate. - -#![cfg(test)] - -use crate::messages::{ - source::{ - FromThisChainMaximalOutboundPayloadSize, FromThisChainMessagePayload, - TargetHeaderChainAdapter, - }, - target::{FromBridgedChainMessagePayload, SourceHeaderChainAdapter}, - BridgedChainWithMessages, HashOf, MessageBridge, ThisChainWithMessages, -}; - -use bp_header_chain::{ChainWithGrandpa, HeaderChain}; -use bp_messages::{ - target_chain::{DispatchMessage, MessageDispatch}, - LaneId, MessageNonce, -}; -use bp_parachains::SingleParaStoredHeaderDataBuilder; -use bp_relayers::PayRewardFromAccount; -use bp_runtime::{ - messages::MessageDispatchResult, Chain, ChainId, Parachain, UnderlyingChainProvider, -}; -use codec::{Decode, Encode}; -use frame_support::{ - derive_impl, parameter_types, - weights::{ConstantMultiplier, IdentityFee, RuntimeDbWeight, Weight}, -}; -use pallet_transaction_payment::Multiplier; -use sp_runtime::{ - testing::H256, - traits::{BlakeTwo256, ConstU32, ConstU64, ConstU8}, - FixedPointNumber, Perquintill, -}; - -/// Account identifier at `ThisChain`. -pub type ThisChainAccountId = u64; -/// Balance at `ThisChain`. -pub type ThisChainBalance = u64; -/// Block number at `ThisChain`. -pub type ThisChainBlockNumber = u32; -/// Hash at `ThisChain`. -pub type ThisChainHash = H256; -/// Hasher at `ThisChain`. -pub type ThisChainHasher = BlakeTwo256; -/// Runtime call at `ThisChain`. -pub type ThisChainRuntimeCall = RuntimeCall; -/// Runtime call origin at `ThisChain`. -pub type ThisChainCallOrigin = RuntimeOrigin; -/// Header of `ThisChain`. -pub type ThisChainHeader = sp_runtime::generic::Header; -/// Block of `ThisChain`. -pub type ThisChainBlock = frame_system::mocking::MockBlockU32; - -/// Account identifier at the `BridgedChain`. -pub type BridgedChainAccountId = u128; -/// Balance at the `BridgedChain`. -pub type BridgedChainBalance = u128; -/// Block number at the `BridgedChain`. -pub type BridgedChainBlockNumber = u32; -/// Hash at the `BridgedChain`. -pub type BridgedChainHash = H256; -/// Hasher at the `BridgedChain`. -pub type BridgedChainHasher = BlakeTwo256; -/// Header of the `BridgedChain`. -pub type BridgedChainHeader = - sp_runtime::generic::Header; - -/// Rewards payment procedure. -pub type TestPaymentProcedure = PayRewardFromAccount; -/// Stake that we are using in tests. -pub type TestStake = ConstU64<5_000>; -/// Stake and slash mechanism to use in tests. -pub type TestStakeAndSlash = pallet_bridge_relayers::StakeAndSlashNamed< - ThisChainAccountId, - ThisChainBlockNumber, - Balances, - ReserveId, - TestStake, - ConstU32<8>, ->; - -/// Message lane used in tests. -pub const TEST_LANE_ID: LaneId = LaneId([0, 0, 0, 0]); -/// Bridged chain id used in tests. -pub const TEST_BRIDGED_CHAIN_ID: ChainId = *b"brdg"; -/// Maximal extrinsic weight at the `BridgedChain`. -pub const BRIDGED_CHAIN_MAX_EXTRINSIC_WEIGHT: usize = 2048; -/// Maximal extrinsic size at the `BridgedChain`. -pub const BRIDGED_CHAIN_MAX_EXTRINSIC_SIZE: u32 = 1024; - -frame_support::construct_runtime! { - pub enum TestRuntime - { - System: frame_system::{Pallet, Call, Config, Storage, Event}, - Utility: pallet_utility, - Balances: pallet_balances::{Pallet, Call, Storage, Config, Event}, - TransactionPayment: pallet_transaction_payment::{Pallet, Storage, Event}, - BridgeRelayers: pallet_bridge_relayers::{Pallet, Call, Storage, Event}, - BridgeGrandpa: pallet_bridge_grandpa::{Pallet, Call, Storage, Event}, - BridgeParachains: pallet_bridge_parachains::{Pallet, Call, Storage, Event}, - BridgeMessages: pallet_bridge_messages::{Pallet, Call, Storage, Event, Config}, - } -} - -crate::generate_bridge_reject_obsolete_headers_and_messages! { - ThisChainRuntimeCall, ThisChainAccountId, - BridgeGrandpa, BridgeParachains, BridgeMessages -} - -parameter_types! { - pub const ActiveOutboundLanes: &'static [LaneId] = &[TEST_LANE_ID]; - pub const BridgedChainId: ChainId = TEST_BRIDGED_CHAIN_ID; - pub const BridgedParasPalletName: &'static str = "Paras"; - pub const ExistentialDeposit: ThisChainBalance = 500; - pub const DbWeight: RuntimeDbWeight = RuntimeDbWeight { read: 1, write: 2 }; - pub const TargetBlockFullness: Perquintill = Perquintill::from_percent(25); - pub const TransactionBaseFee: ThisChainBalance = 0; - pub const TransactionByteFee: ThisChainBalance = 1; - pub AdjustmentVariable: Multiplier = Multiplier::saturating_from_rational(3, 100_000); - pub MinimumMultiplier: Multiplier = Multiplier::saturating_from_rational(1, 1_000_000u128); - pub MaximumMultiplier: Multiplier = sp_runtime::traits::Bounded::max_value(); - pub const MaxUnrewardedRelayerEntriesAtInboundLane: MessageNonce = 16; - pub const MaxUnconfirmedMessagesAtInboundLane: MessageNonce = 1_000; - pub const ReserveId: [u8; 8] = *b"brdgrlrs"; -} - -#[derive_impl(frame_system::config_preludes::TestDefaultConfig)] -impl frame_system::Config for TestRuntime { - type Hash = ThisChainHash; - type Hashing = ThisChainHasher; - type AccountId = ThisChainAccountId; - type Block = ThisChainBlock; - type AccountData = pallet_balances::AccountData; - type BlockHashCount = ConstU32<250>; -} - -impl pallet_utility::Config for TestRuntime { - type RuntimeEvent = RuntimeEvent; - type RuntimeCall = RuntimeCall; - type PalletsOrigin = OriginCaller; - type WeightInfo = (); -} - -#[derive_impl(pallet_balances::config_preludes::TestDefaultConfig)] -impl pallet_balances::Config for TestRuntime { - type ReserveIdentifier = [u8; 8]; - type AccountStore = System; -} - -#[derive_impl(pallet_transaction_payment::config_preludes::TestDefaultConfig)] -impl pallet_transaction_payment::Config for TestRuntime { - type OnChargeTransaction = pallet_transaction_payment::FungibleAdapter; - type OperationalFeeMultiplier = ConstU8<5>; - type WeightToFee = IdentityFee; - type LengthToFee = ConstantMultiplier; - type FeeMultiplierUpdate = pallet_transaction_payment::TargetedFeeAdjustment< - TestRuntime, - TargetBlockFullness, - AdjustmentVariable, - MinimumMultiplier, - MaximumMultiplier, - >; - type RuntimeEvent = RuntimeEvent; -} - -impl pallet_bridge_grandpa::Config for TestRuntime { - type RuntimeEvent = RuntimeEvent; - type BridgedChain = BridgedUnderlyingChain; - type MaxFreeMandatoryHeadersPerBlock = ConstU32<4>; - type HeadersToKeep = ConstU32<8>; - type WeightInfo = pallet_bridge_grandpa::weights::BridgeWeight; -} - -impl pallet_bridge_parachains::Config for TestRuntime { - type RuntimeEvent = RuntimeEvent; - type BridgesGrandpaPalletInstance = (); - type ParasPalletName = BridgedParasPalletName; - type ParaStoredHeaderDataBuilder = - SingleParaStoredHeaderDataBuilder; - type HeadsToKeep = ConstU32<8>; - type MaxParaHeadDataSize = ConstU32<1024>; - type WeightInfo = pallet_bridge_parachains::weights::BridgeWeight; -} - -impl pallet_bridge_messages::Config for TestRuntime { - type RuntimeEvent = RuntimeEvent; - type WeightInfo = pallet_bridge_messages::weights::BridgeWeight; - type ActiveOutboundLanes = ActiveOutboundLanes; - type MaxUnrewardedRelayerEntriesAtInboundLane = MaxUnrewardedRelayerEntriesAtInboundLane; - type MaxUnconfirmedMessagesAtInboundLane = MaxUnconfirmedMessagesAtInboundLane; - - type MaximalOutboundPayloadSize = FromThisChainMaximalOutboundPayloadSize; - type OutboundPayload = FromThisChainMessagePayload; - - type InboundPayload = FromBridgedChainMessagePayload; - type InboundRelayer = BridgedChainAccountId; - type DeliveryPayments = (); - - type TargetHeaderChain = TargetHeaderChainAdapter; - type DeliveryConfirmationPayments = pallet_bridge_relayers::DeliveryConfirmationPaymentsAdapter< - TestRuntime, - (), - ConstU64<100_000>, - >; - type OnMessagesDelivered = (); - - type SourceHeaderChain = SourceHeaderChainAdapter; - type MessageDispatch = DummyMessageDispatch; - type BridgedChainId = BridgedChainId; -} - -impl pallet_bridge_relayers::Config for TestRuntime { - type RuntimeEvent = RuntimeEvent; - type Reward = ThisChainBalance; - type PaymentProcedure = TestPaymentProcedure; - type StakeAndSlash = TestStakeAndSlash; - type WeightInfo = (); -} - -/// Dummy message dispatcher. -pub struct DummyMessageDispatch; - -impl DummyMessageDispatch { - pub fn deactivate() { - frame_support::storage::unhashed::put(&b"inactive"[..], &false); - } -} - -impl MessageDispatch for DummyMessageDispatch { - type DispatchPayload = Vec; - type DispatchLevelResult = (); - - fn is_active() -> bool { - frame_support::storage::unhashed::take::(&b"inactive"[..]) != Some(false) - } - - fn dispatch_weight(_message: &mut DispatchMessage) -> Weight { - Weight::zero() - } - - fn dispatch( - _: DispatchMessage, - ) -> MessageDispatchResult { - MessageDispatchResult { unspent_weight: Weight::zero(), dispatch_level_result: () } - } -} - -/// Bridge that is deployed on `ThisChain` and allows sending/receiving messages to/from -/// `BridgedChain`. -#[derive(Debug, PartialEq, Eq)] -pub struct OnThisChainBridge; - -impl MessageBridge for OnThisChainBridge { - const BRIDGED_MESSAGES_PALLET_NAME: &'static str = ""; - - type ThisChain = ThisChain; - type BridgedChain = BridgedChain; - type BridgedHeaderChain = pallet_bridge_grandpa::GrandpaChainHeaders; -} - -/// Bridge that is deployed on `BridgedChain` and allows sending/receiving messages to/from -/// `ThisChain`. -#[derive(Debug, PartialEq, Eq)] -pub struct OnBridgedChainBridge; - -impl MessageBridge for OnBridgedChainBridge { - const BRIDGED_MESSAGES_PALLET_NAME: &'static str = ""; - - type ThisChain = BridgedChain; - type BridgedChain = ThisChain; - type BridgedHeaderChain = ThisHeaderChain; -} - -/// Dummy implementation of `HeaderChain` for `ThisChain` at the `BridgedChain`. -pub struct ThisHeaderChain; - -impl HeaderChain for ThisHeaderChain { - fn finalized_header_state_root(_hash: HashOf) -> Option> { - unreachable!() - } -} - -/// Call origin at `BridgedChain`. -#[derive(Clone, Debug)] -pub struct BridgedChainOrigin; - -impl From - for Result, BridgedChainOrigin> -{ - fn from( - _origin: BridgedChainOrigin, - ) -> Result, BridgedChainOrigin> { - unreachable!() - } -} - -/// Underlying chain of `ThisChain`. -pub struct ThisUnderlyingChain; - -impl Chain for ThisUnderlyingChain { - const ID: ChainId = *b"tuch"; - - type BlockNumber = ThisChainBlockNumber; - type Hash = ThisChainHash; - type Hasher = ThisChainHasher; - type Header = ThisChainHeader; - type AccountId = ThisChainAccountId; - type Balance = ThisChainBalance; - type Nonce = u32; - type Signature = sp_runtime::MultiSignature; - - fn max_extrinsic_size() -> u32 { - BRIDGED_CHAIN_MAX_EXTRINSIC_SIZE - } - - fn max_extrinsic_weight() -> Weight { - Weight::zero() - } -} - -/// The chain where we are in tests. -pub struct ThisChain; - -impl UnderlyingChainProvider for ThisChain { - type Chain = ThisUnderlyingChain; -} - -impl ThisChainWithMessages for ThisChain { - type RuntimeOrigin = ThisChainCallOrigin; -} - -impl BridgedChainWithMessages for ThisChain {} - -/// Underlying chain of `BridgedChain`. -pub struct BridgedUnderlyingChain; -/// Some parachain under `BridgedChain` consensus. -pub struct BridgedUnderlyingParachain; -/// Runtime call of the `BridgedChain`. -#[derive(Decode, Encode)] -pub struct BridgedChainCall; - -impl Chain for BridgedUnderlyingChain { - const ID: ChainId = *b"buch"; - - type BlockNumber = BridgedChainBlockNumber; - type Hash = BridgedChainHash; - type Hasher = BridgedChainHasher; - type Header = BridgedChainHeader; - type AccountId = BridgedChainAccountId; - type Balance = BridgedChainBalance; - type Nonce = u32; - type Signature = sp_runtime::MultiSignature; - - fn max_extrinsic_size() -> u32 { - BRIDGED_CHAIN_MAX_EXTRINSIC_SIZE - } - fn max_extrinsic_weight() -> Weight { - Weight::zero() - } -} - -impl ChainWithGrandpa for BridgedUnderlyingChain { - const WITH_CHAIN_GRANDPA_PALLET_NAME: &'static str = ""; - const MAX_AUTHORITIES_COUNT: u32 = 16; - const REASONABLE_HEADERS_IN_JUSTIFICATION_ANCESTRY: u32 = 8; - const MAX_MANDATORY_HEADER_SIZE: u32 = 256; - const AVERAGE_HEADER_SIZE: u32 = 64; -} - -impl Chain for BridgedUnderlyingParachain { - const ID: ChainId = *b"bupc"; - - type BlockNumber = BridgedChainBlockNumber; - type Hash = BridgedChainHash; - type Hasher = BridgedChainHasher; - type Header = BridgedChainHeader; - type AccountId = BridgedChainAccountId; - type Balance = BridgedChainBalance; - type Nonce = u32; - type Signature = sp_runtime::MultiSignature; - - fn max_extrinsic_size() -> u32 { - BRIDGED_CHAIN_MAX_EXTRINSIC_SIZE - } - fn max_extrinsic_weight() -> Weight { - Weight::zero() - } -} - -impl Parachain for BridgedUnderlyingParachain { - const PARACHAIN_ID: u32 = 42; -} - -/// The other, bridged chain, used in tests. -pub struct BridgedChain; - -impl UnderlyingChainProvider for BridgedChain { - type Chain = BridgedUnderlyingChain; -} - -impl ThisChainWithMessages for BridgedChain { - type RuntimeOrigin = BridgedChainOrigin; -} - -impl BridgedChainWithMessages for BridgedChain {} - -/// Run test within test externalities. -pub fn run_test(test: impl FnOnce()) { - sp_io::TestExternalities::new(Default::default()).execute_with(test) -} diff --git a/bin/runtime-common/src/parachains_benchmarking.rs b/bin/runtime-common/src/parachains_benchmarking.rs deleted file mode 100644 index b3050b9ac..000000000 --- a/bin/runtime-common/src/parachains_benchmarking.rs +++ /dev/null @@ -1,88 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Everything required to run benchmarks of parachains finality module. - -#![cfg(feature = "runtime-benchmarks")] - -use crate::{ - messages_benchmarking::insert_header_to_grandpa_pallet, - messages_generation::grow_trie_leaf_value, -}; - -use bp_parachains::parachain_head_storage_key_at_source; -use bp_polkadot_core::parachains::{ParaHash, ParaHead, ParaHeadsProof, ParaId}; -use bp_runtime::{record_all_trie_keys, StorageProofSize}; -use codec::Encode; -use frame_support::traits::Get; -use pallet_bridge_parachains::{RelayBlockHash, RelayBlockHasher, RelayBlockNumber}; -use sp_std::prelude::*; -use sp_trie::{trie_types::TrieDBMutBuilderV1, LayoutV1, MemoryDB, TrieMut}; - -/// Prepare proof of messages for the `receive_messages_proof` call. -/// -/// In addition to returning valid messages proof, environment is prepared to verify this message -/// proof. -pub fn prepare_parachain_heads_proof( - parachains: &[ParaId], - parachain_head_size: u32, - size: StorageProofSize, -) -> (RelayBlockNumber, RelayBlockHash, ParaHeadsProof, Vec<(ParaId, ParaHash)>) -where - R: pallet_bridge_parachains::Config - + pallet_bridge_grandpa::Config, - PI: 'static, - >::BridgedChain: - bp_runtime::Chain, -{ - let parachain_head = ParaHead(vec![0u8; parachain_head_size as usize]); - - // insert all heads to the trie - let mut parachain_heads = Vec::with_capacity(parachains.len()); - let mut storage_keys = Vec::with_capacity(parachains.len()); - let mut state_root = Default::default(); - let mut mdb = MemoryDB::default(); - { - let mut trie = - TrieDBMutBuilderV1::::new(&mut mdb, &mut state_root).build(); - - // insert parachain heads - for (i, parachain) in parachains.into_iter().enumerate() { - let storage_key = - parachain_head_storage_key_at_source(R::ParasPalletName::get(), *parachain); - let leaf_data = if i == 0 { - grow_trie_leaf_value(parachain_head.encode(), size) - } else { - parachain_head.encode() - }; - trie.insert(&storage_key.0, &leaf_data) - .map_err(|_| "TrieMut::insert has failed") - .expect("TrieMut::insert should not fail in benchmarks"); - storage_keys.push(storage_key); - parachain_heads.push((*parachain, parachain_head.hash())) - } - } - - // generate heads storage proof - let proof = record_all_trie_keys::, _>(&mdb, &state_root) - .map_err(|_| "record_all_trie_keys has failed") - .expect("record_all_trie_keys should not fail in benchmarks"); - - let (relay_block_number, relay_block_hash) = - insert_header_to_grandpa_pallet::(state_root); - - (relay_block_number, relay_block_hash, ParaHeadsProof { storage_proof: proof }, parachain_heads) -} diff --git a/bin/runtime-common/src/priority_calculator.rs b/bin/runtime-common/src/priority_calculator.rs deleted file mode 100644 index 5035553f5..000000000 --- a/bin/runtime-common/src/priority_calculator.rs +++ /dev/null @@ -1,202 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Bridge transaction priority calculator. -//! -//! We want to prioritize message delivery transactions with more messages over -//! transactions with less messages. That's because we reject delivery transactions -//! if it contains already delivered message. And if some transaction delivers -//! single message with nonce `N`, then the transaction with nonces `N..=N+100` will -//! be rejected. This can lower bridge throughput down to one message per block. - -use bp_messages::MessageNonce; -use frame_support::traits::Get; -use sp_runtime::transaction_validity::TransactionPriority; - -// reexport everything from `integrity_tests` module -#[allow(unused_imports)] -pub use integrity_tests::*; - -/// Compute priority boost for message delivery transaction that delivers -/// given number of messages. -pub fn compute_priority_boost( - messages: MessageNonce, -) -> TransactionPriority -where - PriorityBoostPerMessage: Get, -{ - // we don't want any boost for transaction with single message => minus one - PriorityBoostPerMessage::get().saturating_mul(messages.saturating_sub(1)) -} - -#[cfg(not(feature = "integrity-test"))] -mod integrity_tests {} - -#[cfg(feature = "integrity-test")] -mod integrity_tests { - use super::compute_priority_boost; - - use bp_messages::MessageNonce; - use bp_runtime::PreComputedSize; - use frame_support::{ - dispatch::{DispatchClass, DispatchInfo, Pays, PostDispatchInfo}, - traits::Get, - }; - use pallet_bridge_messages::WeightInfoExt; - use pallet_transaction_payment::OnChargeTransaction; - use sp_runtime::{ - traits::{Dispatchable, UniqueSaturatedInto, Zero}, - transaction_validity::TransactionPriority, - FixedPointOperand, SaturatedConversion, Saturating, - }; - - type BalanceOf = - <::OnChargeTransaction as OnChargeTransaction< - T, - >>::Balance; - - /// Ensures that the value of `PriorityBoostPerMessage` matches the value of - /// `tip_boost_per_message`. - /// - /// We want two transactions, `TX1` with `N` messages and `TX2` with `N+1` messages, have almost - /// the same priority if we'll add `tip_boost_per_message` tip to the `TX1`. We want to be sure - /// that if we add plain `PriorityBoostPerMessage` priority to `TX1`, the priority will be close - /// to `TX2` as well. - pub fn ensure_priority_boost_is_sane( - tip_boost_per_message: BalanceOf, - ) where - Runtime: - pallet_transaction_payment::Config + pallet_bridge_messages::Config, - MessagesInstance: 'static, - PriorityBoostPerMessage: Get, - Runtime::RuntimeCall: Dispatchable, - BalanceOf: Send + Sync + FixedPointOperand, - { - let priority_boost_per_message = PriorityBoostPerMessage::get(); - let maximal_messages_in_delivery_transaction = - Runtime::MaxUnconfirmedMessagesAtInboundLane::get(); - for messages in 1..=maximal_messages_in_delivery_transaction { - let base_priority = estimate_message_delivery_transaction_priority::< - Runtime, - MessagesInstance, - >(messages, Zero::zero()); - let priority_boost = compute_priority_boost::(messages); - let priority_with_boost = base_priority + priority_boost; - - let tip = tip_boost_per_message.saturating_mul((messages - 1).unique_saturated_into()); - let priority_with_tip = - estimate_message_delivery_transaction_priority::(1, tip); - - const ERROR_MARGIN: TransactionPriority = 5; // 5% - if priority_with_boost.abs_diff(priority_with_tip).saturating_mul(100) / - priority_with_tip > - ERROR_MARGIN - { - panic!( - "The PriorityBoostPerMessage value ({}) must be fixed to: {}", - priority_boost_per_message, - compute_priority_boost_per_message::( - tip_boost_per_message - ), - ); - } - } - } - - /// Compute priority boost that we give to message delivery transaction for additional message. - #[cfg(feature = "integrity-test")] - fn compute_priority_boost_per_message( - tip_boost_per_message: BalanceOf, - ) -> TransactionPriority - where - Runtime: - pallet_transaction_payment::Config + pallet_bridge_messages::Config, - MessagesInstance: 'static, - Runtime::RuntimeCall: Dispatchable, - BalanceOf: Send + Sync + FixedPointOperand, - { - // estimate priority of transaction that delivers one message and has large tip - let maximal_messages_in_delivery_transaction = - Runtime::MaxUnconfirmedMessagesAtInboundLane::get(); - let small_with_tip_priority = - estimate_message_delivery_transaction_priority::( - 1, - tip_boost_per_message - .saturating_mul(maximal_messages_in_delivery_transaction.saturated_into()), - ); - // estimate priority of transaction that delivers maximal number of messages, but has no tip - let large_without_tip_priority = estimate_message_delivery_transaction_priority::< - Runtime, - MessagesInstance, - >(maximal_messages_in_delivery_transaction, Zero::zero()); - - small_with_tip_priority - .saturating_sub(large_without_tip_priority) - .saturating_div(maximal_messages_in_delivery_transaction - 1) - } - - /// Estimate message delivery transaction priority. - #[cfg(feature = "integrity-test")] - fn estimate_message_delivery_transaction_priority( - messages: MessageNonce, - tip: BalanceOf, - ) -> TransactionPriority - where - Runtime: - pallet_transaction_payment::Config + pallet_bridge_messages::Config, - MessagesInstance: 'static, - Runtime::RuntimeCall: Dispatchable, - BalanceOf: Send + Sync + FixedPointOperand, - { - // just an estimation of extra transaction bytes that are added to every transaction - // (including signature, signed extensions extra and etc + in our case it includes - // all call arguments except the proof itself) - let base_tx_size = 512; - // let's say we are relaying similar small messages and for every message we add more trie - // nodes to the proof (x0.5 because we expect some nodes to be reused) - let estimated_message_size = 512; - // let's say all our messages have the same dispatch weight - let estimated_message_dispatch_weight = - Runtime::WeightInfo::message_dispatch_weight(estimated_message_size); - // messages proof argument size is (for every message) messages size + some additional - // trie nodes. Some of them are reused by different messages, so let's take 2/3 of default - // "overhead" constant - let messages_proof_size = Runtime::WeightInfo::expected_extra_storage_proof_size() - .saturating_mul(2) - .saturating_div(3) - .saturating_add(estimated_message_size) - .saturating_mul(messages as _); - - // finally we are able to estimate transaction size and weight - let transaction_size = base_tx_size.saturating_add(messages_proof_size); - let transaction_weight = Runtime::WeightInfo::receive_messages_proof_weight( - &PreComputedSize(transaction_size as _), - messages as _, - estimated_message_dispatch_weight.saturating_mul(messages), - ); - - pallet_transaction_payment::ChargeTransactionPayment::::get_priority( - &DispatchInfo { - weight: transaction_weight, - class: DispatchClass::Normal, - pays_fee: Pays::Yes, - }, - transaction_size as _, - tip, - Zero::zero(), - ) - } -} diff --git a/bin/runtime-common/src/refund_relayer_extension.rs b/bin/runtime-common/src/refund_relayer_extension.rs deleted file mode 100644 index 455392a0a..000000000 --- a/bin/runtime-common/src/refund_relayer_extension.rs +++ /dev/null @@ -1,2585 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Signed extension that refunds relayer if he has delivered some new messages. -//! It also refunds transaction cost if the transaction is an `utility.batchAll()` -//! with calls that are: delivering new message and all necessary underlying headers -//! (parachain or relay chain). - -use crate::messages_call_ext::{ - CallHelper as MessagesCallHelper, CallInfo as MessagesCallInfo, MessagesCallSubType, -}; -use bp_messages::{LaneId, MessageNonce}; -use bp_relayers::{RewardsAccountOwner, RewardsAccountParams}; -use bp_runtime::{Chain, Parachain, ParachainIdOf, RangeInclusiveExt, StaticStrProvider}; -use codec::{Codec, Decode, Encode}; -use frame_support::{ - dispatch::{CallableCallFor, DispatchInfo, PostDispatchInfo}, - traits::IsSubType, - weights::Weight, - CloneNoBound, DefaultNoBound, EqNoBound, PartialEqNoBound, RuntimeDebugNoBound, -}; -use pallet_bridge_grandpa::{ - CallSubType as GrandpaCallSubType, Config as GrandpaConfig, SubmitFinalityProofHelper, - SubmitFinalityProofInfo, -}; -use pallet_bridge_messages::Config as MessagesConfig; -use pallet_bridge_parachains::{ - BoundedBridgeGrandpaConfig, CallSubType as ParachainsCallSubType, Config as ParachainsConfig, - RelayBlockNumber, SubmitParachainHeadsHelper, SubmitParachainHeadsInfo, -}; -use pallet_bridge_relayers::{ - Config as RelayersConfig, Pallet as RelayersPallet, WeightInfoExt as _, -}; -use pallet_transaction_payment::{Config as TransactionPaymentConfig, OnChargeTransaction}; -use pallet_utility::{Call as UtilityCall, Config as UtilityConfig, Pallet as UtilityPallet}; -use scale_info::TypeInfo; -use sp_runtime::{ - traits::{DispatchInfoOf, Dispatchable, Get, PostDispatchInfoOf, SignedExtension, Zero}, - transaction_validity::{ - TransactionPriority, TransactionValidity, TransactionValidityError, ValidTransactionBuilder, - }, - DispatchResult, FixedPointOperand, RuntimeDebug, -}; -use sp_std::{marker::PhantomData, vec, vec::Vec}; - -type AccountIdOf = ::AccountId; -// without this typedef rustfmt fails with internal err -type BalanceOf = - <::OnChargeTransaction as OnChargeTransaction>::Balance; -type CallOf = ::RuntimeCall; - -/// Trait identifying a bridged parachain. A relayer might be refunded for delivering messages -/// coming from this parachain. -pub trait RefundableParachainId { - /// The instance of the bridge parachains pallet. - type Instance; - /// The parachain Id. - type Id: Get; -} - -/// Default implementation of `RefundableParachainId`. -pub struct DefaultRefundableParachainId(PhantomData<(Instance, Id)>); - -impl RefundableParachainId for DefaultRefundableParachainId -where - Id: Get, -{ - type Instance = Instance; - type Id = Id; -} - -/// Implementation of `RefundableParachainId` for `trait Parachain`. -pub struct RefundableParachain(PhantomData<(Instance, Para)>); - -impl RefundableParachainId for RefundableParachain -where - Para: Parachain, -{ - type Instance = Instance; - type Id = ParachainIdOf; -} - -/// Trait identifying a bridged messages lane. A relayer might be refunded for delivering messages -/// coming from this lane. -pub trait RefundableMessagesLaneId { - /// The instance of the bridge messages pallet. - type Instance: 'static; - /// The messages lane id. - type Id: Get; -} - -/// Default implementation of `RefundableMessagesLaneId`. -pub struct RefundableMessagesLane(PhantomData<(Instance, Id)>); - -impl RefundableMessagesLaneId for RefundableMessagesLane -where - Instance: 'static, - Id: Get, -{ - type Instance = Instance; - type Id = Id; -} - -/// Refund calculator. -pub trait RefundCalculator { - /// The underlying integer type in which the refund is calculated. - type Balance; - - /// Compute refund for given transaction. - fn compute_refund( - info: &DispatchInfo, - post_info: &PostDispatchInfo, - len: usize, - tip: Self::Balance, - ) -> Self::Balance; -} - -/// `RefundCalculator` implementation which refunds the actual transaction fee. -pub struct ActualFeeRefund(PhantomData); - -impl RefundCalculator for ActualFeeRefund -where - R: TransactionPaymentConfig, - CallOf: Dispatchable, - BalanceOf: FixedPointOperand, -{ - type Balance = BalanceOf; - - fn compute_refund( - info: &DispatchInfo, - post_info: &PostDispatchInfo, - len: usize, - tip: BalanceOf, - ) -> BalanceOf { - pallet_transaction_payment::Pallet::::compute_actual_fee(len as _, info, post_info, tip) - } -} - -/// Data that is crafted in `pre_dispatch` method and used at `post_dispatch`. -#[cfg_attr(test, derive(Debug, PartialEq))] -pub struct PreDispatchData { - /// Transaction submitter (relayer) account. - relayer: AccountId, - /// Type of the call. - call_info: CallInfo, -} - -/// Type of the call that the extension recognizes. -#[derive(RuntimeDebugNoBound, PartialEq)] -pub enum CallInfo { - /// Relay chain finality + parachain finality + message delivery/confirmation calls. - AllFinalityAndMsgs( - SubmitFinalityProofInfo, - SubmitParachainHeadsInfo, - MessagesCallInfo, - ), - /// Relay chain finality + message delivery/confirmation calls. - RelayFinalityAndMsgs(SubmitFinalityProofInfo, MessagesCallInfo), - /// Parachain finality + message delivery/confirmation calls. - /// - /// This variant is used only when bridging with parachain. - ParachainFinalityAndMsgs(SubmitParachainHeadsInfo, MessagesCallInfo), - /// Standalone message delivery/confirmation call. - Msgs(MessagesCallInfo), -} - -impl CallInfo { - /// Returns true if call is a message delivery call (with optional finality calls). - fn is_receive_messages_proof_call(&self) -> bool { - match self.messages_call_info() { - MessagesCallInfo::ReceiveMessagesProof(_) => true, - MessagesCallInfo::ReceiveMessagesDeliveryProof(_) => false, - } - } - - /// Returns the pre-dispatch `finality_target` sent to the `SubmitFinalityProof` call. - fn submit_finality_proof_info(&self) -> Option> { - match *self { - Self::AllFinalityAndMsgs(info, _, _) => Some(info), - Self::RelayFinalityAndMsgs(info, _) => Some(info), - _ => None, - } - } - - /// Returns mutable reference to pre-dispatch `finality_target` sent to the - /// `SubmitFinalityProof` call. - #[cfg(test)] - fn submit_finality_proof_info_mut( - &mut self, - ) -> Option<&mut SubmitFinalityProofInfo> { - match *self { - Self::AllFinalityAndMsgs(ref mut info, _, _) => Some(info), - Self::RelayFinalityAndMsgs(ref mut info, _) => Some(info), - _ => None, - } - } - - /// Returns the pre-dispatch `SubmitParachainHeadsInfo`. - fn submit_parachain_heads_info(&self) -> Option<&SubmitParachainHeadsInfo> { - match self { - Self::AllFinalityAndMsgs(_, info, _) => Some(info), - Self::ParachainFinalityAndMsgs(info, _) => Some(info), - _ => None, - } - } - - /// Returns the pre-dispatch `ReceiveMessagesProofInfo`. - fn messages_call_info(&self) -> &MessagesCallInfo { - match self { - Self::AllFinalityAndMsgs(_, _, info) => info, - Self::RelayFinalityAndMsgs(_, info) => info, - Self::ParachainFinalityAndMsgs(_, info) => info, - Self::Msgs(info) => info, - } - } -} - -/// The actions on relayer account that need to be performed because of his actions. -#[derive(RuntimeDebug, PartialEq)] -pub enum RelayerAccountAction { - /// Do nothing with relayer account. - None, - /// Reward the relayer. - Reward(AccountId, RewardsAccountParams, Reward), - /// Slash the relayer. - Slash(AccountId, RewardsAccountParams), -} - -/// Everything common among our refund signed extensions. -pub trait RefundSignedExtension: - 'static + Clone + Codec + sp_std::fmt::Debug + Default + Eq + PartialEq + Send + Sync + TypeInfo -where - >::BridgedChain: - Chain, -{ - /// This chain runtime. - type Runtime: UtilityConfig> - + GrandpaConfig - + MessagesConfig<::Instance> - + RelayersConfig; - /// Grandpa pallet reference. - type GrandpaInstance: 'static; - /// Messages pallet and lane reference. - type Msgs: RefundableMessagesLaneId; - /// Refund amount calculator. - type Refund: RefundCalculator::Reward>; - /// Priority boost calculator. - type Priority: Get; - /// Signed extension unique identifier. - type Id: StaticStrProvider; - - /// Unpack batch runtime call. - fn expand_call(call: &CallOf) -> Vec<&CallOf>; - - /// Given runtime call, check if it has supported format. Additionally, check if any of - /// (optionally batched) calls are obsolete and we shall reject the transaction. - fn parse_and_check_for_obsolete_call( - call: &CallOf, - ) -> Result, TransactionValidityError>; - - /// Check if parsed call is already obsolete. - fn check_obsolete_parsed_call( - call: &CallOf, - ) -> Result<&CallOf, TransactionValidityError>; - - /// Called from post-dispatch and shall perform additional checks (apart from relay - /// chain finality and messages transaction finality) of given call result. - fn additional_call_result_check( - relayer: &AccountIdOf, - call_info: &CallInfo, - ) -> bool; - - /// Given post-dispatch information, analyze the outcome of relayer call and return - /// actions that need to be performed on relayer account. - fn analyze_call_result( - pre: Option>>>, - info: &DispatchInfo, - post_info: &PostDispatchInfo, - len: usize, - result: &DispatchResult, - ) -> RelayerAccountAction, ::Reward> - { - let mut extra_weight = Weight::zero(); - let mut extra_size = 0; - - // We don't refund anything for transactions that we don't support. - let (relayer, call_info) = match pre { - Some(Some(pre)) => (pre.relayer, pre.call_info), - _ => return RelayerAccountAction::None, - }; - - // now we know that the relayer either needs to be rewarded, or slashed - // => let's prepare the correspondent account that pays reward/receives slashed amount - let reward_account_params = - RewardsAccountParams::new( - ::Id::get(), - ::Instance, - >>::BridgedChainId::get(), - if call_info.is_receive_messages_proof_call() { - RewardsAccountOwner::ThisChain - } else { - RewardsAccountOwner::BridgedChain - }, - ); - - // prepare return value for the case if the call has failed or it has not caused - // expected side effects (e.g. not all messages have been accepted) - // - // we are not checking if relayer is registered here - it happens during the slash attempt - // - // there are couple of edge cases here: - // - // - when the relayer becomes registered during message dispatch: this is unlikely + relayer - // should be ready for slashing after registration; - // - // - when relayer is registered after `validate` is called and priority is not boosted: - // relayer should be ready for slashing after registration. - let may_slash_relayer = - Self::bundled_messages_for_priority_boost(Some(&call_info)).is_some(); - let slash_relayer_if_delivery_result = may_slash_relayer - .then(|| RelayerAccountAction::Slash(relayer.clone(), reward_account_params)) - .unwrap_or(RelayerAccountAction::None); - - // We don't refund anything if the transaction has failed. - if let Err(e) = result { - log::trace!( - target: "runtime::bridge", - "{} via {:?}: relayer {:?} has submitted invalid messages transaction: {:?}", - Self::Id::STR, - ::Id::get(), - relayer, - e, - ); - return slash_relayer_if_delivery_result - } - - // check if relay chain state has been updated - if let Some(finality_proof_info) = call_info.submit_finality_proof_info() { - if !SubmitFinalityProofHelper::::was_successful( - finality_proof_info.block_number, - ) { - // we only refund relayer if all calls have updated chain state - log::trace!( - target: "runtime::bridge", - "{} via {:?}: relayer {:?} has submitted invalid relay chain finality proof", - Self::Id::STR, - ::Id::get(), - relayer, - ); - return slash_relayer_if_delivery_result - } - - // there's a conflict between how bridge GRANDPA pallet works and a `utility.batchAll` - // transaction. If relay chain header is mandatory, the GRANDPA pallet returns - // `Pays::No`, because such transaction is mandatory for operating the bridge. But - // `utility.batchAll` transaction always requires payment. But in both cases we'll - // refund relayer - either explicitly here, or using `Pays::No` if he's choosing - // to submit dedicated transaction. - - // submitter has means to include extra weight/bytes in the `submit_finality_proof` - // call, so let's subtract extra weight/size to avoid refunding for this extra stuff - extra_weight = finality_proof_info.extra_weight; - extra_size = finality_proof_info.extra_size; - } - - // Check if the `ReceiveMessagesProof` call delivered at least some of the messages that - // it contained. If this happens, we consider the transaction "helpful" and refund it. - let msgs_call_info = call_info.messages_call_info(); - if !MessagesCallHelper::::Instance>::was_successful(msgs_call_info) { - log::trace!( - target: "runtime::bridge", - "{} via {:?}: relayer {:?} has submitted invalid messages call", - Self::Id::STR, - ::Id::get(), - relayer, - ); - return slash_relayer_if_delivery_result - } - - // do additional check - if !Self::additional_call_result_check(&relayer, &call_info) { - return slash_relayer_if_delivery_result - } - - // regarding the tip - refund that happens here (at this side of the bridge) isn't the whole - // relayer compensation. He'll receive some amount at the other side of the bridge. It shall - // (in theory) cover the tip there. Otherwise, if we'll be compensating tip here, some - // malicious relayer may use huge tips, effectively depleting account that pay rewards. The - // cost of this attack is nothing. Hence we use zero as tip here. - let tip = Zero::zero(); - - // decrease post-dispatch weight/size using extra weight/size that we know now - let post_info_len = len.saturating_sub(extra_size as usize); - let mut post_info_weight = - post_info.actual_weight.unwrap_or(info.weight).saturating_sub(extra_weight); - - // let's also replace the weight of slashing relayer with the weight of rewarding relayer - if call_info.is_receive_messages_proof_call() { - post_info_weight = post_info_weight.saturating_sub( - ::WeightInfo::extra_weight_of_successful_receive_messages_proof_call(), - ); - } - - // compute the relayer refund - let mut post_info = *post_info; - post_info.actual_weight = Some(post_info_weight); - let refund = Self::Refund::compute_refund(info, &post_info, post_info_len, tip); - - // we can finally reward relayer - RelayerAccountAction::Reward(relayer, reward_account_params, refund) - } - - /// Returns number of bundled messages `Some(_)`, if the given call info is a: - /// - /// - message delivery transaction; - /// - /// - with reasonable bundled messages that may be accepted by the messages pallet. - /// - /// This function is used to check whether the transaction priority should be - /// virtually boosted. The relayer registration (we only boost priority for registered - /// relayer transactions) must be checked outside. - fn bundled_messages_for_priority_boost(call_info: Option<&CallInfo>) -> Option { - // we only boost priority of message delivery transactions - let parsed_call = match call_info { - Some(parsed_call) if parsed_call.is_receive_messages_proof_call() => parsed_call, - _ => return None, - }; - - // compute total number of messages in transaction - let bundled_messages = parsed_call.messages_call_info().bundled_messages().saturating_len(); - - // a quick check to avoid invalid high-priority transactions - let max_unconfirmed_messages_in_confirmation_tx = ::Instance, - >>::MaxUnconfirmedMessagesAtInboundLane::get( - ); - if bundled_messages > max_unconfirmed_messages_in_confirmation_tx { - return None - } - - Some(bundled_messages) - } -} - -/// Adapter that allow implementing `sp_runtime::traits::SignedExtension` for any -/// `RefundSignedExtension`. -#[derive( - DefaultNoBound, - CloneNoBound, - Decode, - Encode, - EqNoBound, - PartialEqNoBound, - RuntimeDebugNoBound, - TypeInfo, -)] -pub struct RefundSignedExtensionAdapter(T) -where - >::BridgedChain: - Chain; - -impl SignedExtension for RefundSignedExtensionAdapter -where - >::BridgedChain: - Chain, - CallOf: Dispatchable - + IsSubType, T::Runtime>> - + GrandpaCallSubType - + MessagesCallSubType::Instance>, -{ - const IDENTIFIER: &'static str = T::Id::STR; - type AccountId = AccountIdOf; - type Call = CallOf; - type AdditionalSigned = (); - type Pre = Option>>; - - fn additional_signed(&self) -> Result<(), TransactionValidityError> { - Ok(()) - } - - fn validate( - &self, - who: &Self::AccountId, - call: &Self::Call, - _info: &DispatchInfoOf, - _len: usize, - ) -> TransactionValidity { - // this is the only relevant line of code for the `pre_dispatch` - // - // we're not calling `validate` from `pre_dispatch` directly because of performance - // reasons, so if you're adding some code that may fail here, please check if it needs - // to be added to the `pre_dispatch` as well - let parsed_call = T::parse_and_check_for_obsolete_call(call)?; - - // the following code just plays with transaction priority and never returns an error - - // we only boost priority of presumably correct message delivery transactions - let bundled_messages = match T::bundled_messages_for_priority_boost(parsed_call.as_ref()) { - Some(bundled_messages) => bundled_messages, - None => return Ok(Default::default()), - }; - - // we only boost priority if relayer has staked required balance - if !RelayersPallet::::is_registration_active(who) { - return Ok(Default::default()) - } - - // compute priority boost - let priority_boost = - crate::priority_calculator::compute_priority_boost::(bundled_messages); - let valid_transaction = ValidTransactionBuilder::default().priority(priority_boost); - - log::trace!( - target: "runtime::bridge", - "{} via {:?} has boosted priority of message delivery transaction \ - of relayer {:?}: {} messages -> {} priority", - Self::IDENTIFIER, - ::Id::get(), - who, - bundled_messages, - priority_boost, - ); - - valid_transaction.build() - } - - fn pre_dispatch( - self, - who: &Self::AccountId, - call: &Self::Call, - _info: &DispatchInfoOf, - _len: usize, - ) -> Result { - // this is a relevant piece of `validate` that we need here (in `pre_dispatch`) - let parsed_call = T::parse_and_check_for_obsolete_call(call)?; - - Ok(parsed_call.map(|call_info| { - log::trace!( - target: "runtime::bridge", - "{} via {:?} parsed bridge transaction in pre-dispatch: {:?}", - Self::IDENTIFIER, - ::Id::get(), - call_info, - ); - PreDispatchData { relayer: who.clone(), call_info } - })) - } - - fn post_dispatch( - pre: Option, - info: &DispatchInfoOf, - post_info: &PostDispatchInfoOf, - len: usize, - result: &DispatchResult, - ) -> Result<(), TransactionValidityError> { - let call_result = T::analyze_call_result(pre, info, post_info, len, result); - - match call_result { - RelayerAccountAction::None => (), - RelayerAccountAction::Reward(relayer, reward_account, reward) => { - RelayersPallet::::register_relayer_reward( - reward_account, - &relayer, - reward, - ); - - log::trace!( - target: "runtime::bridge", - "{} via {:?} has registered reward: {:?} for {:?}", - Self::IDENTIFIER, - ::Id::get(), - reward, - relayer, - ); - }, - RelayerAccountAction::Slash(relayer, slash_account) => - RelayersPallet::::slash_and_deregister(&relayer, slash_account), - } - - Ok(()) - } -} - -/// Signed extension that refunds a relayer for new messages coming from a parachain. -/// -/// Also refunds relayer for successful finality delivery if it comes in batch (`utility.batchAll`) -/// with message delivery transaction. Batch may deliver either both relay chain header and -/// parachain head, or just parachain head. Corresponding headers must be used in messages -/// proof verification. -/// -/// Extension does not refund transaction tip due to security reasons. -#[derive( - DefaultNoBound, - CloneNoBound, - Decode, - Encode, - EqNoBound, - PartialEqNoBound, - RuntimeDebugNoBound, - TypeInfo, -)] -#[scale_info(skip_type_params(Runtime, Para, Msgs, Refund, Priority, Id))] -pub struct RefundBridgedParachainMessages( - PhantomData<( - // runtime with `frame-utility`, `pallet-bridge-grandpa`, `pallet-bridge-parachains`, - // `pallet-bridge-messages` and `pallet-bridge-relayers` pallets deployed - Runtime, - // implementation of `RefundableParachainId` trait, which specifies the instance of - // the used `pallet-bridge-parachains` pallet and the bridged parachain id - Para, - // implementation of `RefundableMessagesLaneId` trait, which specifies the instance of - // the used `pallet-bridge-messages` pallet and the lane within this pallet - Msgs, - // implementation of the `RefundCalculator` trait, that is used to compute refund that - // we give to relayer for his transaction - Refund, - // getter for per-message `TransactionPriority` boost that we give to message - // delivery transactions - Priority, - // the runtime-unique identifier of this signed extension - Id, - )>, -); - -impl RefundSignedExtension - for RefundBridgedParachainMessages -where - Self: 'static + Send + Sync, - Runtime: UtilityConfig> - + BoundedBridgeGrandpaConfig - + ParachainsConfig - + MessagesConfig - + RelayersConfig, - Para: RefundableParachainId, - Msgs: RefundableMessagesLaneId, - Refund: RefundCalculator, - Priority: Get, - Id: StaticStrProvider, - CallOf: Dispatchable - + IsSubType, Runtime>> - + GrandpaCallSubType - + ParachainsCallSubType - + MessagesCallSubType, -{ - type Runtime = Runtime; - type GrandpaInstance = Runtime::BridgesGrandpaPalletInstance; - type Msgs = Msgs; - type Refund = Refund; - type Priority = Priority; - type Id = Id; - - fn expand_call(call: &CallOf) -> Vec<&CallOf> { - match call.is_sub_type() { - Some(UtilityCall::::batch_all { ref calls }) if calls.len() <= 3 => - calls.iter().collect(), - Some(_) => vec![], - None => vec![call], - } - } - - fn parse_and_check_for_obsolete_call( - call: &CallOf, - ) -> Result, TransactionValidityError> { - let calls = Self::expand_call(call); - let total_calls = calls.len(); - let mut calls = calls.into_iter().map(Self::check_obsolete_parsed_call).rev(); - - let msgs_call = calls.next().transpose()?.and_then(|c| c.call_info_for(Msgs::Id::get())); - let para_finality_call = calls - .next() - .transpose()? - .and_then(|c| c.submit_parachain_heads_info_for(Para::Id::get())); - let relay_finality_call = - calls.next().transpose()?.and_then(|c| c.submit_finality_proof_info()); - - Ok(match (total_calls, relay_finality_call, para_finality_call, msgs_call) { - (3, Some(relay_finality_call), Some(para_finality_call), Some(msgs_call)) => Some( - CallInfo::AllFinalityAndMsgs(relay_finality_call, para_finality_call, msgs_call), - ), - (2, None, Some(para_finality_call), Some(msgs_call)) => - Some(CallInfo::ParachainFinalityAndMsgs(para_finality_call, msgs_call)), - (1, None, None, Some(msgs_call)) => Some(CallInfo::Msgs(msgs_call)), - _ => None, - }) - } - - fn check_obsolete_parsed_call( - call: &CallOf, - ) -> Result<&CallOf, TransactionValidityError> { - call.check_obsolete_submit_finality_proof()?; - call.check_obsolete_submit_parachain_heads()?; - call.check_obsolete_call()?; - Ok(call) - } - - fn additional_call_result_check(relayer: &Runtime::AccountId, call_info: &CallInfo) -> bool { - // check if parachain state has been updated - if let Some(para_proof_info) = call_info.submit_parachain_heads_info() { - if !SubmitParachainHeadsHelper::::was_successful( - para_proof_info, - ) { - // we only refund relayer if all calls have updated chain state - log::trace!( - target: "runtime::bridge", - "{} from parachain {} via {:?}: relayer {:?} has submitted invalid parachain finality proof", - Id::STR, - Para::Id::get(), - Msgs::Id::get(), - relayer, - ); - return false - } - } - - true - } -} - -/// Signed extension that refunds a relayer for new messages coming from a standalone (GRANDPA) -/// chain. -/// -/// Also refunds relayer for successful finality delivery if it comes in batch (`utility.batchAll`) -/// with message delivery transaction. Batch may deliver either both relay chain header and -/// parachain head, or just parachain head. Corresponding headers must be used in messages -/// proof verification. -/// -/// Extension does not refund transaction tip due to security reasons. -#[derive( - DefaultNoBound, - CloneNoBound, - Decode, - Encode, - EqNoBound, - PartialEqNoBound, - RuntimeDebugNoBound, - TypeInfo, -)] -#[scale_info(skip_type_params(Runtime, GrandpaInstance, Msgs, Refund, Priority, Id))] -pub struct RefundBridgedGrandpaMessages( - PhantomData<( - // runtime with `frame-utility`, `pallet-bridge-grandpa`, - // `pallet-bridge-messages` and `pallet-bridge-relayers` pallets deployed - Runtime, - // bridge GRANDPA pallet instance, used to track bridged chain state - GrandpaInstance, - // implementation of `RefundableMessagesLaneId` trait, which specifies the instance of - // the used `pallet-bridge-messages` pallet and the lane within this pallet - Msgs, - // implementation of the `RefundCalculator` trait, that is used to compute refund that - // we give to relayer for his transaction - Refund, - // getter for per-message `TransactionPriority` boost that we give to message - // delivery transactions - Priority, - // the runtime-unique identifier of this signed extension - Id, - )>, -); - -impl RefundSignedExtension - for RefundBridgedGrandpaMessages -where - Self: 'static + Send + Sync, - Runtime: UtilityConfig> - + BoundedBridgeGrandpaConfig - + MessagesConfig - + RelayersConfig, - GrandpaInstance: 'static, - Msgs: RefundableMessagesLaneId, - Refund: RefundCalculator, - Priority: Get, - Id: StaticStrProvider, - CallOf: Dispatchable - + IsSubType, Runtime>> - + GrandpaCallSubType - + MessagesCallSubType, -{ - type Runtime = Runtime; - type GrandpaInstance = GrandpaInstance; - type Msgs = Msgs; - type Refund = Refund; - type Priority = Priority; - type Id = Id; - - fn expand_call(call: &CallOf) -> Vec<&CallOf> { - match call.is_sub_type() { - Some(UtilityCall::::batch_all { ref calls }) if calls.len() <= 2 => - calls.iter().collect(), - Some(_) => vec![], - None => vec![call], - } - } - - fn parse_and_check_for_obsolete_call( - call: &CallOf, - ) -> Result, TransactionValidityError> { - let calls = Self::expand_call(call); - let total_calls = calls.len(); - let mut calls = calls.into_iter().map(Self::check_obsolete_parsed_call).rev(); - - let msgs_call = calls.next().transpose()?.and_then(|c| c.call_info_for(Msgs::Id::get())); - let relay_finality_call = - calls.next().transpose()?.and_then(|c| c.submit_finality_proof_info()); - - Ok(match (total_calls, relay_finality_call, msgs_call) { - (2, Some(relay_finality_call), Some(msgs_call)) => - Some(CallInfo::RelayFinalityAndMsgs(relay_finality_call, msgs_call)), - (1, None, Some(msgs_call)) => Some(CallInfo::Msgs(msgs_call)), - _ => None, - }) - } - - fn check_obsolete_parsed_call( - call: &CallOf, - ) -> Result<&CallOf, TransactionValidityError> { - call.check_obsolete_submit_finality_proof()?; - call.check_obsolete_call()?; - Ok(call) - } - - fn additional_call_result_check(_relayer: &Runtime::AccountId, _call_info: &CallInfo) -> bool { - true - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::{ - messages::{ - source::FromBridgedChainMessagesDeliveryProof, target::FromBridgedChainMessagesProof, - }, - messages_call_ext::{ - BaseMessagesProofInfo, ReceiveMessagesDeliveryProofInfo, ReceiveMessagesProofInfo, - UnrewardedRelayerOccupation, - }, - mock::*, - }; - use bp_messages::{ - DeliveredMessages, InboundLaneData, MessageNonce, MessagesOperatingMode, OutboundLaneData, - UnrewardedRelayer, UnrewardedRelayersState, - }; - use bp_parachains::{BestParaHeadHash, ParaInfo}; - use bp_polkadot_core::parachains::{ParaHeadsProof, ParaId}; - use bp_runtime::{BasicOperatingMode, HeaderId}; - use bp_test_utils::{make_default_justification, test_keyring, TEST_GRANDPA_SET_ID}; - use frame_support::{ - assert_storage_noop, parameter_types, - traits::{fungible::Mutate, ReservableCurrency}, - weights::Weight, - }; - use pallet_bridge_grandpa::{Call as GrandpaCall, Pallet as GrandpaPallet, StoredAuthoritySet}; - use pallet_bridge_messages::{Call as MessagesCall, Pallet as MessagesPallet}; - use pallet_bridge_parachains::{ - Call as ParachainsCall, Pallet as ParachainsPallet, RelayBlockHash, - }; - use sp_runtime::{ - traits::{ConstU64, Header as HeaderT}, - transaction_validity::{InvalidTransaction, ValidTransaction}, - DispatchError, - }; - - parameter_types! { - TestParachain: u32 = 1000; - pub TestLaneId: LaneId = TEST_LANE_ID; - pub MsgProofsRewardsAccount: RewardsAccountParams = RewardsAccountParams::new( - TEST_LANE_ID, - TEST_BRIDGED_CHAIN_ID, - RewardsAccountOwner::ThisChain, - ); - pub MsgDeliveryProofsRewardsAccount: RewardsAccountParams = RewardsAccountParams::new( - TEST_LANE_ID, - TEST_BRIDGED_CHAIN_ID, - RewardsAccountOwner::BridgedChain, - ); - } - - bp_runtime::generate_static_str_provider!(TestExtension); - - type TestGrandpaExtensionProvider = RefundBridgedGrandpaMessages< - TestRuntime, - (), - RefundableMessagesLane<(), TestLaneId>, - ActualFeeRefund, - ConstU64<1>, - StrTestExtension, - >; - type TestGrandpaExtension = RefundSignedExtensionAdapter; - type TestExtensionProvider = RefundBridgedParachainMessages< - TestRuntime, - DefaultRefundableParachainId<(), TestParachain>, - RefundableMessagesLane<(), TestLaneId>, - ActualFeeRefund, - ConstU64<1>, - StrTestExtension, - >; - type TestExtension = RefundSignedExtensionAdapter; - - fn initial_balance_of_relayer_account_at_this_chain() -> ThisChainBalance { - let test_stake: ThisChainBalance = TestStake::get(); - ExistentialDeposit::get().saturating_add(test_stake * 100) - } - - // in tests, the following accounts are equal (because of how `into_sub_account_truncating` - // works) - - fn delivery_rewards_account() -> ThisChainAccountId { - TestPaymentProcedure::rewards_account(MsgProofsRewardsAccount::get()) - } - - fn confirmation_rewards_account() -> ThisChainAccountId { - TestPaymentProcedure::rewards_account(MsgDeliveryProofsRewardsAccount::get()) - } - - fn relayer_account_at_this_chain() -> ThisChainAccountId { - 0 - } - - fn relayer_account_at_bridged_chain() -> BridgedChainAccountId { - 0 - } - - fn initialize_environment( - best_relay_header_number: RelayBlockNumber, - parachain_head_at_relay_header_number: RelayBlockNumber, - best_message: MessageNonce, - ) { - let authorities = test_keyring().into_iter().map(|(a, w)| (a.into(), w)).collect(); - let best_relay_header = HeaderId(best_relay_header_number, RelayBlockHash::default()); - pallet_bridge_grandpa::CurrentAuthoritySet::::put( - StoredAuthoritySet::try_new(authorities, TEST_GRANDPA_SET_ID).unwrap(), - ); - pallet_bridge_grandpa::BestFinalized::::put(best_relay_header); - - let para_id = ParaId(TestParachain::get()); - let para_info = ParaInfo { - best_head_hash: BestParaHeadHash { - at_relay_block_number: parachain_head_at_relay_header_number, - head_hash: [parachain_head_at_relay_header_number as u8; 32].into(), - }, - next_imported_hash_position: 0, - }; - pallet_bridge_parachains::ParasInfo::::insert(para_id, para_info); - - let lane_id = TestLaneId::get(); - let in_lane_data = - InboundLaneData { last_confirmed_nonce: best_message, ..Default::default() }; - pallet_bridge_messages::InboundLanes::::insert(lane_id, in_lane_data); - - let out_lane_data = - OutboundLaneData { latest_received_nonce: best_message, ..Default::default() }; - pallet_bridge_messages::OutboundLanes::::insert(lane_id, out_lane_data); - - Balances::mint_into(&delivery_rewards_account(), ExistentialDeposit::get()).unwrap(); - Balances::mint_into(&confirmation_rewards_account(), ExistentialDeposit::get()).unwrap(); - Balances::mint_into( - &relayer_account_at_this_chain(), - initial_balance_of_relayer_account_at_this_chain(), - ) - .unwrap(); - } - - fn submit_relay_header_call(relay_header_number: RelayBlockNumber) -> RuntimeCall { - let relay_header = BridgedChainHeader::new( - relay_header_number, - Default::default(), - Default::default(), - Default::default(), - Default::default(), - ); - let relay_justification = make_default_justification(&relay_header); - - RuntimeCall::BridgeGrandpa(GrandpaCall::submit_finality_proof { - finality_target: Box::new(relay_header), - justification: relay_justification, - }) - } - - fn submit_relay_header_call_ex(relay_header_number: RelayBlockNumber) -> RuntimeCall { - let relay_header = BridgedChainHeader::new( - relay_header_number, - Default::default(), - Default::default(), - Default::default(), - Default::default(), - ); - let relay_justification = make_default_justification(&relay_header); - - RuntimeCall::BridgeGrandpa(GrandpaCall::submit_finality_proof_ex { - finality_target: Box::new(relay_header), - justification: relay_justification, - current_set_id: TEST_GRANDPA_SET_ID, - }) - } - - fn submit_parachain_head_call( - parachain_head_at_relay_header_number: RelayBlockNumber, - ) -> RuntimeCall { - RuntimeCall::BridgeParachains(ParachainsCall::submit_parachain_heads { - at_relay_block: (parachain_head_at_relay_header_number, RelayBlockHash::default()), - parachains: vec![( - ParaId(TestParachain::get()), - [parachain_head_at_relay_header_number as u8; 32].into(), - )], - parachain_heads_proof: ParaHeadsProof { storage_proof: vec![] }, - }) - } - - fn message_delivery_call(best_message: MessageNonce) -> RuntimeCall { - RuntimeCall::BridgeMessages(MessagesCall::receive_messages_proof { - relayer_id_at_bridged_chain: relayer_account_at_bridged_chain(), - proof: FromBridgedChainMessagesProof { - bridged_header_hash: Default::default(), - storage_proof: vec![], - lane: TestLaneId::get(), - nonces_start: pallet_bridge_messages::InboundLanes::::get( - TEST_LANE_ID, - ) - .last_delivered_nonce() + - 1, - nonces_end: best_message, - }, - messages_count: 1, - dispatch_weight: Weight::zero(), - }) - } - - fn message_confirmation_call(best_message: MessageNonce) -> RuntimeCall { - RuntimeCall::BridgeMessages(MessagesCall::receive_messages_delivery_proof { - proof: FromBridgedChainMessagesDeliveryProof { - bridged_header_hash: Default::default(), - storage_proof: vec![], - lane: TestLaneId::get(), - }, - relayers_state: UnrewardedRelayersState { - last_delivered_nonce: best_message, - ..Default::default() - }, - }) - } - - fn parachain_finality_and_delivery_batch_call( - parachain_head_at_relay_header_number: RelayBlockNumber, - best_message: MessageNonce, - ) -> RuntimeCall { - RuntimeCall::Utility(UtilityCall::batch_all { - calls: vec![ - submit_parachain_head_call(parachain_head_at_relay_header_number), - message_delivery_call(best_message), - ], - }) - } - - fn parachain_finality_and_confirmation_batch_call( - parachain_head_at_relay_header_number: RelayBlockNumber, - best_message: MessageNonce, - ) -> RuntimeCall { - RuntimeCall::Utility(UtilityCall::batch_all { - calls: vec![ - submit_parachain_head_call(parachain_head_at_relay_header_number), - message_confirmation_call(best_message), - ], - }) - } - - fn relay_finality_and_delivery_batch_call( - relay_header_number: RelayBlockNumber, - best_message: MessageNonce, - ) -> RuntimeCall { - RuntimeCall::Utility(UtilityCall::batch_all { - calls: vec![ - submit_relay_header_call(relay_header_number), - message_delivery_call(best_message), - ], - }) - } - - fn relay_finality_and_delivery_batch_call_ex( - relay_header_number: RelayBlockNumber, - best_message: MessageNonce, - ) -> RuntimeCall { - RuntimeCall::Utility(UtilityCall::batch_all { - calls: vec![ - submit_relay_header_call_ex(relay_header_number), - message_delivery_call(best_message), - ], - }) - } - - fn relay_finality_and_confirmation_batch_call( - relay_header_number: RelayBlockNumber, - best_message: MessageNonce, - ) -> RuntimeCall { - RuntimeCall::Utility(UtilityCall::batch_all { - calls: vec![ - submit_relay_header_call(relay_header_number), - message_confirmation_call(best_message), - ], - }) - } - - fn relay_finality_and_confirmation_batch_call_ex( - relay_header_number: RelayBlockNumber, - best_message: MessageNonce, - ) -> RuntimeCall { - RuntimeCall::Utility(UtilityCall::batch_all { - calls: vec![ - submit_relay_header_call_ex(relay_header_number), - message_confirmation_call(best_message), - ], - }) - } - - fn all_finality_and_delivery_batch_call( - relay_header_number: RelayBlockNumber, - parachain_head_at_relay_header_number: RelayBlockNumber, - best_message: MessageNonce, - ) -> RuntimeCall { - RuntimeCall::Utility(UtilityCall::batch_all { - calls: vec![ - submit_relay_header_call(relay_header_number), - submit_parachain_head_call(parachain_head_at_relay_header_number), - message_delivery_call(best_message), - ], - }) - } - - fn all_finality_and_delivery_batch_call_ex( - relay_header_number: RelayBlockNumber, - parachain_head_at_relay_header_number: RelayBlockNumber, - best_message: MessageNonce, - ) -> RuntimeCall { - RuntimeCall::Utility(UtilityCall::batch_all { - calls: vec![ - submit_relay_header_call_ex(relay_header_number), - submit_parachain_head_call(parachain_head_at_relay_header_number), - message_delivery_call(best_message), - ], - }) - } - - fn all_finality_and_confirmation_batch_call( - relay_header_number: RelayBlockNumber, - parachain_head_at_relay_header_number: RelayBlockNumber, - best_message: MessageNonce, - ) -> RuntimeCall { - RuntimeCall::Utility(UtilityCall::batch_all { - calls: vec![ - submit_relay_header_call(relay_header_number), - submit_parachain_head_call(parachain_head_at_relay_header_number), - message_confirmation_call(best_message), - ], - }) - } - - fn all_finality_and_confirmation_batch_call_ex( - relay_header_number: RelayBlockNumber, - parachain_head_at_relay_header_number: RelayBlockNumber, - best_message: MessageNonce, - ) -> RuntimeCall { - RuntimeCall::Utility(UtilityCall::batch_all { - calls: vec![ - submit_relay_header_call_ex(relay_header_number), - submit_parachain_head_call(parachain_head_at_relay_header_number), - message_confirmation_call(best_message), - ], - }) - } - - fn all_finality_pre_dispatch_data() -> PreDispatchData { - PreDispatchData { - relayer: relayer_account_at_this_chain(), - call_info: CallInfo::AllFinalityAndMsgs( - SubmitFinalityProofInfo { - block_number: 200, - current_set_id: None, - extra_weight: Weight::zero(), - extra_size: 0, - }, - SubmitParachainHeadsInfo { - at_relay_block_number: 200, - para_id: ParaId(TestParachain::get()), - para_head_hash: [200u8; 32].into(), - }, - MessagesCallInfo::ReceiveMessagesProof(ReceiveMessagesProofInfo { - base: BaseMessagesProofInfo { - lane_id: TEST_LANE_ID, - bundled_range: 101..=200, - best_stored_nonce: 100, - }, - unrewarded_relayers: UnrewardedRelayerOccupation { - free_relayer_slots: MaxUnrewardedRelayerEntriesAtInboundLane::get(), - free_message_slots: MaxUnconfirmedMessagesAtInboundLane::get(), - }, - }), - ), - } - } - - fn all_finality_pre_dispatch_data_ex() -> PreDispatchData { - let mut data = all_finality_pre_dispatch_data(); - data.call_info.submit_finality_proof_info_mut().unwrap().current_set_id = - Some(TEST_GRANDPA_SET_ID); - data - } - - fn all_finality_confirmation_pre_dispatch_data() -> PreDispatchData { - PreDispatchData { - relayer: relayer_account_at_this_chain(), - call_info: CallInfo::AllFinalityAndMsgs( - SubmitFinalityProofInfo { - block_number: 200, - current_set_id: None, - extra_weight: Weight::zero(), - extra_size: 0, - }, - SubmitParachainHeadsInfo { - at_relay_block_number: 200, - para_id: ParaId(TestParachain::get()), - para_head_hash: [200u8; 32].into(), - }, - MessagesCallInfo::ReceiveMessagesDeliveryProof(ReceiveMessagesDeliveryProofInfo( - BaseMessagesProofInfo { - lane_id: TEST_LANE_ID, - bundled_range: 101..=200, - best_stored_nonce: 100, - }, - )), - ), - } - } - - fn all_finality_confirmation_pre_dispatch_data_ex() -> PreDispatchData { - let mut data = all_finality_confirmation_pre_dispatch_data(); - data.call_info.submit_finality_proof_info_mut().unwrap().current_set_id = - Some(TEST_GRANDPA_SET_ID); - data - } - - fn relay_finality_pre_dispatch_data() -> PreDispatchData { - PreDispatchData { - relayer: relayer_account_at_this_chain(), - call_info: CallInfo::RelayFinalityAndMsgs( - SubmitFinalityProofInfo { - block_number: 200, - current_set_id: None, - extra_weight: Weight::zero(), - extra_size: 0, - }, - MessagesCallInfo::ReceiveMessagesProof(ReceiveMessagesProofInfo { - base: BaseMessagesProofInfo { - lane_id: TEST_LANE_ID, - bundled_range: 101..=200, - best_stored_nonce: 100, - }, - unrewarded_relayers: UnrewardedRelayerOccupation { - free_relayer_slots: MaxUnrewardedRelayerEntriesAtInboundLane::get(), - free_message_slots: MaxUnconfirmedMessagesAtInboundLane::get(), - }, - }), - ), - } - } - - fn relay_finality_pre_dispatch_data_ex() -> PreDispatchData { - let mut data = relay_finality_pre_dispatch_data(); - data.call_info.submit_finality_proof_info_mut().unwrap().current_set_id = - Some(TEST_GRANDPA_SET_ID); - data - } - - fn relay_finality_confirmation_pre_dispatch_data() -> PreDispatchData { - PreDispatchData { - relayer: relayer_account_at_this_chain(), - call_info: CallInfo::RelayFinalityAndMsgs( - SubmitFinalityProofInfo { - block_number: 200, - current_set_id: None, - extra_weight: Weight::zero(), - extra_size: 0, - }, - MessagesCallInfo::ReceiveMessagesDeliveryProof(ReceiveMessagesDeliveryProofInfo( - BaseMessagesProofInfo { - lane_id: TEST_LANE_ID, - bundled_range: 101..=200, - best_stored_nonce: 100, - }, - )), - ), - } - } - - fn relay_finality_confirmation_pre_dispatch_data_ex() -> PreDispatchData { - let mut data = relay_finality_confirmation_pre_dispatch_data(); - data.call_info.submit_finality_proof_info_mut().unwrap().current_set_id = - Some(TEST_GRANDPA_SET_ID); - data - } - - fn parachain_finality_pre_dispatch_data() -> PreDispatchData { - PreDispatchData { - relayer: relayer_account_at_this_chain(), - call_info: CallInfo::ParachainFinalityAndMsgs( - SubmitParachainHeadsInfo { - at_relay_block_number: 200, - para_id: ParaId(TestParachain::get()), - para_head_hash: [200u8; 32].into(), - }, - MessagesCallInfo::ReceiveMessagesProof(ReceiveMessagesProofInfo { - base: BaseMessagesProofInfo { - lane_id: TEST_LANE_ID, - bundled_range: 101..=200, - best_stored_nonce: 100, - }, - unrewarded_relayers: UnrewardedRelayerOccupation { - free_relayer_slots: MaxUnrewardedRelayerEntriesAtInboundLane::get(), - free_message_slots: MaxUnconfirmedMessagesAtInboundLane::get(), - }, - }), - ), - } - } - - fn parachain_finality_confirmation_pre_dispatch_data() -> PreDispatchData { - PreDispatchData { - relayer: relayer_account_at_this_chain(), - call_info: CallInfo::ParachainFinalityAndMsgs( - SubmitParachainHeadsInfo { - at_relay_block_number: 200, - para_id: ParaId(TestParachain::get()), - para_head_hash: [200u8; 32].into(), - }, - MessagesCallInfo::ReceiveMessagesDeliveryProof(ReceiveMessagesDeliveryProofInfo( - BaseMessagesProofInfo { - lane_id: TEST_LANE_ID, - bundled_range: 101..=200, - best_stored_nonce: 100, - }, - )), - ), - } - } - - fn delivery_pre_dispatch_data() -> PreDispatchData { - PreDispatchData { - relayer: relayer_account_at_this_chain(), - call_info: CallInfo::Msgs(MessagesCallInfo::ReceiveMessagesProof( - ReceiveMessagesProofInfo { - base: BaseMessagesProofInfo { - lane_id: TEST_LANE_ID, - bundled_range: 101..=200, - best_stored_nonce: 100, - }, - unrewarded_relayers: UnrewardedRelayerOccupation { - free_relayer_slots: MaxUnrewardedRelayerEntriesAtInboundLane::get(), - free_message_slots: MaxUnconfirmedMessagesAtInboundLane::get(), - }, - }, - )), - } - } - - fn confirmation_pre_dispatch_data() -> PreDispatchData { - PreDispatchData { - relayer: relayer_account_at_this_chain(), - call_info: CallInfo::Msgs(MessagesCallInfo::ReceiveMessagesDeliveryProof( - ReceiveMessagesDeliveryProofInfo(BaseMessagesProofInfo { - lane_id: TEST_LANE_ID, - bundled_range: 101..=200, - best_stored_nonce: 100, - }), - )), - } - } - - fn set_bundled_range_end( - mut pre_dispatch_data: PreDispatchData, - end: MessageNonce, - ) -> PreDispatchData { - let msg_info = match pre_dispatch_data.call_info { - CallInfo::AllFinalityAndMsgs(_, _, ref mut info) => info, - CallInfo::RelayFinalityAndMsgs(_, ref mut info) => info, - CallInfo::ParachainFinalityAndMsgs(_, ref mut info) => info, - CallInfo::Msgs(ref mut info) => info, - }; - - if let MessagesCallInfo::ReceiveMessagesProof(ref mut msg_info) = msg_info { - msg_info.base.bundled_range = *msg_info.base.bundled_range.start()..=end - } - - pre_dispatch_data - } - - fn run_validate(call: RuntimeCall) -> TransactionValidity { - let extension: TestExtension = - RefundSignedExtensionAdapter(RefundBridgedParachainMessages(PhantomData)); - extension.validate(&relayer_account_at_this_chain(), &call, &DispatchInfo::default(), 0) - } - - fn run_grandpa_validate(call: RuntimeCall) -> TransactionValidity { - let extension: TestGrandpaExtension = - RefundSignedExtensionAdapter(RefundBridgedGrandpaMessages(PhantomData)); - extension.validate(&relayer_account_at_this_chain(), &call, &DispatchInfo::default(), 0) - } - - fn run_validate_ignore_priority(call: RuntimeCall) -> TransactionValidity { - run_validate(call).map(|mut tx| { - tx.priority = 0; - tx - }) - } - - fn run_pre_dispatch( - call: RuntimeCall, - ) -> Result>, TransactionValidityError> { - let extension: TestExtension = - RefundSignedExtensionAdapter(RefundBridgedParachainMessages(PhantomData)); - extension.pre_dispatch(&relayer_account_at_this_chain(), &call, &DispatchInfo::default(), 0) - } - - fn run_grandpa_pre_dispatch( - call: RuntimeCall, - ) -> Result>, TransactionValidityError> { - let extension: TestGrandpaExtension = - RefundSignedExtensionAdapter(RefundBridgedGrandpaMessages(PhantomData)); - extension.pre_dispatch(&relayer_account_at_this_chain(), &call, &DispatchInfo::default(), 0) - } - - fn dispatch_info() -> DispatchInfo { - DispatchInfo { - weight: Weight::from_parts( - frame_support::weights::constants::WEIGHT_REF_TIME_PER_SECOND, - 0, - ), - class: frame_support::dispatch::DispatchClass::Normal, - pays_fee: frame_support::dispatch::Pays::Yes, - } - } - - fn post_dispatch_info() -> PostDispatchInfo { - PostDispatchInfo { actual_weight: None, pays_fee: frame_support::dispatch::Pays::Yes } - } - - fn run_post_dispatch( - pre_dispatch_data: Option>, - dispatch_result: DispatchResult, - ) { - let post_dispatch_result = TestExtension::post_dispatch( - Some(pre_dispatch_data), - &dispatch_info(), - &post_dispatch_info(), - 1024, - &dispatch_result, - ); - assert_eq!(post_dispatch_result, Ok(())); - } - - fn expected_delivery_reward() -> ThisChainBalance { - let mut post_dispatch_info = post_dispatch_info(); - let extra_weight = ::WeightInfo::extra_weight_of_successful_receive_messages_proof_call(); - post_dispatch_info.actual_weight = - Some(dispatch_info().weight.saturating_sub(extra_weight)); - pallet_transaction_payment::Pallet::::compute_actual_fee( - 1024, - &dispatch_info(), - &post_dispatch_info, - Zero::zero(), - ) - } - - fn expected_confirmation_reward() -> ThisChainBalance { - pallet_transaction_payment::Pallet::::compute_actual_fee( - 1024, - &dispatch_info(), - &post_dispatch_info(), - Zero::zero(), - ) - } - - #[test] - fn validate_doesnt_boost_transaction_priority_if_relayer_is_not_registered() { - run_test(|| { - initialize_environment(100, 100, 100); - Balances::set_balance(&relayer_account_at_this_chain(), ExistentialDeposit::get()); - - // message delivery is failing - assert_eq!(run_validate(message_delivery_call(200)), Ok(Default::default()),); - assert_eq!( - run_validate(parachain_finality_and_delivery_batch_call(200, 200)), - Ok(Default::default()), - ); - assert_eq!( - run_validate(all_finality_and_delivery_batch_call(200, 200, 200)), - Ok(Default::default()), - ); - assert_eq!( - run_validate(all_finality_and_delivery_batch_call_ex(200, 200, 200)), - Ok(Default::default()), - ); - // message confirmation validation is passing - assert_eq!( - run_validate_ignore_priority(message_confirmation_call(200)), - Ok(Default::default()), - ); - assert_eq!( - run_validate_ignore_priority(parachain_finality_and_confirmation_batch_call( - 200, 200 - )), - Ok(Default::default()), - ); - assert_eq!( - run_validate_ignore_priority(all_finality_and_confirmation_batch_call( - 200, 200, 200 - )), - Ok(Default::default()), - ); - assert_eq!( - run_validate_ignore_priority(all_finality_and_confirmation_batch_call_ex( - 200, 200, 200 - )), - Ok(Default::default()), - ); - }); - } - - #[test] - fn validate_boosts_priority_of_message_delivery_transactions() { - run_test(|| { - initialize_environment(100, 100, 100); - - BridgeRelayers::register(RuntimeOrigin::signed(relayer_account_at_this_chain()), 1000) - .unwrap(); - - let priority_of_100_messages_delivery = - run_validate(message_delivery_call(200)).unwrap().priority; - let priority_of_200_messages_delivery = - run_validate(message_delivery_call(300)).unwrap().priority; - assert!( - priority_of_200_messages_delivery > priority_of_100_messages_delivery, - "Invalid priorities: {} for 200 messages vs {} for 100 messages", - priority_of_200_messages_delivery, - priority_of_100_messages_delivery, - ); - - let priority_of_100_messages_confirmation = - run_validate(message_confirmation_call(200)).unwrap().priority; - let priority_of_200_messages_confirmation = - run_validate(message_confirmation_call(300)).unwrap().priority; - assert_eq!( - priority_of_100_messages_confirmation, - priority_of_200_messages_confirmation - ); - }); - } - - #[test] - fn validate_does_not_boost_priority_of_message_delivery_transactions_with_too_many_messages() { - run_test(|| { - initialize_environment(100, 100, 100); - - BridgeRelayers::register(RuntimeOrigin::signed(relayer_account_at_this_chain()), 1000) - .unwrap(); - - let priority_of_max_messages_delivery = run_validate(message_delivery_call( - 100 + MaxUnconfirmedMessagesAtInboundLane::get(), - )) - .unwrap() - .priority; - let priority_of_more_than_max_messages_delivery = run_validate(message_delivery_call( - 100 + MaxUnconfirmedMessagesAtInboundLane::get() + 1, - )) - .unwrap() - .priority; - - assert!( - priority_of_max_messages_delivery > priority_of_more_than_max_messages_delivery, - "Invalid priorities: {} for MAX messages vs {} for MAX+1 messages", - priority_of_max_messages_delivery, - priority_of_more_than_max_messages_delivery, - ); - }); - } - - #[test] - fn validate_allows_non_obsolete_transactions() { - run_test(|| { - initialize_environment(100, 100, 100); - - assert_eq!( - run_validate_ignore_priority(message_delivery_call(200)), - Ok(ValidTransaction::default()), - ); - assert_eq!( - run_validate_ignore_priority(message_confirmation_call(200)), - Ok(ValidTransaction::default()), - ); - - assert_eq!( - run_validate_ignore_priority(parachain_finality_and_delivery_batch_call(200, 200)), - Ok(ValidTransaction::default()), - ); - assert_eq!( - run_validate_ignore_priority(parachain_finality_and_confirmation_batch_call( - 200, 200 - )), - Ok(ValidTransaction::default()), - ); - - assert_eq!( - run_validate_ignore_priority(all_finality_and_delivery_batch_call(200, 200, 200)), - Ok(ValidTransaction::default()), - ); - assert_eq!( - run_validate_ignore_priority(all_finality_and_delivery_batch_call_ex( - 200, 200, 200 - )), - Ok(ValidTransaction::default()), - ); - assert_eq!( - run_validate_ignore_priority(all_finality_and_confirmation_batch_call( - 200, 200, 200 - )), - Ok(ValidTransaction::default()), - ); - assert_eq!( - run_validate_ignore_priority(all_finality_and_confirmation_batch_call_ex( - 200, 200, 200 - )), - Ok(ValidTransaction::default()), - ); - }); - } - - #[test] - fn ext_rejects_batch_with_obsolete_relay_chain_header() { - run_test(|| { - initialize_environment(100, 100, 100); - - assert_eq!( - run_pre_dispatch(all_finality_and_delivery_batch_call(100, 200, 200)), - Err(TransactionValidityError::Invalid(InvalidTransaction::Stale)), - ); - assert_eq!( - run_pre_dispatch(all_finality_and_delivery_batch_call_ex(100, 200, 200)), - Err(TransactionValidityError::Invalid(InvalidTransaction::Stale)), - ); - - assert_eq!( - run_validate(all_finality_and_delivery_batch_call(100, 200, 200)), - Err(TransactionValidityError::Invalid(InvalidTransaction::Stale)), - ); - assert_eq!( - run_validate(all_finality_and_delivery_batch_call_ex(100, 200, 200)), - Err(TransactionValidityError::Invalid(InvalidTransaction::Stale)), - ); - }); - } - - #[test] - fn ext_rejects_batch_with_obsolete_parachain_head() { - run_test(|| { - initialize_environment(100, 100, 100); - - assert_eq!( - run_pre_dispatch(all_finality_and_delivery_batch_call(101, 100, 200)), - Err(TransactionValidityError::Invalid(InvalidTransaction::Stale)), - ); - assert_eq!( - run_pre_dispatch(all_finality_and_delivery_batch_call_ex(101, 100, 200)), - Err(TransactionValidityError::Invalid(InvalidTransaction::Stale)), - ); - assert_eq!( - run_validate(all_finality_and_delivery_batch_call(101, 100, 200)), - Err(TransactionValidityError::Invalid(InvalidTransaction::Stale)), - ); - assert_eq!( - run_validate(all_finality_and_delivery_batch_call_ex(101, 100, 200)), - Err(TransactionValidityError::Invalid(InvalidTransaction::Stale)), - ); - - assert_eq!( - run_pre_dispatch(parachain_finality_and_delivery_batch_call(100, 200)), - Err(TransactionValidityError::Invalid(InvalidTransaction::Stale)), - ); - assert_eq!( - run_validate(parachain_finality_and_delivery_batch_call(100, 200)), - Err(TransactionValidityError::Invalid(InvalidTransaction::Stale)), - ); - }); - } - - #[test] - fn ext_rejects_batch_with_obsolete_messages() { - run_test(|| { - initialize_environment(100, 100, 100); - - assert_eq!( - run_pre_dispatch(all_finality_and_delivery_batch_call(200, 200, 100)), - Err(TransactionValidityError::Invalid(InvalidTransaction::Stale)), - ); - assert_eq!( - run_pre_dispatch(all_finality_and_delivery_batch_call_ex(200, 200, 100)), - Err(TransactionValidityError::Invalid(InvalidTransaction::Stale)), - ); - assert_eq!( - run_pre_dispatch(all_finality_and_confirmation_batch_call(200, 200, 100)), - Err(TransactionValidityError::Invalid(InvalidTransaction::Stale)), - ); - assert_eq!( - run_pre_dispatch(all_finality_and_confirmation_batch_call_ex(200, 200, 100)), - Err(TransactionValidityError::Invalid(InvalidTransaction::Stale)), - ); - - assert_eq!( - run_validate(all_finality_and_delivery_batch_call(200, 200, 100)), - Err(TransactionValidityError::Invalid(InvalidTransaction::Stale)), - ); - assert_eq!( - run_validate(all_finality_and_delivery_batch_call_ex(200, 200, 100)), - Err(TransactionValidityError::Invalid(InvalidTransaction::Stale)), - ); - assert_eq!( - run_validate(all_finality_and_confirmation_batch_call(200, 200, 100)), - Err(TransactionValidityError::Invalid(InvalidTransaction::Stale)), - ); - assert_eq!( - run_validate(all_finality_and_confirmation_batch_call_ex(200, 200, 100)), - Err(TransactionValidityError::Invalid(InvalidTransaction::Stale)), - ); - - assert_eq!( - run_pre_dispatch(parachain_finality_and_delivery_batch_call(200, 100)), - Err(TransactionValidityError::Invalid(InvalidTransaction::Stale)), - ); - assert_eq!( - run_pre_dispatch(parachain_finality_and_confirmation_batch_call(200, 100)), - Err(TransactionValidityError::Invalid(InvalidTransaction::Stale)), - ); - - assert_eq!( - run_validate(parachain_finality_and_delivery_batch_call(200, 100)), - Err(TransactionValidityError::Invalid(InvalidTransaction::Stale)), - ); - assert_eq!( - run_validate(parachain_finality_and_confirmation_batch_call(200, 100)), - Err(TransactionValidityError::Invalid(InvalidTransaction::Stale)), - ); - }); - } - - #[test] - fn ext_rejects_batch_with_grandpa_finality_proof_when_grandpa_pallet_is_halted() { - run_test(|| { - initialize_environment(100, 100, 100); - - GrandpaPallet::::set_operating_mode( - RuntimeOrigin::root(), - BasicOperatingMode::Halted, - ) - .unwrap(); - - assert_eq!( - run_pre_dispatch(all_finality_and_delivery_batch_call(200, 200, 200)), - Err(TransactionValidityError::Invalid(InvalidTransaction::Call)), - ); - assert_eq!( - run_pre_dispatch(all_finality_and_delivery_batch_call_ex(200, 200, 200)), - Err(TransactionValidityError::Invalid(InvalidTransaction::Call)), - ); - assert_eq!( - run_pre_dispatch(all_finality_and_confirmation_batch_call(200, 200, 200)), - Err(TransactionValidityError::Invalid(InvalidTransaction::Call)), - ); - assert_eq!( - run_pre_dispatch(all_finality_and_confirmation_batch_call_ex(200, 200, 200)), - Err(TransactionValidityError::Invalid(InvalidTransaction::Call)), - ); - }); - } - - #[test] - fn ext_rejects_batch_with_parachain_finality_proof_when_parachains_pallet_is_halted() { - run_test(|| { - initialize_environment(100, 100, 100); - - ParachainsPallet::::set_operating_mode( - RuntimeOrigin::root(), - BasicOperatingMode::Halted, - ) - .unwrap(); - - assert_eq!( - run_pre_dispatch(all_finality_and_delivery_batch_call(200, 200, 200)), - Err(TransactionValidityError::Invalid(InvalidTransaction::Call)), - ); - assert_eq!( - run_pre_dispatch(all_finality_and_delivery_batch_call_ex(200, 200, 200)), - Err(TransactionValidityError::Invalid(InvalidTransaction::Call)), - ); - assert_eq!( - run_pre_dispatch(all_finality_and_confirmation_batch_call(200, 200, 200)), - Err(TransactionValidityError::Invalid(InvalidTransaction::Call)), - ); - assert_eq!( - run_pre_dispatch(all_finality_and_confirmation_batch_call_ex(200, 200, 200)), - Err(TransactionValidityError::Invalid(InvalidTransaction::Call)), - ); - - assert_eq!( - run_pre_dispatch(parachain_finality_and_delivery_batch_call(200, 200)), - Err(TransactionValidityError::Invalid(InvalidTransaction::Call)), - ); - assert_eq!( - run_pre_dispatch(parachain_finality_and_confirmation_batch_call(200, 200)), - Err(TransactionValidityError::Invalid(InvalidTransaction::Call)), - ); - }); - } - - #[test] - fn ext_rejects_transaction_when_messages_pallet_is_halted() { - run_test(|| { - initialize_environment(100, 100, 100); - - MessagesPallet::::set_operating_mode( - RuntimeOrigin::root(), - MessagesOperatingMode::Basic(BasicOperatingMode::Halted), - ) - .unwrap(); - - assert_eq!( - run_pre_dispatch(all_finality_and_delivery_batch_call(200, 200, 200)), - Err(TransactionValidityError::Invalid(InvalidTransaction::Call)), - ); - assert_eq!( - run_pre_dispatch(all_finality_and_delivery_batch_call_ex(200, 200, 200)), - Err(TransactionValidityError::Invalid(InvalidTransaction::Call)), - ); - assert_eq!( - run_pre_dispatch(all_finality_and_confirmation_batch_call(200, 200, 200)), - Err(TransactionValidityError::Invalid(InvalidTransaction::Call)), - ); - assert_eq!( - run_pre_dispatch(all_finality_and_confirmation_batch_call_ex(200, 200, 200)), - Err(TransactionValidityError::Invalid(InvalidTransaction::Call)), - ); - - assert_eq!( - run_pre_dispatch(parachain_finality_and_delivery_batch_call(200, 200)), - Err(TransactionValidityError::Invalid(InvalidTransaction::Call)), - ); - assert_eq!( - run_pre_dispatch(parachain_finality_and_confirmation_batch_call(200, 200)), - Err(TransactionValidityError::Invalid(InvalidTransaction::Call)), - ); - - assert_eq!( - run_pre_dispatch(message_delivery_call(200)), - Err(TransactionValidityError::Invalid(InvalidTransaction::Call)), - ); - assert_eq!( - run_pre_dispatch(message_confirmation_call(200)), - Err(TransactionValidityError::Invalid(InvalidTransaction::Call)), - ); - }); - } - - #[test] - fn pre_dispatch_parses_batch_with_relay_chain_and_parachain_headers() { - run_test(|| { - initialize_environment(100, 100, 100); - - assert_eq!( - run_pre_dispatch(all_finality_and_delivery_batch_call(200, 200, 200)), - Ok(Some(all_finality_pre_dispatch_data())), - ); - assert_eq!( - run_pre_dispatch(all_finality_and_delivery_batch_call_ex(200, 200, 200)), - Ok(Some(all_finality_pre_dispatch_data_ex())), - ); - assert_eq!( - run_pre_dispatch(all_finality_and_confirmation_batch_call(200, 200, 200)), - Ok(Some(all_finality_confirmation_pre_dispatch_data())), - ); - assert_eq!( - run_pre_dispatch(all_finality_and_confirmation_batch_call_ex(200, 200, 200)), - Ok(Some(all_finality_confirmation_pre_dispatch_data_ex())), - ); - }); - } - - #[test] - fn pre_dispatch_parses_batch_with_parachain_header() { - run_test(|| { - initialize_environment(100, 100, 100); - - assert_eq!( - run_pre_dispatch(parachain_finality_and_delivery_batch_call(200, 200)), - Ok(Some(parachain_finality_pre_dispatch_data())), - ); - assert_eq!( - run_pre_dispatch(parachain_finality_and_confirmation_batch_call(200, 200)), - Ok(Some(parachain_finality_confirmation_pre_dispatch_data())), - ); - }); - } - - #[test] - fn pre_dispatch_fails_to_parse_batch_with_multiple_parachain_headers() { - run_test(|| { - initialize_environment(100, 100, 100); - - let call = RuntimeCall::Utility(UtilityCall::batch_all { - calls: vec![ - RuntimeCall::BridgeParachains(ParachainsCall::submit_parachain_heads { - at_relay_block: (100, RelayBlockHash::default()), - parachains: vec![ - (ParaId(TestParachain::get()), [1u8; 32].into()), - (ParaId(TestParachain::get() + 1), [1u8; 32].into()), - ], - parachain_heads_proof: ParaHeadsProof { storage_proof: vec![] }, - }), - message_delivery_call(200), - ], - }); - - assert_eq!(run_pre_dispatch(call), Ok(None),); - }); - } - - #[test] - fn pre_dispatch_parses_message_transaction() { - run_test(|| { - initialize_environment(100, 100, 100); - - assert_eq!( - run_pre_dispatch(message_delivery_call(200)), - Ok(Some(delivery_pre_dispatch_data())), - ); - assert_eq!( - run_pre_dispatch(message_confirmation_call(200)), - Ok(Some(confirmation_pre_dispatch_data())), - ); - }); - } - - #[test] - fn post_dispatch_ignores_unknown_transaction() { - run_test(|| { - assert_storage_noop!(run_post_dispatch(None, Ok(()))); - }); - } - - #[test] - fn post_dispatch_ignores_failed_transaction() { - run_test(|| { - assert_storage_noop!(run_post_dispatch( - Some(all_finality_pre_dispatch_data()), - Err(DispatchError::BadOrigin) - )); - }); - } - - #[test] - fn post_dispatch_ignores_transaction_that_has_not_updated_relay_chain_state() { - run_test(|| { - initialize_environment(100, 200, 200); - - assert_storage_noop!(run_post_dispatch(Some(all_finality_pre_dispatch_data()), Ok(()))); - }); - } - - #[test] - fn post_dispatch_ignores_transaction_that_has_not_updated_parachain_state() { - run_test(|| { - initialize_environment(200, 100, 200); - - assert_storage_noop!(run_post_dispatch(Some(all_finality_pre_dispatch_data()), Ok(()))); - assert_storage_noop!(run_post_dispatch( - Some(parachain_finality_pre_dispatch_data()), - Ok(()) - )); - }); - } - - #[test] - fn post_dispatch_ignores_transaction_that_has_not_delivered_any_messages() { - run_test(|| { - initialize_environment(200, 200, 100); - - assert_storage_noop!(run_post_dispatch(Some(all_finality_pre_dispatch_data()), Ok(()))); - assert_storage_noop!(run_post_dispatch( - Some(parachain_finality_pre_dispatch_data()), - Ok(()) - )); - assert_storage_noop!(run_post_dispatch(Some(delivery_pre_dispatch_data()), Ok(()))); - - assert_storage_noop!(run_post_dispatch( - Some(all_finality_confirmation_pre_dispatch_data()), - Ok(()) - )); - assert_storage_noop!(run_post_dispatch( - Some(parachain_finality_confirmation_pre_dispatch_data()), - Ok(()) - )); - assert_storage_noop!(run_post_dispatch(Some(confirmation_pre_dispatch_data()), Ok(()))); - }); - } - - #[test] - fn post_dispatch_ignores_transaction_that_has_not_delivered_all_messages() { - run_test(|| { - initialize_environment(200, 200, 150); - - assert_storage_noop!(run_post_dispatch(Some(all_finality_pre_dispatch_data()), Ok(()))); - assert_storage_noop!(run_post_dispatch( - Some(parachain_finality_pre_dispatch_data()), - Ok(()) - )); - assert_storage_noop!(run_post_dispatch(Some(delivery_pre_dispatch_data()), Ok(()))); - - assert_storage_noop!(run_post_dispatch( - Some(all_finality_confirmation_pre_dispatch_data()), - Ok(()) - )); - assert_storage_noop!(run_post_dispatch( - Some(parachain_finality_confirmation_pre_dispatch_data()), - Ok(()) - )); - assert_storage_noop!(run_post_dispatch(Some(confirmation_pre_dispatch_data()), Ok(()))); - }); - } - - #[test] - fn post_dispatch_refunds_relayer_in_all_finality_batch_with_extra_weight() { - run_test(|| { - initialize_environment(200, 200, 200); - - let mut dispatch_info = dispatch_info(); - dispatch_info.weight = Weight::from_parts( - frame_support::weights::constants::WEIGHT_REF_TIME_PER_SECOND * 2, - 0, - ); - - // without any size/weight refund: we expect regular reward - let pre_dispatch_data = all_finality_pre_dispatch_data(); - let regular_reward = expected_delivery_reward(); - run_post_dispatch(Some(pre_dispatch_data), Ok(())); - assert_eq!( - RelayersPallet::::relayer_reward( - relayer_account_at_this_chain(), - MsgProofsRewardsAccount::get() - ), - Some(regular_reward), - ); - - // now repeat the same with size+weight refund: we expect smaller reward - let mut pre_dispatch_data = all_finality_pre_dispatch_data(); - match pre_dispatch_data.call_info { - CallInfo::AllFinalityAndMsgs(ref mut info, ..) => { - info.extra_weight.set_ref_time( - frame_support::weights::constants::WEIGHT_REF_TIME_PER_SECOND, - ); - info.extra_size = 32; - }, - _ => unreachable!(), - } - run_post_dispatch(Some(pre_dispatch_data), Ok(())); - let reward_after_two_calls = RelayersPallet::::relayer_reward( - relayer_account_at_this_chain(), - MsgProofsRewardsAccount::get(), - ) - .unwrap(); - assert!( - reward_after_two_calls < 2 * regular_reward, - "{} must be < 2 * {}", - reward_after_two_calls, - 2 * regular_reward, - ); - }); - } - - #[test] - fn post_dispatch_refunds_relayer_in_all_finality_batch() { - run_test(|| { - initialize_environment(200, 200, 200); - - run_post_dispatch(Some(all_finality_pre_dispatch_data()), Ok(())); - assert_eq!( - RelayersPallet::::relayer_reward( - relayer_account_at_this_chain(), - MsgProofsRewardsAccount::get() - ), - Some(expected_delivery_reward()), - ); - - run_post_dispatch(Some(all_finality_confirmation_pre_dispatch_data()), Ok(())); - assert_eq!( - RelayersPallet::::relayer_reward( - relayer_account_at_this_chain(), - MsgDeliveryProofsRewardsAccount::get() - ), - Some(expected_confirmation_reward()), - ); - }); - } - - #[test] - fn post_dispatch_refunds_relayer_in_parachain_finality_batch() { - run_test(|| { - initialize_environment(200, 200, 200); - - run_post_dispatch(Some(parachain_finality_pre_dispatch_data()), Ok(())); - assert_eq!( - RelayersPallet::::relayer_reward( - relayer_account_at_this_chain(), - MsgProofsRewardsAccount::get() - ), - Some(expected_delivery_reward()), - ); - - run_post_dispatch(Some(parachain_finality_confirmation_pre_dispatch_data()), Ok(())); - assert_eq!( - RelayersPallet::::relayer_reward( - relayer_account_at_this_chain(), - MsgDeliveryProofsRewardsAccount::get() - ), - Some(expected_confirmation_reward()), - ); - }); - } - - #[test] - fn post_dispatch_refunds_relayer_in_message_transaction() { - run_test(|| { - initialize_environment(200, 200, 200); - - run_post_dispatch(Some(delivery_pre_dispatch_data()), Ok(())); - assert_eq!( - RelayersPallet::::relayer_reward( - relayer_account_at_this_chain(), - MsgProofsRewardsAccount::get() - ), - Some(expected_delivery_reward()), - ); - - run_post_dispatch(Some(confirmation_pre_dispatch_data()), Ok(())); - assert_eq!( - RelayersPallet::::relayer_reward( - relayer_account_at_this_chain(), - MsgDeliveryProofsRewardsAccount::get() - ), - Some(expected_confirmation_reward()), - ); - }); - } - - #[test] - fn post_dispatch_slashing_relayer_stake() { - run_test(|| { - initialize_environment(200, 200, 100); - - let delivery_rewards_account_balance = - Balances::free_balance(delivery_rewards_account()); - - let test_stake: ThisChainBalance = TestStake::get(); - Balances::set_balance( - &relayer_account_at_this_chain(), - ExistentialDeposit::get() + test_stake * 10, - ); - - // slashing works for message delivery calls - BridgeRelayers::register(RuntimeOrigin::signed(relayer_account_at_this_chain()), 1000) - .unwrap(); - assert_eq!(Balances::reserved_balance(relayer_account_at_this_chain()), test_stake); - run_post_dispatch(Some(delivery_pre_dispatch_data()), Ok(())); - assert_eq!(Balances::reserved_balance(relayer_account_at_this_chain()), 0); - assert_eq!( - delivery_rewards_account_balance + test_stake, - Balances::free_balance(delivery_rewards_account()) - ); - - BridgeRelayers::register(RuntimeOrigin::signed(relayer_account_at_this_chain()), 1000) - .unwrap(); - assert_eq!(Balances::reserved_balance(relayer_account_at_this_chain()), test_stake); - run_post_dispatch(Some(parachain_finality_pre_dispatch_data()), Ok(())); - assert_eq!(Balances::reserved_balance(relayer_account_at_this_chain()), 0); - assert_eq!( - delivery_rewards_account_balance + test_stake * 2, - Balances::free_balance(delivery_rewards_account()) - ); - - BridgeRelayers::register(RuntimeOrigin::signed(relayer_account_at_this_chain()), 1000) - .unwrap(); - assert_eq!(Balances::reserved_balance(relayer_account_at_this_chain()), test_stake); - run_post_dispatch(Some(all_finality_pre_dispatch_data()), Ok(())); - assert_eq!(Balances::reserved_balance(relayer_account_at_this_chain()), 0); - assert_eq!( - delivery_rewards_account_balance + test_stake * 3, - Balances::free_balance(delivery_rewards_account()) - ); - - // reserve doesn't work for message confirmation calls - let confirmation_rewards_account_balance = - Balances::free_balance(confirmation_rewards_account()); - - Balances::reserve(&relayer_account_at_this_chain(), test_stake).unwrap(); - assert_eq!(Balances::reserved_balance(relayer_account_at_this_chain()), test_stake); - - assert_eq!( - confirmation_rewards_account_balance, - Balances::free_balance(confirmation_rewards_account()) - ); - run_post_dispatch(Some(confirmation_pre_dispatch_data()), Ok(())); - assert_eq!(Balances::reserved_balance(relayer_account_at_this_chain()), test_stake); - - run_post_dispatch(Some(parachain_finality_confirmation_pre_dispatch_data()), Ok(())); - assert_eq!(Balances::reserved_balance(relayer_account_at_this_chain()), test_stake); - - run_post_dispatch(Some(all_finality_confirmation_pre_dispatch_data()), Ok(())); - assert_eq!(Balances::reserved_balance(relayer_account_at_this_chain()), test_stake); - - // check that unreserve has happened, not slashing - assert_eq!( - delivery_rewards_account_balance + test_stake * 3, - Balances::free_balance(delivery_rewards_account()) - ); - assert_eq!( - confirmation_rewards_account_balance, - Balances::free_balance(confirmation_rewards_account()) - ); - }); - } - - fn run_analyze_call_result( - pre_dispatch_data: PreDispatchData, - dispatch_result: DispatchResult, - ) -> RelayerAccountAction { - TestExtensionProvider::analyze_call_result( - Some(Some(pre_dispatch_data)), - &dispatch_info(), - &post_dispatch_info(), - 1024, - &dispatch_result, - ) - } - - #[test] - fn analyze_call_result_shall_not_slash_for_transactions_with_too_many_messages() { - run_test(|| { - initialize_environment(100, 100, 100); - - // the `analyze_call_result` should return slash if number of bundled messages is - // within reasonable limits - assert_eq!( - run_analyze_call_result(all_finality_pre_dispatch_data(), Ok(())), - RelayerAccountAction::Slash( - relayer_account_at_this_chain(), - MsgProofsRewardsAccount::get() - ), - ); - assert_eq!( - run_analyze_call_result(parachain_finality_pre_dispatch_data(), Ok(())), - RelayerAccountAction::Slash( - relayer_account_at_this_chain(), - MsgProofsRewardsAccount::get() - ), - ); - assert_eq!( - run_analyze_call_result(delivery_pre_dispatch_data(), Ok(())), - RelayerAccountAction::Slash( - relayer_account_at_this_chain(), - MsgProofsRewardsAccount::get() - ), - ); - - // the `analyze_call_result` should not return slash if number of bundled messages is - // larger than the - assert_eq!( - run_analyze_call_result( - set_bundled_range_end(all_finality_pre_dispatch_data(), 1_000_000), - Ok(()) - ), - RelayerAccountAction::None, - ); - assert_eq!( - run_analyze_call_result( - set_bundled_range_end(parachain_finality_pre_dispatch_data(), 1_000_000), - Ok(()) - ), - RelayerAccountAction::None, - ); - assert_eq!( - run_analyze_call_result( - set_bundled_range_end(delivery_pre_dispatch_data(), 1_000_000), - Ok(()) - ), - RelayerAccountAction::None, - ); - }); - } - - #[test] - fn grandpa_ext_only_parses_valid_batches() { - run_test(|| { - initialize_environment(100, 100, 100); - - // relay + parachain + message delivery calls batch is ignored - assert_eq!( - TestGrandpaExtensionProvider::parse_and_check_for_obsolete_call( - &all_finality_and_delivery_batch_call(200, 200, 200) - ), - Ok(None), - ); - assert_eq!( - TestGrandpaExtensionProvider::parse_and_check_for_obsolete_call( - &all_finality_and_delivery_batch_call_ex(200, 200, 200) - ), - Ok(None), - ); - - // relay + parachain + message confirmation calls batch is ignored - assert_eq!( - TestGrandpaExtensionProvider::parse_and_check_for_obsolete_call( - &all_finality_and_confirmation_batch_call(200, 200, 200) - ), - Ok(None), - ); - assert_eq!( - TestGrandpaExtensionProvider::parse_and_check_for_obsolete_call( - &all_finality_and_confirmation_batch_call_ex(200, 200, 200) - ), - Ok(None), - ); - - // parachain + message delivery call batch is ignored - assert_eq!( - TestGrandpaExtensionProvider::parse_and_check_for_obsolete_call( - ¶chain_finality_and_delivery_batch_call(200, 200) - ), - Ok(None), - ); - - // parachain + message confirmation call batch is ignored - assert_eq!( - TestGrandpaExtensionProvider::parse_and_check_for_obsolete_call( - ¶chain_finality_and_confirmation_batch_call(200, 200) - ), - Ok(None), - ); - - // relay + message delivery call batch is accepted - assert_eq!( - TestGrandpaExtensionProvider::parse_and_check_for_obsolete_call( - &relay_finality_and_delivery_batch_call(200, 200) - ), - Ok(Some(relay_finality_pre_dispatch_data().call_info)), - ); - assert_eq!( - TestGrandpaExtensionProvider::parse_and_check_for_obsolete_call( - &relay_finality_and_delivery_batch_call_ex(200, 200) - ), - Ok(Some(relay_finality_pre_dispatch_data_ex().call_info)), - ); - - // relay + message confirmation call batch is accepted - assert_eq!( - TestGrandpaExtensionProvider::parse_and_check_for_obsolete_call( - &relay_finality_and_confirmation_batch_call(200, 200) - ), - Ok(Some(relay_finality_confirmation_pre_dispatch_data().call_info)), - ); - assert_eq!( - TestGrandpaExtensionProvider::parse_and_check_for_obsolete_call( - &relay_finality_and_confirmation_batch_call_ex(200, 200) - ), - Ok(Some(relay_finality_confirmation_pre_dispatch_data_ex().call_info)), - ); - - // message delivery call batch is accepted - assert_eq!( - TestGrandpaExtensionProvider::parse_and_check_for_obsolete_call( - &message_delivery_call(200) - ), - Ok(Some(delivery_pre_dispatch_data().call_info)), - ); - - // message confirmation call batch is accepted - assert_eq!( - TestGrandpaExtensionProvider::parse_and_check_for_obsolete_call( - &message_confirmation_call(200) - ), - Ok(Some(confirmation_pre_dispatch_data().call_info)), - ); - }); - } - - #[test] - fn grandpa_ext_rejects_batch_with_obsolete_relay_chain_header() { - run_test(|| { - initialize_environment(100, 100, 100); - - assert_eq!( - run_grandpa_pre_dispatch(relay_finality_and_delivery_batch_call(100, 200)), - Err(TransactionValidityError::Invalid(InvalidTransaction::Stale)), - ); - assert_eq!( - run_grandpa_pre_dispatch(relay_finality_and_delivery_batch_call_ex(100, 200)), - Err(TransactionValidityError::Invalid(InvalidTransaction::Stale)), - ); - - assert_eq!( - run_grandpa_validate(relay_finality_and_delivery_batch_call(100, 200)), - Err(TransactionValidityError::Invalid(InvalidTransaction::Stale)), - ); - assert_eq!( - run_grandpa_validate(relay_finality_and_delivery_batch_call_ex(100, 200)), - Err(TransactionValidityError::Invalid(InvalidTransaction::Stale)), - ); - }); - } - - #[test] - fn grandpa_ext_rejects_calls_with_obsolete_messages() { - run_test(|| { - initialize_environment(100, 100, 100); - - assert_eq!( - run_grandpa_pre_dispatch(relay_finality_and_delivery_batch_call(200, 100)), - Err(TransactionValidityError::Invalid(InvalidTransaction::Stale)), - ); - assert_eq!( - run_grandpa_pre_dispatch(relay_finality_and_delivery_batch_call_ex(200, 100)), - Err(TransactionValidityError::Invalid(InvalidTransaction::Stale)), - ); - assert_eq!( - run_grandpa_pre_dispatch(relay_finality_and_confirmation_batch_call(200, 100)), - Err(TransactionValidityError::Invalid(InvalidTransaction::Stale)), - ); - assert_eq!( - run_grandpa_pre_dispatch(relay_finality_and_confirmation_batch_call_ex(200, 100)), - Err(TransactionValidityError::Invalid(InvalidTransaction::Stale)), - ); - - assert_eq!( - run_grandpa_validate(relay_finality_and_delivery_batch_call(200, 100)), - Err(TransactionValidityError::Invalid(InvalidTransaction::Stale)), - ); - assert_eq!( - run_grandpa_validate(relay_finality_and_delivery_batch_call_ex(200, 100)), - Err(TransactionValidityError::Invalid(InvalidTransaction::Stale)), - ); - assert_eq!( - run_grandpa_validate(relay_finality_and_confirmation_batch_call(200, 100)), - Err(TransactionValidityError::Invalid(InvalidTransaction::Stale)), - ); - assert_eq!( - run_grandpa_validate(relay_finality_and_confirmation_batch_call_ex(200, 100)), - Err(TransactionValidityError::Invalid(InvalidTransaction::Stale)), - ); - - assert_eq!( - run_grandpa_pre_dispatch(message_delivery_call(100)), - Err(TransactionValidityError::Invalid(InvalidTransaction::Stale)), - ); - assert_eq!( - run_grandpa_pre_dispatch(message_confirmation_call(100)), - Err(TransactionValidityError::Invalid(InvalidTransaction::Stale)), - ); - - assert_eq!( - run_grandpa_validate(message_delivery_call(100)), - Err(TransactionValidityError::Invalid(InvalidTransaction::Stale)), - ); - assert_eq!( - run_grandpa_validate(message_confirmation_call(100)), - Err(TransactionValidityError::Invalid(InvalidTransaction::Stale)), - ); - }); - } - - #[test] - fn grandpa_ext_accepts_calls_with_new_messages() { - run_test(|| { - initialize_environment(100, 100, 100); - - assert_eq!( - run_grandpa_pre_dispatch(relay_finality_and_delivery_batch_call(200, 200)), - Ok(Some(relay_finality_pre_dispatch_data()),) - ); - assert_eq!( - run_grandpa_pre_dispatch(relay_finality_and_delivery_batch_call_ex(200, 200)), - Ok(Some(relay_finality_pre_dispatch_data_ex()),) - ); - assert_eq!( - run_grandpa_pre_dispatch(relay_finality_and_confirmation_batch_call(200, 200)), - Ok(Some(relay_finality_confirmation_pre_dispatch_data())), - ); - assert_eq!( - run_grandpa_pre_dispatch(relay_finality_and_confirmation_batch_call_ex(200, 200)), - Ok(Some(relay_finality_confirmation_pre_dispatch_data_ex())), - ); - - assert_eq!( - run_grandpa_validate(relay_finality_and_delivery_batch_call(200, 200)), - Ok(Default::default()), - ); - assert_eq!( - run_grandpa_validate(relay_finality_and_delivery_batch_call_ex(200, 200)), - Ok(Default::default()), - ); - assert_eq!( - run_grandpa_validate(relay_finality_and_confirmation_batch_call(200, 200)), - Ok(Default::default()), - ); - assert_eq!( - run_grandpa_validate(relay_finality_and_confirmation_batch_call_ex(200, 200)), - Ok(Default::default()), - ); - - assert_eq!( - run_grandpa_pre_dispatch(message_delivery_call(200)), - Ok(Some(delivery_pre_dispatch_data())), - ); - assert_eq!( - run_grandpa_pre_dispatch(message_confirmation_call(200)), - Ok(Some(confirmation_pre_dispatch_data())), - ); - - assert_eq!(run_grandpa_validate(message_delivery_call(200)), Ok(Default::default()),); - assert_eq!( - run_grandpa_validate(message_confirmation_call(200)), - Ok(Default::default()), - ); - }); - } - - #[test] - fn does_not_panic_on_boosting_priority_of_empty_message_delivery_transaction() { - run_test(|| { - let best_delivered_message = MaxUnconfirmedMessagesAtInboundLane::get(); - initialize_environment(100, 100, best_delivered_message); - - // register relayer so it gets priority boost - BridgeRelayers::register(RuntimeOrigin::signed(relayer_account_at_this_chain()), 1000) - .unwrap(); - - // allow empty message delivery transactions - let lane_id = TestLaneId::get(); - let in_lane_data = InboundLaneData { - last_confirmed_nonce: 0, - relayers: vec![UnrewardedRelayer { - relayer: relayer_account_at_bridged_chain(), - messages: DeliveredMessages { begin: 1, end: best_delivered_message }, - }] - .into(), - }; - pallet_bridge_messages::InboundLanes::::insert(lane_id, in_lane_data); - - // now check that the priority of empty tx is the same as priority of 1-message tx - let priority_of_zero_messages_delivery = - run_validate(message_delivery_call(best_delivered_message)).unwrap().priority; - let priority_of_one_messages_delivery = - run_validate(message_delivery_call(best_delivered_message + 1)) - .unwrap() - .priority; - - assert_eq!(priority_of_zero_messages_delivery, priority_of_one_messages_delivery); - }); - } -} diff --git a/chains/chain-asset-hub-rococo/Cargo.toml b/chains/chain-asset-hub-rococo/Cargo.toml deleted file mode 100644 index 660f0f4db..000000000 --- a/chains/chain-asset-hub-rococo/Cargo.toml +++ /dev/null @@ -1,30 +0,0 @@ -[package] -name = "bp-asset-hub-rococo" -description = "Primitives of AssetHubRococo parachain runtime." -version = "0.4.0" -authors.workspace = true -edition.workspace = true -license = "GPL-3.0-or-later WITH Classpath-exception-2.0" -repository.workspace = true - -[lints] -workspace = true - -[dependencies] -codec = { package = "parity-scale-codec", version = "3.6.1", default-features = false } -scale-info = { version = "2.11.1", default-features = false, features = ["derive"] } - -# Substrate Dependencies -frame-support = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } - -# Bridge Dependencies -bp-xcm-bridge-hub-router = { path = "../../primitives/xcm-bridge-hub-router", default-features = false } - -[features] -default = ["std"] -std = [ - "bp-xcm-bridge-hub-router/std", - "codec/std", - "frame-support/std", - "scale-info/std", -] diff --git a/chains/chain-asset-hub-rococo/src/lib.rs b/chains/chain-asset-hub-rococo/src/lib.rs deleted file mode 100644 index de2e9ae85..000000000 --- a/chains/chain-asset-hub-rococo/src/lib.rs +++ /dev/null @@ -1,48 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Module with configuration which reflects AssetHubRococo runtime setup. - -#![cfg_attr(not(feature = "std"), no_std)] - -use codec::{Decode, Encode}; -use scale_info::TypeInfo; - -pub use bp_xcm_bridge_hub_router::XcmBridgeHubRouterCall; - -/// `AssetHubRococo` Runtime `Call` enum. -/// -/// The enum represents a subset of possible `Call`s we can send to `AssetHubRococo` chain. -/// Ideally this code would be auto-generated from metadata, because we want to -/// avoid depending directly on the ENTIRE runtime just to get the encoding of `Dispatchable`s. -/// -/// All entries here (like pretty much in the entire file) must be kept in sync with -/// `AssetHubRococo` `construct_runtime`, so that we maintain SCALE-compatibility. -#[allow(clippy::large_enum_variant)] -#[derive(Encode, Decode, Debug, PartialEq, Eq, Clone, TypeInfo)] -pub enum Call { - /// `ToWestendXcmRouter` bridge pallet. - #[codec(index = 45)] - ToWestendXcmRouter(XcmBridgeHubRouterCall), -} - -frame_support::parameter_types! { - /// Some sane weight to execute `xcm::Transact(pallet-xcm-bridge-hub-router::Call::report_bridge_status)`. - pub const XcmBridgeHubRouterTransactCallMaxWeight: frame_support::weights::Weight = frame_support::weights::Weight::from_parts(200_000_000, 6144); -} - -/// Identifier of AssetHubRococo in the Rococo relay chain. -pub const ASSET_HUB_ROCOCO_PARACHAIN_ID: u32 = 1000; diff --git a/chains/chain-asset-hub-westend/Cargo.toml b/chains/chain-asset-hub-westend/Cargo.toml deleted file mode 100644 index 4022258ac..000000000 --- a/chains/chain-asset-hub-westend/Cargo.toml +++ /dev/null @@ -1,30 +0,0 @@ -[package] -name = "bp-asset-hub-westend" -description = "Primitives of AssetHubWestend parachain runtime." -version = "0.3.0" -authors.workspace = true -edition.workspace = true -license = "GPL-3.0-or-later WITH Classpath-exception-2.0" -repository.workspace = true - -[lints] -workspace = true - -[dependencies] -codec = { package = "parity-scale-codec", version = "3.6.1", default-features = false } -scale-info = { version = "2.11.1", default-features = false, features = ["derive"] } - -# Substrate Dependencies -frame-support = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } - -# Bridge Dependencies -bp-xcm-bridge-hub-router = { path = "../../primitives/xcm-bridge-hub-router", default-features = false } - -[features] -default = ["std"] -std = [ - "bp-xcm-bridge-hub-router/std", - "codec/std", - "frame-support/std", - "scale-info/std", -] diff --git a/chains/chain-asset-hub-westend/src/lib.rs b/chains/chain-asset-hub-westend/src/lib.rs deleted file mode 100644 index 9de1c8809..000000000 --- a/chains/chain-asset-hub-westend/src/lib.rs +++ /dev/null @@ -1,48 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Module with configuration which reflects AssetHubWestend runtime setup. - -#![cfg_attr(not(feature = "std"), no_std)] - -use codec::{Decode, Encode}; -use scale_info::TypeInfo; - -pub use bp_xcm_bridge_hub_router::XcmBridgeHubRouterCall; - -/// `AssetHubWestend` Runtime `Call` enum. -/// -/// The enum represents a subset of possible `Call`s we can send to `AssetHubWestend` chain. -/// Ideally this code would be auto-generated from metadata, because we want to -/// avoid depending directly on the ENTIRE runtime just to get the encoding of `Dispatchable`s. -/// -/// All entries here (like pretty much in the entire file) must be kept in sync with -/// `AssetHubWestend` `construct_runtime`, so that we maintain SCALE-compatibility. -#[allow(clippy::large_enum_variant)] -#[derive(Encode, Decode, Debug, PartialEq, Eq, Clone, TypeInfo)] -pub enum Call { - /// `ToRococoXcmRouter` bridge pallet. - #[codec(index = 34)] - ToRococoXcmRouter(XcmBridgeHubRouterCall), -} - -frame_support::parameter_types! { - /// Some sane weight to execute `xcm::Transact(pallet-xcm-bridge-hub-router::Call::report_bridge_status)`. - pub const XcmBridgeHubRouterTransactCallMaxWeight: frame_support::weights::Weight = frame_support::weights::Weight::from_parts(200_000_000, 6144); -} - -/// Identifier of AssetHubWestend in the Westend relay chain. -pub const ASSET_HUB_WESTEND_PARACHAIN_ID: u32 = 1000; diff --git a/chains/chain-bridge-hub-cumulus/Cargo.toml b/chains/chain-bridge-hub-cumulus/Cargo.toml deleted file mode 100644 index b87b5fefd..000000000 --- a/chains/chain-bridge-hub-cumulus/Cargo.toml +++ /dev/null @@ -1,41 +0,0 @@ -[package] -name = "bp-bridge-hub-cumulus" -description = "Primitives for BridgeHub parachain runtimes." -version = "0.7.0" -authors.workspace = true -edition.workspace = true -license = "GPL-3.0-or-later WITH Classpath-exception-2.0" -repository.workspace = true - -[lints] -workspace = true - -[dependencies] -# Bridge Dependencies - -bp-polkadot-core = { path = "../../primitives/polkadot-core", default-features = false } -bp-messages = { path = "../../primitives/messages", default-features = false } -bp-runtime = { path = "../../primitives/runtime", default-features = false } - -# Substrate Based Dependencies - -frame-system = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -frame-support = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -sp-api = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -sp-std = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } - -# Polkadot Dependencies -polkadot-primitives = { git = "https://github.com/paritytech/polkadot-sdk", default-features = false , branch = "master" } - -[features] -default = ["std"] -std = [ - "bp-messages/std", - "bp-polkadot-core/std", - "bp-runtime/std", - "frame-support/std", - "frame-system/std", - "polkadot-primitives/std", - "sp-api/std", - "sp-std/std", -] diff --git a/chains/chain-bridge-hub-cumulus/src/lib.rs b/chains/chain-bridge-hub-cumulus/src/lib.rs deleted file mode 100644 index c49aa4b85..000000000 --- a/chains/chain-bridge-hub-cumulus/src/lib.rs +++ /dev/null @@ -1,170 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Primitives of all Cumulus-based bridge hubs. - -#![warn(missing_docs)] -#![cfg_attr(not(feature = "std"), no_std)] - -pub use bp_polkadot_core::{ - AccountId, AccountInfoStorageMapKeyProvider, AccountPublic, Balance, BlockNumber, Hash, Hasher, - Hashing, Header, Nonce, Perbill, Signature, SignedBlock, UncheckedExtrinsic, - EXTRA_STORAGE_PROOF_SIZE, TX_EXTRA_BYTES, -}; - -use bp_messages::*; -use bp_polkadot_core::SuffixedCommonSignedExtension; -use bp_runtime::extensions::{ - BridgeRejectObsoleteHeadersAndMessages, RefundBridgedParachainMessagesSchema, -}; -use frame_support::{ - dispatch::DispatchClass, - parameter_types, - sp_runtime::{MultiAddress, MultiSigner}, - weights::constants, -}; -use frame_system::limits; -use sp_std::time::Duration; - -/// Average block interval in Cumulus-based parachains. -/// -/// Corresponds to the `MILLISECS_PER_BLOCK` from `parachains_common` crate. -pub const AVERAGE_BLOCK_INTERVAL: Duration = Duration::from_secs(12); - -/// All cumulus bridge hubs allow normal extrinsics to fill block up to 75 percent. -/// -/// This is a copy-paste from the cumulus repo's `parachains-common` crate. -pub const NORMAL_DISPATCH_RATIO: Perbill = Perbill::from_percent(75); - -/// All cumulus bridge hubs chains allow for 0.5 seconds of compute with a 6-second average block -/// time. -/// -/// This is a copy-paste from the cumulus repo's `parachains-common` crate. -const MAXIMUM_BLOCK_WEIGHT: Weight = Weight::from_parts(constants::WEIGHT_REF_TIME_PER_SECOND, 0) - .saturating_div(2) - .set_proof_size(polkadot_primitives::MAX_POV_SIZE as u64); - -/// We allow for 2 seconds of compute with a 6 second average block. -const MAXIMUM_BLOCK_WEIGHT_FOR_ASYNC_BACKING: Weight = Weight::from_parts( - constants::WEIGHT_REF_TIME_PER_SECOND.saturating_mul(2), - polkadot_primitives::MAX_POV_SIZE as u64, -); - -/// All cumulus bridge hubs assume that about 5 percent of the block weight is consumed by -/// `on_initialize` handlers. This is used to limit the maximal weight of a single extrinsic. -/// -/// This is a copy-paste from the cumulus repo's `parachains-common` crate. -pub const AVERAGE_ON_INITIALIZE_RATIO: Perbill = Perbill::from_percent(5); - -parameter_types! { - /// Size limit of the Cumulus-based bridge hub blocks. - pub BlockLength: limits::BlockLength = limits::BlockLength::max_with_normal_ratio( - 5 * 1024 * 1024, - NORMAL_DISPATCH_RATIO, - ); - - /// Importing a block with 0 Extrinsics. - pub const BlockExecutionWeight: Weight = Weight::from_parts(constants::WEIGHT_REF_TIME_PER_NANOS, 0) - .saturating_mul(5_000_000); - /// Executing a NO-OP `System::remarks` Extrinsic. - pub const ExtrinsicBaseWeight: Weight = Weight::from_parts(constants::WEIGHT_REF_TIME_PER_NANOS, 0) - .saturating_mul(125_000); - - /// Weight limit of the Cumulus-based bridge hub blocks. - pub BlockWeights: limits::BlockWeights = limits::BlockWeights::builder() - .base_block(BlockExecutionWeight::get()) - .for_class(DispatchClass::all(), |weights| { - weights.base_extrinsic = ExtrinsicBaseWeight::get(); - }) - .for_class(DispatchClass::Normal, |weights| { - weights.max_total = Some(NORMAL_DISPATCH_RATIO * MAXIMUM_BLOCK_WEIGHT); - }) - .for_class(DispatchClass::Operational, |weights| { - weights.max_total = Some(MAXIMUM_BLOCK_WEIGHT); - // Operational transactions have an extra reserved space, so that they - // are included even if block reached `MAXIMUM_BLOCK_WEIGHT`. - weights.reserved = Some( - MAXIMUM_BLOCK_WEIGHT - NORMAL_DISPATCH_RATIO * MAXIMUM_BLOCK_WEIGHT, - ); - }) - .avg_block_initialization(AVERAGE_ON_INITIALIZE_RATIO) - .build_or_panic(); - - /// Weight limit of the Cumulus-based bridge hub blocks when async backing is enabled. - pub BlockWeightsForAsyncBacking: limits::BlockWeights = limits::BlockWeights::builder() - .base_block(BlockExecutionWeight::get()) - .for_class(DispatchClass::all(), |weights| { - weights.base_extrinsic = ExtrinsicBaseWeight::get(); - }) - .for_class(DispatchClass::Normal, |weights| { - weights.max_total = Some(NORMAL_DISPATCH_RATIO * MAXIMUM_BLOCK_WEIGHT_FOR_ASYNC_BACKING); - }) - .for_class(DispatchClass::Operational, |weights| { - weights.max_total = Some(MAXIMUM_BLOCK_WEIGHT_FOR_ASYNC_BACKING); - // Operational transactions have an extra reserved space, so that they - // are included even if block reached `MAXIMUM_BLOCK_WEIGHT_FOR_ASYNC_BACKING`. - weights.reserved = Some( - MAXIMUM_BLOCK_WEIGHT_FOR_ASYNC_BACKING - NORMAL_DISPATCH_RATIO * MAXIMUM_BLOCK_WEIGHT_FOR_ASYNC_BACKING, - ); - }) - .avg_block_initialization(AVERAGE_ON_INITIALIZE_RATIO) - .build_or_panic(); -} - -/// Public key of the chain account that may be used to verify signatures. -pub type AccountSigner = MultiSigner; - -/// The address format for describing accounts. -pub type Address = MultiAddress; - -// Note about selecting values of two following constants: -// -// Normal transactions have limit of 75% of 1/2 second weight for Cumulus parachains. Let's keep -// some reserve for the rest of stuff there => let's select values that fit in 50% of maximal limit. -// -// Using current constants, the limit would be: -// -// `75% * WEIGHT_REF_TIME_PER_SECOND * 1 / 2 * 50% = 0.75 * 1_000_000_000_000 / 2 * 0.5 = -// 187_500_000_000` -// -// According to (preliminary) weights of messages pallet, cost of additional message is zero and the -// cost of additional relayer is `8_000_000 + db read + db write`. Let's say we want no more than -// 4096 unconfirmed messages (no any scientific justification for that - it just looks large -// enough). And then we can't have more than 4096 relayers. E.g. for 1024 relayers is (using -// `RocksDbWeight`): -// -// `1024 * (8_000_000 + db read + db write) = 1024 * (8_000_000 + 25_000_000 + 100_000_000) = -// 136_192_000_000` -// -// So 1024 looks like good approximation for the number of relayers. If something is wrong in those -// assumptions, or something will change, it shall be caught by the -// `ensure_able_to_receive_confirmation` test. - -/// Maximal number of unrewarded relayer entries at inbound lane for Cumulus-based parachains. -/// Note: this value is security-relevant, decreasing it should not be done without careful -/// analysis (like the one above). -pub const MAX_UNREWARDED_RELAYERS_IN_CONFIRMATION_TX: MessageNonce = 1024; - -/// Maximal number of unconfirmed messages at inbound lane for Cumulus-based parachains. -/// Note: this value is security-relevant, decreasing it should not be done without careful -/// analysis (like the one above). -pub const MAX_UNCONFIRMED_MESSAGES_IN_CONFIRMATION_TX: MessageNonce = 4096; - -/// Signed extension that is used by all bridge hubs. -pub type SignedExtension = SuffixedCommonSignedExtension<( - BridgeRejectObsoleteHeadersAndMessages, - RefundBridgedParachainMessagesSchema, -)>; diff --git a/chains/chain-bridge-hub-kusama/Cargo.toml b/chains/chain-bridge-hub-kusama/Cargo.toml deleted file mode 100644 index 71ee785d4..000000000 --- a/chains/chain-bridge-hub-kusama/Cargo.toml +++ /dev/null @@ -1,37 +0,0 @@ -[package] -name = "bp-bridge-hub-kusama" -description = "Primitives of BridgeHubKusama parachain runtime." -version = "0.6.0" -authors.workspace = true -edition.workspace = true -license = "GPL-3.0-or-later WITH Classpath-exception-2.0" -repository.workspace = true - -[lints] -workspace = true - -[dependencies] -# Bridge Dependencies - -bp-bridge-hub-cumulus = { path = "../chain-bridge-hub-cumulus", default-features = false } -bp-runtime = { path = "../../primitives/runtime", default-features = false } -bp-messages = { path = "../../primitives/messages", default-features = false } - -# Substrate Based Dependencies - -frame-support = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -sp-api = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -sp-runtime = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -sp-std = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } - -[features] -default = ["std"] -std = [ - "bp-bridge-hub-cumulus/std", - "bp-messages/std", - "bp-runtime/std", - "frame-support/std", - "sp-api/std", - "sp-runtime/std", - "sp-std/std", -] diff --git a/chains/chain-bridge-hub-kusama/src/lib.rs b/chains/chain-bridge-hub-kusama/src/lib.rs deleted file mode 100644 index 576e3dbee..000000000 --- a/chains/chain-bridge-hub-kusama/src/lib.rs +++ /dev/null @@ -1,93 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Module with configuration which reflects BridgeHubKusama runtime setup (AccountId, Headers, -//! Hashes...) - -#![warn(missing_docs)] -#![cfg_attr(not(feature = "std"), no_std)] - -pub use bp_bridge_hub_cumulus::*; -use bp_messages::*; -use bp_runtime::{ - decl_bridge_finality_runtime_apis, decl_bridge_messages_runtime_apis, Chain, ChainId, Parachain, -}; -use frame_support::{ - dispatch::DispatchClass, - sp_runtime::{MultiAddress, MultiSigner}, -}; -use sp_runtime::RuntimeDebug; - -/// BridgeHubKusama parachain. -#[derive(RuntimeDebug)] -pub struct BridgeHubKusama; - -impl Chain for BridgeHubKusama { - const ID: ChainId = *b"bhks"; - - type BlockNumber = BlockNumber; - type Hash = Hash; - type Hasher = Hasher; - type Header = Header; - - type AccountId = AccountId; - type Balance = Balance; - type Nonce = Nonce; - type Signature = Signature; - - fn max_extrinsic_size() -> u32 { - *BlockLength::get().max.get(DispatchClass::Normal) - } - - fn max_extrinsic_weight() -> Weight { - BlockWeights::get() - .get(DispatchClass::Normal) - .max_extrinsic - .unwrap_or(Weight::MAX) - } -} - -impl Parachain for BridgeHubKusama { - const PARACHAIN_ID: u32 = BRIDGE_HUB_KUSAMA_PARACHAIN_ID; -} - -impl ChainWithMessages for BridgeHubKusama { - const WITH_CHAIN_MESSAGES_PALLET_NAME: &'static str = - WITH_BRIDGE_HUB_KUSAMA_MESSAGES_PALLET_NAME; - const MAX_UNREWARDED_RELAYERS_IN_CONFIRMATION_TX: MessageNonce = - MAX_UNREWARDED_RELAYERS_IN_CONFIRMATION_TX; - const MAX_UNCONFIRMED_MESSAGES_IN_CONFIRMATION_TX: MessageNonce = - MAX_UNCONFIRMED_MESSAGES_IN_CONFIRMATION_TX; -} - -/// Public key of the chain account that may be used to verify signatures. -pub type AccountSigner = MultiSigner; - -/// The address format for describing accounts. -pub type Address = MultiAddress; - -/// Identifier of BridgeHubKusama in the Kusama relay chain. -pub const BRIDGE_HUB_KUSAMA_PARACHAIN_ID: u32 = 1002; - -/// Name of the With-BridgeHubKusama messages pallet instance that is deployed at bridged chains. -pub const WITH_BRIDGE_HUB_KUSAMA_MESSAGES_PALLET_NAME: &str = "BridgeKusamaMessages"; - -/// Name of the With-BridgeHubKusama bridge-relayers pallet instance that is deployed at bridged -/// chains. -pub const WITH_BRIDGE_HUB_KUSAMA_RELAYERS_PALLET_NAME: &str = "BridgeRelayers"; - -decl_bridge_finality_runtime_apis!(bridge_hub_kusama); -decl_bridge_messages_runtime_apis!(bridge_hub_kusama); diff --git a/chains/chain-bridge-hub-polkadot/Cargo.toml b/chains/chain-bridge-hub-polkadot/Cargo.toml deleted file mode 100644 index dd4729673..000000000 --- a/chains/chain-bridge-hub-polkadot/Cargo.toml +++ /dev/null @@ -1,38 +0,0 @@ -[package] -name = "bp-bridge-hub-polkadot" -description = "Primitives of BridgeHubPolkadot parachain runtime." -version = "0.6.0" -authors.workspace = true -edition.workspace = true -license = "GPL-3.0-or-later WITH Classpath-exception-2.0" -repository.workspace = true - -[lints] -workspace = true - -[dependencies] - -# Bridge Dependencies - -bp-bridge-hub-cumulus = { path = "../chain-bridge-hub-cumulus", default-features = false } -bp-runtime = { path = "../../primitives/runtime", default-features = false } -bp-messages = { path = "../../primitives/messages", default-features = false } - -# Substrate Based Dependencies - -frame-support = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -sp-api = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -sp-runtime = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -sp-std = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } - -[features] -default = ["std"] -std = [ - "bp-bridge-hub-cumulus/std", - "bp-messages/std", - "bp-runtime/std", - "frame-support/std", - "sp-api/std", - "sp-runtime/std", - "sp-std/std", -] diff --git a/chains/chain-bridge-hub-polkadot/src/lib.rs b/chains/chain-bridge-hub-polkadot/src/lib.rs deleted file mode 100644 index 6db389c92..000000000 --- a/chains/chain-bridge-hub-polkadot/src/lib.rs +++ /dev/null @@ -1,85 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Module with configuration which reflects BridgeHubPolkadot runtime setup -//! (AccountId, Headers, Hashes...) - -#![warn(missing_docs)] -#![cfg_attr(not(feature = "std"), no_std)] - -pub use bp_bridge_hub_cumulus::*; -use bp_messages::*; -use bp_runtime::{ - decl_bridge_finality_runtime_apis, decl_bridge_messages_runtime_apis, Chain, ChainId, Parachain, -}; -use frame_support::dispatch::DispatchClass; -use sp_runtime::RuntimeDebug; - -/// BridgeHubPolkadot parachain. -#[derive(RuntimeDebug)] -pub struct BridgeHubPolkadot; - -impl Chain for BridgeHubPolkadot { - const ID: ChainId = *b"bhpd"; - - type BlockNumber = BlockNumber; - type Hash = Hash; - type Hasher = Hasher; - type Header = Header; - - type AccountId = AccountId; - type Balance = Balance; - type Nonce = Nonce; - type Signature = Signature; - - fn max_extrinsic_size() -> u32 { - *BlockLength::get().max.get(DispatchClass::Normal) - } - - fn max_extrinsic_weight() -> Weight { - BlockWeights::get() - .get(DispatchClass::Normal) - .max_extrinsic - .unwrap_or(Weight::MAX) - } -} - -impl Parachain for BridgeHubPolkadot { - const PARACHAIN_ID: u32 = BRIDGE_HUB_POLKADOT_PARACHAIN_ID; -} - -impl ChainWithMessages for BridgeHubPolkadot { - const WITH_CHAIN_MESSAGES_PALLET_NAME: &'static str = - WITH_BRIDGE_HUB_POLKADOT_MESSAGES_PALLET_NAME; - - const MAX_UNREWARDED_RELAYERS_IN_CONFIRMATION_TX: MessageNonce = - MAX_UNREWARDED_RELAYERS_IN_CONFIRMATION_TX; - const MAX_UNCONFIRMED_MESSAGES_IN_CONFIRMATION_TX: MessageNonce = - MAX_UNCONFIRMED_MESSAGES_IN_CONFIRMATION_TX; -} - -/// Identifier of BridgeHubPolkadot in the Polkadot relay chain. -pub const BRIDGE_HUB_POLKADOT_PARACHAIN_ID: u32 = 1002; - -/// Name of the With-BridgeHubPolkadot messages pallet instance that is deployed at bridged chains. -pub const WITH_BRIDGE_HUB_POLKADOT_MESSAGES_PALLET_NAME: &str = "BridgePolkadotMessages"; - -/// Name of the With-BridgeHubPolkadot bridge-relayers pallet instance that is deployed at bridged -/// chains. -pub const WITH_BRIDGE_HUB_POLKADOT_RELAYERS_PALLET_NAME: &str = "BridgeRelayers"; - -decl_bridge_finality_runtime_apis!(bridge_hub_polkadot); -decl_bridge_messages_runtime_apis!(bridge_hub_polkadot); diff --git a/chains/chain-bridge-hub-rococo/Cargo.toml b/chains/chain-bridge-hub-rococo/Cargo.toml deleted file mode 100644 index a8e0003ee..000000000 --- a/chains/chain-bridge-hub-rococo/Cargo.toml +++ /dev/null @@ -1,37 +0,0 @@ -[package] -name = "bp-bridge-hub-rococo" -description = "Primitives of BridgeHubRococo parachain runtime." -version = "0.7.0" -authors.workspace = true -edition.workspace = true -license = "GPL-3.0-or-later WITH Classpath-exception-2.0" -repository.workspace = true - -[lints] -workspace = true - -[dependencies] -# Bridge Dependencies - -bp-bridge-hub-cumulus = { path = "../chain-bridge-hub-cumulus", default-features = false } -bp-runtime = { path = "../../primitives/runtime", default-features = false } -bp-messages = { path = "../../primitives/messages", default-features = false } - -# Substrate Based Dependencies - -frame-support = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -sp-api = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -sp-runtime = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -sp-std = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } - -[features] -default = ["std"] -std = [ - "bp-bridge-hub-cumulus/std", - "bp-messages/std", - "bp-runtime/std", - "frame-support/std", - "sp-api/std", - "sp-runtime/std", - "sp-std/std", -] diff --git a/chains/chain-bridge-hub-rococo/src/lib.rs b/chains/chain-bridge-hub-rococo/src/lib.rs deleted file mode 100644 index c4e697fbe..000000000 --- a/chains/chain-bridge-hub-rococo/src/lib.rs +++ /dev/null @@ -1,111 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Module with configuration which reflects BridgeHubRococo runtime setup (AccountId, Headers, -//! Hashes...) - -#![warn(missing_docs)] -#![cfg_attr(not(feature = "std"), no_std)] - -pub use bp_bridge_hub_cumulus::*; -use bp_messages::*; -use bp_runtime::{ - decl_bridge_finality_runtime_apis, decl_bridge_messages_runtime_apis, Chain, ChainId, Parachain, -}; -use frame_support::dispatch::DispatchClass; -use sp_runtime::{MultiAddress, MultiSigner, RuntimeDebug}; - -/// BridgeHubRococo parachain. -#[derive(RuntimeDebug)] -pub struct BridgeHubRococo; - -impl Chain for BridgeHubRococo { - const ID: ChainId = *b"bhro"; - - type BlockNumber = BlockNumber; - type Hash = Hash; - type Hasher = Hasher; - type Header = Header; - - type AccountId = AccountId; - type Balance = Balance; - type Nonce = Nonce; - type Signature = Signature; - - fn max_extrinsic_size() -> u32 { - *BlockLength::get().max.get(DispatchClass::Normal) - } - - fn max_extrinsic_weight() -> Weight { - BlockWeightsForAsyncBacking::get() - .get(DispatchClass::Normal) - .max_extrinsic - .unwrap_or(Weight::MAX) - } -} - -impl Parachain for BridgeHubRococo { - const PARACHAIN_ID: u32 = BRIDGE_HUB_ROCOCO_PARACHAIN_ID; -} - -impl ChainWithMessages for BridgeHubRococo { - const WITH_CHAIN_MESSAGES_PALLET_NAME: &'static str = - WITH_BRIDGE_HUB_ROCOCO_MESSAGES_PALLET_NAME; - - const MAX_UNREWARDED_RELAYERS_IN_CONFIRMATION_TX: MessageNonce = - MAX_UNREWARDED_RELAYERS_IN_CONFIRMATION_TX; - const MAX_UNCONFIRMED_MESSAGES_IN_CONFIRMATION_TX: MessageNonce = - MAX_UNCONFIRMED_MESSAGES_IN_CONFIRMATION_TX; -} - -/// Public key of the chain account that may be used to verify signatures. -pub type AccountSigner = MultiSigner; - -/// The address format for describing accounts. -pub type Address = MultiAddress; - -/// Identifier of BridgeHubRococo in the Rococo relay chain. -pub const BRIDGE_HUB_ROCOCO_PARACHAIN_ID: u32 = 1013; - -/// Name of the With-BridgeHubRococo messages pallet instance that is deployed at bridged chains. -pub const WITH_BRIDGE_HUB_ROCOCO_MESSAGES_PALLET_NAME: &str = "BridgeRococoMessages"; - -/// Name of the With-BridgeHubRococo bridge-relayers pallet instance that is deployed at bridged -/// chains. -pub const WITH_BRIDGE_HUB_ROCOCO_RELAYERS_PALLET_NAME: &str = "BridgeRelayers"; - -/// Pallet index of `BridgeWestendMessages: pallet_bridge_messages::`. -pub const WITH_BRIDGE_ROCOCO_TO_WESTEND_MESSAGES_PALLET_INDEX: u8 = 51; -/// Pallet index of `BridgePolkadotBulletinMessages: pallet_bridge_messages::`. -pub const WITH_BRIDGE_ROCOCO_TO_BULLETIN_MESSAGES_PALLET_INDEX: u8 = 61; - -decl_bridge_finality_runtime_apis!(bridge_hub_rococo); -decl_bridge_messages_runtime_apis!(bridge_hub_rococo); - -frame_support::parameter_types! { - /// The XCM fee that is paid for executing XCM program (with `ExportMessage` instruction) at the Rococo - /// BridgeHub. - /// (initially was calculated by test `BridgeHubRococo::can_calculate_weight_for_paid_export_message_with_reserve_transfer` + `33%`) - pub const BridgeHubRococoBaseXcmFeeInRocs: u128 = 59_034_266; - - /// Transaction fee that is paid at the Rococo BridgeHub for delivering single inbound message. - /// (initially was calculated by test `BridgeHubRococo::can_calculate_fee_for_complex_message_delivery_transaction` + `33%`) - pub const BridgeHubRococoBaseDeliveryFeeInRocs: u128 = 5_651_581_649; - - /// Transaction fee that is paid at the Rococo BridgeHub for delivering single outbound message confirmation. - /// (initially was calculated by test `BridgeHubRococo::can_calculate_fee_for_complex_message_confirmation_transaction` + `33%`) - pub const BridgeHubRococoBaseConfirmationFeeInRocs: u128 = 5_380_829_647; -} diff --git a/chains/chain-bridge-hub-westend/Cargo.toml b/chains/chain-bridge-hub-westend/Cargo.toml deleted file mode 100644 index 09bf743c6..000000000 --- a/chains/chain-bridge-hub-westend/Cargo.toml +++ /dev/null @@ -1,38 +0,0 @@ -[package] -name = "bp-bridge-hub-westend" -description = "Primitives of BridgeHubWestend parachain runtime." -version = "0.3.0" -authors.workspace = true -edition.workspace = true -license = "GPL-3.0-or-later WITH Classpath-exception-2.0" -repository.workspace = true - -[lints] -workspace = true - -[dependencies] - -# Bridge Dependencies - -bp-bridge-hub-cumulus = { path = "../chain-bridge-hub-cumulus", default-features = false } -bp-runtime = { path = "../../primitives/runtime", default-features = false } -bp-messages = { path = "../../primitives/messages", default-features = false } - -# Substrate Based Dependencies - -frame-support = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -sp-api = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -sp-runtime = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -sp-std = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } - -[features] -default = ["std"] -std = [ - "bp-bridge-hub-cumulus/std", - "bp-messages/std", - "bp-runtime/std", - "frame-support/std", - "sp-api/std", - "sp-runtime/std", - "sp-std/std", -] diff --git a/chains/chain-bridge-hub-westend/src/lib.rs b/chains/chain-bridge-hub-westend/src/lib.rs deleted file mode 100644 index 4af895cc6..000000000 --- a/chains/chain-bridge-hub-westend/src/lib.rs +++ /dev/null @@ -1,102 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Module with configuration which reflects BridgeHubWestend runtime setup -//! (AccountId, Headers, Hashes...) - -#![cfg_attr(not(feature = "std"), no_std)] - -pub use bp_bridge_hub_cumulus::*; -use bp_messages::*; -use bp_runtime::{ - decl_bridge_finality_runtime_apis, decl_bridge_messages_runtime_apis, Chain, ChainId, Parachain, -}; -use frame_support::dispatch::DispatchClass; -use sp_runtime::RuntimeDebug; - -/// BridgeHubWestend parachain. -#[derive(RuntimeDebug)] -pub struct BridgeHubWestend; - -impl Chain for BridgeHubWestend { - const ID: ChainId = *b"bhwd"; - - type BlockNumber = BlockNumber; - type Hash = Hash; - type Hasher = Hasher; - type Header = Header; - - type AccountId = AccountId; - type Balance = Balance; - type Nonce = Nonce; - type Signature = Signature; - - fn max_extrinsic_size() -> u32 { - *BlockLength::get().max.get(DispatchClass::Normal) - } - - fn max_extrinsic_weight() -> Weight { - BlockWeightsForAsyncBacking::get() - .get(DispatchClass::Normal) - .max_extrinsic - .unwrap_or(Weight::MAX) - } -} - -impl Parachain for BridgeHubWestend { - const PARACHAIN_ID: u32 = BRIDGE_HUB_WESTEND_PARACHAIN_ID; -} - -impl ChainWithMessages for BridgeHubWestend { - const WITH_CHAIN_MESSAGES_PALLET_NAME: &'static str = - WITH_BRIDGE_HUB_WESTEND_MESSAGES_PALLET_NAME; - - const MAX_UNREWARDED_RELAYERS_IN_CONFIRMATION_TX: MessageNonce = - MAX_UNREWARDED_RELAYERS_IN_CONFIRMATION_TX; - const MAX_UNCONFIRMED_MESSAGES_IN_CONFIRMATION_TX: MessageNonce = - MAX_UNCONFIRMED_MESSAGES_IN_CONFIRMATION_TX; -} - -/// Identifier of BridgeHubWestend in the Westend relay chain. -pub const BRIDGE_HUB_WESTEND_PARACHAIN_ID: u32 = 1002; - -/// Name of the With-BridgeHubWestend messages pallet instance that is deployed at bridged chains. -pub const WITH_BRIDGE_HUB_WESTEND_MESSAGES_PALLET_NAME: &str = "BridgeWestendMessages"; - -/// Name of the With-BridgeHubWestend bridge-relayers pallet instance that is deployed at bridged -/// chains. -pub const WITH_BRIDGE_HUB_WESTEND_RELAYERS_PALLET_NAME: &str = "BridgeRelayers"; - -/// Pallet index of `BridgeRococoMessages: pallet_bridge_messages::`. -pub const WITH_BRIDGE_WESTEND_TO_ROCOCO_MESSAGES_PALLET_INDEX: u8 = 44; - -decl_bridge_finality_runtime_apis!(bridge_hub_westend); -decl_bridge_messages_runtime_apis!(bridge_hub_westend); - -frame_support::parameter_types! { - /// The XCM fee that is paid for executing XCM program (with `ExportMessage` instruction) at the Westend - /// BridgeHub. - /// (initially was calculated by test `BridgeHubWestend::can_calculate_weight_for_paid_export_message_with_reserve_transfer` + `33%`) - pub const BridgeHubWestendBaseXcmFeeInWnds: u128 = 17_756_830_000; - - /// Transaction fee that is paid at the Westend BridgeHub for delivering single inbound message. - /// (initially was calculated by test `BridgeHubWestend::can_calculate_fee_for_complex_message_delivery_transaction` + `33%`) - pub const BridgeHubWestendBaseDeliveryFeeInWnds: u128 = 1_695_489_961_344; - - /// Transaction fee that is paid at the Westend BridgeHub for delivering single outbound message confirmation. - /// (initially was calculated by test `BridgeHubWestend::can_calculate_fee_for_complex_message_confirmation_transaction` + `33%`) - pub const BridgeHubWestendBaseConfirmationFeeInWnds: u128 = 1_618_309_961_344; -} diff --git a/chains/chain-kusama/Cargo.toml b/chains/chain-kusama/Cargo.toml deleted file mode 100644 index 2a59937da..000000000 --- a/chains/chain-kusama/Cargo.toml +++ /dev/null @@ -1,36 +0,0 @@ -[package] -name = "bp-kusama" -description = "Primitives of Kusama runtime." -version = "0.5.0" -authors.workspace = true -edition.workspace = true -license = "GPL-3.0-or-later WITH Classpath-exception-2.0" -repository.workspace = true - -[lints] -workspace = true - -[dependencies] - -# Bridge Dependencies - -bp-header-chain = { path = "../../primitives/header-chain", default-features = false } -bp-polkadot-core = { path = "../../primitives/polkadot-core", default-features = false } -bp-runtime = { path = "../../primitives/runtime", default-features = false } - -# Substrate Based Dependencies - -frame-support = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -sp-api = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -sp-std = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } - -[features] -default = ["std"] -std = [ - "bp-header-chain/std", - "bp-polkadot-core/std", - "bp-runtime/std", - "frame-support/std", - "sp-api/std", - "sp-std/std", -] diff --git a/chains/chain-kusama/src/lib.rs b/chains/chain-kusama/src/lib.rs deleted file mode 100644 index a81004afe..000000000 --- a/chains/chain-kusama/src/lib.rs +++ /dev/null @@ -1,78 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Primitives of the Kusama chain. - -#![warn(missing_docs)] -#![cfg_attr(not(feature = "std"), no_std)] - -pub use bp_polkadot_core::*; - -use bp_header_chain::ChainWithGrandpa; -use bp_runtime::{decl_bridge_finality_runtime_apis, Chain, ChainId}; -use frame_support::weights::Weight; - -/// Kusama Chain -pub struct Kusama; - -impl Chain for Kusama { - const ID: ChainId = *b"ksma"; - - type BlockNumber = BlockNumber; - type Hash = Hash; - type Hasher = Hasher; - type Header = Header; - - type AccountId = AccountId; - type Balance = Balance; - type Nonce = Nonce; - type Signature = Signature; - - fn max_extrinsic_size() -> u32 { - max_extrinsic_size() - } - - fn max_extrinsic_weight() -> Weight { - max_extrinsic_weight() - } -} - -impl ChainWithGrandpa for Kusama { - const WITH_CHAIN_GRANDPA_PALLET_NAME: &'static str = WITH_KUSAMA_GRANDPA_PALLET_NAME; - const MAX_AUTHORITIES_COUNT: u32 = MAX_AUTHORITIES_COUNT; - const REASONABLE_HEADERS_IN_JUSTIFICATION_ANCESTRY: u32 = - REASONABLE_HEADERS_IN_JUSTIFICATION_ANCESTRY; - const MAX_MANDATORY_HEADER_SIZE: u32 = MAX_MANDATORY_HEADER_SIZE; - const AVERAGE_HEADER_SIZE: u32 = AVERAGE_HEADER_SIZE; -} - -// The SignedExtension used by Kusama. -pub use bp_polkadot_core::CommonSignedExtension as SignedExtension; - -/// Name of the parachains pallet in the Kusama runtime. -pub const PARAS_PALLET_NAME: &str = "Paras"; - -/// Name of the With-Kusama GRANDPA pallet instance that is deployed at bridged chains. -pub const WITH_KUSAMA_GRANDPA_PALLET_NAME: &str = "BridgeKusamaGrandpa"; - -/// Maximal size of encoded `bp_parachains::ParaStoredHeaderData` structure among all Polkadot -/// parachains. -/// -/// It includes the block number and state root, so it shall be near 40 bytes, but let's have some -/// reserve. -pub const MAX_NESTED_PARACHAIN_HEAD_DATA_SIZE: u32 = 128; - -decl_bridge_finality_runtime_apis!(kusama, grandpa); diff --git a/chains/chain-polkadot-bulletin/Cargo.toml b/chains/chain-polkadot-bulletin/Cargo.toml deleted file mode 100644 index c20a94cfd..000000000 --- a/chains/chain-polkadot-bulletin/Cargo.toml +++ /dev/null @@ -1,46 +0,0 @@ -[package] -name = "bp-polkadot-bulletin" -description = "Primitives of Polkadot Bulletin chain runtime." -version = "0.4.0" -authors.workspace = true -edition.workspace = true -license = "GPL-3.0-or-later WITH Classpath-exception-2.0" -repository.workspace = true - -[lints] -workspace = true - -[dependencies] -codec = { package = "parity-scale-codec", version = "3.6.1", default-features = false, features = ["derive"] } -scale-info = { version = "2.11.1", default-features = false, features = ["derive"] } - -# Bridge Dependencies - -bp-header-chain = { path = "../../primitives/header-chain", default-features = false } -bp-messages = { path = "../../primitives/messages", default-features = false } -bp-polkadot-core = { path = "../../primitives/polkadot-core", default-features = false } -bp-runtime = { path = "../../primitives/runtime", default-features = false } - -# Substrate Based Dependencies - -frame-support = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -frame-system = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -sp-api = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -sp-runtime = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -sp-std = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } - -[features] -default = ["std"] -std = [ - "bp-header-chain/std", - "bp-messages/std", - "bp-polkadot-core/std", - "bp-runtime/std", - "codec/std", - "frame-support/std", - "frame-system/std", - "scale-info/std", - "sp-api/std", - "sp-runtime/std", - "sp-std/std", -] diff --git a/chains/chain-polkadot-bulletin/src/lib.rs b/chains/chain-polkadot-bulletin/src/lib.rs deleted file mode 100644 index f3d300567..000000000 --- a/chains/chain-polkadot-bulletin/src/lib.rs +++ /dev/null @@ -1,227 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Polkadot Bulletin Chain primitives. - -#![warn(missing_docs)] -#![cfg_attr(not(feature = "std"), no_std)] - -use bp_header_chain::ChainWithGrandpa; -use bp_messages::{ChainWithMessages, MessageNonce}; -use bp_runtime::{ - decl_bridge_finality_runtime_apis, decl_bridge_messages_runtime_apis, - extensions::{ - CheckEra, CheckGenesis, CheckNonZeroSender, CheckNonce, CheckSpecVersion, CheckTxVersion, - CheckWeight, GenericSignedExtension, GenericSignedExtensionSchema, - }, - Chain, ChainId, TransactionEra, -}; -use codec::{Decode, Encode}; -use frame_support::{ - dispatch::DispatchClass, - parameter_types, - weights::{constants::WEIGHT_REF_TIME_PER_SECOND, Weight}, -}; -use frame_system::limits; -use scale_info::TypeInfo; -use sp_runtime::{traits::DispatchInfoOf, transaction_validity::TransactionValidityError, Perbill}; - -// This chain reuses most of Polkadot primitives. -pub use bp_polkadot_core::{ - AccountAddress, AccountId, Balance, Block, BlockNumber, Hash, Hasher, Header, Nonce, Signature, - SignedBlock, UncheckedExtrinsic, AVERAGE_HEADER_SIZE, EXTRA_STORAGE_PROOF_SIZE, - MAX_MANDATORY_HEADER_SIZE, REASONABLE_HEADERS_IN_JUSTIFICATION_ANCESTRY, -}; - -/// Maximal number of GRANDPA authorities at Polkadot Bulletin chain. -pub const MAX_AUTHORITIES_COUNT: u32 = 100; - -/// Name of the With-Polkadot Bulletin chain GRANDPA pallet instance that is deployed at bridged -/// chains. -pub const WITH_POLKADOT_BULLETIN_GRANDPA_PALLET_NAME: &str = "BridgePolkadotBulletinGrandpa"; -/// Name of the With-Polkadot Bulletin chain messages pallet instance that is deployed at bridged -/// chains. -pub const WITH_POLKADOT_BULLETIN_MESSAGES_PALLET_NAME: &str = "BridgePolkadotBulletinMessages"; - -// There are fewer system operations on this chain (e.g. staking, governance, etc.). Use a higher -// percentage of the block for data storage. -const NORMAL_DISPATCH_RATIO: Perbill = Perbill::from_percent(90); - -// Re following constants - we are using the same values at Cumulus parachains. They are limited -// by the maximal transaction weight/size. Since block limits at Bulletin Chain are larger than -// at the Cumulus Bridge Hubs, we could reuse the same values. - -/// Maximal number of unrewarded relayer entries at inbound lane for Cumulus-based parachains. -pub const MAX_UNREWARDED_RELAYERS_IN_CONFIRMATION_TX: MessageNonce = 1024; - -/// Maximal number of unconfirmed messages at inbound lane for Cumulus-based parachains. -pub const MAX_UNCONFIRMED_MESSAGES_IN_CONFIRMATION_TX: MessageNonce = 4096; - -/// This signed extension is used to ensure that the chain transactions are signed by proper -pub type ValidateSigned = GenericSignedExtensionSchema<(), ()>; - -/// Signed extension schema, used by Polkadot Bulletin. -pub type SignedExtensionSchema = GenericSignedExtension<( - ( - CheckNonZeroSender, - CheckSpecVersion, - CheckTxVersion, - CheckGenesis, - CheckEra, - CheckNonce, - CheckWeight, - ), - ValidateSigned, -)>; - -/// Signed extension, used by Polkadot Bulletin. -#[derive(Encode, Decode, Debug, PartialEq, Eq, Clone, TypeInfo)] -pub struct SignedExtension(SignedExtensionSchema); - -impl sp_runtime::traits::SignedExtension for SignedExtension { - const IDENTIFIER: &'static str = "Not needed."; - type AccountId = (); - type Call = (); - type AdditionalSigned = - ::AdditionalSigned; - type Pre = (); - - fn additional_signed(&self) -> Result { - self.0.additional_signed() - } - - fn pre_dispatch( - self, - _who: &Self::AccountId, - _call: &Self::Call, - _info: &DispatchInfoOf, - _len: usize, - ) -> Result { - Ok(()) - } -} - -impl SignedExtension { - /// Create signed extension from its components. - pub fn from_params( - spec_version: u32, - transaction_version: u32, - era: TransactionEra, - genesis_hash: Hash, - nonce: Nonce, - ) -> Self { - Self(GenericSignedExtension::new( - ( - ( - (), // non-zero sender - (), // spec version - (), // tx version - (), // genesis - era.frame_era(), // era - nonce.into(), // nonce (compact encoding) - (), // Check weight - ), - (), - ), - Some(( - ( - (), - spec_version, - transaction_version, - genesis_hash, - era.signed_payload(genesis_hash), - (), - (), - ), - (), - )), - )) - } - - /// Return transaction nonce. - pub fn nonce(&self) -> Nonce { - let common_payload = self.0.payload.0; - common_payload.5 .0 - } -} - -parameter_types! { - /// We allow for 2 seconds of compute with a 6 second average block time. - pub BlockWeights: limits::BlockWeights = limits::BlockWeights::with_sensible_defaults( - Weight::from_parts(2u64 * WEIGHT_REF_TIME_PER_SECOND, u64::MAX), - NORMAL_DISPATCH_RATIO, - ); - // Note: Max transaction size is 8 MB. Set max block size to 10 MB to facilitate data storage. - // This is double the "normal" Relay Chain block length limit. - /// Maximal block length at Polkadot Bulletin chain. - pub BlockLength: limits::BlockLength = limits::BlockLength::max_with_normal_ratio( - 10 * 1024 * 1024, - NORMAL_DISPATCH_RATIO, - ); -} - -/// Polkadot Bulletin Chain declaration. -pub struct PolkadotBulletin; - -impl Chain for PolkadotBulletin { - const ID: ChainId = *b"pdbc"; - - type BlockNumber = BlockNumber; - type Hash = Hash; - type Hasher = Hasher; - type Header = Header; - - type AccountId = AccountId; - // The Bulletin Chain is a permissioned blockchain without any balances. Our `Chain` trait - // requires balance type, which is then used by various bridge infrastructure code. However - // this code is optional and we are not planning to use it in our bridge. - type Balance = Balance; - type Nonce = Nonce; - type Signature = Signature; - - fn max_extrinsic_size() -> u32 { - *BlockLength::get().max.get(DispatchClass::Normal) - } - - fn max_extrinsic_weight() -> Weight { - BlockWeights::get() - .get(DispatchClass::Normal) - .max_extrinsic - .unwrap_or(Weight::MAX) - } -} - -impl ChainWithGrandpa for PolkadotBulletin { - const WITH_CHAIN_GRANDPA_PALLET_NAME: &'static str = WITH_POLKADOT_BULLETIN_GRANDPA_PALLET_NAME; - const MAX_AUTHORITIES_COUNT: u32 = MAX_AUTHORITIES_COUNT; - const REASONABLE_HEADERS_IN_JUSTIFICATION_ANCESTRY: u32 = - REASONABLE_HEADERS_IN_JUSTIFICATION_ANCESTRY; - const MAX_MANDATORY_HEADER_SIZE: u32 = MAX_MANDATORY_HEADER_SIZE; - const AVERAGE_HEADER_SIZE: u32 = AVERAGE_HEADER_SIZE; -} - -impl ChainWithMessages for PolkadotBulletin { - const WITH_CHAIN_MESSAGES_PALLET_NAME: &'static str = - WITH_POLKADOT_BULLETIN_MESSAGES_PALLET_NAME; - - const MAX_UNREWARDED_RELAYERS_IN_CONFIRMATION_TX: MessageNonce = - MAX_UNREWARDED_RELAYERS_IN_CONFIRMATION_TX; - const MAX_UNCONFIRMED_MESSAGES_IN_CONFIRMATION_TX: MessageNonce = - MAX_UNCONFIRMED_MESSAGES_IN_CONFIRMATION_TX; -} - -decl_bridge_finality_runtime_apis!(polkadot_bulletin, grandpa); -decl_bridge_messages_runtime_apis!(polkadot_bulletin); diff --git a/chains/chain-polkadot/Cargo.toml b/chains/chain-polkadot/Cargo.toml deleted file mode 100644 index f942e4fe8..000000000 --- a/chains/chain-polkadot/Cargo.toml +++ /dev/null @@ -1,36 +0,0 @@ -[package] -name = "bp-polkadot" -description = "Primitives of Polkadot runtime." -version = "0.5.0" -authors.workspace = true -edition.workspace = true -license = "GPL-3.0-or-later WITH Classpath-exception-2.0" -repository.workspace = true - -[lints] -workspace = true - -[dependencies] - -# Bridge Dependencies - -bp-header-chain = { path = "../../primitives/header-chain", default-features = false } -bp-polkadot-core = { path = "../../primitives/polkadot-core", default-features = false } -bp-runtime = { path = "../../primitives/runtime", default-features = false } - -# Substrate Based Dependencies - -frame-support = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -sp-api = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -sp-std = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } - -[features] -default = ["std"] -std = [ - "bp-header-chain/std", - "bp-polkadot-core/std", - "bp-runtime/std", - "frame-support/std", - "sp-api/std", - "sp-std/std", -] diff --git a/chains/chain-polkadot/src/lib.rs b/chains/chain-polkadot/src/lib.rs deleted file mode 100644 index 00d35783a..000000000 --- a/chains/chain-polkadot/src/lib.rs +++ /dev/null @@ -1,80 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Primitives of the Polkadot chain. - -#![warn(missing_docs)] -#![cfg_attr(not(feature = "std"), no_std)] - -pub use bp_polkadot_core::*; - -use bp_header_chain::ChainWithGrandpa; -use bp_runtime::{ - decl_bridge_finality_runtime_apis, extensions::PrevalidateAttests, Chain, ChainId, -}; -use frame_support::weights::Weight; - -/// Polkadot Chain -pub struct Polkadot; - -impl Chain for Polkadot { - const ID: ChainId = *b"pdot"; - - type BlockNumber = BlockNumber; - type Hash = Hash; - type Hasher = Hasher; - type Header = Header; - - type AccountId = AccountId; - type Balance = Balance; - type Nonce = Nonce; - type Signature = Signature; - - fn max_extrinsic_size() -> u32 { - max_extrinsic_size() - } - - fn max_extrinsic_weight() -> Weight { - max_extrinsic_weight() - } -} - -impl ChainWithGrandpa for Polkadot { - const WITH_CHAIN_GRANDPA_PALLET_NAME: &'static str = WITH_POLKADOT_GRANDPA_PALLET_NAME; - const MAX_AUTHORITIES_COUNT: u32 = MAX_AUTHORITIES_COUNT; - const REASONABLE_HEADERS_IN_JUSTIFICATION_ANCESTRY: u32 = - REASONABLE_HEADERS_IN_JUSTIFICATION_ANCESTRY; - const MAX_MANDATORY_HEADER_SIZE: u32 = MAX_MANDATORY_HEADER_SIZE; - const AVERAGE_HEADER_SIZE: u32 = AVERAGE_HEADER_SIZE; -} - -/// The SignedExtension used by Polkadot. -pub type SignedExtension = SuffixedCommonSignedExtension; - -/// Name of the parachains pallet in the Polkadot runtime. -pub const PARAS_PALLET_NAME: &str = "Paras"; - -/// Name of the With-Polkadot GRANDPA pallet instance that is deployed at bridged chains. -pub const WITH_POLKADOT_GRANDPA_PALLET_NAME: &str = "BridgePolkadotGrandpa"; - -/// Maximal size of encoded `bp_parachains::ParaStoredHeaderData` structure among all Polkadot -/// parachains. -/// -/// It includes the block number and state root, so it shall be near 40 bytes, but let's have some -/// reserve. -pub const MAX_NESTED_PARACHAIN_HEAD_DATA_SIZE: u32 = 128; - -decl_bridge_finality_runtime_apis!(polkadot, grandpa); diff --git a/chains/chain-rococo/Cargo.toml b/chains/chain-rococo/Cargo.toml deleted file mode 100644 index a86e87551..000000000 --- a/chains/chain-rococo/Cargo.toml +++ /dev/null @@ -1,36 +0,0 @@ -[package] -name = "bp-rococo" -description = "Primitives of Rococo runtime." -version = "0.6.0" -authors.workspace = true -edition.workspace = true -license = "GPL-3.0-or-later WITH Classpath-exception-2.0" -repository.workspace = true - -[lints] -workspace = true - -[dependencies] - -# Bridge Dependencies - -bp-header-chain = { path = "../../primitives/header-chain", default-features = false } -bp-polkadot-core = { path = "../../primitives/polkadot-core", default-features = false } -bp-runtime = { path = "../../primitives/runtime", default-features = false } - -# Substrate Based Dependencies - -frame-support = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -sp-api = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -sp-std = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } - -[features] -default = ["std"] -std = [ - "bp-header-chain/std", - "bp-polkadot-core/std", - "bp-runtime/std", - "frame-support/std", - "sp-api/std", - "sp-std/std", -] diff --git a/chains/chain-rococo/src/lib.rs b/chains/chain-rococo/src/lib.rs deleted file mode 100644 index 2385dd2cb..000000000 --- a/chains/chain-rococo/src/lib.rs +++ /dev/null @@ -1,78 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Primitives of the Rococo chain. - -#![warn(missing_docs)] -#![cfg_attr(not(feature = "std"), no_std)] - -pub use bp_polkadot_core::*; - -use bp_header_chain::ChainWithGrandpa; -use bp_runtime::{decl_bridge_finality_runtime_apis, Chain, ChainId}; -use frame_support::weights::Weight; - -/// Rococo Chain -pub struct Rococo; - -impl Chain for Rococo { - const ID: ChainId = *b"roco"; - - type BlockNumber = BlockNumber; - type Hash = Hash; - type Hasher = Hasher; - type Header = Header; - - type AccountId = AccountId; - type Balance = Balance; - type Nonce = Nonce; - type Signature = Signature; - - fn max_extrinsic_size() -> u32 { - max_extrinsic_size() - } - - fn max_extrinsic_weight() -> Weight { - max_extrinsic_weight() - } -} - -impl ChainWithGrandpa for Rococo { - const WITH_CHAIN_GRANDPA_PALLET_NAME: &'static str = WITH_ROCOCO_GRANDPA_PALLET_NAME; - const MAX_AUTHORITIES_COUNT: u32 = MAX_AUTHORITIES_COUNT; - const REASONABLE_HEADERS_IN_JUSTIFICATION_ANCESTRY: u32 = - REASONABLE_HEADERS_IN_JUSTIFICATION_ANCESTRY; - const MAX_MANDATORY_HEADER_SIZE: u32 = MAX_MANDATORY_HEADER_SIZE; - const AVERAGE_HEADER_SIZE: u32 = AVERAGE_HEADER_SIZE; -} - -// The SignedExtension used by Rococo. -pub use bp_polkadot_core::CommonSignedExtension as SignedExtension; - -/// Name of the parachains pallet in the Rococo runtime. -pub const PARAS_PALLET_NAME: &str = "Paras"; - -/// Name of the With-Rococo GRANDPA pallet instance that is deployed at bridged chains. -pub const WITH_ROCOCO_GRANDPA_PALLET_NAME: &str = "BridgeRococoGrandpa"; - -/// Maximal size of encoded `bp_parachains::ParaStoredHeaderData` structure among all Rococo -/// parachains. -/// -/// It includes the block number and state root, so it shall be near 40 bytes, but let's have some -/// reserve. -pub const MAX_NESTED_PARACHAIN_HEAD_DATA_SIZE: u32 = 128; - -decl_bridge_finality_runtime_apis!(rococo, grandpa); diff --git a/chains/chain-westend/Cargo.toml b/chains/chain-westend/Cargo.toml deleted file mode 100644 index 6f5c48139..000000000 --- a/chains/chain-westend/Cargo.toml +++ /dev/null @@ -1,36 +0,0 @@ -[package] -name = "bp-westend" -description = "Primitives of Westend runtime." -version = "0.3.0" -authors.workspace = true -edition.workspace = true -license = "GPL-3.0-or-later WITH Classpath-exception-2.0" -repository.workspace = true - -[lints] -workspace = true - -[dependencies] - -# Bridge Dependencies - -bp-header-chain = { path = "../../primitives/header-chain", default-features = false } -bp-polkadot-core = { path = "../../primitives/polkadot-core", default-features = false } -bp-runtime = { path = "../../primitives/runtime", default-features = false } - -# Substrate Based Dependencies - -frame-support = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -sp-api = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -sp-std = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } - -[features] -default = ["std"] -std = [ - "bp-header-chain/std", - "bp-polkadot-core/std", - "bp-runtime/std", - "frame-support/std", - "sp-api/std", - "sp-std/std", -] diff --git a/chains/chain-westend/src/lib.rs b/chains/chain-westend/src/lib.rs deleted file mode 100644 index b344b7f4b..000000000 --- a/chains/chain-westend/src/lib.rs +++ /dev/null @@ -1,78 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Primitives of the Westend chain. - -#![warn(missing_docs)] -#![cfg_attr(not(feature = "std"), no_std)] - -pub use bp_polkadot_core::*; - -use bp_header_chain::ChainWithGrandpa; -use bp_runtime::{decl_bridge_finality_runtime_apis, Chain, ChainId}; -use frame_support::weights::Weight; - -/// Westend Chain -pub struct Westend; - -impl Chain for Westend { - const ID: ChainId = *b"wend"; - - type BlockNumber = BlockNumber; - type Hash = Hash; - type Hasher = Hasher; - type Header = Header; - - type AccountId = AccountId; - type Balance = Balance; - type Nonce = Nonce; - type Signature = Signature; - - fn max_extrinsic_size() -> u32 { - max_extrinsic_size() - } - - fn max_extrinsic_weight() -> Weight { - max_extrinsic_weight() - } -} - -impl ChainWithGrandpa for Westend { - const WITH_CHAIN_GRANDPA_PALLET_NAME: &'static str = WITH_WESTEND_GRANDPA_PALLET_NAME; - const MAX_AUTHORITIES_COUNT: u32 = MAX_AUTHORITIES_COUNT; - const REASONABLE_HEADERS_IN_JUSTIFICATION_ANCESTRY: u32 = - REASONABLE_HEADERS_IN_JUSTIFICATION_ANCESTRY; - const MAX_MANDATORY_HEADER_SIZE: u32 = MAX_MANDATORY_HEADER_SIZE; - const AVERAGE_HEADER_SIZE: u32 = AVERAGE_HEADER_SIZE; -} - -// The SignedExtension used by Westend. -pub use bp_polkadot_core::CommonSignedExtension as SignedExtension; - -/// Name of the parachains pallet in the Rococo runtime. -pub const PARAS_PALLET_NAME: &str = "Paras"; - -/// Name of the With-Westend GRANDPA pallet instance that is deployed at bridged chains. -pub const WITH_WESTEND_GRANDPA_PALLET_NAME: &str = "BridgeWestendGrandpa"; - -/// Maximal size of encoded `bp_parachains::ParaStoredHeaderData` structure among all Westend -/// parachains. -/// -/// It includes the block number and state root, so it shall be near 40 bytes, but let's have some -/// reserve. -pub const MAX_NESTED_PARACHAIN_HEAD_DATA_SIZE: u32 = 128; - -decl_bridge_finality_runtime_apis!(westend, grandpa); diff --git a/docs/bridge-relayers-claim-rewards.png b/docs/bridge-relayers-claim-rewards.png deleted file mode 100644 index d56b8dd871e8445e7cab49517123b0092ce09137..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 35621 zcmb@tbzD^6+CB`TAd-U8ogzp|mjcqEfP}=*4MR7KG>Ej)r4l0D-JK)S-QC^HZ*jio zd5-7(=lSQ|A4F%|Ywxw!y5qX9``RII6{WBqkvu{|Lc)@fmQY4QLd`@%LdJcF3clgr z_1*%1-FFa^QGEyw&xc0A;4`t~8x2PlsEMPCzP&M$sSVWH_=SU^y|J;4gBjFu52aBA zoWyFbs^KVZZ>;ZV4z;0DHMcefUm_v>&K~roV&|h8^#5J{yM&69o%1CX2e$zGO96H+ zPqo{xNJvyjG7_&$Dn zWI4CLRG2cFT;{s@nwPNQIH@Ga+%MlSB!4BtS&ToVq<|lj9!N*`PAWuVyA2K2r^up;6KN zbwIpjWkdctr7KuPSytk&Lozv9`LFY7I2XTs!F&9lBkFN>#+Sb?5d4|XuY}Ou=P1Ej!?v~{($RhaYuOjiAtk>{@}>Ux8fY)T<-Y&k4W+iPDOu@ zxX}7DG$bx3hoNXn+q}f0o)S-@w!z~fsAkFTkV+JbqkVoFJcdtNqSW4r8ef zrNHK9;mPLuHVxJMCJ)Qs&jArZMn-mYbk;d>Zu}Y{oNG)yGDZ(z&CBDBwT0$5!l5V7ogznVFf!s*AXP-K9F3l$B}4zWO{4 zc0o8=2tia=3{6e-a>0dKRg{EJs_|r5<3b^b%dxui#n3cnijVnPm4~^In@^0XuD)Zs z9@brl;It==7yVEqGYgBwgHFn|LfFmLTKH0(rPF80)+uOXzv#`7=sOjaqlD4Ms4dq2 ztbx67%|f&?Ma0_o+1F|Bp-{H+^73SUhrodW#YY$z7duc`Z>q2dstzMFb13BYFm8Bc zMCS>7qSRE>MFXn&MIL81_V)H>R#t6BoE#pfRzegwZ{ECd_3$WlJupyKRz^Bs zqC`kLIBkd%w{>aG4cy4LU)mTj{iCoYj&7F7f#`9Q{AOS-Vx;CTIQMuZR|N6C^G0oE#xQ4^p0KKpKQ3IC5lctfafU02pXwVxp|2 zC8wc5Bp1g@&%@)a(Rl_YYGh>e_QQuyX=#*9OiV1=RgonnoVVwlqS5j3U6bXO6SXdE zg$oys0VUo#n6!NHQuW>m9B3CkuPThKFOfw6thc+taILD?Uj7 z_3M+%tXmNH_u$CL8)@l>p@eKE1IhexZKn%ClqC*pieQI>LqhaddcJC~5TtsYYRbyV zUDP&S32r+J!iA17fmK~^n~IVilNpJtf_&-*Q}tQ-K7JsLNRcuYdl zb9u6rlbhSR_9a-d?QnyB|M0I5+^0`xX2@glE*(_uj1Q5*B7F6$3+{co6nM4|Cz2ssDPP z%Z}+c27#sM)H;JlZJu$NQ;?SKN?%)V@<9QU29Mdf+#LfJsKE?IPEP(MHg*w=`%KKW zzP)WUlm(tf_4mw7e;^hqpT{W+?~Eh9N|rn|JNtfgyoZN}+d=^0cq2@7VPOG05DP(X z4{r9(RX`ug*M~-;R6X6Ww6?p01R{Cz*VEclYj1N z_N?k`)?))irO{;B&Iro2l-#n9PU# z%FWFU#`!`-Yy5+uo<+Z9%5mEFpx4i`GJ#*D?z~NIR*hsl@TIMM2aCMq}+1s(2w$ZK00IAjVCGW zYOk>oLH0E^_RrLmoRSibVQ08;$7kYc7~&?5{ID=1!~fT>=ZlMr=U0sg4+{5nYMbhn z*wSgp3@tlH!j->;=@(*m9&LRm18yS*N=pMZ>oKXdA5$ensh#x^LT(*>JF_Z+M z0o<0OdYmUt@`4D&+kvqh5IleWeD*6h7x~iT9x#W&nHdEoB_$YnZA}fUCgCNX=FZN} zjW~8kXQv@&!d)m55fNe0EXjbm3MBPAPb^)SqeMU*9bbYSpQ&{j`tzq{V<;OV19&^XQ#r>#O^qxn+NhuMBiHU$AYK$EQfkt_=!YYs zw>6q?WNcjQyfYcvld8m%Dw_J&a@adbaxB06gISf0F8Tr8JZ@j#O zLGYNF}kTFlCd1#vTn=+9C}(g5pNG%z?~0fnxCsDu*3 zsI;r(97fEw8f{-(Tznr5O+`ba3)nwQuUO+DI(qdlYQ{JJiF3EHVVlXy!otE|;^InO z_H==ZwjQqZ5{r5ZQBzZU9wp7S0qgcd!&y9VEt=W^cw}j5NgU(}Fysj#AwCgNSI<}G zYLFR3)mgy3SHlH&o2I6cT7z)TaK?CkgAM5c2<~oafiabpm0?!TKs}d8r=6#)%PK2X zR8=KZRECUWDB|!zx;<+|crnPwgE+;nudgq(--L1_ehBi{XAp!z_9YT@W?@z@prfMt z2*TFgRPP4h;ot`vYDOy{<0KjFPl0f-CODqw%Ix;!! zxS`>krmVF`_q<1BvAHX3r<*aY&7$0Z2vsVa>|1trP}5Vw*r~ec#U2)=?n*0m; z;z|!*{{D$Q#|WXIhzR=l5vixQ7X*K5A)#b?x!7ma^nW?b$7p5yFrvU_U<$2+*y4Z9 zqCn+`K=yx?Mc3AE|F62?|0=Ai0I221figNMjd0J|>@@Ef?bM%6-Q@i`wK8fwJ^a@U z3M3skG+`4GuR)<_?{%n37+GG9yT;xtC_14yAafSn;rK!adWL8JAfMi7?^%Xh+0s9l zsk);AIyV4JysN7VDZRG#D$%MEB;@R)7RYf}SeUC&s%6Q}H(*lp&U1WlG)ZLu9(8nd z)LFRpljCAzqobl;OG=^|8ynM-&ju5ZWT@`krYt`hvwt7wA%&jgeH3xM7C!m7gVo4{}iEmka^D4w3K0Fib{;u@TB~oqH1FdR8Mza01OSetG_?9 zrsmQCTG`N$8XD7Bd{p?x~i`!!yJp{~$UO?nAX=v=duIj9Z4iv6e8yzn3^cYZDMg3(sxMb=L(L!2Qu@cCt_HDgWq!o`j9eG}lqDQ&ZpmpZ=#)EVfvQPo&$rIn560ovnk|sbJN=ZopE{zJl$dXSG z)7B=_)zuyIM4(}lr32^TcUaXvvW885VNwAgm$o5oOViBkS#xtUnUHHFz{X+Dd}DoK zXBoXlv?t9$jZNe==FWag13}fj$T}|+6$y+=Pn6)l_h-_IEH2~Dk(0pw!`G~n1SV}= zCZxUe=tGcos#4xR*`4f=gZ(t@hmUcdNqVDw{}G(5G~HGxz1r)+ypd(APGM6@JSH{q zWxC-}XP8US1INMH%?^*%M-(+)c#c)Ju>_Gf0}et%`|L`2w%V1v;R}@+^WzC`bj<~r z4I~9g{$(*2T6Wjxc3@fVlB7v#LRJ>T_05fx6$}sypo+p3ZET1$sI9Ht*xp_S>9)g& zbHTp43l7iz@xxF8PioC$+w(C%;*CXKI&gp*6f7;b%!2F`!Y)f6D80wtO6^PEmM5J3 zhE3q<#Jz#;r8FnRSUM4zAf@Sq(yybJa)zWdZOO3JZOQ~W$DmNyU>@`23ss-%$y~l0 z_0Hcz2$x%rv7g>*?o0PB?faK5#PRtH)EICOWqVS|g`x z8jYn;>1UYPjVm_D6= zFZ|TMW4jjLGs}dte>8bC=lRtdtv5$_3$@hmxX~kC^+%*ap>Z^1H^cICKxD>OA$KT^ zY-UJs^4d3NC__@kn#$^Si{P_QZ_Y&J6VI}H`Mb3cED7z3)50csQ6Vh1m(_F4!YfAD zW=TSwm8CYl6wgVut7@CbHuajt(E0=1o}^ecS^LkQ_dFlXm|(jW;kkHBVcoflBf%dW z%3*t(CVz-sn;8 zuMbxh)vi0O8H)FnG&$NS1exKDZG#`gMsxR+-|B`8VXYo82^|j^kE2;HrJt`rN(QRq zAve#;3#|(V7F`4BVN>5(3 z;fh7aaGfu9++Oooez_8(^+fs*CDiVQ-ftSE*Om^;${ek_gMn+FB9nmEM9gHSl{RMN58KwvC_?!ch<@_5$Z97kd95YDbj}9}PTZ$~;7V z^MYInruwSe9xZ$+LFh;pyS$%*J!naz$d}s%Fri&uQ@o2m>sv0W_!PNDaogpkMi_F7+E>}wp;Ag+Q$D0>lZXt z+N^nM%mBe55z=P>_T?@Tm6YJMvP(-#H`!F;R9iiZlXDLv7mNjn6_3v!(_9zOn+hc3COck)ta1WJ)XJAfOH);Bj7 zN420UOA(^G2v2VW;CdV#9dRir6lG<5-dq7XY31f@F1x(^jzj=Cv!l#>=wrje6@yZ0 z-*Aq~1SoU?r2vM^Z*9o|_@1ql*0JHZxV9!`Z+{qB{|lQ^L>{0n0R91Dbhz571jt_O z^=o7-EG)pGL`O$I#KvY|V~YSYJ!L$-L*D>ez^pViHTx2|F<>wlAT9vkMg^c5h{f#Q z*RDv;d$Tj5>T#_LdZ2s-t980FC8eT*4{93#{LVUEK$Ht7;}-*2TP^qN@USXUU0ofh z7Kjd$XQU;Cncl?lRt$g52qYO^ZIl~LgXnD|Zqd#0zvWi=f8TtitX@bpJLflLiie6t zSO`gJ$AArVa51NF_qHA+Gf_jex&u~(-y7YG36yx?$w2BclO`0p!^4c?1UBwrD)DO~ z%2Vso>GQ5_FjpsqcwxF=n#{oV{2wrD)=rBZ-MVzU zx}o^sQ_wqIXXa`d5*AtY=-hX&&FSx@TN=S+cq7_2ZI0bi>!MrHdafR7HtYw$% zqRj<5G23@jPYe9N1bb3f*v#}Sv<9)&ZVS#`V1qjJ%9pmLZdHkAf7{YnV!Gion!&vd z3uk<@2Z^FOoVZsndNpE5f~$LF7HWx&>L|jOq;lepkv8pP-;ENrM04i`AKPRZNuf^7 z{iL&z5y*w&w0Q|7eLmwU>OW9o2_>Urhlpavpt(pvv>e)bs02Rk?+SlWZ=HTN-zW22 zOSr0sSw9uU*1n`gL0bC>K@(9P`5~IA3^jcg-KeK2gIWx!*o)uq#8xp#yDBqm$10Z| zZscX!j#Xe3t63R}tI?l6#VMZLRnV%(Xq<0&gO&Y!<55Duj_1-+!S{1Blm(Xp{r6vp zm4YZ2#LMll1v`?yesQB3{J44;^U~FVsc`?*Pvra+)f5iHhzewNGgGwOIovb60Ugrr zXFR1w{Xg0sI1kPrGZ;1D+LcqTz2mV%&dus?=VlBwJvua3l-+<;8~?W4{_gR`(JGLX zk%y&zTwL3BnBYPq*GfAaDTM0A1U>bnqKMp*TklZ~xGQ8nugjN!aK8WG0kRi*RdO;}OKWQaFO=fN zix+@HL;gK60V*#~F$;_5uV25enxW+~Jv=$gvl)(24iru}{!&f!YPt+}@>ipi4d_-N|t6}=q^1vS;UmgiVutZ^FF=-EJKyl81a?1 z+Vl1qhbszS`O~b!{n95C_R;QFnZzHCmL>4uUKJ58@7_kfal`5h3#VmuWPZ^3OD_>i zXyuD~b4Jq6s~2v9p%1Tn4D%A_&VKiM7shyHr}8xeR|FTpVs@>X|j)G<$`ON5~|)(b;w_I!j-*S z(4*oi@OQJGf2!QcxKAp#$bvb8+BW}iQnowfL-3>Q>N5Wilw8TrzY(A<Y&1X$fgDewmdHHdcL)mC#0oDkh7+DBvDG z+i?4aqgc_`$WV**AZvG;@LT(Gt46V#*f)X?_j0q3Wz62F^A3g4gTqI;tk4(JzN6mHon z(xL0QQ+i|ZYx|1!TMSXRN8D@fo+(4gQm49vc4s0TB@S#|dIWJFCzQ^&@{{Z@%El_I@WX6HcIXt$t+tTgx;KKp>; z;>C^B>>HitAu_%Zq#=0!kCTn7iiPBebeT3K2fcZ!43V_&I6HkQalte9!$mX>DAR2(HT!ZMnvEMK9P?M8?^I?d2PsUZ*M_}CV06ygQ>0Ou;|;6p&Aj? zaw59gxfXZfALF_vo|gzkPRtS7X;Y~Pf!sO{1)O*^O>XO8x(60SudGtUHIWFi()C@uaD2CO`PRZ8BnD$DQ6m zne{yQQglElYO++#hGj{XGpDIAdG27Rn}Zc^#mL$rKX725I3)IrnO|YpI#kaEon8S~ zSyQZffvO!_-6{xm@U0fcYdZ<7(HlC3fHxIRMz@`N&l2KfEiCZD?YyMQa(`f8F7BZGmw9W3)isKvBj)f!5P2S^re{0?6;SszCeJbz! zAd#}0*L-=LO~{o3!Nn$aMc)ZbyaOky#q#}sOM3LaeIWc2c1|XW*q@GL;95Z6$h(o83+3JY zM8*k9-k({>ye4&mLiam#7Z$5~jvn)|;!d+(!Z=w7uGd?3Q2RnM`rsxv%!q-ZH{C3< z^^xo5%Lu#P8p40l|x= zbE*m09uDVi4bWI)l_dgD-R_^33p*wXC1o0Q<%)AH)AQ%wfBpoN(FaWKm$Q0P#1=oB zKx-7h>ajGs16LvO0WDBpw8wK8UV0ea)g7s+6g9QAfL7@%TC%jZ&IIK}e-iH*tPn6S zr>D*x5tK@=ULiR-IlbiOeoRc<-5tYdZej7gqM|}{(0R3w82Bjoym^)MrFm@z@XMW@ zoogi=B!SH!nsk74{*09s(E2{05!h?Ba#RicNCzUr^l{g+HhgWGn_a_Z5QW`_)v}@S zd@<;q2c@-mu6y>IxwnlLaiN4@uIyVQL=|P;>fb~#e~A`)=(Z?MN4UNd(5ZSAJmHB z4RM-cDM`c@nVvb9KyJxvkJR?ZT~t^RYK_5N+|CWfQzgb2=;-Ku$^3n+6p%9g=~6xy zsGc}$MxKUv^oz)X9|?ILrtwo}Jo~F+N%v=o(r$&VN}^8dNBSO_{aD_K>k`73R$Xq_ zOlK}ferJbW2YHc|F(TXD{faDf&46r1=HS4jrB{~fl42WGyh2=^bG_A@QqWxVl5r7x zwZuV-0ls1qb2%fW$u2Ya%!TgYHTLeZZBFo=?LmG-VRU{+WnM zz}nBypIS>-lPmBKjm$H!&q@p5V771T7?Z38M~{|wB}RoR0wqF&qU$`9^KEo5J1HS` ziVhU?8_c%J2>0`|_Ey7tO7O4IuhUrPTo{MBKl7WvC{;UX+KJge9y0N&`zw$0*R@vmx^JdUS^8d zYXefQbp3&^=P21+J#gINjQ=9_IHT{OpoD~m{zy+pfqkt?^luOKQK#kE|@zxZZZal>$yN zqoyVi9JkfQz(s;)Z0>qApa9+nWi(ceC@5Jcr>210+@RCbwJt`A-vS_VwgQl&$ zMscs&@|W4ZU1Sa2m3RuOEwjeoCjCyAUvRLqLU3ZFc~+u^B0?V+oaiX9{(O1})bqfBB^p8m-$ zOM06vF7NCoTAs*5aX}`&)#fOopalIg@wmqZr3+pa{!69>Z@Y{-c$mYuMQ_o28F(X) z-l0=8kvw_Gx<*y{G==N)}AGhVZeo4DNW+c4zV#7+>LVD0Cq+Zt{#^Ar? zRgsLhmQCP46`UR&9fRWjC*{RL)libH7M~u&?>SNzuhz^T^4KwHmAsB+4jXA`s2VNB zN)`H~qEyrmY%5|X)tEzaa@yWZK1?HPKc|+AJ!cu4ddm{koguo9Pf;rzc{nZ9GcaRV zJy6qRK)-)kQ~UIe@`++A{`BdQVwwmbRGC53H#08}Gqn9q9CO#rN8H}L+=&p7qN1WeiDi3vY(8zndv}@AQv7rnQ7w>9 zkdTsY?Cxe|WMqg=aPJ>o?5Mtf52OKCfoIv!s@klqtlM!U5YIv9r`pQiECa|+^mvk( zo;?HDDgngoTSFvvfam~mY+E}6svrH*iMy_Tp#Cuq|2PngQD#rCZ-Wc zMK@PlySpCZ6(BJH@Bn((mxXCZou1E7A}(yhPD^Xv?J=$JYsn09V63t-(pPIPmkDST zH;HVY>|m(aEkg}jLs3Hs`HBDuBDCe>a3qzmq#}I?>8xe5Zn+9ixL=doCAu5fu!U-P zc~3bHUVM^?MHCuSq}k-v5#XZDX&1NSV?@6~>SDLQAdTSLa zGUwAi6;_J9&E(|}5r5O~8vZm|eTRx)D7h)z)4qbxpet=D#jL0H>zIce;^H~@l}wSZ z!sZxh>=nU?j;O1S+&m0vMyJH5KUzZ=SrYhQkb60qKhyoI&9U@T!aujNqpX0PuvXJP%o+7Ds0BdPn65^*vrb?J^^XnR~mn%WsFD zD!N8e>Nu^ubacZv;ISoKdFSspBI9g@;=(>C|HpsFs+Y}R<+!c6uy`YfPAiRms~yW$ z3Xg~DWo4pWpA6kemUUjQXNbIV$dQws?H12-@fbI{Z+oNtBDB~?kcxv%5AO^Kt@%Fl z>DKiJPO0rY=nhB7C8dUXczl9l(y9Dy*5ByKawB3|wynr8$nTS|CaZgI>~N(-ygpHV zS5!+-DxCaE`^#{d&qQ3q<3BU1sTNH_6W-lT-T&?iW#;FH0iDV$c#*ZvCGG^`!K0w{RRpupsg{twr(FAi_?PEe+B%(UFvai;s(j39rS$zFbM5W zw>3eduA`RdoS&038o(?7hU{u*02fNYrYkQmj|p1BcUJ@rX3(>91%#sfKD2kSFa>4c8-3~Y$2LgeBWOXMJvbMJV44@s5A&H5xay^- z6nOm_G9?$m79AN%;8CB+UT)?T6(a2ep)5s@+p02NGLv&v)f3Ofct83*yMf$}K6 z{Jqn50j?;SJ=H}ohd%)lZKgSQ#AU&zpVd@CNa={*0;S@x?|YV{m2dk>^tknjRdT20#Y`cfV|sm7NfJ0>YD-CjVu+FZbf6+>s}(*=ikXX z@8dCa6{vT=uT-t1!kIS79%4MBh7&j;CcVr;Qy`>$_JAW)(@Tqlv6`FZqv8p(Vn3ES`KylD}h`sexSw3yW6Emya=g^mT;z*5)0yD2dA~yfS zaNI}h7g_rm3FlLcUV&Jl|0;God4S*~j(ipS(w#O%$yU3+Bo~OF+Qu4hPjbWIy}hBp zu})zrk&&+e@92AoO#cS_p0 z7ip=fB4%XjhZ~08wEGs;HYE1@?UU||@7|q0##s$r__hykH2P5-IOol51KQmFG25a- z3J1w)`?jBE&z?SxB25U2=1n!y(Rr>U6#ZS7g_Fj@<(g4xul#5n9;YKD_4A)156fz* z!Wz&LNNNSz{5^x(b0AHlr3`JHPC+T7E)p8dKyl}J*v)u+ddlgts|~udQi60nJw50+ zwKdtoKSVs|x^gqwv&lMw@C|mqf1``*Yelu2tB7#ci?dD3Y1!}z^u?^;W-LB65w5`p zvr6$+cE<4{u<_dzUsAC(5+qtbE(ebEs5nwu@Bigcs6Fp*P`l{x*;E>Otz%Qip-u zazS~ottUsTDPWyHvsgZ2-nDz@dBa_Qn5k#-$Mw%&lOoOdTm;MhpBVq|-?WM{O#k%K{`pO8_y4=>ze=Az6U^#PzP+LGmlQ@~ zSzv5v$S*E#?0q3`Ud9)=1{A1^3f!0ep5WoipFe+m;Tn6nx=;-nCeczhp!H{slM~MR z`%1T{C&$NtOtKXq1uqZio?QOx4Tn~yaq#v)|BRxPDPetm>xaJtR}vBpbYM>V#9Lu$ z6NFIc(&iYY&40^qmP<)S_r3m}3r$+k|M!)Lz^`APi@I54{97#Z&l*Otah&{LuR;`# zu4c^+VvGI#6b;kED*ye8#dJ}$@(QC;m?If4S%hgBZ6v9f2gXi~T0yvX)qh4dd>K~R zv`D%AxP~}UzqXVdG?StktHw`gT%z>8x;OPpQZSlSLWxPU5%B_XeRak+!Zq3aAH(P> zH~H+0#G4#$k~k%VS<5Ch(_(_WFN~K8kd_$X#}VHv6+9 z8AO>P+&f-LhwZmktCYucNx}49i=4u&;)$D_J$GB~@}+}EdcKAvQTnrdN>58pR!)pk z>fQyaAN^jn$jF@0tE2VtA7{GnlDfE$sB7t{4{QA}H=>~=CwPDSdjg4mhmObR2u{dy z=JY}y%nQ6Ylu_J|(yS6Mz>n!L#e}`*owCOD$eeg*UL>@`)0xs7Ej^Pj?q*D%ep=yV zINAC_6>myhOAaGd*KeL_C|-b`h(vbcY!{+|czyHi&GY}YRL;YK%J2t-WnW92hCIml z-Yfl>^5b9)c`vH1Xk|&Q#Z@{18I-Pt5}{)>b_gFa&TC!rAb{3r=K!*nwkOwCy2p0C zl@yk7J^wCN{@F@C3<8%*LHFEZM1*#X-L{0sgq67v-}i5~-rF2htcG@X^QxZo~TIkGkZL9Avcdco(a-tgX#EBfWdC+_s2IE{3v=~3zo!S;s zRWOWT!#7X?g3<@_2LT<^PS;bl+3_{ob`;5kO$%X-i!0&;$_;0RfBZsSEtj0bhk1s! zZO{gLDOd4D{j}=#vjS++(roTolT$h7qeWP+8ZcPC>q}|!eQfG3$!O0 zXlkVg@G1W3-@cv8y{;a3F8Osa!*akJ>0 z{0_3T@IEB&^yR@LlhZ#V8G*jp>rO!2959?}bKm#ii_IEbGeDIeu7k5sJpIKPO%C-G z^$46=f+k)0+G8PC=wwn~k(^($*VpKtz}+HwEI&zUnK0BhFd$+6gHH@gNXt8=g_g1& zA)WN^%;Wd9wY^lENs%7gHNCj0AYr&Jl)4!F&TiJVaxvE^b+RmYadAd4KgHg~t%MO& zs%t9s4m{V6t7nUJ_1?SKbDf36`wR?oy?z+J&0jl@5T`uZ;RJSnVx)Zs2Xb_vQRRC? zBMN%Oj8P5Oig?$bTrcKy7mA|V{6rxGs%vQ13kf@}a~FJ~8uPsi>fXJ*L0;7>f)QL> zZ2~;2?}x@0nv3MAs81~Ol6jF0BQ=KnLc1mpDXw}*56;o_9I=E9UXab43ha4%eyYKK z_%PhK`zE(~V>%~iA`YIji)3ILzSh>PEYefBmR0Dl!#B(5;Ks!q)S5ljOxDrg+t{j} zeT9AK#rO4!0J=)>ZpZ@HN=|R^u(sN~4*C7w!PT0BT?Z@%0^>?O_1d~PiB)D`x!@4} zKr{`z;i)JanzhCiGA59Vo$Jqw#Ng(Sx)8jj#{Zl!w>*}cbqrBQmupn0!4i_vUCY4= z7=A+eYJ#z_!ui^O~bX>OjWPv&7(!8to4sqnd?w86$0 zynRyl^*~ntw!jCaULuGuV|K~v?F0GWRE7WR+xB6YTXu#Q-72)?ote|zWWpcBX`9T6LOt#{}hIPjGUk^wu ze_Eg#miXnEpgXvb#G!hS_#Nxq{VHH4v!(SB%}&Y+bwEq7ah7aqa;apH5@oU4H;9UdvF z6Qlja`7MXgtEG_E@6*0_OoX@dgyr~9SYWS4JJhA@cZFyO zZ1UH5an&lK>;xEDcQ2{d2IR<;#&ehpH7|J{Bj{`PyPXwn_Nfhn159vf36E&+0XKc8+5O4h@OLv9 z`1p(H>s2k>b3h zGPeca;=D#@`H(1)$2PpY28j>-#TplPdrg7%OW1$nM}kGU9{M5q@l}`yWFgZ9BWDBM zaPtYquF8d#`-XKmcHb+U#p@Hx@L+mp-`5VB6vSGGte4f8@n4!09Ac{6Hdlfgj$Nzv zJ=z_=HvaG|eB1CN>hP2M2YLOrjzImFv4?}Zo)%#t5`n~Js;f~4-qbs<6n0HYK5aI~ zb}57m5m6Gz7vcTq-Bf!lokrozfuy33@6YHLe1*q1%K{53*>44|f-(FO-6HH5~*TN^gDz6G^tFCBlM22&I@o;u-G;QnpYzW4*YHtIodQG}~4yLLGEiGz?9@F!^G$w}M=PZblvKl}i zPV&#D`g>ECNX6MNaW4_O|H9a$DzrAEa?|8cVK9y!69}m5;kJS~(Mn~-Msy@<4<9bF>3vM3+(CGIl`k(n)&C2P^1J;O zs;(ekShsLZoH5Jw^-DY*o%ahVS9+df5z9AX+WcEwrrS&-Cv$CTsaJ!_YUR7ebGJO} zyk8b&+#;hotzDGvQMqLVrrq=oS0HXFJTaEu^IAUR2t_;~4PatR~xQ!CQwJmUa|`|ZkZ&HOO3Q5{C=q%9mQWn$rzQ{ zYpa#uSS=!}$12h-+S zG%AV~lOjqFjJ}hn1SB>!2{C93#xT(;5spPehxa$Bi7uneKlhUjynjP%HCq-HCTPOp zEcP+$WaK3|2D|fVT=klglIU92r&97csg)Qo(_X{V6o|=cFrAjVcU+p)cvsCBbm7xb zzxvwXNy8e{aHa9~%Spe&OXn{fzbs$6Z07fgmS?M~S8ukx)@6ps4ju4$(4v{sHjT@) zV;ZkNxCy)0wRL*UNx+)ea{6>`!8A#4u``xkv@tNvbIrVB*Da==JTrq4`5q=2PpYMf zg1(i>9AeIoP9_@sWSqsFjTDAk7t^N5tyy~}L#>6pgnmy-Mq?H{qfpY5LkWY8%U;_u zxLp)^n4)YAk;08KyP$yzCIdTZtCvy4pPwbR1y@40GQ8dTZhPVq_*uH6 zb}U_wi_5J%Z{HNuY{opP+Zq0L;pT5fMIk75IQ*20h{Nt4aV^h~BVCjKM4F-VYot?0 z;h6Og-q-o%rrIkHi%T<#&&snkt1W01iiggGeio)>NxZ;ObhGYpKteWvRKyW4)HPMU`H3H7m3E`5f#YNZk&em1bcs1TI0? zrV=E3*Vu|xK1%ZDiwTqEdFO~f?v0@>rSCIP+mM3Zf~!s+@~2!%aZ( zks$4M@FE`QT9bCbm%22KSBo%h1&G(;G@=?=}1QJDUYR$E+tmpS{;$uegd~_4(XKg-pe+Pk;d> zauB%adzaG5NTBi?Mmja7HWaJd*FyT|Vq}TbZ3D+T(t|w4UT+TrUb1=OwXAbRY=V$M zokDD7tJb?U#HTz+_Uo@TH{^pt9=`Z7k9xZe?HSjp4P@-i)kJKE%HPH)MVH&r)+!<8 z*G|+O^sP)Mx|Tara@lNO9@H39_Fw5Ac3(=F1mhN+HQ6LawD`y9uxg5f`fDy9{{NaNulX;gv{vokmijeoIIy2Q}^K zHvP(-FeT#jg7z})`XTk(}3%_lY7?{Ak1q=-eixwyPvlp@JYd)j1 zUHj%Yx}`Q9AQ=|28sCo;+7oQt%S$^<-ZL6I5B?Kz5WpmOG2z#!cNEK=H!Bh{#gObt z8RXFKtCe7XXyj{zgMuo**fY(YH>uoy`GdM3RZDU^BwMrcl*;<1Q&~G184G^c*Nasa zmNL*1LC~6Is#$zHUri}x^7ANVwlb#U$57ScPK1c{GZhgB${6TmN6lJl=HdyrYMpYA z@YwEh`Q^Z`vF4rfJW<1*s28LYmj?#m9mtO$bV{YqFP~A{o-UbFmvH%Xb?+HOuk!wQ z^)auRnkZ5cM&-6lWyfd9wC^|o^+Nm^5=6{QM$ePa^?J7^*ulN}`e9oO^&^C*yqjVJ zqHdw^58AyfCcDCCN7-TA*PC(h(&9}`-OHFpZz=EpF;<)ULV?VCaY`${^7`}Ubh}kv z7NluOy;c-IH&@(=gjivq*)*}rF$#+X3Iq{8>i^*q@9 zyiA$fl3%RGLJs|4b6vq@UOIAT$YdE76H8V*qRHP-!X2HNv{8M+XXnDlA-OL{uc9 z^CRPRbM)zO`fV}f>tAJAc%Sau91fk9c!?Ux$M-CMi{x;|q`l-WI&+CQTJri~HaFtO z`g1_3Yd@-0|H#H0G5n)V67`{C-Pa5C6quSji7id@EIF2ysAur`!P)6`sYSkVJ8U?z zk6d=&Y)nPkZ#foWv;ns??@b*LhB@^L#_bE*MUHuwC$Anmf3GDO_rm1-^huHM`lDAP zEHUjR8sGTuSJCnXi9!^vTY+N4Z%X5JN)n!G@sw`z3MP1N8D(h~2ksO?SF24IpJ2j2 zN+kDf?HaX!w>ux*Lvz~^g*E%Xi9!E=t(|pTRNMFWK`;=Iln@DJC_!3ULK+DP>5^8E zZjcb^?gr_SMmhxP6p(HhKstubXV1MC^nQQwyq?$V;U5@g=A3=jS$nOu*E#3&PDk|h zU}a-9UspD~J)Ofa`%S#N@*TC>2r(p9wtHVAQEI9lwq+hwM6JpFv2o?g@U5Z}7ox(UiemFlT(1k4rxULGW@p+k z>JqMbJ;~F@sJ#}>2VWN)a%xjxH5_!}R)bjWbc=EGXq6^to{vVS6gp%`raFE^!4!O^ zFR-kfTuO?HAPokxOjQqtpr{x#e2HM`1ZQS4C z7qYJ$TwaX9&Sv8}kP#x=UCYh)eILiGss2`ZeZNpy9kKL%4(6)e&SK;{uNWvGMX`-x z!nY6}N+`N#5Sx)%+VQ^XaKpHeju8_JFQ-3B4SUHk0%LAW2dAMrtt&H*H^OQ8_BaWF z#drMU0a2=Oe2(@5_VRvkot`JI;~8p0`+NCT>~|kZVJzhk@iIfk<8Uo#d5$hhYuu;{ zHUBhJO}W-wb=megxk1Txx!f-Eyh-6iXMBgG2sGHR`P-)1n$MbYfqSMZFBe~``Q8FoSsbYiG3-*7lLb=~_1ZMchlh*md8N%w2vH59 z!(yNNVId)*^$-aks7?PuKq~bL>u(6G+m9bpANeNW#WFXprc!G@u6C*zX7Qkc4O5D! z7s{DzXxcxv`ZZQE_L)cRg}M3n2QH*t&^Vcyj`t%0RMzWKO_^NF1pM60s=#iS zZe}lKWU|R-EX1T>-t+1EfTO{_os6-JfF2*%64AsciEnt4ZFL^yRwoT~-y^`HMe>dw zV{!Ug4V%8@avfbvM{O`J%e*p9gVT<%%SZ&oO33ih#yWE>^JQih<9yo#LnhWDZZnc6nsYsGw zk&PF{ZpadFnb3KD@10;7%F(7oKYEeigMYvDlbdVJR9JrW&j!QNF&McsuD%@>VTt$W zi*wa^Z<#$??CTAlXqU=MT)(0jNlR6zET$Q9*z>(As377J=>PW(eh_zZ3L6$85)J-p zHUFtXBpS2oX&?@N?-5b#c29fZ2E}IOU-M&^(`Bs!M>$PKg86u+~pB&o$9>a$h z%nk|V<;dRmh##{W&ECY%bS!h$sw`g7ps-_dBsVZl4NMXA4vUR_D{%d#6*PB#SCGUB zxvZ)4ealgkwvyxwIUK(v1_T65OAB5NCa15ny?ypq@d0X*q3kLec#QqCt1I`lsl$3R zTrV%cGa2(Am;Wvz4?Z$D7dw(?UYk?yyEM&}p&=RS!T%V)pGxO?EHuy7BdQg`Q09H7 zrEiE)I&mXqg!L)?GlZX_DiPJl*oC~UBbS?NJSA)cPU6--fcy z>;L(5Ixqc?@bJ5+t(jcGyjU#y;}hcPwXZOt+gL`1lG+zLO+|* zms+7?TtIKywqQfTG4FV_6S8>5!j@#&a&H&D6ph(6;}cuFz|^<9#K4p0c}{h<&hD_y zE?Z)D@?@s++2sp9?U|DIH!ruhh9Hy-^J=baUk$a={dY;8Yh4|&qi>>|V_q}jJs}*( z?fY_$HZ;QIgPcYAe)nH?Wg_IKFf%^9^4U8GM?zUm zvJcUZzae>PruD|Okz32-+fgiCeHyM$(UPmHx8IV6km_gZ{HRF24?%%Fx4B?M%yD5D zeRDYM_pxpm`C_g4g_|gMnc4MfZxZ+@DivoK%jE0vh}^h?xHCd)zdH7A^^o!oT70u8 zZ&ip&ivXFlMhkPNYlTO<>>Yg>oy{qKnl9e9okv(tT+1ScxKknHNS{O`=gDFC%xk;7 z+~G!FQqPnke|n`s?qGNJ>JPNx6>qPakO>^I%gELj1ZjAa89Nigc)5cIOvtb@EsyVN zX8U7qkwR)_4WcHWhrWRs2rFOi+_GT zE_og=8Im_%J`~EL2~izF$Id2hzG9T=Zs=0v9D&38Z{9!m zC8Ey1gi-%Eo4?!1F09?H&{;tL$WDlDZ0dute_*S<75<%G0=Gx}?Y!FVah-F1D@tz~_uBV$YtPK4N{3XxGQ^xCq8se=2tW zYvE9-`TVbnR*qUvDRglHwr~oqSkXsc=}u3;Wn8l8Si9*qpQ#nx$b_%AZyyZHem&gd z6BNZTCqo>g!hrT^_lGfqCG~dVR{kS(4MC_^6|}s(e;l!~n4S5#6(=qG>ta)9yl1CK z5zdacz{8y%mxKa@ZhsodPva!~taqV77l#cwneaZ^gzf({dC6#l331;OIL7^OWRMxO z+F82ru&pC=J?NAgNlYaBtYYg`Jd)j<6{ z-XgI2=tz7`r&Sy`{@fyrszxnHkh}JMY&luVD@?H;F1Xm<3lM~NBnzbPq?7L(iPu^M zXGRe)xvi^P4%&<2OOe$SSvgc&-BGWLxxGp48FlBi3{P{Xl(GG0`)5Qj~kcSEqV-f02k^C=dQNkei+T0*EGS1;z1=rz+)hF%HnKB;-9GPh2 zTlf0BSYu8+PifATiC_2e2p=!q?Q~_2JT>j)UEA^Oh*U5M3b~vU?yh?xcJ4}_QBt+0 z7{EZro}bh{F6wY(j@4MlEj^Q8M)$(QElRzUmpwVhLq=bq_l`C{3aoxNc(PKLxbF+K zq2Qu?E#o3lH}^978qswYOs~%r70xl&9i-S&Uk!}O!S3#SOzd-J%ekmqKRPrPMpjan zbRsCL)8=HG$ZtV7cxyzi?*09)-~e4~Jc#N?$vRyo;pQ1fOQXB`6n(M{IdS?kfpReu zodxm}qQco{!MAhlS@=6PF55Lm>a9>3^aLq_&#jR*cJ9o1-AUJ%mkUFs>Y@$Ib6D@^ z8jr8Q+;p29pn+xyoGBYB$pF1SWWSh;(8uLpJP1kYkyDWe+=C$bZQ85enV~fWVN20T zI{Je~C4*tjTz8etWw1I%ly=XDZnqOB2{R8z+Pn5TBxIsZl$%CcriZdm$j zP&VpP>FIvqFy;emgPT;*Z%1(Dj!j}#BWZnc^>8F9eFAG^3)Pckj*h))%oXc}Qmaftkzt1E9dySxT(gBuzDNJD# z|3qaFi;R&v{;YTmZGDrg5gTu^#%C`wZ*26H%v2IW3YKuK)q8d~OnI|Q=KRL2?8!2Z zYA&t?TqurYmrnDj$&K3{(B=C47RnewX=?s6^O$iZcSM1bwg~3)Oowh_e7AFr<&3XZ zvw=^AxIaR*xTA$jVTeZWH@*aLKkJnbd~h+&1;5yNfX6xRXs906YqxfV!iooOg(uS}tmzsp7F8tlT!-$<;l04q*w4}((>W>@eAX<{M7g@+fa%-%E4=%N8zESqkhse+O}%G}9ve*ZB#-^B z@~ib6f70x?HT|wG(>LqlrE^F&cyUZk9FJ@Gg#&EDOiuR^LHq0;rCH?A3>#iBV9k~-5sZqocU_cJ~W=tz2WJ|kYQdkQN=H-M87pl}*xnG^t4Hsmr|ReBD$P4JvXf2;MbVcrc4{b% ze+_1un3c3=Zg(dudXToZHe@!1k$c{nn*_t#_a=DubQfyG<|(XF#=b!|(0|V1ky&y? zbXaPyh@vulz2-ULFO`2XG>+38^nwugj<{FyXfIpw%9Audv)anG@&FAV+2SLM4FWQ z0l6)1Nr|)_Bk?E+WfX}U`&#zY#P=$EZeD63Mb(eP4#!)d?P~dmtXO+`y`G=qo?j;+E$8++9C02tauq}6mhb_GyrQ0 zjUPU*yxPynLjUEO*6HbG)?Zx#G(zSh?DWAJCN)(YG-593^ZIvOoQ>2Bg*lXyu6I=O z_s>RHBG-x8qx+=$bP+6aT~A!IiR;`b*rh)gX(LP|IR$y8j93n33ruxyY>rO_+_r3g z_aRWL;po(2QS*z8VvSH=k?Ihi|nQ(lFkLW2Ki3K z3kW0+7Yu!V7G#z$$Q=C|a!{1GuG<#w5pxMGebzptN=gA)Seb#My>TEH$56u+Z-OoDS@n4xd%TSBOM7|ZIZ$IJqB=?@|(Hyp= zXp)nu-N>d3PvD~k|8`D=u9wD7a;EWge+Kkfwe{!{@?*4~>dPrJ$2G4p?8MKU1&w=f zs&ilZch{t@QLLMuq5Y0#j+gfA!Z|KacfM0qbJI+YT6Y}UgMuOSgKTRcZEZT!)j@_gBLV`XCsDjyqGsfomY3A#Bfdut=XK;`Dp_QCbXKMVtP%$1SCnVWs3)HYs- z(HVoo!^0DAuW}M(Uj;k_0lFsVpb=s0OUfJi(Rn(0d?LJAO_h*Y?@r?^|H)y6!Br0_ z3^_!SBrE@EW249Ij?RZdj*hb*m((I0;|~rX`MzvHB*x3lC}x3I7QnVP^gMr`89(h3wZ+YAf3>t~xHgmYMUPEX!Y2HatKzaBCzEqwLi9GTp=T+Ih{7N&_D zM<18=v+q++j_wtNRTI=LOw+}9ib~Y z;qUa>Mbz8*+JeT?H(dpatewTeo~)wyFn!CxyY+RoUbBJE&($>!luGw*%V5!{G%J}g zP}pP9h>kp6Jf&SDMcx(z1cKrCbNfeVsl z6cinA&_GIbpp!oLzyIZU3eQ$(0dnfy{xqh!TZTTJ6^lHsGNf-PgW?4w2~y21f!3=P zv6{VD+~5>2j;KGmAB;-lp!O)|Tr9wzxuc!LtCv1WU}f}0HEl|ZR-v}TvFhq!c{r~g zYep%4$?W)~mCKvQ3=E4g`18zH_X0Fht%>1wFA#zIru%JJ-WU?UCL({{eSIJp@G_)LdhjWM|JOo}Go zqlV#L-H2sD-1l0TwF07frJp{Zir@!T-VCK>^VxmX++_QJAHPmxPY(*7kyWK7v?Udg zTda!tgvBq(?mB*a=P!44>Je=vG~_~O=oi=Wbjog9#aMrTfj6xn7Q*qhk4|?#Q$y$; zh1k7z{iU!|n%oQem&PBvkGjklT^~Zq^WThh;%CpyNHa#bh#(^W@{v1r=FD?#@LaMF z<7bSYJX=#XKcu@mNLJ;FbYG?*Gjj*QH$Yv2&ygF|0BUDTEo@BVnwqfcMZMwiB0YJ` z<*V+q#b>ev3QcDX%1r^GiG?qjNwS_JTfaZOf4uK;wgKBnHw;c&FBk5vVMZqYZ~IxIzJt;j*=;};jlNzADX^!mr+8bS*w*lmLy^a-t#eZ7XOMWs_Mke=6FD)rS-OF~>J zvxF#HInX(8S`PE$4d=z6oKL;SkT8>@oe^cl=oh5ahf=h^EbhB}{hO>&dtW83iTmr6YLC_y0((o`?L zZckzqk!<9GiukWmY#(C%kgT>UX-N>#@}&d~VYtj@NJWqF z*f+mq#a9*hmVQ)Bj<4LHIgOAxzYL!8%qXKGpOQ6+jt!rI$s!LUZu7FFUC}7nV-Z@F zmT-a*z9z58VCz;&iJ@J61yg_QcG@>`oBY2xh|8-AlBh#8*bWs0?vi2Nt+VN<`(fng zXN4lPmZN@w`xS-kzxKp%HfM=-YMNNLWH28-^9?&gC}z)9z7QHxLFMUqzsOTZK5MqHurm7*`LOb+DvjO#%RO;R?La|1T4@qEY z9@AY&&gGE2LfdZaepPHhc{A-lfo^~k%80$5i%@hBLSiFzWUPkU- zIpy)4agkJH>f@hERdyX3bO|nwh~K_NqE>EgwoE!5s^4W;>glZy;w0_#ih_5A1oo34 zDf2nFo}afD>@2pTH|Oh*DT=J_(~bytBsy@HwxQMfxLr-!58GoC{iq+mTXv)dDZl&( znOZkPhN$X5+`mk&m+}2=QU!(meQEGTtS+V_qj9-zg$Et!DLP1Wj%Q-ce*~-17rwxVlwS{ zcOzsc3OPi$ce`||f~*SXC4*6NuC5_xl*t$EmM+;B@_3eq-PZ3qlT&wx^ywPrUCl?n zR^du@+{FHQVem9Gh|KeLF10JAL^_hsu-uO2qYBr`9}L&!v*EA)t);7+=8z*~$A*LS z(L?aLBSPaVJof?z6w4XaRtch>MNffTwqe;Da>dP8h>@6qSO;m?4I7uE#R!n!u93wMOLD)RJMKjunXAG6)WlPFTcwqJB&WyrCPZ90B05#v}Y}br#YX>SNThShAlx#F# za6?Vam)qxNS1x^SvC`km{nuqiPy%O4INI_GlMAMcuH{VOxnogut6Jh)we4-+0BxqT zRrs~%Z){#z6MCML|Ew=@ zC*G)v=+)?M%JYB(Q@p)hQqtSd7j;z?JLdhlHi;fw3yirm8zQNu_`I~-@ZCXk(4^`_kTPQRLsQt`IKXZuqBQ8qX z@nsCy69ML9*G~1No(eJ>w*L3871)}oYxSR0;q!2dJCVhobrtG~bU4&=5#P?%QW9Uh z@E23K+9pN}iH*d0v3-N>b&m$3A+ePyzp6o}O?h4AYQ%sN30(Ki=}nVqj<_<2+`G|# zEexptSS5g7M{bd}NiOUc4ilfx#Yy31b%p#tm(n5PCGx9sKBk(2$JI+#$wh`p_wN(= z)Os_8o0)#rdHuBGa69hG8^yO2|G4g&DFOY8pPlQUP|6=+l$%mgouu66gwN9dN_g>v zachp^dH~bU{^zUM?nwy>SIvi<2$KSxm5OAYwhP+<@B2*WOg4LJ!PI;nQu=TEJ9bF|45R)=)T$g5RXg__V7NR!>6u`)x7QpL(iu9ya&eE<(ZKm z7ruT~uS2M!Ti^Kw>qouW?fp}$nB(J{xEU2V$AT1n&o40h%NpM73_apdl{qHw%0}tY z?j@#xKJojPTRg)qOvl#?Tf(tvmeQnDj(9v`QZ<}L@GZ&@T=k7z5DWP>KdKboh^ z|LMdFP`|OQ z)ljmop+|Q$$i3;+{-o=5h4?@VXdvgZNA;h6rklEMBS50JoV4thcU_(z)&Xi$oz(SR z94i{XWV8Oe8$%ASlP2gGtNi)m?;9xo?&@9}|3B^|`J)j-D7MyCtk*^nd{thC?YUUc zjR}zD!f-hw9)yd7i zihsqh{@o8(dGNXcPxEZ@f9xGC|6z3r&VNOqKaT$TT&MrMvuAtbgVdil)6f!K?+Fp! zOakp97DmucJ@Jf=0SBH^_guU-Bl3};7b6e37}<}83i)e$ui}JtER=nv-~fP+6agfL zO2(l=3YSsa&-C89qU{fOVgx*H?b|-N{`?0yph{T$-J38W(m?>`SeEsZ)IY6lyraGS z*9Pmgg-*T%1fYSgQp2pnKc0WogrCQ*lLXXEFU~beWUjWiqJDC?)#r-?Z;&MZ_C5*D z-%DI^iK;)A+P_Dh^Cm0eBmyu+%%Sa})6xqcsA44#Z0}xeIZ^446p(92%()0Qv_hXfk3bpv)OeLFBNyqQsG5ET{eM@i_&}qfPtzW-p zGom$hrmAg((qx&LnPXVrVp}bBzuBH`tc%G1$J9Al>h&^PMTUJOL8N@WLrPje`(**> z)y<3YonQg2y%pO{a0tF4X=ntDZlI-T1@xwe^VQlgT(*&bm)~pJ@1iV&Up)lfe@>7)w)A8iM`iXnW52Y=LyykG5bI zaGEj!45HzDqCp>=&9gzia~wbrd(#m{0SCeH97Dr_3BADqz`6kPPT}_8_S|=sBJIYU zphO^>2?t~471m2MxSYoTAQK$ahIyyR<=hb0*ryG@_mLI5WMQgcm z0>Ts-fc*i>Cpq5p75W$!hP_z}lU6L$>;VippyES8Mdj_ZS+%Ti*yDzX1m|KQ!Xa6O zwYzr##s#i42jDd&mWvVqCL&FZJD)~+P7N@HPXShI{un$jaB+4F$T1#p`U;>4?(~?s zgo{C%?CN2}fYMh5&_PFY7_MR*4=H0Mncw}S<^{AmAgCB)wKP6D3P<0-!Bi=)emy;; z1%BO(BLhdR0XF1uyC`o{8}9)V0)R4Q=j6OOJKBLW+-5E>4#BytLN+IpxK!RwSnKd8OWi1IUFLEEn+>yJoOOb29HDF0g>)7r zQ`WPFcMrtV-_~KJU$irQo=3+uE2{D_aYwJLG8uLHe(X^q{YpXdW6Tol1U2dFFpS5z z@FvT4>=ukCXpCpu_}juxk=YEdgoFeT$9Tla8Ef`oO^yA%U6uM7fGjQm#Ugti5^&ET z95x6v<$$wZY9DS+@$CPUhm(;irXBr(^X&j*`VSdISj3F)l_$;m$ z&I`O;TwF&x!LDfE;O7^pv$h2hlLLKtIP(yw&;pcY7m-#j=S1Q+9Njc1 zjRogD7U-dAIxOC=-;FE;zJ3GF4YZtZMoDtLa9CYiqoJo?0;eC05w33!NO6EncC&JY zdmIRB0@bAK{QQixG>^}p$y^Rfljjx|th^hr{!p#3SD>-M2jG}(FlCbfstJfp0V1Rh zAW^ATS$e{y=z%aYP#;VKVpM<%l!i|B>=_`L;QNP^8 z{m$ci-X7iElHCR@;GWL;M7!y#l;HY$UO?OfEE@#CTN(t2Eih+p19l-$vT8FZg(Jb{ zthu}sy@8r8;5h>KMe_dM2#(vx%#4bK1;-8Wun>Ba`7q!?8tC!X0$H4x9s@Wx1xWn6 zUci#w-Ji$MYg7vXNFR`s1A4tck_zs~!1&-74j!JZ;OOAUh$sk-KsNguztQrJ{b;of zT=z~NNcI4`+pdqg550Sqv^Kk}255guO88PzQi4fq+FC%I3rN_#Qc^Np&v`bf?&Rob zf4)Bg@=k+@ni z!!8V_JCH68=!=6te+oG}^H^9|ByigW0SY1F?Nv^hg2dz#9IQn48^6T- zf;=~(WccUwerHB8fmIn$`kS?_3CsloYzACKBNsz~%vI z<)6dD*tobt>gox_*3%$T!ecWW*9pR^dYxk=m}3$zUjlEM9WzYMC@c)tI$u?S>!X9Y zMg+5;_oWK(O9y@nP*?%lP)DHNERuZ_lYl@>O^pbMjv)ZGv|`gKE+9e+kBh+CfF3l! z(Ew5yzNUTS(FFjkQE_p_0ifH0WfeSp0BYp)ZW<%E*%5|uY|Re2fqS@HXzE*#Kg2dT8!kjzczyN@L_lI#U2$%MS>^~BqK=Sh%W4sr7J^LxzA9S^(~NFc)V4ZveU0C)mUKm!*CVbE|H zY;Y{VVnEFquo|_=5+0yw-8%erGCopoFu4%*R1HK}!jg*Nwq;e2VF3vcUM=**JpRAr{psz72Jw%2f^IsXap!+o9|W+r2&WyL=Bc)_z!S~+qZHDL27!Rz+Gvk zXm5WM!h5S>YkeJA`($rLK~WJz1YI7bydeoNLI4;1@`Y|;8EqCsJ}Mxi8%V@1_xkls zehZ78_cQ%RNA{ptVNX59{Vf-RNPag1I!K5H0RmEp0TLL%Z2`#xh~g_KDZyte@YQGr zttEi_7&T!t0EZ7v{^fCtRvRb`GsXAp?eF(bPImh<)Egz}GRK;7Q2|vs01&E=C&55L znFHkx03%1IranH{oazE%`YNEL2!Vmg2Nb1KN=gPQW!!)sA0XHP%~mp2)`+ZRmj`Y@ zMi>E90i5Z%nfdt}UX0ld4auM?l2uoiR8oqz-~+ndAPNH^CGe%$_E2(AJP;rb&l1D~ z=Inr$n%YFsVO&Cjq^T)A1qH>~K}7=vIk|^nl0)kqZ4eR1#>Rl=IIbI<9}FS~!2&>h z%BiYy={;m%U;rEC04z}uDZT;CJ^(cUryx5oj~omQsN(@VE4STdI$$1DOuXk=i{EH? zR%r*IY|TY&$@cRo)6Os{pxZYJ6!r3y3MpAw29cn2>UH zIe-E>h{s3A$5VSe01N|<55@%}Gc#t$H}U*ivXy-U1NF~O z_Te)Pz!HJRp#wBjG{8v{$R>j5EW!tz5BLq@C0_`@;OYXEQ^_QrSOA?f--U{U@N1$D z&-6fY)PAe(kTF&g3WXvdJ@WoOHw%c+xlGfu1OO-d-6aanFa_$nfN^LAlurTmMQ7bY zk1ZZ7EYJaE@F9n}8=xRYqZ6?aB%cEJRVmT)?Vr)+2MD)DP{V`r00K%BAhigm)PU>& zib8~0PzERPIE39Mv3CQ)z}rA&Zb*6BjKf3s9{rAeop?B9F(56&!4q)cbZ;VeM|U^t zi&!w!?j6xW*-5`Tz^bYib4!y=wt- zd7zj^(Df|>Lk7MMtXq3|ae8!gMDVh9$`2^nAI`Y)!QroPx+XwO0>mzyCoCs7P&BeW z(*Q4f!$7V9#gQq;^w3ZX5J3i~bZ)-IA3^X2bHzjth%a!x*_@oWqvwG@s~N~BFN0-N zMp01+*fTs4gL($6@c?R<(R7jn%&0!TOP;#AI#5dhfGAL&W&G#`aEPGNkg>9YgPYe_88(L ztpL;#kmIV%=Wc+c2WqT9ko4f21|(yEF)c4=gM*y4wH;YBWdnGE#QiMEc+Twp|SrX@(l(pfnEEzEkc zognfbY|X&c*8!R7s5JY~knoaYhz2BUy`l4KX12FyI#PD8ATxeO7rw- z#@DYyclru|IAjml#@%l~%@yQq=fj#AIEow;;8j-3zqYp6pVWbU1{lVI;_tEP%t}a`c__o&!}$X(?iWo82}c5UPij>mpS+|B)Y{;Vd>><7TJHipEkK z8inc){F&ZM*@9rSwHlKw+HkhWV8QY5>|~^Yq?B`gq5@8^2e?qsg1Ny^yWR9On)Y`= zffd>(ZSys<9p#Okbs%_ft}g?S)Ip6R2;^-6COzQBANq`;P6;n)s&t6}vtx}0&VLWw z$NlZ2LeP5w*|ktphBS6X-9HGXKJCAL#2H>|NP$0?S4D6oRE*z=I*}YU$NcLZE(Mz3 zr8IaaO*-XYxYB@XEKfP#aBPvkzN*{eQtftjX-s_XUY#FNtMx7Cpr7J9oq9q??xdWk=y&*V_PZZy z8OJ0hf%s%ZEZzC8j@+jCreGEpzq0kif*)=&gMZ7U{w;v$ED%V&93n%{43Hw3)d=hd zB^v8IL~SKmq-4XW+vS+cH;2^@v?&*h(aBLG9%hZ!J8)<5h(qa>8wbQtIq0o+UND%l z{D9GzHq%CVUfO)m`o;gp=ixPRh@Uyn;FPm zWG85Dg7-2>_50b8dz;lkKK|Hn%}1^LzSi|}+1+003zq@E?+Y^Kdq`g{8+9R$Hdaqu z@%0M;AY-Rg&l~L^K}2zruVEeeM3veBM!{=hHM<)Kp)n*hCVfdhiuz5uquiD1@=M|- zuWJ*fezD%-UCY$&ABGgJgAKYL%*khxNdr!G5Y=W*eOjZ8In-zQc!OeJDwUcTq-#F8 zu6VJqsv3QEJ&Kh@wzS@?)N^AeG=*i1eR7!KbRtWxl{)}qWnoo92ri77`U?dWw<8p${R>%YzU)AX5*<977 zT=U~&LsxIk^P189Xfe4YEwNFXsmk>sg$l3I?YunkosCw-a__UmiQ?wVPc1ln6Tu>X z7@D^VF5fIG%d}n$#~Oe7H2FmNXgxc7k>pKMNK=f|{9&!UWpGxWX4U2Qo!EMTe%We3GMs#=dY^@8*TNL71_4d>8 zjU(repZb&A@3;J#q91^dj3CW zYO5Gb#!oWc#x5cRr5VOH%HHL8zRRsIu4DlxP47)k zdTgaR-9i}Yrl&>=MmJ5qSWLG@A6!oM9$*xP?rIJ1PLVn%?_*HSyNZ55tlTnj{=-=a zZ9EzkKpKWaC)V6?sUuFDflqFv3^`vm=K*MhwdtK<>EF`@&nw{DuGCMME8tnR-WP`+ zA*d_mmsQrH9I_)_=()b7p}3?Ceji`RblDi2E_Af8YLap>@N7Pcw{?TLuc)1oL%GRa z_l=(L?QcWIxlY)>2Orw^>Fyg|!-8iCOWzuu*Aq><-9sVIi{kVT&#gl1d|}5+7)>J?kumW>Y-S-0l-r!Of?8Ht zSlcpzs~umm$PXL$ae;B&#-*oQ) zuGbQvV|4w~o-I%Pzvovjn3&9-Jr4l?1%Wk3oZ7mG(|kHI0?2{y1)U;%o=lR+1^3iw;>0SfqOlh@w^ ze%*5vl~H{FF0TiWQ1EYJCkYKF6S@!>{sv1!>w%okkOU!Kw3=V2h?zJ|#+R?`xm7&b>c1{M zFZ2}gzb?oW9)DQqKiAJv2B*h=y&}dy z9^&e4Io)5+vLJWkNfBIp+Db- z4Q^x=6-DIcnsi2l?GOrLF*t46rFB@4OR1`k;WH@z{qbUJ$t?D0{HmHN_D-^4SHx?Q z&7FfsI5_tE(rJNfdNTTvacut*x;xcTN%JmX@TS`2QY30StqXzpA?W0Df}~FVtyp z(SV&@`J{TEXr}sHY#Q8`EGjCx?DD-t^}Rk!3sJ*vUGGmihuQk}u11LrklHrd4_~GE zT$rbN9mpUJB)@1c@i}I^2Jf<+ay}x-cjO{j@IhE#AM4$GQpr=)nb_A`J7xK2iX0@( zb{Vmg+c3`h4F@p)WJP)`EG#Ey=QmnfJ#e2>0T=j!&=ULk`T6BKZ_D!0=BW3Uxw$#9 zix4U{#oD~<;{7L2pWaXEcNsR~7Z5-eadmZ#a^pKVcjTH0c8Ksu;dlNJ8Tk;5%Td!? z#}cmd&&=1fOi5sOC85J+Yh_&&?c2wX2?!(|9XTT+BF?;;??mMn6tw?x(24O7zB*Dt z+HFSIkUi|e-&|Gpz9x&P&UlxlyNFmg{aR8&FD!greo;TM(C`mhepcD2iscm-v*`L> zIrsGT?jx>f>F5sHDX*ikh0osCEObeJcBTn`Y({7UY_Uw=%DMbcw zl?46T^z~16rVHQIJ&x`E5lJrajEO0`u8ufs_~*`a#lA;V+^0_lJV~1MP7Iownn;4D zbB?v|SZ+i1Qek_t#PsUo7y^OV4GNvUE8WdY^+10w0B?*nAI-h`($`^v15sORp37Vifr3?p$4=M%mrie0aWy=Lr+ipva9Nl3YBKLB|JtRdw}`pFf9; zZbB~4_UBw?t>x2%y`?v5a&m&ONO{1EWi&L1z;Ga@rYdS`9gc%SQXU@sb#--M$RU%u zo)J=%-f=Rqbo;h9C$=ES`)jdECoiO=q9}|AKt0O>gKgPy_`>8lMK8jo}V}l_c#HL*a z1_s`{#W28Al+el4`^jgltWoVD*rOvO{@sw3X2fpR*RP>b!WW@Z6z+iu2}BdcI=!2t zdG-zSfgvFkknWGI@U{%G5H|#@#@xaJ0M1|&-p`aHQCdbOqoEheeh0>(?(|y=r@JotKjnlb9&u zL2+ctHCQ2I!V zir%H7qDtnozxx}l4+-G#c8>ryp?&)F7^9Pfl=OxB0qvVNZ~7A0k@H5ods$n;yH+!@ zvH}Qo-Tah<&jZKCRC5(mWOa1r?^`mnu$TZ~Cdku)puKSV{ivk*;yFry-*~}WBaXzg zZM~ZwF!~qvEB6-9I(b7QBPsSsKE=fiO-)J5%A)V@?}wA}7AyItq@;i_8?q7A?$&Sr z-r6!;?Ti2;^xxgJnJChhl9R*GL(EJj2)c8Cfn8agAg&tER-j~jc1TJ6l3+qHF)@Tq z5C0AqeijM8v#TreBT~{{=9;#D$1m|IB^>r7Llt>CBM|AWv`lf7g7*15hRy} z6vS>rRAC|0rH=?VH#Z1SOibI>ctLkxADLYmSi4VPO0fV|Zq^5D-D~UX4&b#&uhrCo zW??3SsX{+~{D@9U>h(UI59u&mZuLV(u5N;KKUOc%?e9zEWO)Ak`}T3U#rSidvvx{{ zE^^nU!}H+E+~K@5AFz<*6{dsh86gxarXmPpuzG;A4Y)`Wg}wRv`}+YnBPoQ?UphNG zKVgznP#`}d11#yCos%=XurL4+{=#AH0SpFXdRH6W0B`mjI-De}pZA0}%{fo%fbE=e z8#A!03=Iu6$_(!-=EVYRfdKY=55hnk-<(@QxJbJD`+s$bT-&z{<$s>H*+4Ab_$-mA zsjIJ?BKFU{?xCVO&O1-;U%`EE{8bLrR8?PVYs-Q7iQJqIx-a>mYLx1?y@$W~i;4?O zRP!daYwf7gybjPBT;V40g(fE5#ssjC_yKKfYdoFd!~xxq`Xw#7CK`3HG49-@QSrzt zZ+Ps1*!lfCy{Za7ZBR-~47ug{xC9x2d3*(ea)-@+MLeB8S>WcljfQdUlmfrTZvtSknQ1z0p$X|M+o9pS{BDDO{BTTAMp zSaVN>#wR8|CM5jqj->~S<&KGhJU>4V5?ib1k(=OEWKe4Q7T_8`hC3=MD(oZ?jW^c_ zz?r31)8(y4_AU)q{u_sf10pRlGBQAexa>^5($*#gm_nxDb=lE)$3h4&<(l?JhMk=q z$!ew&4J^O7_&wW}D;|)Y1A~IFHc50oEG!0`A8y?8mGIdTGLP+oypNACSQGCP7#6PQ zVBSCZGW3Z1TgE6rp_GCG7TC0Y!%~1-{g8|hC$O=}NynY3GM}!Av9VGY*i7q~9q*i7 zd#Od)EDY&32h7Z9bw-l+)-)d%`5^EJ2@iW1OZ01IWdQh{V0qEek3b09udfj9s}a1R z5fM3nobF=~93_6SeR8Y%95KY9FFJI1X!w3j~E;j7j zx98P1^F1pa&_p(ayHB4!y-m1WJUjsM$SU7K5Tn6b4`)f$0Y;KmRHWnLimR^XDK9T) zIlt`q@x$(P-t};|0bXvqDEz&>-47#btEaDT?ME1H8+SCI|WU%!6ca+Rx;Hc)H7`izCGc2kAI>ikC3u`1ts3j!92{qF$uc5r~EhSa!W(@ruR3 zz<^B9?NQ039AL=h%X>1Pa0WmT0<<@~MO0EmtOWTmXG-kHd0%0guEPFh|bQ{-YeDuK_Q z8Vn_zQX~~botMpcq307LNLx8TU@tA{gO34lq&GFCfPwlPWl_e&$9DoKqoJd(RL|OM zR|t>H)j8g|<$tzB`Rwd$Bq2*Wok$=J_E>KYSOu?+Mu7%FPE#;Mx~P_H@>j&@TI*yJ3c=CEhEm_ zG!!lR01%*3dPNg+nk+f-zwkIR)1Q(2XMJ(*h&61>-_)%OcWEw)8ybGcXO(Q8O7+-c zC{$+zk4A}H6QvDy8AS;ZB#bEiLw(}SWZAABn)08~0< z#RCW-xAm{OW{puaBR2d2(1c397o)>0Vt>6$viocC=6}^hdJq4vMHSDt43*L4qBU4C z`MygEYR$(M)3w>AcU28~rt=e9|N4IL4~cVxJD$3`R73Qe;Ywe|psu+k%SFGWY5ix;TN?(#^%xAHoeylgp5LEc%)aee5 z45G=}kM)jAt#{eN!HIcww)N~PJ^H|7@b|bKb((4F8S6o$4n*WF^DoLt4#T>kL~5SG z#kM?VB#PvaL$|Ax4{{Ak(0>!tPwsyB>FOy$v&sU0dV1PTkm8oYq@=n^XIVgo-rR9_ z(bLl#1S*Lyd$qoy0f_w1e$K z)C@H}eaO_*RIl*cw*lsr7`2dfwnE(1y>N^M{;N7U&C@ad4?03obMo=hnXr7wT&4e0 zPU;dxENbe5al^1qp0?m|jx(Eq%t?&~-XN*#=}7M>J(_*xt0-pGA1v$pngp$)6Fuj6 z&9$HAd0+f2e^}%F2dl)-Yi9ca8RnOibiq$?ad6ri-48=Qd?49x?la*8#XW!=;ng2NEz~ zx59Z_U0GQ<6bfx96b2%Kh}VV;lxh&bU!TW2&CXVW6eR_qyPkW`U4D1|Lg_j{tsB3S zT1^*;Pgr^$H8|pS!>TE?O09Gca4L(MJe{Rayt^}EhbhahC24GybF=@md>BTe$TG3Z zuKmZVITz>cKNCAgp9NZ*A}XWq0t{(xYfI@O@OJcTh~mPGsIG?v zvKkgeANxZBpFN6HW|k;4=(yzfNp%M}htwb%pNiu3h1UnhluD8; zd!~2YI=w10*8C$Yzc1jX&(6O3ClhZbAf4uEPwx%b^jK;eSWzd|6q)=g z#PDTqblE;EoL84nq1Px3gZMadGn#7I!#q-NE|Y>9u7y6=ZQPxoalL_>%9$Sg%wjAh zogdrnp|SO3P*ZqhtYo^mMaE&I@E$r+qifXLW|7QnQ_82YAB(C9d2YN)>b_H8^BosY z#!2!a_F_zACI?Q8uFdo6X2pd_4vTq{!hN1T9lLA#GySyv0ilwLvh?sTD1^J|(jzTb z&*~1TSv_p+)1J}7`|rvsaV>ZHJByNS))X52>XSBmwv?C$l|IkYD>HmtD?G4K#p|j( zyo3Uc7_$|Ax!ag}-IP~z(3hosCp7uyNlVk{9bNhkchYHPWeKCt_h-X9*5>L$?_mt- zzPh&b&8w{srsLJ1gY36vF@_a9&znnnshGy9GL?{MVsyA1!1sCf_0Au9ba1Y$uKoy0 zI-+T(>j!xtSAT#)&8@6xK^YkyjtOL<>##K;*dD_hi$G$7l$1^%$M3R3l&6#iDx||< zDato0Dgnc0kd+@{pwNDVhsR}x0NnlY<6{d8i`%3GiY}l%rcytDMr{R}5{Szs%9}G( zz$BoGi9B8KjLKKZTe0KYa}(T|t-;P44eN$bKYR8d309FuAQefLk(qg0(d5b}ih?2< z37d0nm zEWrBh+-z;>LApz_sYiq6Y@Ex&kNnjOK7ozyKD1q7)b;R7QW7u~_{bib3?zrO>Hl`s zDR$1H;9q7p(u-@)B*Go=_NBjJWqo|9)B@?I-Q#0?CP3Vg*L|gdN~@l{(*YzO^YQ1 zNke!tBDRbVI(0U|uUjEJfP9FOAUtSB)pyq^Mf=x^(;e0;b5f>0DpxhyVq{;_cE;_{ zFK28}x+9S}es3y9`bF8e>4=JFQCKC`PgpkZtSdU|Z!f&+U{4UhYE@8hril*}-;uO~ zd8UY57R4Od7A>nMg(xk~wr=}Y>w9`VMdMQTt|fC_fz2fmhvbinjq&kHY)_Px2tdcN zO&%#QDn=>z#fHAaPvW&*-&<}coI44iyj;o@i!O=J{r=hA+>s_-D za}Lz2lUFTH*`QCqAKS!`yPk^fF^h|>`LnfOga>BwT9O0UxsRrGdIyfrRrv(;*Ee>B z790-rn9~_xEEKg}!FBUto;#*72@G_x+;z}WV!u7JR)zD<&u*~Md+KxNkI+wR%L%e^h&672q^Tm{xa{#utx3~9u39>+iX@(=57IlR4pcxf=jxALn40|M+2xIZ_@33k7SsDN+LJuuK|ShlwuLOC)qF#-1`=d*hc^kHmr zGDy&<85tRV)))j#tu{4AY(y*`-dkH+KYMx%K^f6rtA*U4HMk`yd=Czw6 zQk;3>uMWQq-c29D!s9x!VTEiYsyg^o2RdOy6Ls}u_;BQMrxcOww32#;_>#T$K{ zE+_hNMdAug6gJtQLgoGpDhQnOA(XCr2=st%v%Fn$R zvXLGIdrR_1%A|3t%#Yn$Efn`{z9}0Pcb*(_LaA*fWcVW&A!|8#dAP*HGVbm#K)H2m zFNL(f0@e=jo|rVt?_~{3TUb1&^u1&U{xpy%*tIZ+34aS48xvp#op}iZcmsga($gb9 zBA!0PPha(phd}_w!y5nZQ+e>4sO+4OT^vJ4h_0aJ6r13^`QbNV< zYv7FSR9RYaW<6(2P&dJz`fJA-sN**0rQx=F|GB(|_U0*OdB~^m9Vc`>AVZFc=MtJi zgDSX2WW!RUcMx)6Z#~$fWB;m@}}QxzgdA`*jcnwP@E5 z`_Rdw6|&_^Ay3ETh8*Atmi6{p>)|BA1cqY_@Z*5}1q>p(XU{^x@&nNgEK(JHy_Utn z=?WlXwn*$|e+4Q62LU()Sip$_$`yGT?I?UVKR>^1bzh&2D1pPcC*AsZ;sj{%BX=P> zllQ2n$RG#%tYo;F3r7^cxn_lY<^bBk!r@YZrjcvrgw z4F?)^h!rLqJH@#{aS;|ycrla`rBXTnb#;bX_|N>~u3GMH{JO!f8|H2IE#Mn$txxV+DRzq3&G z$^Y#&lw6|b@aL7L2I_-NadAmyc>hj6?;RRQc-Aev`(o}>Q|QizJAAnxKzNGROMrPxd(a3 zF4jQEnkIm&%Ss`CPtVhr{g0pH_4G$)=VMCBD9|UEsWbnqEuJ#X`##-4xNS32?}}*xd1<&V`& zYR*^T3%M0Ah^*7~P;_-1ioKU!G0d`~EJ*iwZ>z7}rJa!z|Lu1g zquwqbVNHqO$mT-IR~GaRek#0Cg)?RLpc>$V-QuRE^yK8^pp#0cl-lQqjA~|M(+P7y&2{HY z>c4e@LF)$Odoye6oQjIr@83oF`8`rSypD#Y#>U4D*oeSZ0`5&%vN8QJ&Of|M`|trBG~ZY^6Bw@wZN^$^cC7O=cBE+ulyLC{4jHUm?GWDs zsVLK;n;vvEM-(C|!g4UkHTrEVv>N@K>6!91A%QJF7WEcF&*Kxt>nx{F^GOGaI*h-R zMKf(5TKTw>=jISk(X`DEeR%OSHVz~5N%=?TjUqBHx+tt!jJ(m!RpP&B?XyG6w*JB_@`?Uz*D5Js+>YxO+I}xBrBO4aOuiZ zn#1{}PpxUS$f|G9rPT2XMVDg3WSNVRAPPFeLJD2StW=Io+7d#Tk&{YQ-X~#9vLnq3 z9l7+NrrwzP+4v%byrgkJ?u|`+t_J%X8J>A?7Gd!AEP%86(Aw1wihJ4M8Ajby(^KPD zDAi5(8!sjC7+Xx*Ro=?GA0jQL?u{!qwue1!lf)}hP2NJ1duH%5Tj(LQAkg0*5*m;@ zV8&r;QffCrpnE;5#p6N9&N<;f`77Jm3MDZNc@Zgd6_z0KHDdy+`q0I3Vr(gq4WnU~ z{EzefRsp2(+X4q50+9HQivS>JfFR(OK4W27zdBvKbtWh&l_VuQ(`Ny9eJ$uu;ss49 z1!?IApihYWlt`xr969df!G%T#APjgK1w}GG> z1r5ntRDf%YAe zvWg05gQ2)<@pK)0HjQ>Z!oBba>K?OKO5P$RUmbD7UJPV&#U7NUv zi*x=Rnp_ba;^l5N_pxh+d^Z&_94Nl7J$Qv<9|Is*KhsYn7 zM=*AFcH|aZ7;+Vo2p)U{)hE|Ojm2!4*aR+f<)y`qNkfiA6if}^xraqX!4GG?sTJH6 zVbyCE+;e>W`cZ{Rzag-(v+P8DLFLzVphFMII6!0#uiF-CKKcZH!H2zEs4fP5;dp>oR#yae_ZR@VTV1~tT zpj>v%pv&JB+C|y(9o>#8ieB$4-S%hfir*s>$A20FF_x-;J#Cwg2|(Kew5Ow_rH_rB z_Lg2fXJiCs0BD4B8g*gR z?-mLAG&3t1Wrpsft&ui0dJ+z_0MSFLQ_gK|e6eL5|7Sg)$#lTc`b3bxEDWDBKe zJHB0cKn(i;1my>_{G3{SRBnah)Xj?>2BwBnZAj$FQQicT&5b(auML|R^)3zv8~Jne z%d-ERQleRAF>MrUV-9#VnQACz|blJPiAe*TvPV3bEu-a@ z-x&9gyT(w5Yp?aLH=cLSXFhYjp>i^x(O$fM0RsbrCN3td00V>A1Oo%h_#6?q!+%p~ z1^k2HASkZ%9JoB68-)P>CvX%|b5yi3adgqQH-<5_wy`p1a4@ttHnw*7V&ixM-^LF- z#Av3Z<|t%utnX-MV@;-HW@QZAgn{|>ecYFfnUich;8)GBDl%4P*7syA>^$$ec$m3z zT(sL@V8~#^g+D2|rXMbQIVx?pfFE3_u{&j0;icfP(_xUA+Yyj43K8B+=yklSEeuwx zpYC+c!X0fm0 z(SAl^Cb6gkhf>}?2!fr8uFw5`@7Aa!rf7NS&eniQ(n}x%;01YxjyxIRrx)Uzp@dA| zkSAAvkHQ=k`IE~(v6B(&$wlz91fE@v%W~p1kEo_c@~8|SwbYX)w}vR+y$hRr!_rM4 z6dJnM7GnOfv&?pG>x&|0upz=|00KQOPJHN-4|=8ulVhc(rJdeIu^J?ouELeWA~$IW z_1SWX>eVeG8E`%MHde!+8hzq%NkMKyiarZ6WYVVXVj{TqiWwOh!lwW9L4xL&FJFGK zzyNFdz`t~Ma&|V~6MXu6he749`T2Pt$fc(2lfjac!%OKWJ?UKts>X|^C&Lpq`gJ=I zq`hwYz;|6Dkeufx(u;-wx4i1&>^!lw^&0bOs>{jw z`GHAUfsr^_emC(?%D`^|&`p8sP0|Nf@pD+UHgCSR`)ww-t(ecQd$~!sOV9xt(AH@m zDN%jSZ zr6H0{%Ox%hNfc84H~XyRS3;-0XrT7ZxNG6(>c$In@w9 z{W4y{rG%_rMU@);{h_Hq*L&%WjSVjMi??WKXu{+%)yEFMf4%wIv1Xhy*6MW!Q-4s_ zWZSfyWE_Fw^uJq=dbo}v;54F6|jp7Ln>6~!$r88sHO3hL_OrlvGz zW@bwsCoRo)$4$*w3l>Y>cSpk+ACmBNJ!7h>SiNs2K`%*3G1^T*UcK2-{1Qq^uh>k6 z$v8Rjil+zjq|@Hg(ysp=&sL+yH5pF!GfO>OX-P{?#>ZvQ5O#LvfN6VxoFF6m^WJR5 zEjzg1Y$v$^e+Gt!x0+TV#{|2OZfsc?+dg6!X0r)On5&c3u`UdO;a}f{)GPF0xw*NQ z`_z<_y8B3dq=4^Mn~WHoZuF>?wlFPM8ur1sUd@}rs@2=R3CCkHd4NJ*;o@$b-{5oG zQ`W7x)5X!q@|T}4&drH^`UHCiKA~Y`j66KF+!+JODl6kW_G_TOUvIw3*z->ZQi*zX zP%w#mh(%dLkk4J6Pc|?`z*n1Rxo| zqvLyB$i8aBUS8VhfyO^D2J(uTi{ZBj^sV@lAbi`3_$Mg z?Nv}v=v=gEL;GlD#qc?j=nXr20v3fRFj=G(4!bhcQpq1RH3`d`&t3pOa$B?kI5ILe zX0hAouC`|Lx^=9os&Z#w|5BrI>dNcr=qRVA)(>C^vAwvwbr8VYXM1~A^kDyu4TG@o z@UaCj(D#ACL4)CBmiO=9|5N*GZx4&0t~yZ{DFqhRr?!D9bM# z*9*sk(z-K$(t8+~q@f-@@RAU)6EiY0reYN|Cl(gJI@ zEI|0dnf|er1KkH67-ePU0Kqr>*K6>xv9W5kmIUA>2aF*73aiE1>Z=ypbwQww!JlT6 zZy6YR-b-6I9dR7DfOV|fAkO45{ce2K=Buq~@2roT&t<4(Glw(zJib_2QPI+d_JreYp5MHsr+-B6a2n^y&r{hJ zQQYuA3_c<`R_y0#L5g55fG>Qnak5m%H7ueo0-SESj2{$Ov$NcGf8a9EYWo7e!{P=oyW)GnG(Ru)` zkcxw&wyA}wD+mof3z#GTV7|wYvE0H!WUdCHE#zk)8xO=%HQ(+R->+l%ka2UT08_4~;lQgt0suamFD4`{=PLYwCDy}7{DtFvvAx#epPU!kwa!&~BrIlct~N10@3v4& zBbYifGgBSOZqy&mU$@8I8!FdhQXK;VMfFT5hq!FCd2!oG0T{^1&3)YTzQ=>P091H* zctV89m;3P1gJl=-8sVx8`y!o~T9Pv}BYOZHk(j z;mg)1Wwl|Ek;mvVD}Yf23}UArJd(^}U2ScmaF5~&S?*xU4vFmem>}YUZ=9MayXqkE=mBJoTTF(y0vAbp{dyby*=+E1$Z2iV@%356 zYxc0C#k%vm)rZWWAVdKAO-U<~Vq#pfdx=QsZ8FK z2k6Oz$#SF9*6uE7mc3D~FOqoErR|Q`us324ef&puw(O!cUJ|rvhBg}knv~NJLBJLS*zSYw?(PbU*|KBf1NYPq zE{trZqd7G-2`wIAz+)_#wz(|XA#pC*!UH`v1RpQIXJ^k~k>ZX6e6aU*7b&az#h0Su zVgi6)2hhb0IG^$H@g?_z z(w0mgc8eK$n6j2DjS>|)fF=HRr=IBnSOfLL8Ph{2KyC-_Z_fSVBtKs{0=_*C@an++ zpKtNt1o=R@fmR%yozDj8$}tHEhekM8hk@qpnwH~}k`$Gdopfe@3)cMn2_vVY6FGZe z49xH2H}kDbYc$j=4W23HNp%2(1K=IbOi@oK8IP%mYR>UC)e|!8(k-i@vA<0sir7Gt z=?WLz%=7HKh4E_GLaLgmVC0ugj<(hIX?|~`m=H? z{`CMNL!kB(xRA4cp*?*0Pm@?NpDg5pwV~RCv`ZSZCy-lb%bef-&=au1w3YVI{g^~FW=gZOvS558BO|@{>aw%p+g4X0w*VPV-sxGgxHi~x^9G-+$^!KB*|TR#%F5au z>e?j!Jcnmi9>K8^q<9yJR_9g6r@DU@>%2_o+jEYyFD>WJs1PZfP&(iZ0Ug)fJxAsc@rkMD6S-VX1oWw_4v0E_;8q^7I zx(LO@IZ2drqx95S2J>*{M@}C&;jfkn&a|$#RIFnEJ;Pg7dxwWc$4d=c+uN`eZs#WG zB)qNz5E3AE08BjT?-#Bd)=OfVnu~5n9#n#r06mr0&~P8M0;~rUEow=5`R=mID%HzS z7-sK#*R!)TF&UY>>gw|!cP$#~)~6Ux zklFbbuFloN!R&wutPxf34yAECF~!c(;*G>?#XbcF#{E*@LF|KPPtNUwb$eQ~@8>+i zq=k8LQSwoF8KT|EZJOenp7U)}^b>j+dlJ{=XS;6J6tvJBUn46n_v9Wm!1Rm{Oe0=;6>k42M=9=Bu9-RbmA7CT^<(2mE_yBCRNAC%c zGhPx5d3kvVKlJKo5g-hJVXj`i<4=;$76tr#g@bjq-KHon$(RV}4zLlA4m*W>63#<;OwoS)+dyrHUsgMPA z%j*$NZY~LrE@!Y?kvNF*_3U7H8m%5S{}%dtYu!}M)!C2&Y;`z=4XKOyi@7;DKR>kU zn`zx*ihvKlot<_MEkH?s*43SVv+;xv?-3-!%ql5~sI9FXdMTDs;Qsmx5E&dgq(PGx zf!X?EX4bIfjp+Z0)H%h8s>Jh$Z_zC?i@5cU*SQR?>`_rNE5@tp#)|kNe>8-ywPY{O zs3mK;i*7chF`xLm>ZV$h<97xls~nr97}#;~L6h&_l=hK*Vj`TysoIO<`KnJB)Fr;J zYTs~2?J|pOS(Nj8n|u&__gw9z-Pq9fXd2%|$s((0lQ`c>zK8jXqlcSUJ*4gfX|aAd z&yNmN>p(TeU7(yrwT+DePQD45Ii}V#7$~lIp*fXnHffih6tx zHXaV+Ydz9zTi**a?2l2h#1}~l7+fzbK1f!+4nDm1P^~OhfEf4 z?=7EC$ID%tv*56M3*vK$=0=}hdfMmDKliGL?OUm+zOT%!XTZGA31Kg{?$soP>k_}^ zOsckYo;3|{bES~w`-ug!n2vLQhR&JHKZr70sqcY?zs|LqWM>`wjePgef^J!N+i!UlqhMO?OVq#w zZ$T{hzIs$#ues8{ukQX^zGF z`oiZwMf8x_H6wuw;+8LYIK6a;X(sb{)Y~kOTGKM#9{+8Bx$8cX`F5?7(b6Jkss1uf zrCzMF$*--&1#|xbyP0#z*1_vl;2Exh@rNf{Xyh2bAlDaEefR~Ol z>CZTa-Io)a(t?$=YG%J%9y-V~9~4=3E_$|Bce+c!3XL5eReTDc#~;+i?I`_wTy{2C z1zlHlBu~8s^xMr<6fb;KN@nXH}|uPoEE&f{arH!#}0N{v_2!xv*uS zd$=P`PU@>#v_zLv0T$_E_B(0<=Xb*{v3)G}IAU^+4oK2Fl9=K! ze`5sh1$4g~5|_?j+<8yMRAKK0zlU;SbgD+@2g$YMT0C#Q^>eU%!E&M`KZ`gzH^dqF zAA-=2*8$J~KwCmdNJx-MX7+m{6y#WEwc45iI2b@BdX4~Rxzd~h_zOU59i4Zkh4X^} z4D_6(Hdqqi)8aBR)BuVev_Xj36|^8Da@(Kwbg@ zFc?f691#Np1F%E0yZ7qURDeGIFh~Go%Oz!LNpEayY|3{Gv}C{4j|J#oQTq6gs>yp! zP7p^M5YRt5Ick}7F$)Vipn0%30Adkg5s`5~b4_H>wB^YFq@73Ncm};Ysdhaz zfB_<_M|}@)n{7D`-W;lTEscOfP*<#0OXowOTT6)-vshNNT-Y#PKk9o}@d$wjzvnEZ z$uxesN9^oY7q5^M-{rqLzzuE2=5&=T;9Id)Y3%rx%Msy~(Jokv<@vcOTu{u%C8s33;s>;=nn-b4dMb+ANVK!LaQqUh z^WPxF7!{6@#TKiJkVEgPfUrg_1#K9W!_Bu&EX4BinaE-jN5?L<2Ga9nfXZGHoFK~^=zig4N^ z!QXx;J`iO~a(yHN4C_}yFg{`Wt&c(Yw{9a-)wySw^x4PSjSp(3FZ{noK(28a>>U35}@DM7(b?nTC4fKG{0 zz63G@kHVB{>lu#k@isavAYsi{o63T`lvP#P0A29-MZwRX?*ZgY6`z&tp11c#k^1zW zz#14DvY=)+o$YJ&wE(hASsT=yD6-7so`uEK8>V5}{0O~c)N$;fd+9YHp_#R{$$DoX z5aI)p9iS=80y}B$$QcIMMvc2Ot7TUnPX7Fd`#l@vRwbWK^+`W1?b_LX@(dQjH=K+_ z($2RiD5g04!_2aGc=SCwYB~zaR_wu1e~*wwcNmteRB~$yy%E+diHk@}s&Qo zAEjOs*+-(`V*l`*>nkkRk5)%*bS};j>w-tHMhdMeg?vr(=X3j(C+D&utf4CK=ILcTMHal9YMvDB}oscA0G&-!%yv z>7f$y#M}7XE#6<2`{cH^w%&EIQBY)`WRzC`5JD&{MbV2mC&FmF)ia9b%THGJxslCW z$msc+PMFNTL!ZzE)5?RM@~L_3A$>vz%^L1|^$D&uivRQiWZq0_pj{dk$3xmg8l&Cwo1I-F`dLF=|!1BS@Ay7Lns_Rlkp*4}iw#`?q6hC>nXX@|h z%HCQgA3J|ioD5SBheIDTm z12WO0z6z)pfSlUhd({#l`Sz6o5LN?e0>a-fzQ`cRC2eT+Ew8LZnx!t|nC-Fxa#gL( z>L{U2^Cu0B*ML9mGT??FN=izi0KU5!2z{3Y4gjHauF`N>e|FokX0`avAW$2I7}6Bw zc28hyI;z!yfX@+rPs+GYoUDFC141*X+|cnPZBR6LZb_%7Y?myLt>7LgI#Vd=x|Y@g z-FLu~2YKNIEu-0vKl%$Ar@;-2W;eec0v@p>O2)5mw%E&S#( z?@pa3d$*sZJA=ZpW@LCb#!jc!dO_?oyiH$m?SH7Bkru_QzNqC;V7!u-p6UcxfBF>Ycrj_$5zLFaxh-sTBec$J}(VR=Gn~ zaD(|sxFpwbunFNAsHyvWxS4s4WWIh4TTb4DKP;$>I~`GixldE6c4sqU8lagprfJTP zPOwQoIfWtCM?6%;oTQ9Mc%$3A>iT!Y$4J+-9ND+|y}3CZ$kEW_qM+c?b1}T#KRo2M zSnLUlilX6Z1dqhYy%h!oA0YigNlguGVj;kRfV|KEfZA>LvbddXO)k*mN`hGf@%BAO z#oxZ#KTk0L)S<_$5#Zh)wS7-dUThkfZi7n9P+@oCU_e5sU9>*HfkDqHzR_96qpM;r zF671RJeK%=()_AjRH7>57cA4XshhwDQJ$>I$fC8B#fHGlqlT$)jNzE+}^15iXv zVSSn1x_Y|ov8zCj5ABb}QVaA^c@8NMs3O~iH1e9nWhb&NUkEfij(I@6n}@1UZnyCo zBv?Mo;fAhWhBzotq~~JHFj_d-D9BkHCojHs{;UF=zWWxbmcP@cxJ&w~PUTNOxH@FM zDWx=ctfeJH%;zOx za!fpIc&olIN9@;cV;Rt<*n2NYJmQbi6B;7dou6NO(C2=MKE=T}WXR8PL{g?OiDAzx z?omDP2U;0)+>~&Rl^Qav@+DPY78Jqovc3rv2)uxO3_cFFl z0c?QF@i!&|1H*d`4h3c9z!8&2-U(#Is;m|{B1!lpfMwAcJ=U^<6joPP-xZraN6?mp zt*xmUUswq1F?dYk&doh%58NgCQPtr$vHJnds^aCKSoOtpR8zuwIQ|BuJPTuIp zRZb<)>;#eXB`$;a^Hli%tpyOA;z(T{yJViI9*EE#NMkCY*g2T22;P6Yi1&4K%(Yr3 z<#X+(`b=BJ>w_OsS{%=(kwQ+5*0~LP;{6XlSnKyG2EJ;J8}R3BvNc`xJ@CIQfyO2D zI4sQ>2wr0dnLE#(%SE`tJJ>wSMOrrE&X`*qd^@k^HytOI?v{*sQSY}$#x^89PWa67 zizMYiE%v3c%+9b&_02Y%4nCH_3MG;#mzZ`_TzZPB$4k|aeT>29sIqUyeS;w0g18@? zOAX(W+sgn{*R^b{`$nFHso97IOefPBJXpxGSGe&0>?dw>FS+1^|BQQ@UDx%MI*FZ095w(lG ztU={ysZUI}!VKp{FR{|O-w}Zd+V88?D(d=|*Czfc*sH&o|GTZkWA+3frDO55ir2rhqwx9M-vdg2E|6;lsOk8`L?@tsrT{vvP!OvAQoS7! zuWR(>!7MhQe*!|OnWg0+db+!}5A`n3F~Ec6R8)FzFAoSSAq1;QsYOeNI$KwhSAo+eIf`Xg3 zCBN&z2l^~YaIJ_-pwd`u7%Xo=C+$lP!d!QA6A-#oEXA#Hb4@zt6OkN^s*OI;%0gDj zCCYVWRL%sCn`R|C2&rD(n^C+$B`Q4c2gSjv*qSnt8?L6kYEhKlalwmeo0ILxo94Ys zM6N{S`26CSBq6#sW!?ztMU@*#*e`X=a+q$sgh!iGQ6D|s^CrY8IyU9=jMS_;bU`4X z+@4LO$rC}EPE_FqK^>FvXoAm7dPx6tY57=?T?73?*}wuCp_(#Dr(HApp45^C~ zADwP2EFqeh*mUTshz*i>jto&;c33Vea*`cEc&@cw?c8)x7iNsCi7TI>gn@u6mKuwT zT|6Sv%Q-Pd;H*w#vQ}GW3|_&k&I5@xEg6|E(0Vb&lpM$;Zho+-k88e#<0$b++`v>r zBfJ_CV?t8RynxHmlHG3j?lWXy7$FBXL zTD7bf@eli?sBXuxf!simzcs%SAU){WyA91McHg(`7;IQz>;~#IZm#OuygJUq!@>-p z_czPs`7#-gVnma!Z0h?^@i^*?T3snAsd5NRS<`VNn>Pr^NiIMEXhlY%n~bK512To* zM_&fb+UVqDR}I}q@3{$m1W{g^_5Niw5cW^*cyu<5F3w-@wstTAs``{$1JWt+X2nip{Sj_71O z+x|yhUI8sTo$|H*>*h4tCF7p+s6thDgM8%a*>%qE-kr|lEq_~xrYOmem%C|H;RF*h*qOYb9!<>y`1+1dW@9<6)wbbC)PFgF%Z2lJL z8!Uk`5y#woW<8*g#95hd)*R@Nsz|$2!`s7s$$r_(KEk&jUXjfK+FfA8Qqw@o3mdo@ zT!FD_O`$n0f~6{YJeR|S%$Sgnba_MK?VhxPOQfd6GAFnCyxJC^tE}t)KSu#n zdI13>kUIZ}`mYi))k-l+Ag8Rl4ZSMdh%2Z(|4dDGR*c90f}^$pR2du%#k^qt~70SG11e021Sm~kmOZ6 zd<=T^^qpd1py;Mj$RLC*=q0w{QJXi?yMN_Yu=Z@=t0MlJ6ZtE8?BCA%Kk_rBKuuZ^ zh9?JR*Wcmgf1XQ<0HU5Jm(j2Pvv^KLNwT8wTtZgx$y35x25LZNfid59E{EMQ0w`b0 z$QZnM`aoGcFpmbo-I%7RRMdG7e&CM+c;$m4bg`7}=rN1(iqLT8b#|WX7%>0HW z)_wZ24C@GbBr(ck5e@@?*?;QkczDR+S)`aszZ*X0DzC4vVM#TgzT;l4ku44+QUUh$ zQ4zb#;N@+uu4LPVJ&w5r2AkvWUvzwEPv#VBPyhF??>}pT?@HT%o*}v;3;B+Y%KXT# zaeO+D{nL~@JgK9jquvKL05pF8X0n7$c{+bupWtH!iFsYWkOe($J%%X-Xmz3M2%aV4 z>EoUQU%WbCjSz*B@v6MG$dO(=Lr&Rs^~%-$WtS45!RU2n5>D##=N&iljxuW$Tf-bU z{hzn{aahUpXW6skkPMQGRdXVL(D;mwJ*dJqQD~QQ@l9ehM(1&Ry&@OR$f_8zK_U?bZW@C)}}taJL!Db`w}W4@P;^=qv2?ovGPGCiE; zaUhT#1+Sx|UJHP+VTL)d$yt^c({FDLl7k0n-8YE`ucomMnQbG-HWC*d2E^c`NO2BU zjfD2B_chuRx2O+lKdAHeI0t3k$2T`-?(30bA0a{+=-hUsnf#(dLvbuT$;~q9t#3@w z3r|0G=dgQ#nC2Z}v5*WnM_b#(zqKo=kj2OI*0vM}l z{^$16oLZ9GP|Q#>0#{MZ(<26xOG&D0zh7mX>P?k!Z81Tn&BW)nRD=)ruW?M@_3+3E zo_b>+z>vlAPDHa^{otZu&ehh`>log&6AG6qT9Sffvb@PJUm z#kB`s&sY{@0ojWYF7?r^-Yv#B_dYifq=B(TZ4j|-Ir$Lxj$s)6XDZLaACM@#(CB~}&n26SKH3@7 zN{g^h+gC5%ncIsKyX1lYt3Lv!r+?b`ek^4`+XNhH~bLUjs!<@j88@U=l)cIQ>QJ86+u>k zC7N}S(?0Y+-Wf#mc<_|5W%&COxQ0K-&mTNV^=|KVnBaa4$oq=53QbXSQP5=0Ex~%5C&Qa;C+SJBd%qtd> zr7o#`e#Sgkn~<%auMVyF->qs4eL(UUh2Mx>s{c%Q4Nj=o=`rx+i!A>^8#z^Iy%DJ* zcac9GkK`vuoRHvjE#A#GEtEPWz7cBtf&xtm;L-_W-9-21iWgOb&~$`b=2>P`A=XaOVA#b zPqOf9@iSRV+39U8eraP&+^wphjxi%n;EJj@VW1)fM~+#fHMIGl9d>fgCSX&XUu3m- zYS?p*fl=jI>u0m6c%N5r%sRX(J_sko{h1LFsSY1n5sIPneIk{f@>2kkV#w6-J*30DN&Dz`Y%IpyJi zk1#3?%D&0XOOW_f#!i4+wjv$m8XIDxd&EbhNGM%~2awZ42q7osCen(}`wl&iTL-4^ zMo#+4AE-abmcPqg+kREW=t>~)VQ*D!H1@izsJzjlYM_lXIB1q+x9)^PPL=R0_82tm z(sroj#z^k-dYr7hmg1*ef#6@Ko9FcEGEhhOhdfi)eFi3D520ZmdE@QDHxI`po5}{C zieQ*eN&jpJw1!%_q9a($H&Z;1v`pmtiBOjdcv6sop)sv7lwM{EhNrD6z!*gqdiV2N zg+7d5FCy2`i+@%jM9T}2@{l!Ru9fpOWkg|0y&+xUQcK3*_q&2{wNoQ!hM`?L&7piG^29m<*1xL97yvOWavw-PX6gA*@!0_n>$TT>8&6^-t@6Ok^o3x4{q5 zM-*)iE}=yvqR-V+WXIn#$|d}z(vilLq(E#}eSM5mq84lCt}PWD>bCI0O~sM}{<+*; z%EMG|_$z86{&R~-UFn`kp0D{zj0BY|FbJ>VcMs>qB&eKnaL7v$;91Rs6RE#;o!$$#<6HMt z5b0JiA7)?uNx5?wRTt~6z?>E-etW$o703L?lP#)5pSq=*)<0P-E7wnNaehoXDY=z{ ztFSb2zYE6suuHZXqom$it2gXU#nESbq&~#aUX9!tMNap#6(Q!S`~h2sL7#Wz*HSq$ zVSMHdCVFLYx$PT=1bt6 z&BIG~ET{l9@rS<(uM0GGyIL0Uwdg)qIxbVIbg}D9lHhf9oLSVvvZ-6+D}{7K1oKms z6wH%eHbgFfQ}AO)2$v`msyV=r1#Jy$;mYh0By-t*tL^X;QkwlVlM}}fyrMg_&xgsY zATs@_-MQK3P+M66iKD{Y>jiiu#oxjLYen@TF29!QWl>r0Pp;&+ZNw6zQG0nUrM}(= z2qAie-N(0xV=jqKO!KBrMpYLd`U<>S114&oxsnXhY))Z+aqS-&$=VphSso%auw%80 z_%v`Oq2pmqg%d>j@Qle}PH1i@NSSWlGv{?VX@r@&RGr^Zqg1+Pc%etRuM;SDts)tL ze6#;9hLG`s0Sna8Y$oC=z_=dxP(;07ERACHsR>_#m^D~`c^(d@E@2J2EzGkc?K@694bOz{}N5SO#&RDtnIly zQ?FrzL?4?snUsHyO4|>RG^^6=B2fxn@{BqOF>+%QcfWsT_Ko|pCIbkN)zD}dcWJ$7 zR)wLl>h(d0e~uTVOU=5td%v-fbh2=~9MNe0@UEZ)IfjcRoC>@XqwhQan+68+O#a4> zd)s(BJ}IGU(ZzjRDYft0r|9Ly=49WPwRU|q!IQe6x%)fd35bj;@f`_IcbJJXXOI?o~S`Nt1ko z_sW}m<~{6uN!t&}J?hnrlS7f(pF+TE;!XYfPhx*WwZB48dkmnSRM6A&ew!o@Zxu_x z%&ZWeC-0&IpkM>53QHUMdlW+hJbm`=tbe{5)46T*NJJdoxfzM?x5s<^Q#Rh81jzL4 zqcM1=_#ig?=)p2S^v11s#?r>)rPUQR%8K#?%@frzI$sZ0RHXY8KOK`lds5fP>WV66 zJUgEKnSjNh`>`9!&f2AbV4cDht32a09g1*zQ_2+=(OQS|Pc+zo;9pi%Oq=zNgu6rT zms3sE+$+a_xORUQ8`-~)YxQq419uF4lbHV{0x$&w5#^yXgXWWfi<}(t7?%2G{#OzH z50H~Od;_$h*`JXN@c-!s7S>H{rOqBQs>bDZiMKTxH2rgfK+`VkH)nzD^(puQ1Pk7=AI#$QA-#p60#OEv5yh z<+BGn}QlcSrj+A5J&6&iI!4*dMP)%UcTLr+w6-Y%Umk8?I0mK=L?hq_^mSJCJFixej!}GBt$pQOxjqz7M3>*k{rfg!`nBl=wL}zUYel=AA zRqk0MlHz9Yv?xhcu6amf>A71Ym5MD50gYk;KS|%_=1|9^=SudcZs^Z&o~gfY^czW| zW7%t32J0iQyh+*erXd{5LB;iUVaZHnm8cC~<6U?HZDGU(V6A}k(PMWiWYh=SphNb! zvMw(j%nfr=_g~vax;Pk23m&*~(oQL88+NY0+h2H@!Uf%`@+-tvWO-d&_`-hRsAJz8 zPV}AG0F4H5PUBI4z++;pcsb|T7eisGUe?I<)Ou+^PE#A@Vw&+EQ%BDF2cR-M3aiYR zU0QDIn0_40FC*dv!fOnp0zZ-=Fnof;@~^)pxtlwhL&o~D9=izN0h6b%cQS8TEhHJO za%+x(`Gq4il+&HJeYs4s0m|V3saWyk0LtX&9m1in z-;pbQ5edV6l^gF~xAIftV75xT>2BF#S-Wi!>I^w9TeNPQaW1Y^-bbr?`@_UfR7nnV zJPSc8K+ix-j5_6wR4f5&+?LVV9mS_EX=-Gtpr+Z)M)$quF>VlecX-X|^!P4)%ye$^ z>F8tdVoX#uG|VX{(!IyylH;E-)ZB4R9J87qaJ36l8>f+Zk*lAUdg^%m9+KQ|E$9OA zhN6_DpPj~w$BCWIo{F5uWCzcwPJXhSd1fSx;j$#GVvR^N*N6AHQ9T%3Sx!JLllmOh zMeB(KMtNocg|4^V2IzvLbeD&pL%9x$ccMeYxn1m3L$qYWE-Z_Sk9AiVa1$F38!ik5bVW> zQ9OmhZc51erpBfwiWt!+%OyO@hGpi=&Yku@-Tucx0Hiu15O{yXV3YiRc;DDWCPeu4 z(_V0o3FpjDFA_%lPq%H?K2lEn!_$EX-;J7lq-^pHAe*kNtW5q}BqhkHt4k^3Rht8C~jfd`>=m-Cc3w&1$m)P`lpfqX2 z%&h+DfW!p4ep6xo**tX>tBRoDn0b|GH#TpAcfr}r1A%R9^!R=7!PMsI5udFHJfN)3 zQ;R+nd8dNCkxX!;tPyf4H8?_W@c!vU+h>k1c>n9hjKaUqF^p>LUSvLT=;Y+~){|-f zzVUy*^KSt3s}H>@6mDs00S>SLrx>~B+eHij6khwHa1LXNUuNRgolPw zdu(*G@tfec5PjjLo>?67^E;Qy!>2Ye{>H_@)fZ~Ol80Z_M`nVLi1a!wC?dr`T55Qh zH`{=ZkMBJPcW^{cZ%^#DZMqgcM`Fove>Z?XGkcDxcer?jpG|fG*!X9rrvs}SjQiC* zwTRIBQb=r(}!tMhK*COup4x(9x>g|9E;mp&@4j&&9vte_B&*u9nIOpyyKS|Ca-X$8< zM90S3FBlLL>!7=d;t_(3fMaXlxMqNJ;4Utl;^N{IMa81;_ABaIm@Ko=z84pxBrT3~ zm(c(C@uQa~<1e)LC;#5t`#!K|Bq%D{-zmlZAC0R&6c{6VEeYl1IvF)JHNBx|&iuA> zQ>9u7C1r9SPr~1?p54J;d+GpZL!);K5dTwDO`xHxtE)ze8iBqj_NKe1uecxkaSz5g z2nv2piI1Op`XrFNIU*+3Cu9OP+Nrqse~rUSEEbmUlC;ww{{z_XZoF3dgml0&JANZf z|8bV1&z$ZKgh*7;DF1rqB{yI&o;5c9*A8q&WW5&q%c4-e*F3wf_=Xc>@~@Ns{aBlQ zAmfp;h>Huc9B!+KHE|vM_nlm!Fs&a$#=5k=4^$hbY*0v%I6& z#qUBIw=AS2cx&7coLjoH+q?q#PLr4oC+au(qel4tGoNy!;jh1ZO_P@89|f(P7;WU@ z#j;Y@5v?rjeL-YD$XpAZZu*_3)tQrdbiZDiufC{fIAffWp4OK2_xLxK7?CAPjvlm@UKRHJdy`a5^ZfK1;xL(G%;qKWl8|m!@I^!MZ;N7@!BtUR8%}u^ zy9+8x?1Xfo_?GB`L%I-&G@hMvZYFcG$ybT@zhO-ft+oO#zZJ)A_iQ_EvTc9+%36G6E2u%PxQRwI9mX@$ZewQNieJx*)W+{w-R_NH&>cd zcxW`cLSXTCv-Ebz6Nd+SQT6Hz-_mVoWL(xaO3j_^*zIPpiJjOhw6?~(@LP3azu$bD zJo1fmlsx}|Je{<8a44T^rkZS43y1Q0k8~6}gIv z2=bg+HC*JcZmFF)T%RW&z0&r==G57{Hjjt|;1_d*xvdA)Jd)3D6}K>g4)!eMqQlzg1x~H#vwcoC! z%lct$tZuZ>p#18J>g-hxj$HJN5G1(R?UaEAW^{y3T~SAtj3xN1IBhq2O-ayrd!qn< zU*0>6x%_BQ!q*XVzc1J()d@=|`SPCM(Ylny=NF`>qU>qAe}Q2tPfZl6P~I{Qce&4r z_Yjt!b*#}4#ht%xZ4TH=+S2<*(sA>bp)$-GyQFQ&9oq9c!Ruph44Q6?S^;~1ve~o0 z)dFQILB$)m-$a2(*?)~kaL}-ikf2JUi{LG=)TmT%Z!5fmhq12ZeIPP!N+T;yPMKt7 zsc9&jKiQcRnHHLn(XJ`ctxRR8@h-u`jV3(G`*9&#vdH-*9{Ez*M|&qVO)~74jz1f@A8>tCv-D*U9<$I`P&s5j8lH#O)!&ydZRvmU_1@8NesA=s z79xU#=skkyy$>RVB!o{$^j=2v-bZgCdhZf~h;FpedpDvRox$jgUIur<=kvYyuKQcp z9~O&u)_LFeoafokv-jDj^*GFyF>{l#AIE@=c@77!(dq1AQ3htq`}dppVd>1q^wWI| z^m7t!|48h^w{VPhrh+vOp8gq@!>}_!&^GGGn{lU2#2@TRi~2FA$;eaj{T8i%2>P*$ z$&j$62Hu5h0raKUzdR_XI{Gqn+vpG%Zp|1XMV-O+BOyEZ>j#g$Hg=es@@#y(dTQ#? zDew6+LGo`@`=g_TM??+orUH*?4(k3?seW&l$M|l<5~T(r6LrSr6GPJKN={bf75QI8 zmd;-bMEnHyX0=Cj9ye55au#MzDZy4CT-qXL2JZ5AIVlc}_Ju{#1k-TC@-ZSyc~>ub z^*_^wnv1N9pKQx=sTKJaj=wXeSm`G2h_lkj!Rs`h6RLaowIbF;Ja$L{j!C>iuj)=m zC0OwFji|1y$G{7~#QQ}2bn@?=?{Y}PDuSCh!MV+h$3hT%gPRH+M{C|(t0eLc{Wxxm~tZLxOUf3sveL8$YNR26jxt^u| z>+lxYv~iumrprX@nrJWKW=!)W)uIi5L4jC651hKGIi0gDk1^ZANKC%3n%n+ zVKD+5Eq)`E@Q2O1xzY!2LNvmjH`y@@Usi*cMU@UzKb%Eal{9;bsFU0L1fII+o$<11 z4)a(L`HSB;gjpGIvNK)f42~s9b(YEuu?)k-JqG+E#U7MX#}aHfe2WCvQ~U(<}?#@xkej z7V}`!3yhA|o(ayHr*>E6S^3_daS$POA(UV5f#{SrEkFE`c;0)axQ$%=J=Zfjw8tDO z1RNr=Z~D8I-clYzqzU^+f_~uCNXdTM40@fb56E*@>ZJON9TChPWtqhD9bUz~J*O=j zG7g&6)4ke8ujtXBBu8@k_d|ML4VsPD29S?{9m*K$ae^VyET>$+wR>ve zYg#80b@$J(HOzR7ynkS)x|W$9s#!>I|1^^<>%uq(*CHM862Ifw{&7MfT3^n++SRc+ zyLd8EzXhvy7o6E7u(NcZf0bSoFZs` z_o+=?8UN(?Y{MEAXrSG`pX76u|N2AIu1OxpD}i4+S7Npn6vK_HXSEMInYDPqTq-oxeLaAb^)+SE|9w`bkgh4t&uLEBk(R5PNIQ0cJZa0 z^M_CAtAWV%6{`XD8>h2i^)2ZOqFp8q>&;&WM0wQf8suT*GB1XgN2Q4GE>xY09jcJ} zR|%&ihsN@Tbi5McG^0NYpJuAxj`nq=h&SjgRrj$w*m6L`jQsC~F^*bU^%hf(Mu*L^ zs3JVfb?btko`~Q4Lz1&~2fzL9u6t=|YfG%%&iT~Vh2>fDf88D^znArKd!}*SKFtfC z9L`wxS0Dc?^HTGR|H2V;7iV^sB=oMs;Sd>mP!n;cbCbo;dV`RE8e3!IvkpwQV7wiF zOi$=rutJ*Pt;1MP@A}q`H0R;(e`{6z!9-Bv4vfW&8hf%kx5&Ld_TmI%ZA^2v3WSNS zDDSwDaGg(zv*zY*Z#p~P0P=5PQP24^ol9rgz*Jr18ga1SVY|A!yDHgkJ8)ao%n|+p z+(XrRa8{x$TIQ{KPuPZ`W4b3pBy@q9 zp^b{T>ZU~MO7r`B6De+9YoeS14qKI_-rEy9Mef$T<&L(?mlRq|Us$E6K@5QQoZPAa zTs~D0VgYP*(6c7AeSABcbHX${BC;jC7Glr~-v#n{Sr4_8O)+_ZH zOyVr7+4nitxT4TXV~gU&!3vd3hNRvF5T~aJ#EUS#xzVp8Y7=RgzyE6+_t>^%B|*7( zKo3L!nfn}HtlfJU+7IzbB(wvsb|!%SsJ9W;C^J>N2$mLmN4m{8teHiur19H*wq6~q z`0$Bdh>wikv9jDR?+nW(P4(^d_Zo=i$XsDP1K75}KojZF^EhDRRLxeU)#n7wva?$j86VHWNi<=g8I6Y4;y8d4gJJiY5S zAS9G$G$;OV%tgI8xN-;cPlsU0!-=Yer<1Rm$pyTy z_+XJWC-&$T+n}gz=EzNs^C#7l3v?@J)-A_+Wk{nX))IIA;a1V|pflNimA1xMZR~M3 z$T4_0U|6#^Oy1=P?||a4XO86v7c05TELNt~9yK|~hf05W_v{=<@=NnnTW>0* zW&Cuue-^(z>NMAYdy8Jb7f+5~#_fxtNunmP1ngkMV!rs$DEh^2cqdmqE3u%og$Np? zcoJsXyzNWtuV4XRs9&r*R9FvM%oiYET0CC3-ZctSje1D={W=5T*5pFn-z;N16~cF$ zN1p-E2$?z#&A=aziN3r@2Sfq{%~Rv$$#b`Dg_5PL$-k|sAzy2j*-$2pgyI|n3=?nesM_Jfz zv-DUQdRo&DllfBX7=3G)ePbvmHiF5m5hJ#9?Fj|sikUk_XQegIZ?x#aalZC(=g@$K z_Q&v_+fhD0iz)&oc}3wKH=nEdl2(Z+Mj)$H-e|S;c}#al zivbsqmjP!}5W@#aq@!zQMs5RqU}>5LZ~iy6>~6G#mlF=@TUr1`VZyU?H_6tmL!)Hy zJ8ICBQIHZthVREKOa`YXc5pAp=K|OeqLRz(Sc=~k_&U-NkGYdcth*9z*sl8XGW{M~ z2c@c?k2jZe#mBejF6aypj_xE=dMmZ--o^w6f$WjImzySE=0sP-8}X!BS#!Py+Fqdo zy92BVa}-h~ATp)tIR9%#L4)771hP~$dkcDCQQG%TEn{%4DDzT@bt|cbg$CPT@Bh;{ zsc>C2TJ}0%HLSmPx*{sIgd_~U^Tc~gy5zVNk)*9Wx^tUR(%-aI1u~&~SWif!)YSD5BpCyw7m5;Nn62|u*YhN@>W7Fg z2yLg!IW_(rL(jXf>J^ZjnKEqb*Mq>sL^CW%1qA*cM;&zPb|^ImFz`yd2uB&>WLSA@Mm2b~Fdn8}J{Mav z%5mr16E+bzoFTt2vWV1t7g}gE=wsCZI>ahs<<(<|wqTQ?pseHNyiI)pl{L2$wRdfx zeC=AafQZ;5?)S{M8iJyU(gV8RmMt`v{%aN-#89J?#o&bAmmY~hM@3c+Idg!EX_pgd z)=Ol|YLk@Z`BkcxgH2OZT0T)-$m2D7mF%FO>mCw#rB=(w%WRmIPm{UT6f-a-e3|R@ z?}7B*Hu%2ARbx?1?W(9>Tc(`#4J#)}jFlLc9j+p<-}6d8<=kSDv7HhpbiI2qy! zx{scdqln+s2kE;Rg~0%k-#QyXfAM8}H&q<<7p~gsgY7WFvD0MxmDQLr>dtUn-Q`wq3!M5qG%E>~-^P%=?;X>F! zkROZXgL5FUFkO@EmVXfIGct{3mcWh`H2O*l@0z zK;Hr1m50(ahk$Xqb@uWm6INXBX4?6F0>NcA9D{d4w7VFD;Ox( zK5b2G_sWLUwz-^cC>nlz1MkPfmy*<|0`gYha|LfN8_(aTdAIw|h=6^E=da>R3z6eX zn)~5Hz}S0M;xLV?<845!UdPs!t4F&MzTuucOat9hpUzy`p-?wY z)edjR@_VJHsQO;6vZx2&{r6O5f0Wc{FckxIPTOtL8eS2Ey=RY3fd9RodmlF_NTm*C z{bw>M!L-AI49_^OmdnD0MTGyUrE;5Y@J%SvnHiR+t~u;0O#5YpG&~~2@r=^+xO%41 zt<8_wIEU-xahYaT?bjF>AfH9EzJDyEqfs6!&)F1~CV@@q+Y1OvNt$B!86vEHnTN=F zo!`H|v>{}yAmCS;_`8mN1OdYcNR6Pe({zoiFfx-67Tg_Lj3upF*Hv;4Zg0(JP zF5b=dq{0+KEuLkmJK7>k2Y<`F-i*3vPm1`5T<_O&%6kKrru)Kfs4KyCpNJ-9oOr?3 zKOBjNUPFaz%6?jMn_6ZWD^w<+QfkUfCSLeaS(}{{*Ze`gXWzqSx8_RRK#3XrD>>q@&6vzSVceUCxhIR#D!*v zC9T#BF<$*nOTOx}@ID_ZHL9RE+}R@>#@&lU2 z{>Cb-?__z`Vp&#v4kRbWlI?U@<{lGw} zmsxa98Lr3HgcPj3`4P0@qz+y~BA)N@8Ky+PN+d&N;6L#>^&F^#zz)W2H4%M=!`KAt zT31g@2VK^#*D==$A-6@0gW4Bcnf{Y)37Rm#-MvY^)~`%X4r+eN(YPPq75bj{8E~nz zYbBxtoQo^4%Cm^q5B!L39Zg$Uq+?>u4eNN!VTG}=E*4Dp<++|ZJZ}v)&W?E9!taO) z<}EYa+#ja$L?L3p|9}pqxat5Qfl!Z=U1w5Cv-X&s7FxpOe7kljg_IOqY*%hi8MJdU zx^3fci@=?iHi(us#uOZ-9k^D*uhwJJzGmx?9=uJ~4suBJwlwP^bSOiY|7Z_gD&veh z*$7n7>vCdL`&goKm*)IUL0l*r$o%S^%y63jR}6t>aQ|%w{0S~Blw*EM!;wMvUsC|a zYH{G4%&ITH&?GDS)mNU&9DO`|lYRt}IZSgu)-X>_5&N zzxXfBLRh+97B29R%>C|erT>4s9#0>CtZ9~BvHx!*F{EgQ`@i1HVd(#E*Q$4E2F7Dk?bLeA|K@3^xOC=PBDnw^5!y_LIY#C7 zGoNjT&+E>1;yv(E!+haHpyzk^MZXd{E_Q!X9I3Yin7lZup#Yf3LP`;n-05)By)^YBJt7Ii5pB!OK#PNA^J%wagE9HyF9J4bpd3`Nz17f=o($8XP5wh z`%C9gTkUi5`C1f%fz5O%uF(Ihesy9qRR(nCRll^k49R^`#Smv3KYF|aYdOr7v~y;Q zmb7H=Uo$z48m{{$eAAeztj)yk!&AePA$w6q=Hz?fnh8IjHs`vze#j-?Cd;;WtxQ(^ z#;5m`Esb9S`)aF5$Y!xT-{nu-9~ry8@aDy9Td`m$T@i4wxnyN@-H&yl1H#qp^$-2u zXB-25TYIvF^%dQv+#{IzayS3{7Z}8RPIoNk`bvGOFwN#)92OX2M@d1*lqayf4{|hr zGB`W<9++JR30D!V-K=5{L`=65@LHKP8$TxI<)#(umOl^EF9T+UF3;Q~nyRK3U5=|- zAwO){y0}O7il#zv(fH$x)e61A0Hn=r>0N_)II^mfwPQRZ*!*KN+w<(2jI~c66n2pA zS$kt~`N!D#6S7NzW(tnIcczKj%SqM!+_1k+#B-e{`8<-r>4PWfx@pvr3#++W{9TU(klCXj$HL@9_!i?+l1y^ElD4 z-Nq51uXZZ!9#_HU-JdrBAio-%GqLte{XCPl??jbcL=?6* z-x146+f9*DtnI!g8Zh4=FdLs#l53?d)%2wwKWR~rL;YZ12BY^jE1WHyadUEzB)jju z;_4$>;J%OmQId1cyJoE+FZv;uSezLj^w#%>4XX{zXIn}qaB5u&B3_qQvqnfOmynpq@YM5>J>-6_ft!W9=b(j}RW&kBA-IPyTIAc_&7S5XJ5`9iaG{#^ zIr7xZKr$umwyiBbcpFd^{)?yLne0*7?Bsni^F#9<0cb>*dcl}qR@Mx0IHm*LSTKJ1 z=VlF#+La=Xuwm?|R>R3abJwrp1I}F+`nSiJJzePHBrMS-0uSl{%!zWv^X&w-SI!W) zgKMRUHz&`_^E8-x6;xV6hg5n{wtnSi!Wk3ST{KgW%@VxiH1a0*WGi=&qZ|(dXzaB? zBFpPtOrYc&M}@OR^<$xXW(K3zw`s|N{J|tiFvbOE`R!DEn{4`%xL$Hxv*q0B5 z4GUjhm%zTkmgDfU4XIs}yYim6ir-zVttL zQfA^(q)>#%uFzRsI$@jY;^w^@tsTStKX9t@`m@G%IyBFx--q4%Vn4DTm|bKX*?p4q zORhy39wUUcccWPf&z{yH`_Fqql5v@opu#*0GzsZguI_NS4De-gte4RnJlqpq+QW_WdazJy&VzWV{rUE8l{2eP${Oc# z|COh`O%@id;WK>MwUOCXAh^okI0da9v;ifvbFg<|JsNEei7#z`=T(fag}3ut82VmC zNQ_1ViIPk~8B8x??mW>;Y@gZMf>kctQ?tU>oroj=(xje%S_{#Gqr=O$Pa|M8P)QG^ zVe(iKzgM2@p5~H~x!o(5awoqY==P7@o0V~HMdFR**&has)}Bwf8PPl1dig@SyK)X| zz0VgMu0ms9Q#i~p2X*6ED>j>ok19J|KZ4MyFAOf|NOeTq1-kCa#k;+kZ+Fa!E;Lpp~aQN#B%io*PqbJWCCUf{pHkE4*!4bs2?iFi;LDE%$b9 zo1>l5mZjaI+e0LkdDf(RmY_Mtc*NDqlAu~2sChv+jVO;*dF9v>wB;K5Oc*yWV^W0= z<%U&S;%E~uI=uzWAxO;lNxQBR6C0o5*eY;k?G=4 z-qC?Gvg0Cg<*mbW`pXDTLRvPnLwj;*&mhj*s_5Y%k>Q6?LO!|A?}L$K=~RvbP6-cp zLS18U8-}Bm*{OJIW|2{5XyHs*=Q4cRPK@=>t@M}0s(g3XCN+H!qocDcCWWd=1MN|& znL^~@;qk@FA=874Cy}drm|wLhG&CiJGPu>RD^E4DGufyI+Lw*ig$H9t3KV;^!{#c{-~_Eg|O(#20eiO9iWjK zIoc1(IODD)H5ovk1Y=aL@ii(+W={1EQ;S;$Ilc-1x(yp29`=*s{t$78@Wr=S!fE8u zUuXo5H@|(Wlzbdh{#T{00|EfS;v@yP(ED}Qr)(H%Rbcg%ctv>S?TYxCp3E%{P@sH? z<~8AQvHxjlpFI^2sARh%m3s>APkQS<`TAm9!myvNDmw;e+VT*2<{5CHx9X4s#9Pbh;b-F@cmrOz@C z5N<~|sBkid5WUhHH?#;B6)W$3@+6HY^8$cXueXelxunR1?Mg2pTEl-=0ZxWw;FEiF zKHDaNp0@>Y47e3tVdi^2K->>_f#{XO`QqfUf7W(}v2@>ASag588_zt#ZB0-?_ZR`V zgzVwSqqe6$4)#OmzYc}AFwIYC^G&`+d-oo>866pL#beXs`YqG64;yCrK^mHYR z)2AM>vT$EAaro7D-VQ`-2$mkeZ=Wo1iC+2dJ6f??JPq+)+K=ng!Q@U`u$!y+e&9<-he{J{R#47-vdnt zvVH0L$d^WAoBl@W=8c$)TSAiP=cS{xvS+ladd}kP`ptgapO0xDX!W8+zGhx_zeqUgD@ z1=-WSHd7!ha~s;*X1PfJ^q1rjsj%s|Ipq-S@7VrGe{wA?>7QH4otEXrb{QY?uCXkd zjpt!IXx+3MVFNnRDUo37Ys%T{K7B(*Y%SiwqW+`;Q1xxHW^DgYmzbWGnEh1VKY&MR ziS7FOB1D!5CcW$Li6Npa2YuP0KbOtgPb5yXz#87>B=+=d$~HIQ%?f4Surn$VS2G%0 zkj7wKf_0_9ZtdmNmF&H!i4KSUStFhed*sT*gKOoshnckl0{-xJALS2K`ukg_r{LaE z1r+~a9uI=)_XijH#$}iXxOsPYb8Zbr&20G*g*1 zWcU}PxSf1fsjk4AQQrMt+y6>8gGSju!2cV`{@8@d`P#QzywcuaZjV}|642;2^)i!=kG|V0NxHv##c=;&IqHr6BdndHd(#_^KDb$p zL~F>`6ri+Z^MxE<5oE_CVLL6uMMUBYb+L&s$)QS{)szLRmXw;?-Vyj%TyK6pu$zMM zk7Zk;>Hw;ffL*#8{%5?UF8`({kzBA-;fA6=FCrWJ@LXxF#MORfavEtx)qHla7&36I z*Oq51i7QPLzbA>KNx^^BGqmT7Tv?;;BXSy{HU%O=7V@SG9miSu{VW&>z?`Z|C}C$SSm3s^sd_yRZe_9Tu;Q_{vyiA<0r2E0b%wA z|BRzw^Gx<>b8UTe?3b2(u=%AKKy&p6)c2D%tBkh8W0sG4^KJ;$%wpuonAuZ7=chML zhfjFK4{qowcdVvu?U1=t*n?R-E+Q|ctlw|xgZ)ln7WK;SePb+ zcY)XYvSn@ej#N%zde`tAp!D&{V@YVAH9^T@3BKuo9>W8STwo=3N1o!E)=~##$(7b@HSUuBvc!z4Jfq7#ta_t zdd{-{^q8|MJQ{ftIJS<{1YX|)A$pE@%4ENNYqqq>Nib2_SkCwd(*5d~Edk)k7sQ|! zH_*RF3$}Gp=H&GuH|RU+KVKTdVI;Kl(l>m!d0$;0GKgQnr1VE+kK#{PT0_8-~UJJ}J+u)jxt6mIqs9NS= ztHuI$?(VzHOxz@nmcLJsptQ5r%8!r7gli37N@;U|qCd4+6LaM)zE$n22_XIrLhneK zp-0`o>Q8Hbl>&xtc*9;kD8f?sHG(CJg-`jB-zZy7!lHYMwK$Z&cY19EqR8fzqEs!K zk}@}lsy6d*nVExa9OcxEj!N|cW1SF2W)Y#G)zrpMtV`+My$*Q%5J&cTG1mKn*x&xy zXIGdGAAe!JiCDV=0Z;*lSKdRbo=RT1O$<=+Z$12Cu%oR76`gDROQ|3+qQs@vxAWNy z$bBKw66@wwbq1HKr;{oZ?mO!+E-yQtjim?S&3bP(5T;_bK>?;VU+v2lhr8L|AsKM+{T$x(ljfWVVv1W%l*U#xwlH`!74thNbODTc8=bf#5 z3KXl+o_Pnj1W_PQk_Bq~a7w@SKT5s)qAbDXsO^(-vHP;`0Y|XYUm3WQ?}$=qQA65R z8g1S#yOM2=G#ovbyT^(|qWf(jRYEH($XRQkT;v7Gs;n&!m%d=8_N{DNh$u%n# z^%X9y{XaLoW&m~4Ze{|T<+TpJc*UCjFXybL?tz_d;WP9b{fo~WAir?wV6oFgA9FJ>gn zb_10T+Aeb&tnK!%?z#UwpKE|@F)HIOOJFPDu+Jai578+v$Ji>GI)FII^O0eonlYDIy%-_S2;Rgxs2^~yZPI9O4|)0 z+Yvf04qOnh8JN1|@SbhfTvDPQ%WV6)F|tY(f3{oUn@H}z3rc(=cahST~{PeukC|;CEakT`j>90jW50) z2sf`}O&rl8vre)ct65JX0-TtABlumIh1|?fg32m! zFbHx~FH$O;Scmd5P?upDje@yUGEL-+(BN^8-Q31|smdf6v{WfxKAJWK*=HHfRx-=1 zc56v}$$vEULBV~2;=MR$=9UpU!Nw;q!U)I@Jok)5j(W6We5^BckI=|9O&QL#JWL!wV8nu%zWsTWT93A_I^97~Z)6OZS zs%EmcjD-dyQMBg-&1dV3n%!wB*AcLRovY{f#m_^Mh$;#QmsJJaY=9Qcm-XX5dR>Jd z$mtyT@{gJ;|-eM!49cLD$QYv!{3S~xm1Ka?8Zjm}3Z0+|)tjH@o?j3^+vDvM= z-A|5Jc)OoO~d? zL-0J2U`Ft7Rox^IEa>9&&R@43(yKwt3quyURt5hSrBbl!pd8yiYlnyq4K0Lxr)ZyT zW;haN)8VP;G9z*Ws4yx*16uvXAMfMcKfWlT_;P~D%2l8Sy@|Xm5F3}BsC|avPtTy_ zNTTW0^tu{cUlCe;q+_$o(F3!i*54Pp*&kuO{&8BsUMz$SG-6Lr=uV$J1sAX+WX-wn zZf62lUug5W*n(^$>&jzNqe>7XOeRhNq91qQ# z7z0tX`!Dob*Bwh->r%~W#8cb!VjdmV4(pJXEPp2;r_uUF#J2Bi>^n)##Jig5@|(t0 zaBEa=V@ZRSd*{VAWQXcuPSC33$k}3smNXb~uoeimu&F#8^AfesM?7QLE?iC#wr_CT ztCAAD@a^IyIvA)P9k`tMFiW#aY{Z+e4nIfzTv0Z4mVWd=#r|h(maSxupqwD5E&V`6 zVEI7C4dEIVPQ#NV5OnC943oFX;~|qyIfxp1MwQ^SQ%TlkBYY9F|M*_PGsIkrO?S$L zih>^vEP=yK|6dQ1JbzCDwEJPZ64)w#?hT$7&X7lL(ODbQI;)PqY?tAhNfx^=t zl9Jl?nDhrzhxy!CT7Zix#V$)zk<;ueY5mW+wL5uzlHPe3-S_FE-9;P8f1Zbb{=}MW zcbeJ&tZ>5Z(WzfB^|~;gwLcjm#l5VuT@)B1w{@Qiz*PL#pMfC(x&F3soa>eJI~L?Smyt zH%NP0zHAbu9*QUq(^G)W| zLknUp4@<8#1l@M))m-R`md2H3maeghu95S-1H#yBN@auSpeJrNC=gb${1&9fQ%atD z@{Zbd-s_uPx*m;s-zgi>{ zDkwrut-F0-l0jA$#=>Iygq=O-d1X+6cVlWzlx{y*Gee%HkO&DMWJ^H}ZU7uVlp^_h zD)8dbagEK~#&5SkgTE3@gd7xC}_${~slO6A>S((^P@o6KL*vd4qu1Ng`eyrF^R z%@s@hmoFD!dKk%E0~uaYZ`Lcl2er-5b4IX%o+4Cg#%7tP(p}3NFb%D(0#tTb9q{Yc#p;~2r#SIK zVpRM3kAuSpvxb5vM~YTvJ`{o74FhrmTVLbR@)~wmXMCzo zE;kb^>%bBRLo8FV9j5|BtpKM8*$QF}xv$lB_ie4kFQvs>hRXs_=ih2gt&RY<+Wp{H zXFb77nst0CR9Vf+=$~aXEW@H`EAQh3w13PkoOw&|4)=R?@?_0~9I@Lm^MqE5)agJ= zScunNNp6wpS1Oa0gXK1%itrrw&3USZ^Ng4D<=wH`d=a91%_$2$%=df^cJN)Q*aS~g zGxyXZ$6}JPr8gsh6-ABLnsre8jFs`IO)CD}XbM{auqOiXBcDe>`=2#CV;{8U4Q z7Jhb}or2mLb0ebvg#d#CKIV}23RK#C_x81?Hd|XWCv~?!e+{gXaIdd8YzUC2{-Q|L zo-U}5j37TK`p3HnU%#2E({XU!%|F7sA+hO{otvLL9C#4>12y#yS`ykW9a}rf#=b#@ zj~o|9=C&%@a70@$4gG0rO2wT1li=#5%2_bcWSZ~PC?b|Gg-Z~(Qx4K}t*Do3^&nnd zdYva$hh>t7G#O+Uoez>B@LoYL=ginS*T?VY6VY-~e{X z*x{U>2+LIiFJ+AK?A>S5--6gfhoj`_hT5|;=3hw<8ZzZprFiWMvU|VZyy}Rc>O508 zsQ$ZInx|$doK813MYp{<(2#g5@EpKrcn!f*0UoaSWX&BO{@mMFzR8)iSH%VIdZue4 zbk+?fM!Hj~oO};hp@^l>t9U#E2R_T*oB^$o1zACH$ENl;M_h|{#5s-8oj#v?mcb*4 z`I{W*#wDWcW7(1X6E3=Sgx=@$lSiC+$Eb8igb%PY!_2#nvNn8%9${;{#U^ERXLv)w zlUE$H-_5HVThYeQ{Mu@QR$`fiIWu2VyS+sy=3Wf~&d$^Ad4Xz9Pjy&xG&8){yx{74 ztPPBh?Fm zkg=>$SM?cu=AB~NYBjhPknkCB0|~fs_X3QEG#j{jl0e9raZ=fNir2pPb}{STqva~d z>)193Y77V?)%^%~_0XIhI1v2BRQ_>NTO;WQ0A!Dm_nCFz$mG-6Xhs;}PJAHfVcO^k;k z;m+j;c8@}=pOO22qAge^RK_B(g}Ch{qv!m}2sLzSm<8X35W#AEi_;E{N4yuK9*GH` z5g5roa@`RPeYbv2G%~!F-?*#V#%9T^Pb2o`(oe?dIf!f7+}nq7xk<-(`=oX9)UyAp z@tW&d?|`6;+0l7GXT#-nCfRAmd2g40{o=SFgJ#6R%4IZu3RDCCi zfyp4RO_FT0BOJ%N@}sscpe@DaA~0uJ?r>~QHFUuxP&7>DN4XRl#BxT34}Rx6q2vW^ znn7o2Qmk;w_FK3lEkzR#U1T|;rf5MoG=!*0U9@oL9{LO=q}47TPQ8}4xQ1Tuw63gw zM=TKCEI5a)P>WfvCB#5#$Pd7}A?~ZZc<%LN1Bp*E--F{Oo%}^lGqNS7lp5Im+EAf@ z6ib|>Z*%jno1Co|=F(JH71u=R$N>5dfo85wvao^0#PbeuW~xvQ<$gY}&;89s zrEO+>vB3Tn2DX{j`{r@^2c@6(w_=$A^)fhlF9eO0r3v&2c<>!K?bnU~-1)D6_)LiGY=$Bw4k2+YQC7GgvNV$^n zzC@j;hN3FgAIU7ZMl;lOMW?wFHa9ravcK5x=XGl}UyH%3+l)?cRgJT7H?n2HEA3~V zUl^Deq1F?l^Ph6-%9lKYi8?~W>06W`Gs0h=ompig(Jz)(urf1%^s~sf@biWM-5nv3^u--){eKq7zqy+{6E(Wo zzD>?6oQ3f9$RC57SVnTmmmfsK`&yj(1u5lBUqcz~euHNT>`-T+1a*2%9MkLtR&Am# z4@G?T@&@Y`mQjD%XPs1oNjn>XJFSL31Kn}fLrd@Ak5rUko2EsF z4@l(WC_MjGrg4R)?3eUmQNhXa84^9YDf?CF?2m%oK#Pqw2-W$ zcI~{oTGF`>nV$Y@SLYCPyUtT!&YNR@66yZ7_8vdRT;HFNm+E%g0elzy<6O{pe2M)y zDyn=Yr1)PaN%frDjA)RA`04-eDCqs?WwSPK7W2a<=$C`4(f1AZYLB*S~Yzewr^CQ6b|H6JHHkiU$MdL6u5S!b8yj@1WQ>%>ac2Rp`wc`U#F zGL_d2EX#(fAW#R~&%jR1^3Dd(VVvr31a+qoNApj|WNqh3uI}|`k9J2{tw_Uaw{~GB zM5dk5UDyrBef4f(nM-(rgLA!p(~Y27cit?udi+k!cWGj2^-WUP@+*6lbA4*p;i7%e zj5&j>LuPM>G-iuAY`l5XV>YJ$tGvJ9ifie*Kw%`fyE}p4?v?<7Bv>E>cXx-zA-EIV zAvnR^T^fhr?(R1h2VGl?Y30H(zn&lstgIvw|8*h#v2j9-uY9 zFxq=aBwH=W>e}(4&?OVG5-b*C9f={fi&Rt^Xn(K3#eu^Biwu{Cds|;$OdI=N2se&@ z38;5Nuks$1y8Z5f_;!9EttzBt*LX@>Jbvp(b6Y8RN?x?&NYHzW`n0+3$Thy5LUsvO zx}oZ!jVzuoh+P0#tLi0^?PfuW7dJ`i;Pyj$!o|1Un!6|lCo=NuM#LucD#In6 zTOW0W8f#GnF~QuD7n?70W%^Su?yXWx0(`97nM)2Wi>JAsNAUHD%3pmL12_aCTQ%|R z$K_1n275#;Zt<@66c~%Fd`V7yQu`M6xCN?#*xq2Dxvv7=0^nLt1As%G2Tqyw#OX>O zcHOes7DpJ$J+suwRAn4X39k~Di73W$%x7^#=G=|$%nmB8N4^?;4~B$DXhL%u+g*dx zrnj&-s2lf7S?|V!Dx)u>SB-o6zc>0xl^)!WXii$91}-0KHUyPr&1|8iRcebtq;{Ne z8;qj3*cW)45TeCmz-;cM6{M#c@s?%t0#b2MD<}0;7PK<{js8HYYBj%uuISuwZmq5l zyS@k6_Y-1x%-ZhG0GNfn!14QRA`+sEARE2sSy+D9p9_1xf))7A^2P-HTTB+%c@r-; z3Y=R-IU_rsRe-+^6KmFyyuVIg`L&NW#WlPrE6CS7e|4@XDW{Kj7D6d70Xa=xU{-CJ z*v+x<$`)EKRGD+xwpwnTk+U9K*doqlXLvULPOc8Z1crJY;a15?(0yF<^7E_L0c3A$ zt5DR(Z4vg~Z|Y86S2HKktaG-ShKUPsqu51ETGn z3E;Qw3*qE{)?Z)-Zbwy^7g9~rUMLK6q=%dBsAX%yIj#@S6!Ej`FF%mZQV16S3xj!( z_HW)K_`T$q-7>or}^mvlaB+2>lWIRLv>zrI|R2zY@?q)+-ufMHKnxl)-- z9USUg6M(_jHY}7&`m~Nu;!j4KIVCXX?J&`r zdx+eT>ZSYUw&gfVF=n7G}we(TTc_f}j-bLB>HTjr+j>EHMtFFFbS2g`WXqo9_s&ngu4UA8NCvt(=P1tNX$|)08 zlSXfzj&$(j6_qxQO=;eR&>nwAKI{%LOncpO4i~wjS31#9BYs2a zB-;5M0XGgmZD`RY%`f{8E$Y0c)O32nH&15?9t&&yfz&RSXJCy8`w5@z4RaI3mW7MS z@MHP5X!@jGgC_^BvtzWcc93%Jd}X<3x05O+B2U~k%>eG=!H1yB zN8Pd=GebnfFT7j?wXJ zavRjVCWPDV(y|3+PFR{OXMaBiR|X7EIC{gg3Koh5s`V9eDo-m^jQyVSYo4FX}Vn9s#`#a0)ZmZ(M~J%j7K zrTf)MEp+KpRo_%bzJkvfReCwFc6wR5o&9XHN?SiJZDb6cmSkqIkmfov+G(uDIPYmSE%a!(k;* z-x`0x$t910(f@0oQFFFdv8?&nxGY_s6I>q=Lq6<&f?#on0R{us2#KP$GCBK;W7@P%V@ zwIfB23EUY%@e#v-4cDbqpddx?BE+}ht1H%!uDs&yAG1HWT$?wWfK&|I%De! zB2>jgielrb5~hVD;iMMwA>cNv{>j7l(s~y-%^$l{-Vs5Vth$IT_odM~J@PtUUI-&< zBd}|{TS9kOXJzhbc~#Nh;uk^FjzAo}2=2_>+YCO3B^c<%Pn1=2s8}AUXrlHT=>8&$ z7qle!Uw#*zvswL5H(nm;I~})sUkU+qM4d37e!V-H{DaaY6%^5 z{{19>+ML=}5o`?NV@>w}D3Qa1DY6Vb97EoCuzn0D;9^>NgT}+}uR>>X69C|>;bK22 z%U?u@T9t_i^5h4R{hIta*lIaes_Gsh=Ck=D-oU$IQ~L=XWy>~sJflIb*Fx{>*3ebb z_ooA8s_2(sEsfb58<}eloGR<<(UK02y@i_CHQS4yd>|YA2(HaCR((tBieX~VCGd=I zGF}8SSRK6&{f@--(#j zE(uen&-ZT9+`P&nJ6hNXXcR@mwRa50OuO2ys@@dOSDBMl)T+C>FZ9;DVC)wvK%=S@ ztU>bSEm){X5d}dYw(WZquYU8Aq+Jz5Fqh1u$4d}|RpExmC4jzK@4%B32t`hsjBugV zpdt6Bn5hWKB#sCpQ-fY799EhW6MY>+-0yec36R63bb8Y&Ky+fs8>}0$TE6e~mRc8q zV=KHfUbx7s#&B2!>S5TU>_hwFZS2b<+0e1jGrPEo1TJ zLqwwSXwxIr@526vH-z1gO+#|x2Z7sJ#`lQd_Axd4qL(-bBlhnzpO5AbN4D!*93Ge1 z4Na&}Ok7Sw6Os(``|dSep7X<+0Jq*tl*^qVU8gGh;2TK8JxlCa}VNKemg`k2h zU;k3Hopf3`cXNn>Sbr=i9;v*=Acqs@;#8PqIE{=;$=cx8`!S9ys~dri!fL2|4$YNXbUfn`N@)czg+ z)g5UO#7`;9k08sc=eTJBe2X`H!`WDvABIPQp-43Lb{zA*2GO@`s8_%-Srnf#=>iz{ zbT;F0Kg}1TzWdG#*%!;P*#C5<#E;4fI}JuospI&~DIFkQ&-SI3@hkpY6ovN^Ns zp=g-<9M^t?Mn(EPz;UaD$?d?6{(dXFfKze;_NoP@h$sTknHZZfy*)-Xh1)&1oiDO1 zwWPjueDeqvpLRjZsgT^Z?D+|@?#{PGcWr&1r|*p#q~Fj4PeYEf4K8|5(IgN=GKgMk zRQ=Fn-?aZ5sfej)`5TDQ9;9aM_}1*_+w9z`hkI!(}Ei`pQTw!3ua@mmaYy%5l=<=~pxFxv3CL^?blu=007zyR{^Ul1?6#M5}^8vXGp@5@a zTetc~RdO<^QXNVThiAuhc^GIu2scm1h02*=o|_rGEa9N;CGNe9FFw7o5MT8}+g#@Z zu8bqhbjCSmv3h?Pc-GQF9zo4Z^nn?|7`TN5KfxEc+?1C%H`mr3rVS+(0ii?+%osMM zlM;~kE!ZOe#HJ@zADU`Hv&ts(&QUI}2sG&id;MHlE8@ty_V@vs+>}U@uVNE$#=n*MkG7TBex9O_HGeZnwyX#UNrK?n(zhsiHye=^8;nWHk-}XmGqS^H>luIp zAl7Z*iW%*5;t2FZk95XVID<$_)-O54xtB<*dBv`YI2}TLYFu>v6A=>UUl2EkFDVY^ zM3hnYndtE$J|&V5idO1@`F0Kan>)OV%b)ceAy%HRSvo}n<7-d}N4mRZVce$&pr$qO zRm1PG&+i11(y!)^7`Gt+xT;t_L+XQ7WD5;T{%dnQK2zD=h=ddw0!7fS;EQ#i2~-44 zL_j6y1a_Ow#1hYSJ9bQo>T&!TCM$T#x28p7c=r8rXIJ4L0!?_`WQLS^hP7;(A@kI_ ze@}qu|MkmP@&AdWuWZU@_;`6Y&*}CxxL?3`863EMo>$fm=Nm)pXL@Iu>@^l^7juVR zv5^lb9?*JtBSVi$eLm%F6MMBzm8wL?SqGX7@-OuOMvG~$cKgwIpi7>M9;7hJiLDSjko)z=bN&C@GJxDe0TLgS|ky?t_85@|9@Bjq0F4nS%CjO$x-}U zVF7cj_HR0$LblE|$1l9C=nk~4Bb|huj35LNCdOS;;mMhZH6~+5Ye(Q<5MMm!%|!PY zFqqiqXZ_`;{6w|}_8Q6Mx&$-q<(ayu!mTPH|<78OXcT3vuVtpplN8`y5?s80bVakZumyNn<5%UNCJ3Qpm3N!U!`BnhWN z@AgjBY+)MTH$thsGlRy~U$@6A*YBso$egOUn2IgGM)ys4x&~a`UtpL@xA9>uxtNrq zI-QLzFWV`cK8Kyqmz5{;sV3>Cv?NPkTKCq)U#^~kZbj(Dvp$kWlf93K%sFE>!} z6;Sh>(1(W}Hn|JJDf_hB7iqqrhH<9 z#M%!5F?k!i0z_V3&s1DSu?J9d6O1t0pR*4&4nG-sy3c8L2W7NgnX>VTp3|I?>W(MY zd0e12Prhxvkr$W^jIOqtquqHY7+R(3G<0>{Zm9K6ER4Sv_tMyb$DN6!M?}04R0Ny#H33z>y0D@$$3= z>1?4fJOSuYH81Rd+MXX5%6Q-9{LPMt(7r2Ya&Fo=wTVPd7fO-TCu} zQPDa+ON2sF6(rHwpgP{;S;UI;9$zYScEg4gq$XhqU7^#cZ`G?hWyFao{c;^mqP9JH*@Y$wm1J7g{#tgV zAV2I2L7r(rMjG0+r%iKB6gw-b3NVlOf~@4g&E;;n@ky$bf$o!B57t?Bar54wl4EV+ zdk3ii^$;m>Z68LP+ColZg0W_<1PuF4SDc7yjzvskEh0#;`R?1(w%F~HW~yy>a?MW) zA5-{SGZ3A|QqGD{ZlsfDNHdMdRxulwOIqlAIlM8uHJTqd@~N9O45@rEBUQYl>gH+> zqq$iQEmQD>(Yz#vsO0H|*bl2U%2%DxzfQ*uz@EA)>%b_(P7ED|tXo|$isuW2rO49F z%a*i3xYn~%9SAGpBOP63DEwV!X8{Z?RiZjCrwyy70WkN>*5A}a@dUS(^|A?F6T`V@sWRdgT!a%3`l zHoo`c_k_PHk5GgCv{onV0d#tQ#)`4ViHfW?n~p@`AwvbXp~cnLCIDB9X0^I7O)h<$ z9*eWhr^g_}5a+Y3qfTD*Kr5pNPAm)iZ&kU;#4@Z`)j?a{&h{?t`{{hA$Jrc>M=N#m z;tV*0!jIAVXbZ9(UBLF}xhhdFDn8og`SvE#shit9945(q8fl#k`#l<5Gb1&DD?R-{QqdBcGOG%y=fD`@h+>ya^=G>;S@V+)_PT)N z1+-W_Hxr#FKgOwE%hC{9xM083*hmWhmoT@ZXeoTzaUdx{=T^{23XX@L5NuQ-%E{w9 zFW5K48I7sS)A31_k6Xl>Cd_-~I$Xb~$r^OxM)xsf?#^>%ZPJc?3nZ+b=R+9)RJ;<` z?a{L?j}V&S`zyiO_M`P%bNuuczHgaoojvz-oznNW*r`%NmGs0&+}|rrW#E9vZ_|H! zZ`ymeS*SU|1ZpvbQt?6ma{NB9?}cwKOozwrsKj9l(LIZQ ztmqcy0z7S62jOHXY{MSZkGK1^)jt@A$M{}BX^IwiE8V?|8}Z+tx_Ur$Av(W5G8s>l z=-pY7(~I-<41 zkfkLqe4y&?_rf<>0rO>Vx=(YPz=PZMV$t5ymA8#j<9jihTn|qU91szkSmY^!m?IS= zJ*9<|`){@CobRs2GUrmj>;m@PeaRfDE3ZZfCxkF>WB4fc&EI6%sw@uv#eT!u&IwD& z9iRj25dvv(0O5`}v1Zi&=8beyaA8S{=k#APIo`=1?NgT5m>$PgY`ZTi0bx3vY=b(M z7UD?b2FVn}<%0ZVQ&ODUk_5A* zW={bRTTSk0fZ?P#xk!!eWT=LGE&O?zN6h7_vAjn}7V9Wnu0LF~u;gZzr~&A)qKd(+ zmwWPyi48G6tZ&M7sVpNS?X*gokFr^tow#K$7YoZ5ZNcp2OCz^D6&sdZ2J(X@Iwtyi*pb%W^6|EYkN6nSvdPvd zvby!@aHb}L^LyuaJyNSnwMifq8ya6E%U#q&xvcpshDi3m#t+t#l%b!qzEzZ! zjaGd~PH4RjM)bRWu3bm47uBqd`moGW%jr?xTVZ#PxmC@H2S@-l4q4r$BYNhD_X=1- zC2I}`IqHGsb^+zxiiUT*q8Kqu(ak>zyn|TJp89i_q5gyf1(5en9IdLYv3m`^Jh?wT z$kW%F`thYWZMDZlx_GQKu84(D@f}Q9_A(c=^`rHm6h?LGPo1_P4_I9g12~fKd`#rx zGPc59=}1om@KwL;V2fYyUhXpQWtwY?;8<&Y#JO0NwA8HiK-gsfUACP|&Enf`N3Q%B zGtFj+Bayy|IFcsky<81yD}CuP5w|xXbXzK9DFXn&5B8;B!*lzN`GUN$K(3!7S!uci>0Y7oe$Kzu~Y@|AY`^-uU!K(}(@z zIPD}$lTr?3l5o^oV#;jW1{Nuk`;ER`Q=Fc@bd1&5qIEF2=vNtYZjeM@u9qH{w>^x7 ze627$37b`=zN|A&egt9KQa#;2_9X4y0MG^g7>7}~MzcftQw6=J^c~D2(s>P@J`)L< z+}(;tL0}4a)@R25`lj*cF=zo|-bgS6mB)b$L_LvfB-fLH5?HA4_Dw5B5L$v`x4%rc zAzFhlKcJ*?v2yM2UW)EtrIh3EMX-F6>+u&co?`6ovx4xiA-INtN8YM$?Gn;jVlu?2 z5E>-H>vk>HhO|U{F(~t>st7_3zlVpd+WIE z86g#2g(4R2yjuGMbHYgAaH5K)sK-HO~EG#iL(VmRra!d@p; z&5#yA;sHXwQ>H9@t@;dG|ItgGQS*qh983q7S} zLlBCD6hX`r*4VhQl=cjnr{d^YCotzinivu>WNj4o>VKWu`@=s#f>&@vUpSF2H}ePP*FQ8}66Rz%Y?S8K24w%W zkF-Q&FD{Z=kY5+uk0NI_jJ1^R=_{t8pu!HFa+iG-$H#MTBwk0oH1E~Cqvl3{h|Gz+rQnd1u6IM zln1=pZm+Kc_l>k%Evc!gtRFdEJC!6EzUK2kT_|a5BZG8{A?$B)0Ia4WjVEaNN6(}d z;{X4Hy=jS!jkV)zWc#T2`W=DXyXmN<6KV#A0|(W=$Q((@W-SP7zo4LCNN8x+)`ea1 z0*dd74GjksbkdTXOt_DkdA%}(`NW=&+wQ?>_&&dhgntE+7|hPbT05A6npgTpDB!SN znV(+}0kq~|qOdQ{6+bx=vB;?V`WO=vzs+~}YW%imXomHfz ze`|la{aHTKqJ6z~0`fV-YqiKIHZD%U`4BfVu8@`ATG;_RS1NK-*^!HUd@wvVG2N)k zqHc2eIDc@9sGhrGPR8(?sYYzy$o@nw+q==dtBF7JP0GLfgkvELA^I|0(WIX(ERYKo zvVbdpToos!=;#b;@*(w){f1ZA?G^_lt$yz%B_=2Pu9)QZo%QcHU@%x48Nl$SMNWB( zopSBHO@=Up)V_gflyA%_yoYVnl8}*MPa)(2My*k-_p%p2XhEU?Gvl}v!WdpfQY^MHp+j3uZ$kHX{SyVoyzbuC5B66Bv1v zw#Hsagvw!LVublu?p?afIXE~X#z%hT=KAOI)b%97fAy*j;({Fd(7CR?NFjohS+x7B zQ=%Zln8JbX-}PS3nq}iWu+TU{hMt!$0 zmc%^&YGu8yScoEE9pnu9_rt%d*#BRg)W6?4^eI)!yvpC}Yq=Wo{|Mm#p3J|}`YM80 zO$H4Adlyw^xPM94Uaznnmaq3ebpW-haF1UF?e!8dOo{(r>4b6XGTtlW%0G|ZgzY?? z@JA8EK~_F~U{>z0=eYOdj@BS{GPFupLG~fCH;+JPH7xAc1*2+1^3>`#vZNO5|C>G4 zu=9<&fZ^4^Z#@1jlma*sFDry}%K*s|Cts}L^3pK#It)evhwPyW>H{f zwEk{%=;t<@WJ@{+)=1TRLB;<}J8hWVca9Bfre8Fk`b2$%>?3aY61U46MTws}xOH}p zq&)t!A|x@(86gLy#DeH+qU-=V$dm(pkpAjQ2MudO|GPpUrzR?jqk?EMxQv3-U6+s{ z+YqWp`{hjmq@C)2R~IT}>mDCLJFb?GV0VUFI4TXKW6l3vk9iauJ(dS{3EH(!LR!WF z7!_@1_pq+$wuaB5dmmZ0jz%FKU*xeUaq}`>6C;obn*LFASxWt?k436n^uIT{rqQ}R zb<;LwUm|o{lS%GqA-)2rav29Q~dwEYGah>eO-~`<3ua1q#s+9l(-^m2%9YP!z=fV%+j4a)G4Ax6N6?*X0Zmo|FR`K^Z7xz|0e?hq_Ta zt1(44wOyOy7%Siv!*wD%K5{T}(8O)Mm?>XKZ$V;-R;9!K1!+~O6_ZK_yVQUZD_rs` zzzIiv2xcri5-Q*X7Fc<& zYRH3yl+spfHBnmMY$B@hUh4S0sq-_nHaWJiO%Cd>n#}6Cuqu?7 z1UM5mnb8b6E7$vm=@l^PTGm0+uB$bW>uE}&y$fT&d^7UOKnkrrl+7<2r{euNl(DT% zesO!~tP5-*kpkrv)vf|)i}X*ajqkv_5sr)6H8S*{yN5H(YjNE*qT}fSzqXn4tMp>y9_?iz=Q-r1a>l&fLax%YiFxbQqPLFt~iLLC0Cv#34D8`Px4V=t3jmhsDvAlSEoZ@E{Q#=h%m1CQk6qVR$ z(<+NJE-B(uP~3iZQiwVkkW{o*M^Bq(ncgO!Q$4D^9oAvp82C&=GpiuKDr^$%7&ty- zYq$_(L97s1x!6T?L~k0FHJfUIPL5_kMEB$O)w|Tp5S+@@XIL=?$C`ZvxD}K=UP$4Q zevBrQeg_I$gcxkj;O6d@!EQSM`dBqIOBQ=j>yKx<2ZzO>feSkgD7v=2{>@B z?oV^g`4w#T5_FYyd3Dov+w$YBR{CS&Rub%1l}*MlRJ*-Q#C*SZMAjM4kXj&j7136k z5U`uJx?QBqS!?zLWdxg9aIxdGN@gO|6nDFg2<>f zd^tx7u1ErbH=7XgyZZwS2I!EqF;G@kU&?pz*yZh$%y?5>V%gEq!o>%fJCQ8@aOtZ_ zb}|uR(_j=^3-o)qDWnL}uZ_#xU;d-cwepW-4J-E2AyrudhL^UQx@a*U4w>l2dik}7 z+ZVNBn{kw3O-Sw>t_CA{%oa4|7sA%KAnGld1O8Y6GUTCy{tEa%2^YSsEWNeT21HD! z)Fg|IMt<+esN3nYy6xyp@69CULmcrAda{St!xWjPa*3t^g~GtlFsFhNYV4gkPCYUW zrP4pn01a*-2^jEiR0G5bD-ZWlT#FSje21oUz(kW9bK_jVx;JlmSz&g9U3JO0w=d3; z`hy0X(O%6u7D3sWwSn~oiDa}}>M}p#sYX}z=m#a!5yM*%`0VK71&5Mbl$MdNp{hY1 zq4qtQpki}kF}$^3Lrqq8`CN~o>WyXYj6<_Ec`KtRAIVX^5sF0nb4os>weB(Ge(v4zNATz5MGVJ6HQwIaUYAj05W>3fowQ zGfIlherp-J10WP7R*^jq6t$oF;zgNZlMX8F2U`e%3=*7G+_?R(1(p8l4a*FnsUrp& z_L^}`g!K3OxooN`&{yDgzq(;?8VGxGU}#W(C_qw6jp+L80 zE5qGHcu{-8Aepw{4vC+V{fY2Vw3yz_fbMryXHu@&{1qb_1w2*NUa#+!4TjDcnwkZ# zEX-DOe~i^1J3Bb%o3fD*%YF;Eo+J97$33CZnSgs_rMhR#+dFZ5&$le;>}J;4IeG0_ z&y8mD-v_HWYVNvJ=>l-xBE@i%dnSM|-VA1#)I4Umr>BH7q#j_fl_R8cE6?FHzbxox zQDIl`W4krl;g9z1v|j4Y2yO_i-8a}cbw34^Nx(1iy2C}`4>`Wd; z|41L3Is1Ip8*H`!wp@Mmzy#_PVUx&Ok+}93V99@@!}dAF{BBFSlZ;mIUE-s2$aH@u z4fsbUXIM-!ImW6^G4qK(-myif>FpJqY!uvi+_Bk4VM#-csmf6P9%s8`={lSwJGdOzN zBVD<+_D>ji5zZiqgb&|z9Hz>*Cv*vIR^X_)kdrcKaNo=fxel6~PX4puUNPR&cqnRt zY|59hJ95UBV>)87plvH3Fetb=hr&#lyw^r;T*15XWs*l*ufE=oEN3V0Lw7ALODU); zML5H@)os5e=VFBywceovEBvu_r~lU}hj=07 zywt>{3hu%)-ghjd6BnwaB)o#pn;qk4K3;l{l#zGGr3v~EgYNSRv)fK8FD7u7!&9HU zgpo+~0Jqget7%c9V;_knVaPHFJb)qlEluNxE9{+uJ~!X3o#uIaB1995g0(GHc(Q-` z$d{Ue#;lcb!G2^jj&3gckn;q_&Y%s#^=gpuGZf+0?xj_xm}jHr_9q5g%I?KaB=Vc{j%A8?S%>H#4pvuJ+SV z?LH3FEntIt4{8P3lBDgfri_lJ&9e=ha8^1D_;mjL(~>nUZy0ia zuP#b_hDK>(HG02V@#se!?Hl!TA4puuWeAK%&j5mhevFde{gA4~KGBG(Wmuf*|tP@JrD*TYVj}mPk&FWxhln=zA+KKXxNHxf>QbZ%%{P(^tTOG-d-YODdzud>R&8FhSUb0`PK#2kQ*Mz{uYS&>!A`k zP-D||J=$I$Ena!3PR`sSodmx)T&Qh(IffG26JWj=X+3Tkt1&A6q{V+`Yg;BM}Dm-$-2`esnY(HiLKL-c)F6z{9XYynoo!&%A z@IL%1nRKb*w--D?9Qk1G%ZH@lY^aPqwIsx{@VB$BDFFkEWq z15U9Ib>b^hSJ_{N(Chy!>aRzHnm!rA$%etp>$v6ffh?K%8&a!>3=*w*Lq4-}`077D zr)K~J71%h{G*t!{@s=v>4Csu%#V5eSH_{1r0R&|j7Bq$~GgR(U++&lSKxNt%6vP+E z^Y9Z<_{F?uhK>W>QBE6jvh+TR3-zB zyR}jOFe*UOb3)^MDZVSr?dvuFu$k5MG$>cP)a(a7vfCUTFx5(}@cACoMikRc6F8+_ zSLL_U!PMtM$iG4dT_Z+wDpDVr?k4Q+Afh?Tmfa20bI>h92Rp|OMSpOWnNwsXqB#`4 zumCU5l^_smerGV1%*E$F2eG^ADYSBHQnI{_t5JITk8QosW)qa^bFi5vY26H{!Y`GA zl>0W{OL`IuuI$Z`{oh7#DtTL6xbD>Bd*&z^t#8y-!TPdt0a{64YKtHH(?6#p%!p+8 zQZmOSdm!T|zy>Z(EZxhZ0R_?C1+n&(EDtLG$LE@am3|Nw1Uz|mf}At&lWX3xVR8s- zu}#{yg2smhCil?XJsnt%KJj?Sw_K#!P?s6DcI3x?%hXbO=-`>RmbqXAaK8OdoXmZl zFiBw!VFz{<1?d-;>o!+K=y|~wVUk#K_Gh*+QZd;KzZNSJFWG4}P;>FX`29hM$^LR-^hJ_F}6A=?eq=a2G|<>GpN{|_~{ BGcy1H diff --git a/docs/complex-relay.html b/docs/complex-relay.html deleted file mode 100644 index 21524bfd0..000000000 --- a/docs/complex-relay.html +++ /dev/null @@ -1,85 +0,0 @@ - - - - - - Complex Relay - - -

Complex Relay

-

- Both Source Chain and Target Chains have Bridge Messages pallets deployed. They also have required - finality pallets deployed - we don't care about finality type here - they can be either Bridge GRANDPA, - or Bridge Parachains finality pallets, or any combination of those.
-

-

- There are 4-6 relayer subprocesses inside the Complex Relayer. They include two message relayers, - serving the lane in both directions and 2-4 Complex Relayers (depending on the finality type of Source - and Target Chains).
-

-

- The following diagram shows the way the complex relayer serves the lane in single direction. Everything - below may be applied to the opposite direction if you'll swap the Source and Target Chains. -

-
- sequenceDiagram - participant Source Chain - participant Complex Relayer - participant Target Chain - - Note right of Source Chain: Finalized: 480, Target Finalized: 50, Sent Messages: 42, Confirmed Messages: 42 - Note left of Target Chain: Finalized: 60, Source Finalized: 420, Received Messages: 42 - - Source Chain ->> Source Chain: someone Sends Message 43 - Source Chain ->> Source Chain: Import and Finalize Block 481 - - Source Chain ->> Complex Relayer: notes new outbound message 43 at Source Chain Block 481 - Note right of Complex Relayer: can't deliver message 43, Source Chain Block 481 is not relayed - Complex Relayer ->> Complex Relayer: asks on-demand Finality Relayer to relay Source Chain Block 481 - - Source Chain ->> Complex Relayer: Read Finality Proof of Block 481 - Complex Relayer ->> Target Chain: Submit Finality Proof of Block 481 - Target Chain ->> Target Chain: Import and Finalize Block 61 - Note left of Target Chain: Finalized: 61, Source Finalized: 481, Received Messages: 42 - - Source Chain ->> Complex Relayer: Read Proof of Message 43 at Block 481 - Complex Relayer ->> Target Chain: Submit Proof of Message 43 at Block 481 - Target Chain ->> Target Chain: Import and Finalize Block 62 - Note left of Target Chain: Finalized: 62, Source Finalized: 481, Received Messages: { rewarded: 42, messages-relayer-account: [43] } - - Target Chain ->> Complex Relayer: notes new unrewarded relayer at Target Chain Block 62 - Note right of Complex Relayer: can't relay delivery confirmations because Target Chain Block 62 is not relayed - Complex Relayer ->> Complex Relayer: asks on-demand Finality Relayer to relay Target Chain Block 62 - - Target Chain ->> Complex Relayer: Read Finality Proof of Block 62 - Complex Relayer ->> Source Chain: Submit Finality Proof of Block 62 - Source Chain ->> Source Chain: Import and Finalize Block 482 - Note right of Source Chain: Finalized: 482, Target Finalized: 62, Confirmed Messages: 42 - - Target Chain ->> Complex Relayer: Read Proof of Message 43 Delivery at Block 62 - Complex Relayer ->> Source Chain: Submit Proof of Message 43 Delivery at Block 612 - Source Chain ->> Source Chain: rewards messages-relayer-account for delivering message [43] - Source Chain ->> Source Chain: prune delivered message 43 from runtime storage - Note right of Source Chain: Finalized: 482, Target Finalized: 61, Confirmed Messages: 43 - - Source Chain ->> Source Chain: someone Sends Message 44 - Source Chain ->> Source Chain: Import and Finalize Block 483 - - Source Chain ->> Complex Relayer: notes new outbound message 44 at Source Chain Block 483 and new confirmed message 43 - Note right of Complex Relayer: can't deliver message 44, Source Chain Block 483 is not relayed - Complex Relayer ->> Complex Relayer: asks on-demand Finality Relayer to relay Source Chain Block 483 - - Source Chain ->> Complex Relayer: Read Finality Proof of Block 483 - Complex Relayer ->> Target Chain: Submit Finality Proof of Block 483 - Target Chain ->> Target Chain: Import and Finalize Block 63 - Note left of Target Chain: Finalized: 63, Source Finalized: 483, Received Messages: { rewarded: 42, messages-relayer-account: [43] } - - Source Chain ->> Complex Relayer: Read Proof of Message 44 and Proof of Message 43 reward at Block 483 - Complex Relayer ->> Target Chain: Submit Proof of Message 44 and Proof of Message 43 reward at Block 483 - Target Chain ->> Target Chain: Import and Finalize Block 64 - Note left of Target Chain: Finalized: 64, Source Finalized: 483, Received Messages: { rewarded: 43, messages-relayer-account: [44] }--> -
- - - - diff --git a/docs/grandpa-finality-relay.html b/docs/grandpa-finality-relay.html deleted file mode 100644 index 4136621b1..000000000 --- a/docs/grandpa-finality-relay.html +++ /dev/null @@ -1,47 +0,0 @@ - - - - - - GRANDPA Finality Relay - - -

GRANDPA Finality Relay

-

- Source Chain is running GRANDPA Finality Gadget. Bridge GRANDPA finality pallet is deployed at - Target Chain runtime. Relayer is configured to relay Source Chain finality to Target Chain. -

-
- sequenceDiagram - participant Source Chain - participant Relayer - participant Target Chain - Note left of Source Chain: Best: 500, Finalized: 480, Authorities Set Index: 42 - Note right of Target Chain: Uninitialized - - Source Chain ->> Relayer: Read Initialization Data - Relayer ->> Target Chain: Initialize Bridge GRANDPA Finality Pallet - Note right of Target Chain: Finalized: 480, Authorities Set Index: 42 - - Source Chain ->> Source Chain: Import Block 501 - Source Chain ->> Source Chain: Import Block 502 - Source Chain ->> Source Chain: Finalize Block 495 - Source Chain ->> Relayer: Read Finality Proof of Block 495 - Relayer ->> Target Chain: Finality Proof of Block 495 - Note right of Target Chain: Finalized: 495, Authorities Set Index: 42 - - Source Chain ->> Source Chain: Import Block 503 that changes Authorities Set to 43 - Source Chain ->> Source Chain: Finalize Block 500 - Note left of Relayer: Relayer Misses Finality Notification for Block 500 - - Source Chain ->> Source Chain: Import Block 504 - Source Chain ->> Source Chain: Finalize Mandatory Block 503 - Source Chain ->> Source Chain: Finalize Block 504 - Source Chain ->> Relayer: Read Finality Proof of Mandatory Block 503 - Relayer ->> Target Chain: Finality Proof of Block 503 - Note right of Target Chain: Finalized: 503, Authorities Set Index: 43 -
- - - - diff --git a/docs/high-level-overview.md b/docs/high-level-overview.md deleted file mode 100644 index d6d6fb3f0..000000000 --- a/docs/high-level-overview.md +++ /dev/null @@ -1,184 +0,0 @@ -# High-Level Bridge Documentation - -This document gives a brief, abstract description of main components that may be found in this repository. If you want -to see how we're using them to build Rococo <> Westend (Kusama <> Polkadot) bridge, please refer to the [Polkadot <> -Kusama Bridge](./polkadot-kusama-bridge-overview.md). - -## Purpose - -This repo contains all components required to build a trustless connection between standalone Substrate chains, that are -using GRANDPA finality, their parachains or any combination of those. On top of this connection, we offer a messaging -pallet that provides means to organize messages exchange. - -On top of that layered infrastructure, anyone may build their own bridge applications - e.g. [XCM -messaging](./polkadot-kusama-bridge-overview.md), [encoded calls -messaging](https://github.com/paritytech/parity-bridges-common/releases/tag/encoded-calls-messaging) and so on. - -## Terminology - -Even though we support (and require) two-way bridging, the documentation will generally talk about a one-sided -interaction. That's to say, we will only talk about syncing finality proofs and messages from a _source_ chain to a -_target_ chain. This is because the two-sided interaction is really just the one-sided interaction with the source and -target chains switched. - -The bridge has both on-chain (pallets) and offchain (relayers) components. - -## On-chain components - -On-chain bridge components are pallets that are deployed at the chain runtime. Finality pallets require deployment at -the target chain, while messages pallet needs to be deployed at both, source and target chains. - -### Bridge GRANDPA Finality Pallet - -A GRANDPA light client of the source chain built into the target chain's runtime. It provides a "source of truth" about -the source chain headers which have been finalized. This is useful for higher level applications. - -The pallet tracks current GRANDPA authorities set and only accepts finality proofs (GRANDPA justifications), generated -by the current authorities set. The GRANDPA protocol itself requires current authorities set to generate explicit -justification for the header that enacts next authorities set. Such headers and their finality proofs are called -mandatory in the pallet and relayer pays no fee for such headers submission. - -The pallet does not require all headers to be imported or provided. The relayer itself chooses which headers he wants to -submit (with the exception of mandatory headers). - -More: [pallet level documentation and code](../modules/grandpa/). - -### Bridge Parachains Finality Pallet - -Parachains are not supposed to have their own finality, so we can't use bridge GRANDPA pallet to verify their finality -proofs. Instead, they rely on their relay chain finality. The parachain header is considered final, when it is accepted -by the [`paras` -pallet](https://github.com/paritytech/polkadot/tree/1a034bd6de0e76721d19aed02a538bcef0787260/runtime/parachains/src/paras) -at its relay chain. Obviously, the relay chain block, where it is accepted, must also be finalized by the relay chain -GRANDPA gadget. - -That said, the bridge parachains pallet accepts storage proof of one or several parachain heads, inserted to the -[`Heads`](https://github.com/paritytech/polkadot/blob/1a034bd6de0e76721d19aed02a538bcef0787260/runtime/parachains/src/paras/mod.rs#L642) -map of the [`paras` -pallet](https://github.com/paritytech/polkadot/tree/1a034bd6de0e76721d19aed02a538bcef0787260/runtime/parachains/src/paras). -To verify this storage proof, the pallet uses relay chain header, imported earlier by the bridge GRANDPA pallet. - -The pallet may track multiple parachains at once and those parachains may use different primitives. So the parachain -header decoding never happens at the pallet level. For maintaining the headers order, the pallet uses relay chain header -number. - -More: [pallet level documentation and code](../modules/parachains/). - -### Bridge Messages Pallet - -The pallet is responsible for queuing messages at the source chain and receiving the messages proofs at the target -chain. The messages are sent to the particular _lane_, where they are guaranteed to be received in the same order they -are sent. The pallet supports many lanes. - -The lane has two ends. Outbound lane end is storing number of messages that have been sent and the number of messages -that have been received. Inbound lane end stores the number of messages that have been received and also a map that maps -messages to relayers that have delivered those messages to the target chain. - -The pallet has three main entrypoints: -- the `send_message` may be used by the other runtime pallets to send the messages; -- the `receive_messages_proof` is responsible for parsing the messages proof and handing messages over to the dispatch -code; -- the `receive_messages_delivery_proof` is responsible for parsing the messages delivery proof and rewarding relayers -that have delivered the message. - -Many things are abstracted by the pallet: -- the message itself may mean anything, the pallet doesn't care about its content; -- the message dispatch happens during delivery, but it is decoupled from the pallet code; -- the messages proof and messages delivery proof are verified outside of the pallet; -- the relayers incentivization scheme is defined outside of the pallet. - -Outside of the messaging pallet, we have a set of adapters, where messages and delivery proofs are regular storage -proofs. The proofs are generated at the bridged chain and require bridged chain finality. So messages pallet, in this -case, depends on one of the finality pallets. The messages are XCM messages and we are using XCM executor to dispatch -them on receival. You may find more info in [Polkadot <> Kusama Bridge](./polkadot-kusama-bridge-overview.md) document. - -More: [pallet level documentation and code](../modules/messages/). - -### Bridge Relayers Pallet - -The pallet is quite simple. It just registers relayer rewards and has an entrypoint to collect them. When the rewards -are registered and the reward amount is configured outside of the pallet. - -More: [pallet level documentation and code](../modules/relayers/). - -## Offchain Components - -Offchain bridge components are separate processes, called relayers. Relayers are connected both to the source chain and -target chain nodes. Relayers are reading state of the source chain, compare it to the state of the target chain and, if -state at target chain needs to be updated, submits target chain transaction. - -### GRANDPA Finality Relay - -The task of relay is to submit source chain GRANDPA justifications and their corresponding headers to the Bridge GRANDPA -Finality Pallet, deployed at the target chain. For that, the relay subscribes to the source chain GRANDPA justifications -stream and submits every new justification it sees to the target chain GRANDPA light client. In addition, relay is -searching for mandatory headers and submits their justifications - without that the pallet will be unable to move -forward. - -More: [GRANDPA Finality Relay Sequence Diagram](./grandpa-finality-relay.html), [pallet level documentation and -code](../relays/finality/). - -### Parachains Finality Relay - -The relay connects to the source _relay_ chain and the target chain nodes. It doesn't need to connect to the tracked -parachain nodes. The relay looks at the -[`Heads`](https://github.com/paritytech/polkadot/blob/1a034bd6de0e76721d19aed02a538bcef0787260/runtime/parachains/src/paras/mod.rs#L642) -map of the [`paras` -pallet](https://github.com/paritytech/polkadot/tree/1a034bd6de0e76721d19aed02a538bcef0787260/runtime/parachains/src/paras) -in source chain, and compares the value with the best parachain head, stored in the bridge parachains pallet at the -target chain. If new parachain head appears at the relay chain block `B`, the relay process **waits** until header `B` -or one of its ancestors appears at the target chain. Once it is available, the storage proof of the map entry is -generated and is submitted to the target chain. - -As its on-chain component (which requires bridge GRANDPA pallet to be deployed nearby), the parachains finality relay -requires GRANDPA finality relay to be running in parallel. Without it, the header `B` or any of its children's finality -at source won't be relayed at target, and target chain won't be able to verify generated storage proof. - -More: [Parachains Finality Relay Sequence Diagram](./parachains-finality-relay.html), [code](../relays/parachains/). - -### Messages Relay - -Messages relay is actually two relays that are running in a single process: messages delivery relay and delivery -confirmation relay. Even though they are more complex and have many caveats, the overall algorithm is the same as in -other relays. - -Message delivery relay connects to the source chain and looks at the outbound lane end, waiting until new messages are -queued there. Once they appear at the source block `B`, the relay start waiting for the block `B` or its descendant -appear at the target chain. Then the messages storage proof is generated and submitted to the bridge messages pallet at -the target chain. In addition, the transaction may include the storage proof of the outbound lane state - that proves -that relayer rewards have been paid and this data (map of relay accounts to the delivered messages) may be pruned from -the inbound lane state at the target chain. - -Delivery confirmation relay connects to the target chain and starts watching the inbound lane end. When new messages are -delivered to the target chain, the corresponding _source chain account_ is inserted to the map in the inbound lane data. -Relay detects that, say, at the target chain block `B` and waits until that block or its descendant appears at the -source chain. Once that happens, the relay crafts a storage proof of that data and sends it to the messages pallet, -deployed at the source chain. - -As you can see, the messages relay also requires finality relay to be operating in parallel. Since messages relay -submits transactions to both source and target chains, it requires both _source-to-target_ and _target-to-source_ -finality relays. They can be GRANDPA finality relays or GRANDPA+parachains finality relays, depending on the type of -connected chain. - -More: [Messages Relay Sequence Diagram](./messages-relay.html), [pallet level documentation and -code](../relays/messages/). - -### Complex Relay - -Every relay transaction has its cost. The only transaction, that is "free" to relayer is when the mandatory GRANDPA -header is submitted. The relay that feeds the bridge with every relay chain and/or parachain head it sees, will have to -pay a (quite large) cost. And if no messages are sent through the bridge, that is just waste of money. - -We have a special relay mode, called _complex relay_, where relay mostly sleeps and only submits transactions that are -required for the messages/confirmations delivery. This mode starts two message relays (in both directions). All required -finality relays are also started in a special _on-demand_ mode. In this mode they do not submit any headers without -special request. As always, the only exception is when GRANDPA finality relay sees the mandatory header - it is -submitted without such request. - -The message relays are watching their lanes and when, at some block `B`, they see new messages/confirmations to be -delivered, they are asking on-demand relays to relay this block `B`. On-demand relays does that and then message relay -may perform its job. If on-demand relay is a parachain finality relay, it also runs its own on-demand GRANDPA relay, -which is used to relay required relay chain headers. - -More: [Complex Relay Sequence Diagram](./complex-relay.html), -[code](../relays/bin-substrate/src/cli/relay_headers_and_messages/). diff --git a/docs/messages-relay.html b/docs/messages-relay.html deleted file mode 100644 index c4dab9901..000000000 --- a/docs/messages-relay.html +++ /dev/null @@ -1,78 +0,0 @@ - - - - - - Messages Relay - - -

Messages Relay

-

- Both Source Chain and Target Chains have Bridge Messages pallets deployed. They also have required - finality pallets deployed - we don't care about finality type here - they can be either Bridge GRANDPA, - or Bridge Parachains finality pallets, or any combination of those. -

-

- Finality Relayer represents two actual relayers - one relays Source Chain Finality to Target Chain. - And another one relays Target Chain Finality to Source Chain. -

-
- sequenceDiagram - participant Source Chain - participant Finality Relayer - participant Messages Relayer - participant Target Chain - - Note right of Source Chain: Finalized: 480, Target Finalized: 50, Sent Messages: 42, Confirmed Messages: 42 - Note left of Target Chain: Finalized: 60, Source Finalized: 420, Received Messages: 42 - - Source Chain ->> Source Chain: someone Sends Message 43 - Source Chain ->> Source Chain: Import and Finalize Block 481 - - Source Chain ->> Messages Relayer: notes new outbound message 43 at Source Chain Block 481 - Note right of Messages Relayer: can't deliver message 43, Source Chain Block 481 is not relayed - - Source Chain ->> Finality Relayer: Read Finality Proof of Block 481 - Finality Relayer ->> Target Chain: Submit Finality Proof of Block 481 - Target Chain ->> Target Chain: Import and Finalize Block 61 - Note left of Target Chain: Finalized: 61, Source Finalized: 481, Received Messages: 42 - - Source Chain ->> Messages Relayer: Read Proof of Message 43 at Block 481 - Messages Relayer ->> Target Chain: Submit Proof of Message 43 at Block 481 - Target Chain ->> Target Chain: Import and Finalize Block 62 - Note left of Target Chain: Finalized: 62, Source Finalized: 481, Received Messages: { rewarded: 42, messages-relayer-account: [43] } - - Target Chain ->> Messages Relayer: notes new unrewarded relayer at Target Chain Block 62 - Note right of Messages Relayer: can't relay delivery confirmations because Target Chain Block 62 is not relayed - - Target Chain ->> Finality Relayer: Read Finality Proof of Block 62 - Finality Relayer ->> Source Chain: Submit Finality Proof of Block 62 - Source Chain ->> Source Chain: Import and Finalize Block 482 - Note right of Source Chain: Finalized: 482, Target Finalized: 62, Confirmed Messages: 42 - - Target Chain ->> Messages Relayer: Read Proof of Message 43 Delivery at Block 62 - Messages Relayer ->> Source Chain: Submit Proof of Message 43 Delivery at Block 612 - Source Chain ->> Source Chain: rewards messages-relayer-account for delivering message [43] - Source Chain ->> Source Chain: prune delivered message 43 from runtime storage - Note right of Source Chain: Finalized: 482, Target Finalized: 61, Confirmed Messages: 43 - - Source Chain ->> Source Chain: someone Sends Message 44 - Source Chain ->> Source Chain: Import and Finalize Block 483 - - Source Chain ->> Messages Relayer: notes new outbound message 44 at Source Chain Block 483 and new confirmed message 43 - Note right of Messages Relayer: can't deliver message 44, Source Chain Block 483 is not relayed - - Source Chain ->> Finality Relayer: Read Finality Proof of Block 483 - Finality Relayer ->> Target Chain: Submit Finality Proof of Block 483 - Target Chain ->> Target Chain: Import and Finalize Block 63 - Note left of Target Chain: Finalized: 63, Source Finalized: 483, Received Messages: { rewarded: 42, messages-relayer-account: [43] } - - Source Chain ->> Messages Relayer: Read Proof of Message 44 and Proof of Message 43 reward at Block 483 - Messages Relayer ->> Target Chain: Submit Proof of Message 44 and Proof of Message 43 reward at Block 483 - Target Chain ->> Target Chain: Import and Finalize Block 64 - Note left of Target Chain: Finalized: 64, Source Finalized: 483, Received Messages: { rewarded: 43, messages-relayer-account: [44] } -
- - - - diff --git a/docs/parachains-finality-relay.html b/docs/parachains-finality-relay.html deleted file mode 100644 index 4fc1392b8..000000000 --- a/docs/parachains-finality-relay.html +++ /dev/null @@ -1,55 +0,0 @@ - - - - - - Parachains Finality Relay - - -

Parachains Finality Relay

-

- Source Relay Chain is running GRANDPA Finality Gadget. Source Parachain is a parachain of the Source - Relay Chain. Bridge GRANDPA finality pallet is deployed at Target Chain runtime and is "connected" - to the Source Relay Chain. Bridge Parachains finality pallet is deployed at Target Chain and is - configured to track the Source Parachain. GRANDPA Relayer is configured to relay Source Relay Chain - finality to Target Chain. Parachains Relayer is configured to relay Source Parachain headers finality - to Target Chain. -

-
- sequenceDiagram - participant Source Parachain - participant Source Relay Chain - participant GRANDPA Relayer - participant Parachains Relayer - participant Target Chain - - Note left of Source Parachain: Best: 125 - Note left of Source Relay Chain: Finalized: 500, Best Parachain at Finalized: 120 - Note right of Target Chain: Best Relay: 480, Best Parachain: 110 - - Source Parachain ->> Source Parachain: Import Block 126 - Source Parachain ->> Source Relay Chain: Receives the Parachain block 126 - - Source Relay Chain ->> Source Relay Chain: Import block 501 - Source Relay Chain ->> Source Relay Chain: Finalize block 501 - Note left of Source Relay Chain: Finalized: 501, Best Parachain at Finalized: 126 - - Source Relay Chain ->> Parachains Relayer: notes new Source Parachain Block 126 - Note left of Parachains Relayer: can't relay Source Parachain Block 126, because it requires at least Source Relay Block 501 at Target Chain - - Source Relay Chain ->> Source Relay Chain: Import block 502 - Source Relay Chain ->> Source Relay Chain: Finalize block 502 - - Source Relay Chain ->> GRANDPA Relayer: read GRANDPA Finality Proof of Block 502 - GRANDPA Relayer ->> Target Chain: submit GRANDPA Finality Proof of Block 502 - Note right of Target Chain: Best Relay: 502, Best Parachain: 110 - - Target Chain ->> Parachains Relayer: notes finalized Source Relay Block 502 at Target Chain - Source Relay Chain ->> Parachains Relayer: read Parachain Finality Proof at Relay Block 502 - Parachains Relayer ->> Target Chain: submit Parachain Finality Proof at Relay Block 502 - Note right of Target Chain: Best Relay: 502, Best Parachain: 126 -
- - - - diff --git a/docs/polkadot-kusama-bridge-overview.md b/docs/polkadot-kusama-bridge-overview.md deleted file mode 100644 index 08036f0b0..000000000 --- a/docs/polkadot-kusama-bridge-overview.md +++ /dev/null @@ -1,129 +0,0 @@ -# Polkadot <> Kusama Bridge Overview - -This document describes how we use all components, described in the [High-Level Bridge -Documentation](./high-level-overview.md), to build the XCM bridge between Kusama and Polkadot. In this case, our -components merely work as a XCM transport (like XCMP/UMP/HRMP), between chains that are not a part of the same consensus -system. - -The overall architecture may be seen in [this diagram](./polkadot-kusama-bridge.html). - -## Bridge Hubs - -All operations at relay chain are expensive. Ideally all non-mandatory transactions must happen on parachains. That's -why we are planning to have two parachains - Polkadot Bridge Hub under Polkadot consensus and Kusama Bridge Hub under -Kusama consensus. - -The Bridge Hub will have all required bridge pallets in its runtime. We hope that later, other teams will be able to use -our bridge hubs too and have their pallets there. - -The Bridge Hub will use the base token of the ecosystem - KSM at Kusama Bridge Hub and DOT at Polkadot Bridge Hub. The -runtime will have minimal set of non-bridge pallets, so there's not much you can do directly on bridge hubs. - -## Connecting Parachains - -You won't be able to directly use bridge hub transactions to send XCM messages over the bridge. Instead, you'll need to -use other parachains transactions, which will use HRMP to deliver messages to the Bridge Hub. The Bridge Hub will just -queue these messages in its outbound lane, which is dedicated to deliver messages between two parachains. - -Our first planned bridge will connect the Polkadot and Kusama Asset Hubs. A bridge between those two parachains would -allow Asset Hub Polkadot accounts to hold wrapped KSM tokens and Asset Hub Kusama accounts to hold wrapped DOT tokens. - -For that bridge (pair of parachains under different consensus systems) we'll be using the lane 00000000. Later, when -other parachains will join the bridge, they will be using other lanes for their messages. - -## Running Relayers - -We are planning to run our own complex relayer for the lane 00000000. The relayer will relay Kusama/Polkadot GRANDPA -justifications to the bridge hubs at the other side. It'll also relay finalized Kusama Bridge Hub and Polkadot Bridge -Hub heads. This will only happen when messages will be queued at hubs. So most of time relayer will be idle. - -There's no any active relayer sets, or something like that. Anyone may start its own relayer and relay queued messages. -We are not against that and, as always, appreciate any community efforts. Of course, running relayer has the cost. Apart -from paying for the CPU and network, the relayer pays for transactions at both sides of the bridge. We have a mechanism -for rewarding relayers. - -### Compensating the Cost of Message Delivery Transactions - -One part of our rewarding scheme is that the cost of message delivery, for honest relayer, is zero. The honest relayer -is the relayer, which is following our rules: - -- we do not reward relayers for submitting GRANDPA finality transactions. The only exception is submitting mandatory - headers (headers which are changing the GRANDPA authorities set) - the cost of such transaction is zero. The relayer - will pay the full cost for submitting all other headers; - -- we do not reward relayers for submitting parachain finality transactions. The relayer will pay the full cost for - submitting parachain finality transactions; - -- we compensate the cost of message delivery transactions that have actually delivered the messages. So if your - transaction has claimed to deliver messages `[42, 43, 44]`, but, because of some reasons, has actually delivered - messages `[42, 43]`, the transaction will be free for relayer. If it has not delivered any messages, then the relayer - pays the full cost of the transaction; - -- we compensate the cost of message delivery and all required finality calls, if they are part of the same - [`frame_utility::batch_all`](https://github.com/paritytech/substrate/blob/891d6a5c870ab88521183facafc811a203bb6541/frame/utility/src/lib.rs#L326) - transaction. Of course, the calls inside the batch must be linked - e.g. the submitted parachain head must be used to - prove messages. Relay header must be used to prove parachain head finality. If one of calls fails, or if they are not - linked together, the relayer pays the full transaction cost. - -Please keep in mind that the fee of "zero-cost" transactions is still withdrawn from the relayer account. But the -compensation is registered in the `pallet_bridge_relayers::RelayerRewards` map at the target bridge hub. The relayer may -later claim all its rewards later, using the `pallet_bridge_relayers::claim_rewards` call. - -*A side note*: why we don't simply set the cost of useful transactions to zero? That's because the bridge has its cost. -If we won't take any fees, it would mean that the sender is not obliged to pay for its messages. And Bridge Hub -collators (and, maybe, "treasury") are not receiving any payment for including transactions. More about this later, in -the [Who is Rewarding Relayers](#who-is-rewarding-relayers) section. - -### Message Delivery Confirmation Rewards - -In addition to the "zero-cost" message delivery transactions, the relayer is also rewarded for: - -- delivering every message. The reward is registered during delivery confirmation transaction at the Source Bridge Hub.; - -- submitting delivery confirmation transaction. The relayer may submit delivery confirmation that e.g. confirms delivery - of four messages, of which the only one (or zero) messages is actually delivered by this relayer. It receives some fee - for confirming messages, delivered by other relayers. - -Both rewards may be claimed using the `pallet_bridge_relayers::claim_rewards` call at the Source Bridge Hub. - -### Who is Rewarding Relayers - -Obviously, there should be someone who is paying relayer rewards. We want bridge transactions to have a cost, so we -can't use fees for rewards. Instead, the parachains using the bridge, use sovereign accounts on both sides of the bridge -to cover relayer rewards. - -Bridged Parachains will have sovereign accounts at bridge hubs. For example, the Kusama Asset Hub will have an account -at the Polkadot Bridge Hub. The Polkadot Asset Hub will have an account at the Kusama Bridge Hub. The sovereign accounts -are used as a source of funds when the relayer is calling the `pallet_bridge_relayers::claim_rewards`. - -Since messages lane is only used by the pair of parachains, there's no collision between different bridges. E.g. Kusama -Asset Hub will only reward relayers that are delivering messages from Kusama Asset Hub. The Kusama Asset Hub sovereign -account is not used to cover rewards of bridging with some other Polkadot Parachain. - -### Multiple Relayers and Rewards - -Our goal is to incentivize running honest relayers. But we have no relayers sets, so at any time anyone may submit -message delivery transaction, hoping that the cost of this transaction will be compensated. So what if some message is -currently queued and two relayers are submitting two identical message delivery transactions at once? Without any -special means, the cost of first included transaction will be compensated and the cost of the other one won't. A honest, -but unlucky relayer will lose some money. In addition, we'll waste some portion of block size and weight, which may be -used by other useful transactions. - -To solve the problem, we have two signed extensions ([generate_bridge_reject_obsolete_headers_and_messages! -{}](../bin/runtime-common/src/lib.rs) and -[RefundRelayerForMessagesFromParachain](../bin/runtime-common/src/refund_relayer_extension.rs)), that are preventing -bridge transactions with obsolete data from including into the block. We are rejecting following transactions: - -- transactions, that are submitting the GRANDPA justification for the best finalized header, or one of its ancestors; - -- transactions, that are submitting the proof of the current best parachain head, or one of its ancestors; - -- transactions, that are delivering already delivered messages. If at least one of messages is not yet delivered, the - transaction is not rejected; - -- transactions, that are confirming delivery of already confirmed messages. If at least one of confirmations is new, the - transaction is not rejected; - -- [`frame_utility::batch_all`](https://github.com/paritytech/substrate/blob/891d6a5c870ab88521183facafc811a203bb6541/frame/utility/src/lib.rs#L326) - transactions, that have both finality and message delivery calls. All restrictions from the [Compensating the Cost of - Message Delivery Transactions](#compensating-the-cost-of-message-delivery-transactions) are applied. diff --git a/docs/polkadot-kusama-bridge.html b/docs/polkadot-kusama-bridge.html deleted file mode 100644 index bf248adb5..000000000 --- a/docs/polkadot-kusama-bridge.html +++ /dev/null @@ -1,67 +0,0 @@ - - - - - - Polkadot <> Kusama Bridge - - -

Polkadot <> Kusama Bridge

-

- Our bridge connects two parachains - Kusama Bridge Hub and Polkadot Bridge Hub. Messages that - are sent over bridge have XCM format and we are using existing architecture to dispatch them. - Since both Polkadot, Kusama and their parachains already have means to exchange XCM messages - within the same consensus system (HRMP, VMP, ...), it means that we are able to connect all those - chains with our bridge. -

-

- In our architecture, the lane that is used to relay messages over the bridge is determined by - the XCM source and destinations. So e.g. bridge between Asset Hubs Polkadot and Kusama (and opposite direction) - will use the lane 00000000, bridge between some other Polkadot Parachain and some other Kusama Parachain - will use the lane 00000001 and so on. -

-
- flowchart LR - subgraph Polkadot Consensus - polkadot(((Polkadot))) - asset_hub_polkadot(((Polkadot Asset Hub))) - polkadot_bh(((Polkadot Bridge Hub))) - - polkadot---asset_hub_polkadot - polkadot---polkadot_bh - - asset_hub_polkadot-->|Send Message Using HRMP|polkadot_bh - - polkadot_bh-->|Send Message Using HRMP|asset_hub_polkadot - asset_hub_polkadot-->|Dispatch the Message|asset_hub_polkadot - end - subgraph Kusama Consensus - kusama_bh(((Kusama Bridge Hub))) - asset_hub_kusama(((Kusama Asset Hub))) - kusama(((Kusama))) - - kusama---asset_hub_kusama - kusama---kusama_bh - - kusama_bh-->|Send Message Using HRMP|asset_hub_kusama - asset_hub_kusama-->|Dispatch the Message|asset_hub_kusama - - asset_hub_kusama-->|Send Message Using HRMP|kusama_bh - end - - polkadot_bh<===>|Message is relayed to the Bridged Chain using lane 00000000|kusama_bh - - linkStyle 2 stroke:red - linkStyle 7 stroke:red - linkStyle 8 stroke:red - - linkStyle 3 stroke:green - linkStyle 4 stroke:green - linkStyle 9 stroke:green -
- - - \ No newline at end of file diff --git a/docs/running-relayer.md b/docs/running-relayer.md deleted file mode 100644 index 710810a47..000000000 --- a/docs/running-relayer.md +++ /dev/null @@ -1,343 +0,0 @@ -# Running your own bridge relayer - -:warning: :construction: Please read the [Disclaimer](#disclaimer) section first :construction: :warning: - -## Disclaimer - -There are several things you should know before running your own relayer: - -- initial bridge version (we call it bridges v1) supports any number of relayers, but **there's no guaranteed -compensation** for running a relayer and/or submitting valid bridge transactions. Most probably you'll end up -spending more funds than getting from rewards - please accept this fact; - -- even if your relayer has managed to submit a valid bridge transaction that has been included into the bridge -hub block, there's no guarantee that you will be able to claim your compensation for that transaction. That's -because compensations are paid from the account, controlled by relay chain governance and it could have no funds -to compensate your useful actions. We'll be working on a proper process to resupply it on-time, but we can't -provide any guarantee until that process is well established. - -## A Brief Introduction into Relayers and our Compensations Scheme - -Omitting details, relayer is an offchain process that is connected to both bridged chains. It looks at the -outbound bridge messages queue and submits message delivery transactions to the target chain. There's a lot -of details behind that simple phrase - you could find more info in the -[High-Level Bridge Overview](./high-level-overview.md) document. - -Reward that is paid to relayer has two parts. The first part static and is controlled by the governance. -It is rather small initially - e.g. you need to deliver `10_000` Kusama -> Polkadot messages to gain single -KSM token. - -The other reward part is dynamic. So to deliver an XCM message from one BridgeHub to another, we'll need to -submit two transactions on different chains. Every transaction has its cost, which is: - -- dynamic, because e.g. message size can change and/or fee factor of the target chain may change; - -- quite large, because those transactions are quite heavy (mostly in terms of size, not weight). - -We are compensating the cost of **valid**, **minimal** and **useful** bridge-related transactions to -relayer, that has submitted such transaction. Valid here means that the transaction doesn't fail. Minimal -means that all data within transaction call is actually required for the transaction to succeed. Useful -means that all supplied data in transaction is new and yet unknown to the target chain. - -We have implemented a relayer that is able to craft such transactions. The rest of document contains a detailed -information on how to deploy this software on your own node. - -## Relayers Concurrency - -As it has been said above, we are not compensating cost of transactions that are not **useful**. For -example, if message `100` has already been delivered from Kusama Bridge Hub to Polkadot Bridge Hub, then another -transaction that delivers the same message `100` won't be **useful**. Hence, no compensation to relayer that -has submitted that second transaction. - -But what if there are several relayers running? They are noticing the same queued message `100` and -simultaneously submit identical message delivery transactions. You may expect that there'll be one lucky -relayer, whose transaction would win the "race" and which will receive the compensation and reward. And -there'll be several other relayers, losing some funds on their unuseful transactions. - -But actually, we have a solution that invalidates transactions of "unlucky" relayers before they are -included into the block. So at least you may be sure that you won't waste your funds on duplicate transactions. - -
-Some details? - -All **unuseful** transactions are rejected by our -[transaction extension](https://github.com/paritytech/polkadot-sdk/blob/master/bridges/bin/runtime-common/src/refund_relayer_extension.rs), -which also handles transaction fee compensations. You may find more info on unuseful (aka obsolete) transactions -by lurking in the code. - -We also have the WiP prototype of relayers coordination protocol, where relayers will get some guarantee -that their transactions will be prioritized over other relayers transactions at their assigned slots. -That is planned for the future version of bridge and the progress is -[tracked here](https://github.com/paritytech/parity-bridges-common/issues/2486). - -
- -## Prerequisites - -Let's focus on the bridge between Polkadot and Kusama Bridge Hubs. Let's also assume that we want to start -a relayer that "serves" an initial lane [`0x00000001`](https://github.com/polkadot-fellows/runtimes/blob/9ce1bbbbcd7843b3c76ba4d43c036bc311959e9f/system-parachains/bridge-hubs/bridge-hub-kusama/src/bridge_to_polkadot_config.rs#L54). - -
-Lane? - -Think of lane as a queue of messages that need to be delivered to the other/bridged chain. The lane is -bidirectional, meaning that there are four "endpoints". Two "outbound" endpoints (one at every chain), contain -messages that need to be delivered to the bridged chain. Two "inbound" are accepting messages from the bridged -chain and also remember the relayer, who has delivered message(s) to reward it later. - -
- -The same steps may be performed for other lanes and bridges as well - you'll just need to change several parameters. - -So to start your relayer instance, you'll need to prepare: - -- an address of ws/wss RPC endpoint of the Kusama relay chain; - -- an address of ws/wss RPC endpoint of the Polkadot relay chain; - -- an address of ws/wss RPC endpoint of the Kusama Bridge Hub chain; - -- an address of ws/wss RPC endpoint of the Polkadot Bridge Hub chain; - -- an account on Kusama Bridge Hub; - -- an account on Polkadot Bridge Hub. - -For RPC endpoints, you could start your own nodes, or use some public community nodes. Nodes are not meant to be -archive or provide access to insecure RPC calls. - -To create an account on Bridge Hubs, you could use XCM teleport functionality. E.g. if you have an account on -the relay chain, you could use the `teleportAssets` call of `xcmPallet` and send asset -`V3 { id: Concrete(0, Here), Fungible: }` to beneficiary `V3(0, X1(AccountId32()))` -on destination `V3(0, X1(Parachain(1002)))`. To estimate amounts you need, please refer to the [Costs](#costs) -section of the document. - -## Registering your Relayer Account (Optional, But Please Read) - -Bridge transactions are quite heavy and expensive. We want to minimize block space that can be occupied by -invalid bridge transactions and prioritize valid transactions over invalid. That is achieved by **optional** -relayer registration. Transactions, signed by relayers with active registration, gain huge priority boost. -In exchange, such relayers may be slashed if they submit **invalid** or **non-minimal** transaction. - -Transactions, signed by relayers **without** active registration, on the other hand, receive no priority -boost. It means that if there is active registered relayer, most likely all transactions from unregistered -will be counted as **unuseful**, not included into the block and unregistered relayer won't get any reward -for his operations. - -Before registering, you should know several things about your funds: - -- to register, you need to hold significant amount of funds on your relayer account. As of now, it is - [100 KSM](https://github.com/polkadot-fellows/runtimes/blob/9ce1bbbbcd7843b3c76ba4d43c036bc311959e9f/system-parachains/bridge-hubs/bridge-hub-kusama/src/bridge_to_polkadot_config.rs#L71C14-L71C43) - for registration on Kusama Bridge Hub and - [500 DOT](https://github.com/polkadot-fellows/runtimes/blob/9ce1bbbbcd7843b3c76ba4d43c036bc311959e9f/system-parachains/bridge-hubs/bridge-hub-polkadot/src/bridge_to_kusama_config.rs#L71C14-L71C43) - for registration on Polkadot Bridge Hub; - -- when you are registered, those funds are reserved on relayer account and you can't transfer them. - -The registration itself, has three states: active, inactive or expired. Initially, it is active, meaning that all -your transactions that are **validated** on top of block, where it is active get priority boost. Registration -becomes expired when the block with the number you have specified during registration is "mined". It is the -`validTill` parameter of the `register` call (see below). After that `validTill` block, you may unregister and get -your reserved funds back. There's also an intermediate point between those blocks - it is the `validTill - LEASE`, -where `LEASE` is the the chain constant, controlled by the governance. Initially it is set to `300` blocks. -All your transactions, **validated** between the `validTill - LEASE` and `validTill` blocks do not get the -priority boost. Also, it is forbidden to specify `validTill` such that the `validTill - currentBlock` is less -than the `LEASE`. - -
-Example? - -| Bridge Hub Block | Registration State | Comment | -| ----------------- | ------------------ | ------------------------------------------------------ | -| 100 | Active | You have submitted a tx with the `register(1000)` call | -| 101 | Active | Your message delivery transactions are boosted | -| 102 | Active | Your message delivery transactions are boosted | -| ... | Active | Your message delivery transactions are boosted | -| 700 | Inactive | Your message delivery transactions are not boosted | -| 701 | Inactive | Your message delivery transactions are not boosted | -| ... | Inactive | Your message delivery transactions are not boosted | -| 1000 | Expired | Your may submit a tx with the `deregister` call | - -
- -So once you have enough funds on your account and have selected the `validTill` parameter value, you -could use the Polkadot JS apps to submit an extrinsic. If you want priority boost for your transactions -on the Kusama Bridge Hub, open the -[Polkadot JS Apps](https://polkadot.js.org/apps/?rpc=wss%3A%2F%2Fkusama-bridge-hub-rpc.polkadot.io#/extrinsics) -and submit the `register` extrinsic from the `bridgeRelayers` pallet: - -![Register Extrinsic](./bridge-relayers-register.png) - -To deregister, submit the simple `deregister` extrinsic when registration is expired: - -![Deregister Extrinsic](./bridge-relayers-deregister.png) - -At any time, you can prolong your registration by calling the `register` with the larger `validTill`. - -## Costs - -Your relayer account (on both Bridge Hubs) must hold enough funds to be able to pay costs of bridge -transactions. If your relayer behaves correctly, those costs will be compensated and you will be -able to claim it later. - -**IMPORTANT**: you may add tip to your bridge transactions to boost their priority. But our -compensation mechanism never refunds transaction tip, so all tip tokens will be lost. - -
-Types of bridge transactions - -There are two types of bridge transactions: - -- message delivery transaction brings queued message(s) from one Bridge Hub to another. We record - the fact that this specific (your) relayer has delivered those messages; - -- message confirmation transaction confirms that some message have been delivered and also brings - back information on how many messages (your) relayer has delivered. We use this information later - to register delivery rewards on the source chain. - -Several messages/confirmations may be included in a single bridge transaction. Apart from this -data, bridge transaction may include finality and storage proofs, required to prove authenticity of -this data. - -
- -To deliver and get reward for a single message, the relayer needs to submit two transactions. One -at the source Bridge Hub and one at the target Bridge Hub. Below are costs for Polkadot <> Kusama -messages (as of today): - -- to deliver a single Polkadot -> Kusama message, you would need to pay around `0.06 KSM` at Kusama - Bridge Hub and around `1.62 DOT` at Polkadot Bridge Hub; - -- to deliver a single Kusama -> Polkadot message, you would need to pay around `1.70 DOT` at Polkadot - Bridge Hub and around `0.05 KSM` at Kusama Bridge Hub. - -Those values are not constants - they depend on call weights (that may change from release to release), -on transaction sizes (that depends on message size and chain state) and congestion factor. In any -case - it is your duty to make sure that the relayer has enough funds to pay transaction fees. - -## Claiming your Compensations and Rewards - -Hopefully you have successfully delivered some messages and now can claim your compensation and reward. -This requires submitting several transactions. But first, let's check that you actually have something to -claim. For that, let's check the state of the pallet that tracks all rewards. - -To check your rewards at the Kusama Bridge Hub, go to the -[Polkadot JS Apps](https://polkadot.js.org/apps/?rpc=wss%3A%2F%2Fkusama-bridge-hub-rpc.polkadot.io#/chainstate) -targeting Kusama Bridge Hub, select the `bridgeRelayers` pallet, choose `relayerRewards` map and -your relayer account. Then: - -- set the `laneId` to `0x00000001` - -- set the `bridgedChainId` to `bhpd`; - -- check the both variants of the `owner` field: `ThisChain` is used to pay for message delivery transactions - and `BridgedChain` is used to pay for message confirmation transactions. - -If check shows that you have some rewards, you can craft the claim transaction, with similar parameters. -For that, go to `Extrinsics` tab of the -[Polkadot JS Apps](https://polkadot.js.org/apps/?rpc=wss%3A%2F%2Fkusama-bridge-hub-rpc.polkadot.io#/extrinsics) -and submit the following transaction (make sure to change `owner` before): - -![Claim Rewards Extrinsic](./bridge-relayers-claim-rewards.png) - -To claim rewards on Polkadot Bridge Hub you can follow the same process. The only difference is that you -need to set value of the `bridgedChainId` to `bhks`. - -## Starting your Relayer - -### Starting your Rococo <> Westend Relayer - -You may find the relayer image reference in the -[Releases](https://github.com/paritytech/parity-bridges-common/releases) -of this repository. Make sure to check supported (bundled) versions -of release there. For Rococo <> Westend bridge, normally you may use the -latest published release. The release notes always contain the docker -image reference and source files, required to build relayer manually. - -Once you have the docker image, update variables and run the following script: -```sh -export DOCKER_IMAGE= - -export ROCOCO_HOST= -export ROCOCO_PORT= -# or set it to '--rococo-secure' if wss is used above -export ROCOCO_IS_SECURE= -export BRIDGE_HUB_ROCOCO_HOST= -export BRIDGE_HUB_ROCOCO_PORT= -# or set it to '--bridge-hub-rococo-secure' if wss is used above -export BRIDGE_HUB_ROCOCO_IS_SECURE= -export BRIDGE_HUB_ROCOCO_KEY_FILE= - -export WESTEND_HOST= -export WESTEND_PORT= -# or set it to '--westend-secure' if wss is used above -export WESTEND_IS_SECURE= -export BRIDGE_HUB_WESTEND_HOST= -export BRIDGE_HUB_WESTEND_PORT= -# or set it to '--bridge-hub-westend-secure ' if wss is used above -export BRIDGE_HUB_WESTEND_IS_SECURE= -export BRIDGE_HUB_WESTEND_KEY_FILE= - -# you can get extended relay logs (e.g. for debugging issues) by passing `-e RUST_LOG=bridge=trace` -# argument to the `docker` binary -docker run \ - -v $BRIDGE_HUB_ROCOCO_KEY_FILE:/bhr.key \ - -v $BRIDGE_HUB_WESTEND_KEY_FILE:/bhw.key \ - $DOCKER_IMAGE \ - relay-headers-and-messages bridge-hub-rococo-bridge-hub-westend \ - --rococo-host $ROCOCO_HOST \ - --rococo-port $ROCOCO_PORT \ - $ROCOCO_IS_SECURE \ - --rococo-version-mode Auto \ - --bridge-hub-rococo-host $BRIDGE_HUB_ROCOCO_HOST \ - --bridge-hub-rococo-port $BRIDGE_HUB_ROCOCO_PORT \ - $BRIDGE_HUB_ROCOCO_IS_SECURE \ - --bridge-hub-rococo-version-mode Auto \ - --bridge-hub-rococo-signer-file /bhr.key \ - --bridge-hub-rococo-transactions-mortality 16 \ - --westend-host $WESTEND_HOST \ - --westend-port $WESTEND_PORT \ - $WESTEND_IS_SECURE \ - --westend-version-mode Auto \ - --bridge-hub-westend-host $BRIDGE_HUB_WESTEND_HOST \ - --bridge-hub-westend-port $BRIDGE_HUB_WESTEND_PORT \ - $BRIDGE_HUB_WESTEND_IS_SECURE \ - --bridge-hub-westend-version-mode Auto \ - --bridge-hub-westend-signer-file /bhw.key \ - --bridge-hub-westend-transactions-mortality 16 \ - --lane 00000002 -``` - -### Starting your Polkadot <> Kusama Relayer - -*Work in progress, coming soon* - -### Watching your relayer state - -Our relayer provides some Prometheus metrics that you may convert into some fancy Grafana dashboards -and alerts. By default, metrics are exposed at port `9616`. To expose endpoint to the localhost, change -the docker command by adding following two lines: - -```sh -docker run \ - .. - -p 127.0.0.1:9616:9616 \ # tell Docker to bind container port 9616 to host port 9616 - # and listen for connections on the host' localhost interface - .. - $DOCKER_IMAGE \ - relay-headers-and-messages bridge-hub-rococo-bridge-hub-westend \ - --prometheus-host 0.0.0.0 \ # tell `substrate-relay` binary to accept Prometheus endpoint - # connections from everywhere - .. -``` - -You can find more info on configuring Prometheus and Grafana in the -[Monitor your node](https://wiki.polkadot.network/docs/maintain-guides-how-to-monitor-your-node) -guide from Polkadot wiki. - -We have our own set of Grafana dashboards and alerts. You may use them for inspiration. -Please find them in this folder: - -- for Rococo <> Westend bridge: [rococo-westend](https://github.com/paritytech/parity-bridges-common/tree/master/deployments/bridges/rococo-westend). - -- for Polkadot <> Kusama bridge: *work in progress, coming soon* diff --git a/modules/beefy/Cargo.toml b/modules/beefy/Cargo.toml deleted file mode 100644 index 2c552430c..000000000 --- a/modules/beefy/Cargo.toml +++ /dev/null @@ -1,63 +0,0 @@ -[package] -name = "pallet-bridge-beefy" -version = "0.1.0" -description = "Module implementing BEEFY on-chain light client used for bridging consensus of substrate-based chains." -authors.workspace = true -edition.workspace = true -license = "GPL-3.0-or-later WITH Classpath-exception-2.0" -repository.workspace = true -publish = false - -[lints] -workspace = true - -[dependencies] -codec = { package = "parity-scale-codec", version = "3.6.1", default-features = false } -log = { workspace = true } -scale-info = { version = "2.11.1", default-features = false, features = ["derive"] } -serde = { optional = true, workspace = true } - -# Bridge Dependencies - -bp-beefy = { path = "../../primitives/beefy", default-features = false } -bp-runtime = { path = "../../primitives/runtime", default-features = false } - -# Substrate Dependencies - -frame-support = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -frame-system = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -sp-core = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -sp-runtime = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -sp-std = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } - -[dev-dependencies] -sp-consensus-beefy = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } -mmr-lib = { package = "ckb-merkle-mountain-range", version = "0.5.2" } -pallet-beefy-mmr = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } -pallet-mmr = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } -rand = "0.8.5" -sp-io = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } -bp-test-utils = { path = "../../primitives/test-utils" } - -[features] -default = ["std"] -std = [ - "bp-beefy/std", - "bp-runtime/std", - "codec/std", - "frame-support/std", - "frame-system/std", - "log/std", - "scale-info/std", - "serde/std", - "sp-core/std", - "sp-runtime/std", - "sp-std/std", -] -try-runtime = [ - "frame-support/try-runtime", - "frame-system/try-runtime", - "pallet-beefy-mmr/try-runtime", - "pallet-mmr/try-runtime", - "sp-runtime/try-runtime", -] diff --git a/modules/beefy/src/lib.rs b/modules/beefy/src/lib.rs deleted file mode 100644 index 27c839210..000000000 --- a/modules/beefy/src/lib.rs +++ /dev/null @@ -1,651 +0,0 @@ -// Copyright 2021 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! BEEFY bridge pallet. -//! -//! This pallet is an on-chain BEEFY light client for Substrate-based chains that are using the -//! following pallets bundle: `pallet-mmr`, `pallet-beefy` and `pallet-beefy-mmr`. -//! -//! The pallet is able to verify MMR leaf proofs and BEEFY commitments, so it has access -//! to the following data of the bridged chain: -//! -//! - header hashes -//! - changes of BEEFY authorities -//! - extra data of MMR leafs -//! -//! Given the header hash, other pallets are able to verify header-based proofs -//! (e.g. storage proofs, transaction inclusion proofs, etc.). - -#![warn(missing_docs)] -#![cfg_attr(not(feature = "std"), no_std)] - -use bp_beefy::{ChainWithBeefy, InitializationData}; -use sp_std::{boxed::Box, prelude::*}; - -// Re-export in crate namespace for `construct_runtime!` -pub use pallet::*; - -mod utils; - -#[cfg(test)] -mod mock; -#[cfg(test)] -mod mock_chain; - -/// The target that will be used when publishing logs related to this pallet. -pub const LOG_TARGET: &str = "runtime::bridge-beefy"; - -/// Configured bridged chain. -pub type BridgedChain = >::BridgedChain; -/// Block number, used by configured bridged chain. -pub type BridgedBlockNumber = bp_runtime::BlockNumberOf>; -/// Block hash, used by configured bridged chain. -pub type BridgedBlockHash = bp_runtime::HashOf>; - -/// Pallet initialization data. -pub type InitializationDataOf = - InitializationData, bp_beefy::MmrHashOf>>; -/// BEEFY commitment hasher, used by configured bridged chain. -pub type BridgedBeefyCommitmentHasher = bp_beefy::BeefyCommitmentHasher>; -/// BEEFY validator id, used by configured bridged chain. -pub type BridgedBeefyAuthorityId = bp_beefy::BeefyAuthorityIdOf>; -/// BEEFY validator set, used by configured bridged chain. -pub type BridgedBeefyAuthoritySet = bp_beefy::BeefyAuthoritySetOf>; -/// BEEFY authority set, used by configured bridged chain. -pub type BridgedBeefyAuthoritySetInfo = bp_beefy::BeefyAuthoritySetInfoOf>; -/// BEEFY signed commitment, used by configured bridged chain. -pub type BridgedBeefySignedCommitment = bp_beefy::BeefySignedCommitmentOf>; -/// MMR hashing algorithm, used by configured bridged chain. -pub type BridgedMmrHashing = bp_beefy::MmrHashingOf>; -/// MMR hashing output type of `BridgedMmrHashing`. -pub type BridgedMmrHash = bp_beefy::MmrHashOf>; -/// The type of the MMR leaf extra data used by the configured bridged chain. -pub type BridgedBeefyMmrLeafExtra = bp_beefy::BeefyMmrLeafExtraOf>; -/// BEEFY MMR proof type used by the pallet -pub type BridgedMmrProof = bp_beefy::MmrProofOf>; -/// MMR leaf type, used by configured bridged chain. -pub type BridgedBeefyMmrLeaf = bp_beefy::BeefyMmrLeafOf>; -/// Imported commitment data, stored by the pallet. -pub type ImportedCommitment = bp_beefy::ImportedCommitment< - BridgedBlockNumber, - BridgedBlockHash, - BridgedMmrHash, ->; - -/// Some high level info about the imported commitments. -#[derive(codec::Encode, codec::Decode, scale_info::TypeInfo)] -pub struct ImportedCommitmentsInfoData { - /// Best known block number, provided in a BEEFY commitment. However this is not - /// the best proven block. The best proven block is this block's parent. - best_block_number: BlockNumber, - /// The head of the `ImportedBlockNumbers` ring buffer. - next_block_number_index: u32, -} - -#[frame_support::pallet(dev_mode)] -pub mod pallet { - use super::*; - use bp_runtime::{BasicOperatingMode, OwnedBridgeModule}; - use frame_support::pallet_prelude::*; - use frame_system::pallet_prelude::*; - - #[pallet::config] - pub trait Config: frame_system::Config { - /// The upper bound on the number of requests allowed by the pallet. - /// - /// A request refers to an action which writes a header to storage. - /// - /// Once this bound is reached the pallet will reject all commitments - /// until the request count has decreased. - #[pallet::constant] - type MaxRequests: Get; - - /// Maximal number of imported commitments to keep in the storage. - /// - /// The setting is there to prevent growing the on-chain state indefinitely. Note - /// the setting does not relate to block numbers - we will simply keep as much items - /// in the storage, so it doesn't guarantee any fixed timeframe for imported commitments. - #[pallet::constant] - type CommitmentsToKeep: Get; - - /// The chain we are bridging to here. - type BridgedChain: ChainWithBeefy; - } - - #[pallet::pallet] - #[pallet::without_storage_info] - pub struct Pallet(PhantomData<(T, I)>); - - #[pallet::hooks] - impl, I: 'static> Hooks> for Pallet { - fn on_initialize(_n: BlockNumberFor) -> frame_support::weights::Weight { - >::mutate(|count| *count = count.saturating_sub(1)); - - Weight::from_parts(0, 0) - .saturating_add(T::DbWeight::get().reads(1)) - .saturating_add(T::DbWeight::get().writes(1)) - } - } - - impl, I: 'static> OwnedBridgeModule for Pallet { - const LOG_TARGET: &'static str = LOG_TARGET; - type OwnerStorage = PalletOwner; - type OperatingMode = BasicOperatingMode; - type OperatingModeStorage = PalletOperatingMode; - } - - #[pallet::call] - impl, I: 'static> Pallet - where - BridgedMmrHashing: 'static + Send + Sync, - { - /// Initialize pallet with BEEFY authority set and best known finalized block number. - #[pallet::call_index(0)] - #[pallet::weight((T::DbWeight::get().reads_writes(2, 3), DispatchClass::Operational))] - pub fn initialize( - origin: OriginFor, - init_data: InitializationDataOf, - ) -> DispatchResult { - Self::ensure_owner_or_root(origin)?; - - let is_initialized = >::exists(); - ensure!(!is_initialized, >::AlreadyInitialized); - - log::info!(target: LOG_TARGET, "Initializing bridge BEEFY pallet: {:?}", init_data); - Ok(initialize::(init_data)?) - } - - /// Change `PalletOwner`. - /// - /// May only be called either by root, or by `PalletOwner`. - #[pallet::call_index(1)] - #[pallet::weight((T::DbWeight::get().reads_writes(1, 1), DispatchClass::Operational))] - pub fn set_owner(origin: OriginFor, new_owner: Option) -> DispatchResult { - >::set_owner(origin, new_owner) - } - - /// Halt or resume all pallet operations. - /// - /// May only be called either by root, or by `PalletOwner`. - #[pallet::call_index(2)] - #[pallet::weight((T::DbWeight::get().reads_writes(1, 1), DispatchClass::Operational))] - pub fn set_operating_mode( - origin: OriginFor, - operating_mode: BasicOperatingMode, - ) -> DispatchResult { - >::set_operating_mode(origin, operating_mode) - } - - /// Submit a commitment generated by BEEFY authority set. - /// - /// It will use the underlying storage pallet to fetch information about the current - /// authority set and best finalized block number in order to verify that the commitment - /// is valid. - /// - /// If successful in verification, it will update the underlying storage with the data - /// provided in the newly submitted commitment. - #[pallet::call_index(3)] - #[pallet::weight(0)] - pub fn submit_commitment( - origin: OriginFor, - commitment: BridgedBeefySignedCommitment, - validator_set: BridgedBeefyAuthoritySet, - mmr_leaf: Box>, - mmr_proof: BridgedMmrProof, - ) -> DispatchResult - where - BridgedBeefySignedCommitment: Clone, - { - Self::ensure_not_halted().map_err(Error::::BridgeModule)?; - ensure_signed(origin)?; - - ensure!(Self::request_count() < T::MaxRequests::get(), >::TooManyRequests); - - // Ensure that the commitment is for a better block. - let commitments_info = - ImportedCommitmentsInfo::::get().ok_or(Error::::NotInitialized)?; - ensure!( - commitment.commitment.block_number > commitments_info.best_block_number, - Error::::OldCommitment - ); - - // Verify commitment and mmr leaf. - let current_authority_set_info = CurrentAuthoritySetInfo::::get(); - let mmr_root = utils::verify_commitment::( - &commitment, - ¤t_authority_set_info, - &validator_set, - )?; - utils::verify_beefy_mmr_leaf::(&mmr_leaf, mmr_proof, mmr_root)?; - - // Update request count. - RequestCount::::mutate(|count| *count += 1); - // Update authority set if needed. - if mmr_leaf.beefy_next_authority_set.id > current_authority_set_info.id { - CurrentAuthoritySetInfo::::put(mmr_leaf.beefy_next_authority_set); - } - - // Import commitment. - let block_number_index = commitments_info.next_block_number_index; - let to_prune = ImportedBlockNumbers::::try_get(block_number_index); - ImportedCommitments::::insert( - commitment.commitment.block_number, - ImportedCommitment:: { - parent_number_and_hash: mmr_leaf.parent_number_and_hash, - mmr_root, - }, - ); - ImportedBlockNumbers::::insert( - block_number_index, - commitment.commitment.block_number, - ); - ImportedCommitmentsInfo::::put(ImportedCommitmentsInfoData { - best_block_number: commitment.commitment.block_number, - next_block_number_index: (block_number_index + 1) % T::CommitmentsToKeep::get(), - }); - if let Ok(old_block_number) = to_prune { - log::debug!( - target: LOG_TARGET, - "Pruning commitment for old block: {:?}.", - old_block_number - ); - ImportedCommitments::::remove(old_block_number); - } - - log::info!( - target: LOG_TARGET, - "Successfully imported commitment for block {:?}", - commitment.commitment.block_number, - ); - - Ok(()) - } - } - - /// The current number of requests which have written to storage. - /// - /// If the `RequestCount` hits `MaxRequests`, no more calls will be allowed to the pallet until - /// the request capacity is increased. - /// - /// The `RequestCount` is decreased by one at the beginning of every block. This is to ensure - /// that the pallet can always make progress. - #[pallet::storage] - #[pallet::getter(fn request_count)] - pub type RequestCount, I: 'static = ()> = StorageValue<_, u32, ValueQuery>; - - /// High level info about the imported commitments. - /// - /// Contains the following info: - /// - best known block number of the bridged chain, finalized by BEEFY - /// - the head of the `ImportedBlockNumbers` ring buffer - #[pallet::storage] - pub type ImportedCommitmentsInfo, I: 'static = ()> = - StorageValue<_, ImportedCommitmentsInfoData>>; - - /// A ring buffer containing the block numbers of the commitments that we have imported, - /// ordered by the insertion time. - #[pallet::storage] - pub(super) type ImportedBlockNumbers, I: 'static = ()> = - StorageMap<_, Identity, u32, BridgedBlockNumber>; - - /// All the commitments that we have imported and haven't been pruned yet. - #[pallet::storage] - pub type ImportedCommitments, I: 'static = ()> = - StorageMap<_, Blake2_128Concat, BridgedBlockNumber, ImportedCommitment>; - - /// The current BEEFY authority set at the bridged chain. - #[pallet::storage] - pub type CurrentAuthoritySetInfo, I: 'static = ()> = - StorageValue<_, BridgedBeefyAuthoritySetInfo, ValueQuery>; - - /// Optional pallet owner. - /// - /// Pallet owner has the right to halt all pallet operations and then resume it. If it is - /// `None`, then there are no direct ways to halt/resume pallet operations, but other - /// runtime methods may still be used to do that (i.e. `democracy::referendum` to update halt - /// flag directly or calling `halt_operations`). - #[pallet::storage] - pub type PalletOwner, I: 'static = ()> = - StorageValue<_, T::AccountId, OptionQuery>; - - /// The current operating mode of the pallet. - /// - /// Depending on the mode either all, or no transactions will be allowed. - #[pallet::storage] - pub type PalletOperatingMode, I: 'static = ()> = - StorageValue<_, BasicOperatingMode, ValueQuery>; - - #[pallet::genesis_config] - #[derive(frame_support::DefaultNoBound)] - pub struct GenesisConfig, I: 'static = ()> { - /// Optional module owner account. - pub owner: Option, - /// Optional module initialization data. - pub init_data: Option>, - } - - #[pallet::genesis_build] - impl, I: 'static> BuildGenesisConfig for GenesisConfig { - fn build(&self) { - if let Some(ref owner) = self.owner { - >::put(owner); - } - - if let Some(init_data) = self.init_data.clone() { - initialize::(init_data) - .expect("invalid initialization data of BEEFY bridge pallet"); - } else { - // Since the bridge hasn't been initialized we shouldn't allow anyone to perform - // transactions. - >::put(BasicOperatingMode::Halted); - } - } - } - - #[pallet::error] - pub enum Error { - /// The pallet has not been initialized yet. - NotInitialized, - /// The pallet has already been initialized. - AlreadyInitialized, - /// Invalid initial authority set. - InvalidInitialAuthoritySet, - /// There are too many requests for the current window to handle. - TooManyRequests, - /// The imported commitment is older than the best commitment known to the pallet. - OldCommitment, - /// The commitment is signed by unknown validator set. - InvalidCommitmentValidatorSetId, - /// The id of the provided validator set is invalid. - InvalidValidatorSetId, - /// The number of signatures in the commitment is invalid. - InvalidCommitmentSignaturesLen, - /// The number of validator ids provided is invalid. - InvalidValidatorSetLen, - /// There aren't enough correct signatures in the commitment to finalize the block. - NotEnoughCorrectSignatures, - /// MMR root is missing from the commitment. - MmrRootMissingFromCommitment, - /// MMR proof verification has failed. - MmrProofVerificationFailed, - /// The validators are not matching the merkle tree root of the authority set. - InvalidValidatorSetRoot, - /// Error generated by the `OwnedBridgeModule` trait. - BridgeModule(bp_runtime::OwnedBridgeModuleError), - } - - /// Initialize pallet with given parameters. - pub(super) fn initialize, I: 'static>( - init_data: InitializationDataOf, - ) -> Result<(), Error> { - if init_data.authority_set.len == 0 { - return Err(Error::::InvalidInitialAuthoritySet) - } - CurrentAuthoritySetInfo::::put(init_data.authority_set); - - >::put(init_data.operating_mode); - ImportedCommitmentsInfo::::put(ImportedCommitmentsInfoData { - best_block_number: init_data.best_block_number, - next_block_number_index: 0, - }); - - Ok(()) - } -} - -#[cfg(test)] -mod tests { - use super::*; - use bp_runtime::{BasicOperatingMode, OwnedBridgeModuleError}; - use bp_test_utils::generate_owned_bridge_module_tests; - use frame_support::{assert_noop, assert_ok, traits::Get}; - use mock::*; - use mock_chain::*; - use sp_consensus_beefy::mmr::BeefyAuthoritySet; - use sp_runtime::DispatchError; - - fn next_block() { - use frame_support::traits::OnInitialize; - - let current_number = frame_system::Pallet::::block_number(); - frame_system::Pallet::::set_block_number(current_number + 1); - let _ = Pallet::::on_initialize(current_number); - } - - fn import_header_chain(headers: Vec) { - for header in headers { - if header.commitment.is_some() { - assert_ok!(import_commitment(header)); - } - } - } - - #[test] - fn fails_to_initialize_if_already_initialized() { - run_test_with_initialize(32, || { - assert_noop!( - Pallet::::initialize( - RuntimeOrigin::root(), - InitializationData { - operating_mode: BasicOperatingMode::Normal, - best_block_number: 0, - authority_set: BeefyAuthoritySet { - id: 0, - len: 1, - keyset_commitment: [0u8; 32].into() - } - } - ), - Error::::AlreadyInitialized, - ); - }); - } - - #[test] - fn fails_to_initialize_if_authority_set_is_empty() { - run_test(|| { - assert_noop!( - Pallet::::initialize( - RuntimeOrigin::root(), - InitializationData { - operating_mode: BasicOperatingMode::Normal, - best_block_number: 0, - authority_set: BeefyAuthoritySet { - id: 0, - len: 0, - keyset_commitment: [0u8; 32].into() - } - } - ), - Error::::InvalidInitialAuthoritySet, - ); - }); - } - - #[test] - fn fails_to_import_commitment_if_halted() { - run_test_with_initialize(1, || { - assert_ok!(Pallet::::set_operating_mode( - RuntimeOrigin::root(), - BasicOperatingMode::Halted - )); - assert_noop!( - import_commitment(ChainBuilder::new(1).append_finalized_header().to_header()), - Error::::BridgeModule(OwnedBridgeModuleError::Halted), - ); - }) - } - - #[test] - fn fails_to_import_commitment_if_too_many_requests() { - run_test_with_initialize(1, || { - let max_requests = <::MaxRequests as Get>::get() as u64; - let mut chain = ChainBuilder::new(1); - for _ in 0..max_requests + 2 { - chain = chain.append_finalized_header(); - } - - // import `max_request` headers - for i in 0..max_requests { - assert_ok!(import_commitment(chain.header(i + 1))); - } - - // try to import next header: it fails because we are no longer accepting commitments - assert_noop!( - import_commitment(chain.header(max_requests + 1)), - Error::::TooManyRequests, - ); - - // when next block is "started", we allow import of next header - next_block(); - assert_ok!(import_commitment(chain.header(max_requests + 1))); - - // but we can't import two headers until next block and so on - assert_noop!( - import_commitment(chain.header(max_requests + 2)), - Error::::TooManyRequests, - ); - }) - } - - #[test] - fn fails_to_import_commitment_if_not_initialized() { - run_test(|| { - assert_noop!( - import_commitment(ChainBuilder::new(1).append_finalized_header().to_header()), - Error::::NotInitialized, - ); - }) - } - - #[test] - fn submit_commitment_works_with_long_chain_with_handoffs() { - run_test_with_initialize(3, || { - let chain = ChainBuilder::new(3) - .append_finalized_header() - .append_default_headers(16) // 2..17 - .append_finalized_header() // 18 - .append_default_headers(16) // 19..34 - .append_handoff_header(9) // 35 - .append_default_headers(8) // 36..43 - .append_finalized_header() // 44 - .append_default_headers(8) // 45..52 - .append_handoff_header(17) // 53 - .append_default_headers(4) // 54..57 - .append_finalized_header() // 58 - .append_default_headers(4); // 59..63 - import_header_chain(chain.to_chain()); - - assert_eq!( - ImportedCommitmentsInfo::::get().unwrap().best_block_number, - 58 - ); - assert_eq!(CurrentAuthoritySetInfo::::get().id, 2); - assert_eq!(CurrentAuthoritySetInfo::::get().len, 17); - - let imported_commitment = ImportedCommitments::::get(58).unwrap(); - assert_eq!( - imported_commitment, - bp_beefy::ImportedCommitment { - parent_number_and_hash: (57, chain.header(57).header.hash()), - mmr_root: chain.header(58).mmr_root, - }, - ); - }) - } - - #[test] - fn commitment_pruning_works() { - run_test_with_initialize(3, || { - let commitments_to_keep = >::CommitmentsToKeep::get(); - let commitments_to_import: Vec = ChainBuilder::new(3) - .append_finalized_headers(commitments_to_keep as usize + 2) - .to_chain(); - - // import exactly `CommitmentsToKeep` commitments - for index in 0..commitments_to_keep { - next_block(); - import_commitment(commitments_to_import[index as usize].clone()) - .expect("must succeed"); - assert_eq!( - ImportedCommitmentsInfo::::get().unwrap().next_block_number_index, - (index + 1) % commitments_to_keep - ); - } - - // ensure that all commitments are in the storage - assert_eq!( - ImportedCommitmentsInfo::::get().unwrap().best_block_number, - commitments_to_keep as TestBridgedBlockNumber - ); - assert_eq!( - ImportedCommitmentsInfo::::get().unwrap().next_block_number_index, - 0 - ); - for index in 0..commitments_to_keep { - assert!(ImportedCommitments::::get( - index as TestBridgedBlockNumber + 1 - ) - .is_some()); - assert_eq!( - ImportedBlockNumbers::::get(index), - Some(Into::into(index + 1)), - ); - } - - // import next commitment - next_block(); - import_commitment(commitments_to_import[commitments_to_keep as usize].clone()) - .expect("must succeed"); - assert_eq!( - ImportedCommitmentsInfo::::get().unwrap().next_block_number_index, - 1 - ); - assert!(ImportedCommitments::::get( - commitments_to_keep as TestBridgedBlockNumber + 1 - ) - .is_some()); - assert_eq!( - ImportedBlockNumbers::::get(0), - Some(Into::into(commitments_to_keep + 1)), - ); - // the side effect of the import is that the commitment#1 is pruned - assert!(ImportedCommitments::::get(1).is_none()); - - // import next commitment - next_block(); - import_commitment(commitments_to_import[commitments_to_keep as usize + 1].clone()) - .expect("must succeed"); - assert_eq!( - ImportedCommitmentsInfo::::get().unwrap().next_block_number_index, - 2 - ); - assert!(ImportedCommitments::::get( - commitments_to_keep as TestBridgedBlockNumber + 2 - ) - .is_some()); - assert_eq!( - ImportedBlockNumbers::::get(1), - Some(Into::into(commitments_to_keep + 2)), - ); - // the side effect of the import is that the commitment#2 is pruned - assert!(ImportedCommitments::::get(1).is_none()); - assert!(ImportedCommitments::::get(2).is_none()); - }); - } - - generate_owned_bridge_module_tests!(BasicOperatingMode::Normal, BasicOperatingMode::Halted); -} diff --git a/modules/beefy/src/mock.rs b/modules/beefy/src/mock.rs deleted file mode 100644 index c99566b6b..000000000 --- a/modules/beefy/src/mock.rs +++ /dev/null @@ -1,193 +0,0 @@ -// Copyright 2019-2021 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -use crate as beefy; -use crate::{ - utils::get_authorities_mmr_root, BridgedBeefyAuthoritySet, BridgedBeefyAuthoritySetInfo, - BridgedBeefyCommitmentHasher, BridgedBeefyMmrLeafExtra, BridgedBeefySignedCommitment, - BridgedMmrHash, BridgedMmrHashing, BridgedMmrProof, -}; - -use bp_beefy::{BeefyValidatorSignatureOf, ChainWithBeefy, Commitment, MmrDataOrHash}; -use bp_runtime::{BasicOperatingMode, Chain, ChainId}; -use codec::Encode; -use frame_support::{construct_runtime, derive_impl, weights::Weight}; -use sp_core::{sr25519::Signature, Pair}; -use sp_runtime::{ - testing::{Header, H256}, - traits::{BlakeTwo256, Hash}, -}; - -pub use sp_consensus_beefy::ecdsa_crypto::{AuthorityId as BeefyId, Pair as BeefyPair}; -use sp_core::crypto::Wraps; -use sp_runtime::traits::Keccak256; - -pub type TestAccountId = u64; -pub type TestBridgedBlockNumber = u64; -pub type TestBridgedBlockHash = H256; -pub type TestBridgedHeader = Header; -pub type TestBridgedAuthoritySetInfo = BridgedBeefyAuthoritySetInfo; -pub type TestBridgedValidatorSet = BridgedBeefyAuthoritySet; -pub type TestBridgedCommitment = BridgedBeefySignedCommitment; -pub type TestBridgedValidatorSignature = BeefyValidatorSignatureOf; -pub type TestBridgedCommitmentHasher = BridgedBeefyCommitmentHasher; -pub type TestBridgedMmrHashing = BridgedMmrHashing; -pub type TestBridgedMmrHash = BridgedMmrHash; -pub type TestBridgedBeefyMmrLeafExtra = BridgedBeefyMmrLeafExtra; -pub type TestBridgedMmrProof = BridgedMmrProof; -pub type TestBridgedRawMmrLeaf = sp_consensus_beefy::mmr::MmrLeaf< - TestBridgedBlockNumber, - TestBridgedBlockHash, - TestBridgedMmrHash, - TestBridgedBeefyMmrLeafExtra, ->; -pub type TestBridgedMmrNode = MmrDataOrHash; - -type Block = frame_system::mocking::MockBlock; - -construct_runtime! { - pub enum TestRuntime - { - System: frame_system::{Pallet, Call, Config, Storage, Event}, - Beefy: beefy::{Pallet}, - } -} - -#[derive_impl(frame_system::config_preludes::TestDefaultConfig as frame_system::DefaultConfig)] -impl frame_system::Config for TestRuntime { - type Block = Block; -} - -impl beefy::Config for TestRuntime { - type MaxRequests = frame_support::traits::ConstU32<16>; - type BridgedChain = TestBridgedChain; - type CommitmentsToKeep = frame_support::traits::ConstU32<16>; -} - -#[derive(Debug)] -pub struct TestBridgedChain; - -impl Chain for TestBridgedChain { - const ID: ChainId = *b"tbch"; - - type BlockNumber = TestBridgedBlockNumber; - type Hash = H256; - type Hasher = BlakeTwo256; - type Header = sp_runtime::testing::Header; - - type AccountId = TestAccountId; - type Balance = u64; - type Nonce = u64; - type Signature = Signature; - - fn max_extrinsic_size() -> u32 { - unreachable!() - } - fn max_extrinsic_weight() -> Weight { - unreachable!() - } -} - -impl ChainWithBeefy for TestBridgedChain { - type CommitmentHasher = Keccak256; - type MmrHashing = Keccak256; - type MmrHash = ::Output; - type BeefyMmrLeafExtra = (); - type AuthorityId = BeefyId; - type AuthorityIdToMerkleLeaf = pallet_beefy_mmr::BeefyEcdsaToEthereum; -} - -/// Run test within test runtime. -pub fn run_test(test: impl FnOnce() -> T) -> T { - sp_io::TestExternalities::new(Default::default()).execute_with(test) -} - -/// Initialize pallet and run test. -pub fn run_test_with_initialize(initial_validators_count: u32, test: impl FnOnce() -> T) -> T { - run_test(|| { - let validators = validator_ids(0, initial_validators_count); - let authority_set = authority_set_info(0, &validators); - - crate::Pallet::::initialize( - RuntimeOrigin::root(), - bp_beefy::InitializationData { - operating_mode: BasicOperatingMode::Normal, - best_block_number: 0, - authority_set, - }, - ) - .expect("initialization data is correct"); - - test() - }) -} - -/// Import given commitment. -pub fn import_commitment( - header: crate::mock_chain::HeaderAndCommitment, -) -> sp_runtime::DispatchResult { - crate::Pallet::::submit_commitment( - RuntimeOrigin::signed(1), - header - .commitment - .expect("thou shall not call import_commitment on header without commitment"), - header.validator_set, - Box::new(header.leaf), - header.leaf_proof, - ) -} - -pub fn validator_pairs(index: u32, count: u32) -> Vec { - (index..index + count) - .map(|index| { - let mut seed = [1u8; 32]; - seed[0..8].copy_from_slice(&(index as u64).encode()); - BeefyPair::from_seed(&seed) - }) - .collect() -} - -/// Return identifiers of validators, starting at given index. -pub fn validator_ids(index: u32, count: u32) -> Vec { - validator_pairs(index, count).into_iter().map(|pair| pair.public()).collect() -} - -pub fn authority_set_info(id: u64, validators: &[BeefyId]) -> TestBridgedAuthoritySetInfo { - let merkle_root = get_authorities_mmr_root::(validators.iter()); - - TestBridgedAuthoritySetInfo { id, len: validators.len() as u32, keyset_commitment: merkle_root } -} - -/// Sign BEEFY commitment. -pub fn sign_commitment( - commitment: Commitment, - validator_pairs: &[BeefyPair], - signature_count: usize, -) -> TestBridgedCommitment { - let total_validators = validator_pairs.len(); - let random_validators = - rand::seq::index::sample(&mut rand::thread_rng(), total_validators, signature_count); - - let commitment_hash = TestBridgedCommitmentHasher::hash(&commitment.encode()); - let mut signatures = vec![None; total_validators]; - for validator_idx in random_validators.iter() { - let validator = &validator_pairs[validator_idx]; - signatures[validator_idx] = - Some(validator.as_inner_ref().sign_prehashed(commitment_hash.as_fixed_bytes()).into()); - } - - TestBridgedCommitment { commitment, signatures } -} diff --git a/modules/beefy/src/mock_chain.rs b/modules/beefy/src/mock_chain.rs deleted file mode 100644 index c83907f83..000000000 --- a/modules/beefy/src/mock_chain.rs +++ /dev/null @@ -1,299 +0,0 @@ -// Copyright 2019-2021 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Utilities to build bridged chain and BEEFY+MMR structures. - -use crate::{ - mock::{ - sign_commitment, validator_pairs, BeefyPair, TestBridgedBlockNumber, TestBridgedCommitment, - TestBridgedHeader, TestBridgedMmrHash, TestBridgedMmrHashing, TestBridgedMmrNode, - TestBridgedMmrProof, TestBridgedRawMmrLeaf, TestBridgedValidatorSet, - TestBridgedValidatorSignature, TestRuntime, - }, - utils::get_authorities_mmr_root, -}; - -use bp_beefy::{BeefyPayload, Commitment, ValidatorSetId, MMR_ROOT_PAYLOAD_ID}; -use codec::Encode; -use pallet_mmr::NodeIndex; -use rand::Rng; -use sp_consensus_beefy::mmr::{BeefyNextAuthoritySet, MmrLeafVersion}; -use sp_core::Pair; -use sp_runtime::traits::{Hash, Header as HeaderT}; -use std::collections::HashMap; - -#[derive(Debug, Clone)] -pub struct HeaderAndCommitment { - pub header: TestBridgedHeader, - pub commitment: Option, - pub validator_set: TestBridgedValidatorSet, - pub leaf: TestBridgedRawMmrLeaf, - pub leaf_proof: TestBridgedMmrProof, - pub mmr_root: TestBridgedMmrHash, -} - -impl HeaderAndCommitment { - pub fn customize_signatures( - &mut self, - f: impl FnOnce(&mut Vec>), - ) { - if let Some(commitment) = &mut self.commitment { - f(&mut commitment.signatures); - } - } - - pub fn customize_commitment( - &mut self, - f: impl FnOnce(&mut Commitment), - validator_pairs: &[BeefyPair], - signature_count: usize, - ) { - if let Some(mut commitment) = self.commitment.take() { - f(&mut commitment.commitment); - self.commitment = - Some(sign_commitment(commitment.commitment, validator_pairs, signature_count)); - } - } -} - -pub struct ChainBuilder { - headers: Vec, - validator_set_id: ValidatorSetId, - validator_keys: Vec, - mmr: mmr_lib::MMR, -} - -struct BridgedMmrStorage { - nodes: HashMap, -} - -impl mmr_lib::MMRStore for BridgedMmrStorage { - fn get_elem(&self, pos: NodeIndex) -> mmr_lib::Result> { - Ok(self.nodes.get(&pos).cloned()) - } - - fn append(&mut self, pos: NodeIndex, elems: Vec) -> mmr_lib::Result<()> { - for (i, elem) in elems.into_iter().enumerate() { - self.nodes.insert(pos + i as NodeIndex, elem); - } - Ok(()) - } -} - -impl ChainBuilder { - /// Creates new chain builder with given validator set size. - pub fn new(initial_validators_count: u32) -> Self { - ChainBuilder { - headers: Vec::new(), - validator_set_id: 0, - validator_keys: validator_pairs(0, initial_validators_count), - mmr: mmr_lib::MMR::new(0, BridgedMmrStorage { nodes: HashMap::new() }), - } - } - - /// Get header with given number. - pub fn header(&self, number: TestBridgedBlockNumber) -> HeaderAndCommitment { - self.headers[number as usize - 1].clone() - } - - /// Returns single built header. - pub fn to_header(&self) -> HeaderAndCommitment { - assert_eq!(self.headers.len(), 1); - self.headers[0].clone() - } - - /// Returns built chain. - pub fn to_chain(&self) -> Vec { - self.headers.clone() - } - - /// Appends header, that has been finalized by BEEFY (so it has a linked signed commitment). - pub fn append_finalized_header(self) -> Self { - let next_validator_set_id = self.validator_set_id; - let next_validator_keys = self.validator_keys.clone(); - HeaderBuilder::with_chain(self, next_validator_set_id, next_validator_keys).finalize() - } - - /// Append multiple finalized headers at once. - pub fn append_finalized_headers(mut self, count: usize) -> Self { - for _ in 0..count { - self = self.append_finalized_header(); - } - self - } - - /// Appends header, that enacts new validator set. - /// - /// Such headers are explicitly finalized by BEEFY. - pub fn append_handoff_header(self, next_validators_len: u32) -> Self { - let new_validator_set_id = self.validator_set_id + 1; - let new_validator_pairs = - validator_pairs(rand::thread_rng().gen::() % (u32::MAX / 2), next_validators_len); - - HeaderBuilder::with_chain(self, new_validator_set_id, new_validator_pairs).finalize() - } - - /// Append several default header without commitment. - pub fn append_default_headers(mut self, count: usize) -> Self { - for _ in 0..count { - let next_validator_set_id = self.validator_set_id; - let next_validator_keys = self.validator_keys.clone(); - self = - HeaderBuilder::with_chain(self, next_validator_set_id, next_validator_keys).build() - } - self - } -} - -/// Custom header builder. -pub struct HeaderBuilder { - chain: ChainBuilder, - header: TestBridgedHeader, - leaf: TestBridgedRawMmrLeaf, - leaf_proof: Option, - next_validator_set_id: ValidatorSetId, - next_validator_keys: Vec, -} - -impl HeaderBuilder { - fn with_chain( - chain: ChainBuilder, - next_validator_set_id: ValidatorSetId, - next_validator_keys: Vec, - ) -> Self { - // we're starting with header#1, since header#0 is always finalized - let header_number = chain.headers.len() as TestBridgedBlockNumber + 1; - let header = TestBridgedHeader::new( - header_number, - Default::default(), - Default::default(), - chain.headers.last().map(|h| h.header.hash()).unwrap_or_default(), - Default::default(), - ); - - let next_validators = - next_validator_keys.iter().map(|pair| pair.public()).collect::>(); - let next_validators_mmr_root = - get_authorities_mmr_root::(next_validators.iter()); - let leaf = sp_consensus_beefy::mmr::MmrLeaf { - version: MmrLeafVersion::new(1, 0), - parent_number_and_hash: (header.number().saturating_sub(1), *header.parent_hash()), - beefy_next_authority_set: BeefyNextAuthoritySet { - id: next_validator_set_id, - len: next_validators.len() as u32, - keyset_commitment: next_validators_mmr_root, - }, - leaf_extra: (), - }; - - HeaderBuilder { - chain, - header, - leaf, - leaf_proof: None, - next_validator_keys, - next_validator_set_id, - } - } - - /// Customize generated proof of header MMR leaf. - /// - /// Can only be called once. - pub fn customize_proof( - mut self, - f: impl FnOnce(TestBridgedMmrProof) -> TestBridgedMmrProof, - ) -> Self { - assert!(self.leaf_proof.is_none()); - - let leaf_hash = TestBridgedMmrHashing::hash(&self.leaf.encode()); - let node = TestBridgedMmrNode::Hash(leaf_hash); - let leaf_position = self.chain.mmr.push(node).unwrap(); - - let proof = self.chain.mmr.gen_proof(vec![leaf_position]).unwrap(); - // genesis has no leaf => leaf index is header number minus 1 - let leaf_index = *self.header.number() - 1; - let leaf_count = *self.header.number(); - self.leaf_proof = Some(f(TestBridgedMmrProof { - leaf_indices: vec![leaf_index], - leaf_count, - items: proof.proof_items().iter().map(|i| i.hash()).collect(), - })); - - self - } - - /// Build header without commitment. - pub fn build(mut self) -> ChainBuilder { - if self.leaf_proof.is_none() { - self = self.customize_proof(|proof| proof); - } - - let validators = - self.chain.validator_keys.iter().map(|pair| pair.public()).collect::>(); - self.chain.headers.push(HeaderAndCommitment { - header: self.header, - commitment: None, - validator_set: TestBridgedValidatorSet::new(validators, self.chain.validator_set_id) - .unwrap(), - leaf: self.leaf, - leaf_proof: self.leaf_proof.expect("guaranteed by the customize_proof call above; qed"), - mmr_root: self.chain.mmr.get_root().unwrap().hash(), - }); - - self.chain.validator_set_id = self.next_validator_set_id; - self.chain.validator_keys = self.next_validator_keys; - - self.chain - } - - /// Build header with commitment. - pub fn finalize(self) -> ChainBuilder { - let validator_count = self.chain.validator_keys.len(); - let current_validator_set_id = self.chain.validator_set_id; - let current_validator_set_keys = self.chain.validator_keys.clone(); - let mut chain = self.build(); - - let last_header = chain.headers.last_mut().expect("added by append_header; qed"); - last_header.commitment = Some(sign_commitment( - Commitment { - payload: BeefyPayload::from_single_entry( - MMR_ROOT_PAYLOAD_ID, - chain.mmr.get_root().unwrap().hash().encode(), - ), - block_number: *last_header.header.number(), - validator_set_id: current_validator_set_id, - }, - ¤t_validator_set_keys, - validator_count * 2 / 3 + 1, - )); - - chain - } -} - -/// Default Merging & Hashing behavior for MMR. -pub struct BridgedMmrHashMerge; - -impl mmr_lib::Merge for BridgedMmrHashMerge { - type Item = TestBridgedMmrNode; - - fn merge(left: &Self::Item, right: &Self::Item) -> mmr_lib::Result { - let mut concat = left.hash().as_ref().to_vec(); - concat.extend_from_slice(right.hash().as_ref()); - - Ok(TestBridgedMmrNode::Hash(TestBridgedMmrHashing::hash(&concat))) - } -} diff --git a/modules/beefy/src/utils.rs b/modules/beefy/src/utils.rs deleted file mode 100644 index ce7a11630..000000000 --- a/modules/beefy/src/utils.rs +++ /dev/null @@ -1,361 +0,0 @@ -use crate::{ - BridgedBeefyAuthorityId, BridgedBeefyAuthoritySet, BridgedBeefyAuthoritySetInfo, - BridgedBeefyMmrLeaf, BridgedBeefySignedCommitment, BridgedChain, BridgedMmrHash, - BridgedMmrHashing, BridgedMmrProof, Config, Error, LOG_TARGET, -}; -use bp_beefy::{merkle_root, verify_mmr_leaves_proof, BeefyAuthorityId, MmrDataOrHash}; -use codec::Encode; -use frame_support::ensure; -use sp_runtime::traits::{Convert, Hash}; -use sp_std::{vec, vec::Vec}; - -type BridgedMmrDataOrHash = MmrDataOrHash, BridgedBeefyMmrLeaf>; -/// A way to encode validator id to the BEEFY merkle tree leaf. -type BridgedBeefyAuthorityIdToMerkleLeaf = - bp_beefy::BeefyAuthorityIdToMerkleLeafOf>; - -/// Get the MMR root for a collection of validators. -pub(crate) fn get_authorities_mmr_root< - 'a, - T: Config, - I: 'static, - V: Iterator>, ->( - authorities: V, -) -> BridgedMmrHash { - let merkle_leafs = authorities - .cloned() - .map(BridgedBeefyAuthorityIdToMerkleLeaf::::convert) - .collect::>(); - merkle_root::, _>(merkle_leafs) -} - -fn verify_authority_set, I: 'static>( - authority_set_info: &BridgedBeefyAuthoritySetInfo, - authority_set: &BridgedBeefyAuthoritySet, -) -> Result<(), Error> { - ensure!(authority_set.id() == authority_set_info.id, Error::::InvalidValidatorSetId); - ensure!( - authority_set.len() == authority_set_info.len as usize, - Error::::InvalidValidatorSetLen - ); - - // Ensure that the authority set that signed the commitment is the expected one. - let root = get_authorities_mmr_root::(authority_set.validators().iter()); - ensure!(root == authority_set_info.keyset_commitment, Error::::InvalidValidatorSetRoot); - - Ok(()) -} - -/// Number of correct signatures, required from given validators set to accept signed -/// commitment. -/// -/// We're using 'conservative' approach here, where signatures of `2/3+1` validators are -/// required.. -pub(crate) fn signatures_required(validators_len: usize) -> usize { - validators_len - validators_len.saturating_sub(1) / 3 -} - -fn verify_signatures, I: 'static>( - commitment: &BridgedBeefySignedCommitment, - authority_set: &BridgedBeefyAuthoritySet, -) -> Result<(), Error> { - ensure!( - commitment.signatures.len() == authority_set.len(), - Error::::InvalidCommitmentSignaturesLen - ); - - // Ensure that the commitment was signed by enough authorities. - let msg = commitment.commitment.encode(); - let mut missing_signatures = signatures_required(authority_set.len()); - for (idx, (authority, maybe_sig)) in - authority_set.validators().iter().zip(commitment.signatures.iter()).enumerate() - { - if let Some(sig) = maybe_sig { - if authority.verify(sig, &msg) { - missing_signatures = missing_signatures.saturating_sub(1); - if missing_signatures == 0 { - break - } - } else { - log::debug!( - target: LOG_TARGET, - "Signed commitment contains incorrect signature of validator {} ({:?}): {:?}", - idx, - authority, - sig, - ); - } - } - } - ensure!(missing_signatures == 0, Error::::NotEnoughCorrectSignatures); - - Ok(()) -} - -/// Extract MMR root from commitment payload. -fn extract_mmr_root, I: 'static>( - commitment: &BridgedBeefySignedCommitment, -) -> Result, Error> { - commitment - .commitment - .payload - .get_decoded(&bp_beefy::MMR_ROOT_PAYLOAD_ID) - .ok_or(Error::MmrRootMissingFromCommitment) -} - -pub(crate) fn verify_commitment, I: 'static>( - commitment: &BridgedBeefySignedCommitment, - authority_set_info: &BridgedBeefyAuthoritySetInfo, - authority_set: &BridgedBeefyAuthoritySet, -) -> Result, Error> { - // Ensure that the commitment is signed by the best known BEEFY validator set. - ensure!( - commitment.commitment.validator_set_id == authority_set_info.id, - Error::::InvalidCommitmentValidatorSetId - ); - ensure!( - commitment.signatures.len() == authority_set_info.len as usize, - Error::::InvalidCommitmentSignaturesLen - ); - - verify_authority_set(authority_set_info, authority_set)?; - verify_signatures(commitment, authority_set)?; - - extract_mmr_root(commitment) -} - -/// Verify MMR proof of given leaf. -pub(crate) fn verify_beefy_mmr_leaf, I: 'static>( - mmr_leaf: &BridgedBeefyMmrLeaf, - mmr_proof: BridgedMmrProof, - mmr_root: BridgedMmrHash, -) -> Result<(), Error> { - let mmr_proof_leaf_count = mmr_proof.leaf_count; - let mmr_proof_length = mmr_proof.items.len(); - - // Verify the mmr proof for the provided leaf. - let mmr_leaf_hash = BridgedMmrHashing::::hash(&mmr_leaf.encode()); - verify_mmr_leaves_proof( - mmr_root, - vec![BridgedMmrDataOrHash::::Hash(mmr_leaf_hash)], - mmr_proof, - ) - .map_err(|e| { - log::error!( - target: LOG_TARGET, - "MMR proof of leaf {:?} (root: {:?}, leaf count: {}, len: {}) \ - verification has failed with error: {:?}", - mmr_leaf_hash, - mmr_root, - mmr_proof_leaf_count, - mmr_proof_length, - e, - ); - - Error::::MmrProofVerificationFailed - }) -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::{mock::*, mock_chain::*, *}; - use bp_beefy::{BeefyPayload, MMR_ROOT_PAYLOAD_ID}; - use frame_support::{assert_noop, assert_ok}; - use sp_consensus_beefy::ValidatorSet; - - #[test] - fn submit_commitment_checks_metadata() { - run_test_with_initialize(8, || { - // Fails if `commitment.commitment.validator_set_id` differs. - let mut header = ChainBuilder::new(8).append_finalized_header().to_header(); - header.customize_commitment( - |commitment| { - commitment.validator_set_id += 1; - }, - &validator_pairs(0, 8), - 6, - ); - assert_noop!( - import_commitment(header), - Error::::InvalidCommitmentValidatorSetId, - ); - - // Fails if `commitment.signatures.len()` differs. - let mut header = ChainBuilder::new(8).append_finalized_header().to_header(); - header.customize_signatures(|signatures| { - signatures.pop(); - }); - assert_noop!( - import_commitment(header), - Error::::InvalidCommitmentSignaturesLen, - ); - }); - } - - #[test] - fn submit_commitment_checks_validator_set() { - run_test_with_initialize(8, || { - // Fails if `ValidatorSet::id` differs. - let mut header = ChainBuilder::new(8).append_finalized_header().to_header(); - header.validator_set = ValidatorSet::new(validator_ids(0, 8), 1).unwrap(); - assert_noop!( - import_commitment(header), - Error::::InvalidValidatorSetId, - ); - - // Fails if `ValidatorSet::len()` differs. - let mut header = ChainBuilder::new(8).append_finalized_header().to_header(); - header.validator_set = ValidatorSet::new(validator_ids(0, 5), 0).unwrap(); - assert_noop!( - import_commitment(header), - Error::::InvalidValidatorSetLen, - ); - - // Fails if the validators differ. - let mut header = ChainBuilder::new(8).append_finalized_header().to_header(); - header.validator_set = ValidatorSet::new(validator_ids(3, 8), 0).unwrap(); - assert_noop!( - import_commitment(header), - Error::::InvalidValidatorSetRoot, - ); - }); - } - - #[test] - fn submit_commitment_checks_signatures() { - run_test_with_initialize(20, || { - // Fails when there aren't enough signatures. - let mut header = ChainBuilder::new(20).append_finalized_header().to_header(); - header.customize_signatures(|signatures| { - let first_signature_idx = signatures.iter().position(Option::is_some).unwrap(); - signatures[first_signature_idx] = None; - }); - assert_noop!( - import_commitment(header), - Error::::NotEnoughCorrectSignatures, - ); - - // Fails when there aren't enough correct signatures. - let mut header = ChainBuilder::new(20).append_finalized_header().to_header(); - header.customize_signatures(|signatures| { - let first_signature_idx = signatures.iter().position(Option::is_some).unwrap(); - let last_signature_idx = signatures.len() - - signatures.iter().rev().position(Option::is_some).unwrap() - - 1; - signatures[first_signature_idx] = signatures[last_signature_idx].clone(); - }); - assert_noop!( - import_commitment(header), - Error::::NotEnoughCorrectSignatures, - ); - - // Returns Ok(()) when there are enough signatures, even if some are incorrect. - let mut header = ChainBuilder::new(20).append_finalized_header().to_header(); - header.customize_signatures(|signatures| { - let first_signature_idx = signatures.iter().position(Option::is_some).unwrap(); - let first_missing_signature_idx = - signatures.iter().position(Option::is_none).unwrap(); - signatures[first_missing_signature_idx] = signatures[first_signature_idx].clone(); - }); - assert_ok!(import_commitment(header)); - }); - } - - #[test] - fn submit_commitment_checks_mmr_proof() { - run_test_with_initialize(1, || { - let validators = validator_pairs(0, 1); - - // Fails if leaf is not for parent. - let mut header = ChainBuilder::new(1).append_finalized_header().to_header(); - header.leaf.parent_number_and_hash.0 += 1; - assert_noop!( - import_commitment(header), - Error::::MmrProofVerificationFailed, - ); - - // Fails if mmr proof is incorrect. - let mut header = ChainBuilder::new(1).append_finalized_header().to_header(); - header.leaf_proof.leaf_indices[0] += 1; - assert_noop!( - import_commitment(header), - Error::::MmrProofVerificationFailed, - ); - - // Fails if mmr root is incorrect. - let mut header = ChainBuilder::new(1).append_finalized_header().to_header(); - // Replace MMR root with zeroes. - header.customize_commitment( - |commitment| { - commitment.payload = - BeefyPayload::from_single_entry(MMR_ROOT_PAYLOAD_ID, [0u8; 32].encode()); - }, - &validators, - 1, - ); - assert_noop!( - import_commitment(header), - Error::::MmrProofVerificationFailed, - ); - }); - } - - #[test] - fn submit_commitment_extracts_mmr_root() { - run_test_with_initialize(1, || { - let validators = validator_pairs(0, 1); - - // Fails if there is no mmr root in the payload. - let mut header = ChainBuilder::new(1).append_finalized_header().to_header(); - // Remove MMR root from the payload. - header.customize_commitment( - |commitment| { - commitment.payload = BeefyPayload::from_single_entry(*b"xy", vec![]); - }, - &validators, - 1, - ); - assert_noop!( - import_commitment(header), - Error::::MmrRootMissingFromCommitment, - ); - - // Fails if mmr root can't be decoded. - let mut header = ChainBuilder::new(1).append_finalized_header().to_header(); - // MMR root is a 32-byte array and we have replaced it with single byte - header.customize_commitment( - |commitment| { - commitment.payload = - BeefyPayload::from_single_entry(MMR_ROOT_PAYLOAD_ID, vec![42]); - }, - &validators, - 1, - ); - assert_noop!( - import_commitment(header), - Error::::MmrRootMissingFromCommitment, - ); - }); - } - - #[test] - fn submit_commitment_stores_valid_data() { - run_test_with_initialize(20, || { - let header = ChainBuilder::new(20).append_handoff_header(30).to_header(); - assert_ok!(import_commitment(header.clone())); - - assert_eq!(ImportedCommitmentsInfo::::get().unwrap().best_block_number, 1); - assert_eq!(CurrentAuthoritySetInfo::::get().id, 1); - assert_eq!(CurrentAuthoritySetInfo::::get().len, 30); - assert_eq!( - ImportedCommitments::::get(1).unwrap(), - bp_beefy::ImportedCommitment { - parent_number_and_hash: (0, [0; 32].into()), - mmr_root: header.mmr_root, - }, - ); - }); - } -} diff --git a/modules/grandpa/Cargo.toml b/modules/grandpa/Cargo.toml deleted file mode 100644 index b3deefc87..000000000 --- a/modules/grandpa/Cargo.toml +++ /dev/null @@ -1,72 +0,0 @@ -[package] -name = "pallet-bridge-grandpa" -version = "0.7.0" -description = "Module implementing GRANDPA on-chain light client used for bridging consensus of substrate-based chains." -authors.workspace = true -edition.workspace = true -license = "GPL-3.0-or-later WITH Classpath-exception-2.0" -repository.workspace = true - -[lints] -workspace = true - -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - -[dependencies] -codec = { package = "parity-scale-codec", version = "3.6.1", default-features = false } -finality-grandpa = { version = "0.16.2", default-features = false } -log = { workspace = true } -scale-info = { version = "2.11.1", default-features = false, features = ["derive"] } - -# Bridge Dependencies - -bp-runtime = { path = "../../primitives/runtime", default-features = false } -bp-header-chain = { path = "../../primitives/header-chain", default-features = false } - -# Substrate Dependencies - -frame-support = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -frame-system = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -sp-consensus-grandpa = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false, features = ["serde"] } -sp-runtime = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false, features = ["serde"] } -sp-std = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -sp-trie = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } - -# Optional Benchmarking Dependencies -bp-test-utils = { path = "../../primitives/test-utils", default-features = false, optional = true } -frame-benchmarking = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false, optional = true } - -[dev-dependencies] -sp-core = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } -sp-io = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } - -[features] -default = ["std"] -std = [ - "bp-header-chain/std", - "bp-runtime/std", - "bp-test-utils/std", - "codec/std", - "finality-grandpa/std", - "frame-benchmarking/std", - "frame-support/std", - "frame-system/std", - "log/std", - "scale-info/std", - "sp-consensus-grandpa/std", - "sp-runtime/std", - "sp-std/std", - "sp-trie/std", -] -runtime-benchmarks = [ - "bp-test-utils", - "frame-benchmarking/runtime-benchmarks", - "frame-support/runtime-benchmarks", - "frame-system/runtime-benchmarks", - "sp-runtime/runtime-benchmarks", -] -try-runtime = [ - "frame-support/try-runtime", - "frame-system/try-runtime", - "sp-runtime/try-runtime", -] diff --git a/modules/grandpa/README.md b/modules/grandpa/README.md deleted file mode 100644 index 4a3099b8a..000000000 --- a/modules/grandpa/README.md +++ /dev/null @@ -1,101 +0,0 @@ -# Bridge GRANDPA Pallet - -The bridge GRANDPA pallet is a light client for the GRANDPA finality gadget, running at the bridged chain. -It may import headers and their GRANDPA finality proofs (justifications) of the bridged chain. Imported -headers then may be used to verify storage proofs by other pallets. This makes the bridge GRANDPA pallet -a basic pallet of all bridges with Substrate-based chains. It is used by all bridge types (bridge between -standalone chains, between parachains and any combination of those) and is used by other bridge pallets. -It is used by the parachains light client (bridge parachains pallet) and by messages pallet. - -## A Brief Introduction into GRANDPA Finality - -You can find detailed information on GRANDPA, by exploring its [repository](https://github.com/paritytech/finality-grandpa). -Here is the minimal required GRANDPA information to understand how pallet works. - -Any Substrate chain may use different block authorship algorithms (like BABE or Aura) to determine block producers and -generate blocks. This has nothing common with finality, though - the task of block authorship is to coordinate -blocks generation. Any block may be reverted (if there's a fork) if it is not finalized. The finality solution -for (standalone) Substrate-based chains is the GRANDPA finality gadget. If some block is finalized by the gadget, it -can't be reverted. - -In GRANDPA, there are validators, identified by their public keys. They select some generated block and produce -signatures on this block hash. If there are enough (more than `2 / 3 * N`, where `N` is number of validators) -signatures, then the block is considered finalized. The set of signatures for the block is called justification. -Anyone who knows the public keys of validators is able to verify GRANDPA justification and that it is generated -for provided header. - -There are two main things in GRANDPA that help building light clients: - -- there's no need to import all headers of the bridged chain. Light client may import finalized headers or just - some of finalized headers that it consider useful. While the validators set stays the same, the client may - import any header that is finalized by this set; - -- when validators set changes, the GRANDPA gadget adds next set to the header. So light client doesn't need to - verify storage proofs when this happens - it only needs to look at the header and see if it changes the set. - Once set is changed, all following justifications are generated by the new set. Header that is changing the - set is called "mandatory" in the pallet. As the name says, the light client need to import all such headers - to be able to operate properly. - -## Pallet Operations - -The main entrypoint of the pallet is the `submit_finality_proof_ex` call. It has three arguments - the finalized -headers, associated GRANDPA justification and ID of the authority set that has generated this justification. The -call simply verifies the justification using current validators set and checks if header is better than the -previous best header. If both checks are passed, the header (only its useful fields) is inserted into the runtime -storage and may be used by other pallets to verify storage proofs. - -The submitter pays regular fee for submitting all headers, except for the mandatory header. Since it is -required for the pallet operations, submitting such header is free. So if you're ok with session-length -lags (meaning that there's exactly 1 mandatory header per session), the cost of pallet calls is zero. - -When the pallet sees mandatory header, it updates the validators set with the set from the header. All -following justifications (until next mandatory header) must be generated by this new set. - -## Pallet Initialization - -As the previous section states, there are two things that are mandatory for pallet operations: best finalized -header and the current validators set. Without it the pallet can't import any headers. But how to provide -initial values for these fields? There are two options. - -First option, while it is easier, doesn't work in all cases. It is to start chain with initial header and -validators set specified in the chain specification. This won't work, however, if we want to add bridge -to already started chain. - -For the latter case we have the `initialize` call. It accepts the initial header and initial validators set. -The call may be called by the governance, root or by the pallet owner (if it is set). - -## Non-Essential Functionality - -There may be a special account in every runtime where the bridge GRANDPA module is deployed. This -account, named 'module owner', is like a module-level sudo account - he's able to halt and -resume all module operations without requiring runtime upgrade. Calls that are related to this -account are: - -- `fn set_owner()`: current module owner may call it to transfer "ownership" to another account; - -- `fn set_operating_mode()`: the module owner (or sudo account) may call this function to stop all - module operations. After this call, all finality proofs will be rejected until further `set_operating_mode` call'. - This call may be used when something extraordinary happens with the bridge; - -- `fn initialize()`: module owner may call this function to initialize the bridge. - -If pallet owner is not defined, the governance may be used to make those calls. - -## Signed Extension to Reject Obsolete Headers - -It'd be better for anyone (for chain and for submitters) to reject all transactions that are submitting -already known headers to the pallet. This way, we leave block space to other useful transactions and -we don't charge concurrent submitters for their honest actions. - -To deal with that, we have a [signed extension](./src/call_ext) that may be added to the runtime. -It does exactly what is required - rejects all transactions with already known headers. The submitter -pays nothing for such transactions - they're simply removed from the transaction pool, when the block -is built. - -You may also take a look at the [`generate_bridge_reject_obsolete_headers_and_messages`](../../bin/runtime-common/src/lib.rs) -macro that bundles several similar signed extensions in a single one. - -## GRANDPA Finality Relay - -We have an offchain actor, who is watching for GRANDPA justifications and submits them to the bridged chain. -It is the finality relay - you may look at the [crate level documentation and the code](../../relays/finality/). diff --git a/modules/grandpa/src/benchmarking.rs b/modules/grandpa/src/benchmarking.rs deleted file mode 100644 index 11033373c..000000000 --- a/modules/grandpa/src/benchmarking.rs +++ /dev/null @@ -1,142 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Benchmarks for the GRANDPA Pallet. -//! -//! The main dispatchable for the GRANDPA pallet is `submit_finality_proof_ex`. Our benchmarks -//! are based around `submit_finality_proof`, though - from weight PoV they are the same calls. -//! There are to main factors which affect finality proof verification: -//! -//! 1. The number of `votes-ancestries` in the justification -//! 2. The number of `pre-commits` in the justification -//! -//! Vote ancestries are the headers between (`finality_target`, `head_of_chain`], where -//! `header_of_chain` is a descendant of `finality_target`. -//! -//! Pre-commits are messages which are signed by validators at the head of the chain they think is -//! the best. -//! -//! Consider the following: -//! -//! / B <- C' -//! A <- B <- C -//! -//! The common ancestor of both forks is block A, so this is what GRANDPA will finalize. In order to -//! verify this we will have vote ancestries of `[B, C, B', C']` and pre-commits `[C, C']`. -//! -//! Note that the worst case scenario here would be a justification where each validator has it's -//! own fork which is `SESSION_LENGTH` blocks long. - -use crate::*; - -use bp_header_chain::justification::required_justification_precommits; -use bp_runtime::BasicOperatingMode; -use bp_test_utils::{ - accounts, make_justification_for_header, JustificationGeneratorParams, TEST_GRANDPA_ROUND, - TEST_GRANDPA_SET_ID, -}; -use frame_benchmarking::{benchmarks_instance_pallet, whitelisted_caller}; -use frame_system::RawOrigin; -use sp_consensus_grandpa::AuthorityId; -use sp_runtime::traits::{One, Zero}; -use sp_std::vec::Vec; - -/// The maximum number of vote ancestries to include in a justification. -/// -/// In practice this would be limited by the session length (number of blocks a single authority set -/// can produce) of a given chain. -const MAX_VOTE_ANCESTRIES: u32 = 1000; - -// `1..MAX_VOTE_ANCESTRIES` is too large && benchmarks are running for almost 40m (steps=50, -// repeat=20) on a decent laptop, which is too much. Since we're building linear function here, -// let's just select some limited subrange for benchmarking. -const MAX_VOTE_ANCESTRIES_RANGE_BEGIN: u32 = MAX_VOTE_ANCESTRIES / 20; -const MAX_VOTE_ANCESTRIES_RANGE_END: u32 = - MAX_VOTE_ANCESTRIES_RANGE_BEGIN + MAX_VOTE_ANCESTRIES_RANGE_BEGIN; - -// the same with validators - if there are too much validators, let's run benchmarks on subrange -fn precommits_range_end, I: 'static>() -> u32 { - let max_bridged_authorities = T::BridgedChain::MAX_AUTHORITIES_COUNT; - if max_bridged_authorities > 128 { - sp_std::cmp::max(128, max_bridged_authorities / 5) - } else { - max_bridged_authorities - }; - required_justification_precommits(max_bridged_authorities) -} - -/// Prepare header and its justification to submit using `submit_finality_proof`. -fn prepare_benchmark_data, I: 'static>( - precommits: u32, - ancestors: u32, -) -> (BridgedHeader, GrandpaJustification>) { - // going from precommits to total authorities count - let total_authorities_count = (3 * precommits - 1) / 2; - - let authority_list = accounts(total_authorities_count as u16) - .iter() - .map(|id| (AuthorityId::from(*id), 1)) - .collect::>(); - - let genesis_header: BridgedHeader = bp_test_utils::test_header(Zero::zero()); - let genesis_hash = genesis_header.hash(); - let init_data = InitializationData { - header: Box::new(genesis_header), - authority_list, - set_id: TEST_GRANDPA_SET_ID, - operating_mode: BasicOperatingMode::Normal, - }; - - bootstrap_bridge::(init_data); - assert!(>::contains_key(genesis_hash)); - - let header: BridgedHeader = bp_test_utils::test_header(One::one()); - let params = JustificationGeneratorParams { - header: header.clone(), - round: TEST_GRANDPA_ROUND, - set_id: TEST_GRANDPA_SET_ID, - authorities: accounts(precommits as u16).iter().map(|k| (*k, 1)).collect::>(), - ancestors, - forks: 1, - }; - let justification = make_justification_for_header(params); - (header, justification) -} - -benchmarks_instance_pallet! { - // This is the "gold standard" benchmark for this extrinsic, and it's what should be used to - // annotate the weight in the pallet. - submit_finality_proof { - let p in 1 .. precommits_range_end::(); - let v in MAX_VOTE_ANCESTRIES_RANGE_BEGIN..MAX_VOTE_ANCESTRIES_RANGE_END; - let caller: T::AccountId = whitelisted_caller(); - let (header, justification) = prepare_benchmark_data::(p, v); - }: submit_finality_proof(RawOrigin::Signed(caller), Box::new(header), justification) - verify { - let genesis_header: BridgedHeader = bp_test_utils::test_header(Zero::zero()); - let header: BridgedHeader = bp_test_utils::test_header(One::one()); - let expected_hash = header.hash(); - - // check that the header#1 has been inserted - assert_eq!(>::get().unwrap().1, expected_hash); - assert!(>::contains_key(expected_hash)); - - // check that the header#0 has been pruned - assert!(!>::contains_key(genesis_header.hash())); - } - - impl_benchmark_test_suite!(Pallet, crate::mock::new_test_ext(), crate::mock::TestRuntime) -} diff --git a/modules/grandpa/src/call_ext.rs b/modules/grandpa/src/call_ext.rs deleted file mode 100644 index 4a7ebb3cc..000000000 --- a/modules/grandpa/src/call_ext.rs +++ /dev/null @@ -1,426 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -use crate::{ - weights::WeightInfo, BridgedBlockNumber, BridgedHeader, Config, CurrentAuthoritySet, Error, - Pallet, -}; -use bp_header_chain::{ - justification::GrandpaJustification, max_expected_submit_finality_proof_arguments_size, - ChainWithGrandpa, GrandpaConsensusLogReader, -}; -use bp_runtime::{BlockNumberOf, OwnedBridgeModule}; -use codec::Encode; -use frame_support::{dispatch::CallableCallFor, traits::IsSubType, weights::Weight}; -use sp_consensus_grandpa::SetId; -use sp_runtime::{ - traits::{Header, Zero}, - transaction_validity::{InvalidTransaction, TransactionValidity, ValidTransaction}, - RuntimeDebug, SaturatedConversion, -}; - -/// Info about a `SubmitParachainHeads` call which tries to update a single parachain. -#[derive(Copy, Clone, PartialEq, RuntimeDebug)] -pub struct SubmitFinalityProofInfo { - /// Number of the finality target. - pub block_number: N, - /// An identifier of the validators set that has signed the submitted justification. - /// It might be `None` if deprecated version of the `submit_finality_proof` is used. - pub current_set_id: Option, - /// Extra weight that we assume is included in the call. - /// - /// We have some assumptions about headers and justifications of the bridged chain. - /// We know that if our assumptions are correct, then the call must not have the - /// weight above some limit. The fee paid for weight above that limit, is never refunded. - pub extra_weight: Weight, - /// Extra size (in bytes) that we assume are included in the call. - /// - /// We have some assumptions about headers and justifications of the bridged chain. - /// We know that if our assumptions are correct, then the call must not have the - /// weight above some limit. The fee paid for bytes above that limit, is never refunded. - pub extra_size: u32, -} - -impl SubmitFinalityProofInfo { - /// Returns `true` if call size/weight is below our estimations for regular calls. - pub fn fits_limits(&self) -> bool { - self.extra_weight.is_zero() && self.extra_size.is_zero() - } -} - -/// Helper struct that provides methods for working with the `SubmitFinalityProof` call. -pub struct SubmitFinalityProofHelper, I: 'static> { - _phantom_data: sp_std::marker::PhantomData<(T, I)>, -} - -impl, I: 'static> SubmitFinalityProofHelper { - /// Check that the GRANDPA head provided by the `SubmitFinalityProof` is better than the best - /// one we know. Additionally, checks if `current_set_id` matches the current authority set - /// id, if specified. - pub fn check_obsolete( - finality_target: BlockNumberOf, - current_set_id: Option, - ) -> Result<(), Error> { - let best_finalized = crate::BestFinalized::::get().ok_or_else(|| { - log::trace!( - target: crate::LOG_TARGET, - "Cannot finalize header {:?} because pallet is not yet initialized", - finality_target, - ); - >::NotInitialized - })?; - - if best_finalized.number() >= finality_target { - log::trace!( - target: crate::LOG_TARGET, - "Cannot finalize obsolete header: bundled {:?}, best {:?}", - finality_target, - best_finalized, - ); - - return Err(Error::::OldHeader) - } - - if let Some(current_set_id) = current_set_id { - let actual_set_id = >::get().set_id; - if current_set_id != actual_set_id { - log::trace!( - target: crate::LOG_TARGET, - "Cannot finalize header signed by unknown authority set: bundled {:?}, best {:?}", - current_set_id, - actual_set_id, - ); - - return Err(Error::::InvalidAuthoritySetId) - } - } - - Ok(()) - } - - /// Check if the `SubmitFinalityProof` was successfully executed. - pub fn was_successful(finality_target: BlockNumberOf) -> bool { - match crate::BestFinalized::::get() { - Some(best_finalized) => best_finalized.number() == finality_target, - None => false, - } - } -} - -/// Trait representing a call that is a sub type of this pallet's call. -pub trait CallSubType, I: 'static>: - IsSubType, T>> -{ - /// Extract finality proof info from a runtime call. - fn submit_finality_proof_info( - &self, - ) -> Option>> { - if let Some(crate::Call::::submit_finality_proof { finality_target, justification }) = - self.is_sub_type() - { - return Some(submit_finality_proof_info_from_args::( - finality_target, - justification, - None, - )) - } else if let Some(crate::Call::::submit_finality_proof_ex { - finality_target, - justification, - current_set_id, - }) = self.is_sub_type() - { - return Some(submit_finality_proof_info_from_args::( - finality_target, - justification, - Some(*current_set_id), - )) - } - - None - } - - /// Validate Grandpa headers in order to avoid "mining" transactions that provide outdated - /// bridged chain headers. Without this validation, even honest relayers may lose their funds - /// if there are multiple relays running and submitting the same information. - fn check_obsolete_submit_finality_proof(&self) -> TransactionValidity - where - Self: Sized, - { - let finality_target = match self.submit_finality_proof_info() { - Some(finality_proof) => finality_proof, - _ => return Ok(ValidTransaction::default()), - }; - - if Pallet::::ensure_not_halted().is_err() { - return InvalidTransaction::Call.into() - } - - match SubmitFinalityProofHelper::::check_obsolete( - finality_target.block_number, - finality_target.current_set_id, - ) { - Ok(_) => Ok(ValidTransaction::default()), - Err(Error::::OldHeader) => InvalidTransaction::Stale.into(), - Err(_) => InvalidTransaction::Call.into(), - } - } -} - -impl, I: 'static> CallSubType for T::RuntimeCall where - T::RuntimeCall: IsSubType, T>> -{ -} - -/// Extract finality proof info from the submitted header and justification. -pub(crate) fn submit_finality_proof_info_from_args, I: 'static>( - finality_target: &BridgedHeader, - justification: &GrandpaJustification>, - current_set_id: Option, -) -> SubmitFinalityProofInfo> { - let block_number = *finality_target.number(); - - // the `submit_finality_proof` call will reject justifications with invalid, duplicate, - // unknown and extra signatures. It'll also reject justifications with less than necessary - // signatures. So we do not care about extra weight because of additional signatures here. - let precommits_len = justification.commit.precommits.len().saturated_into(); - let required_precommits = precommits_len; - - // We do care about extra weight because of more-than-expected headers in the votes - // ancestries. But we have problems computing extra weight for additional headers (weight of - // additional header is too small, so that our benchmarks aren't detecting that). So if there - // are more than expected headers in votes ancestries, we will treat the whole call weight - // as an extra weight. - let votes_ancestries_len = justification.votes_ancestries.len().saturated_into(); - let extra_weight = - if votes_ancestries_len > T::BridgedChain::REASONABLE_HEADERS_IN_JUSTIFICATION_ANCESTRY { - T::WeightInfo::submit_finality_proof(precommits_len, votes_ancestries_len) - } else { - Weight::zero() - }; - - // check if the `finality_target` is a mandatory header. If so, we are ready to refund larger - // size - let is_mandatory_finality_target = - GrandpaConsensusLogReader::>::find_scheduled_change( - finality_target.digest(), - ) - .is_some(); - - // we can estimate extra call size easily, without any additional significant overhead - let actual_call_size: u32 = finality_target - .encoded_size() - .saturating_add(justification.encoded_size()) - .saturated_into(); - let max_expected_call_size = max_expected_submit_finality_proof_arguments_size::( - is_mandatory_finality_target, - required_precommits, - ); - let extra_size = actual_call_size.saturating_sub(max_expected_call_size); - - SubmitFinalityProofInfo { block_number, current_set_id, extra_weight, extra_size } -} - -#[cfg(test)] -mod tests { - use crate::{ - call_ext::CallSubType, - mock::{run_test, test_header, RuntimeCall, TestBridgedChain, TestNumber, TestRuntime}, - BestFinalized, Config, CurrentAuthoritySet, PalletOperatingMode, StoredAuthoritySet, - SubmitFinalityProofInfo, WeightInfo, - }; - use bp_header_chain::ChainWithGrandpa; - use bp_runtime::{BasicOperatingMode, HeaderId}; - use bp_test_utils::{ - make_default_justification, make_justification_for_header, JustificationGeneratorParams, - TEST_GRANDPA_SET_ID, - }; - use frame_support::weights::Weight; - use sp_runtime::{testing::DigestItem, traits::Header as _, SaturatedConversion}; - - fn validate_block_submit(num: TestNumber) -> bool { - let bridge_grandpa_call = crate::Call::::submit_finality_proof_ex { - finality_target: Box::new(test_header(num)), - justification: make_default_justification(&test_header(num)), - // not initialized => zero - current_set_id: 0, - }; - RuntimeCall::check_obsolete_submit_finality_proof(&RuntimeCall::Grandpa( - bridge_grandpa_call, - )) - .is_ok() - } - - fn sync_to_header_10() { - let header10_hash = sp_core::H256::default(); - BestFinalized::::put(HeaderId(10, header10_hash)); - } - - #[test] - fn extension_rejects_obsolete_header() { - run_test(|| { - // when current best finalized is #10 and we're trying to import header#5 => tx is - // rejected - sync_to_header_10(); - assert!(!validate_block_submit(5)); - }); - } - - #[test] - fn extension_rejects_same_header() { - run_test(|| { - // when current best finalized is #10 and we're trying to import header#10 => tx is - // rejected - sync_to_header_10(); - assert!(!validate_block_submit(10)); - }); - } - - #[test] - fn extension_rejects_new_header_if_pallet_is_halted() { - run_test(|| { - // when pallet is halted => tx is rejected - sync_to_header_10(); - PalletOperatingMode::::put(BasicOperatingMode::Halted); - - assert!(!validate_block_submit(15)); - }); - } - - #[test] - fn extension_rejects_new_header_if_set_id_is_invalid() { - run_test(|| { - // when set id is different from the passed one => tx is rejected - sync_to_header_10(); - let next_set = StoredAuthoritySet::::try_new(vec![], 0x42).unwrap(); - CurrentAuthoritySet::::put(next_set); - - assert!(!validate_block_submit(15)); - }); - } - - #[test] - fn extension_accepts_new_header() { - run_test(|| { - // when current best finalized is #10 and we're trying to import header#15 => tx is - // accepted - sync_to_header_10(); - assert!(validate_block_submit(15)); - }); - } - - #[test] - fn submit_finality_proof_info_is_parsed() { - // when `submit_finality_proof` is used, `current_set_id` is set to `None` - let deprecated_call = - RuntimeCall::Grandpa(crate::Call::::submit_finality_proof { - finality_target: Box::new(test_header(42)), - justification: make_default_justification(&test_header(42)), - }); - assert_eq!( - deprecated_call.submit_finality_proof_info(), - Some(SubmitFinalityProofInfo { - block_number: 42, - current_set_id: None, - extra_weight: Weight::zero(), - extra_size: 0, - }) - ); - - // when `submit_finality_proof_ex` is used, `current_set_id` is set to `Some` - let deprecated_call = - RuntimeCall::Grandpa(crate::Call::::submit_finality_proof_ex { - finality_target: Box::new(test_header(42)), - justification: make_default_justification(&test_header(42)), - current_set_id: 777, - }); - assert_eq!( - deprecated_call.submit_finality_proof_info(), - Some(SubmitFinalityProofInfo { - block_number: 42, - current_set_id: Some(777), - extra_weight: Weight::zero(), - extra_size: 0, - }) - ); - } - - #[test] - fn extension_returns_correct_extra_size_if_call_arguments_are_too_large() { - // when call arguments are below our limit => no refund - let small_finality_target = test_header(1); - let justification_params = JustificationGeneratorParams { - header: small_finality_target.clone(), - ..Default::default() - }; - let small_justification = make_justification_for_header(justification_params); - let small_call = RuntimeCall::Grandpa(crate::Call::submit_finality_proof_ex { - finality_target: Box::new(small_finality_target), - justification: small_justification, - current_set_id: TEST_GRANDPA_SET_ID, - }); - assert_eq!(small_call.submit_finality_proof_info().unwrap().extra_size, 0); - - // when call arguments are too large => partial refund - let mut large_finality_target = test_header(1); - large_finality_target - .digest_mut() - .push(DigestItem::Other(vec![42u8; 1024 * 1024])); - let justification_params = JustificationGeneratorParams { - header: large_finality_target.clone(), - ..Default::default() - }; - let large_justification = make_justification_for_header(justification_params); - let large_call = RuntimeCall::Grandpa(crate::Call::submit_finality_proof_ex { - finality_target: Box::new(large_finality_target), - justification: large_justification, - current_set_id: TEST_GRANDPA_SET_ID, - }); - assert_ne!(large_call.submit_finality_proof_info().unwrap().extra_size, 0); - } - - #[test] - fn extension_returns_correct_extra_weight_if_there_are_too_many_headers_in_votes_ancestry() { - let finality_target = test_header(1); - let mut justification_params = JustificationGeneratorParams { - header: finality_target.clone(), - ancestors: TestBridgedChain::REASONABLE_HEADERS_IN_JUSTIFICATION_ANCESTRY, - ..Default::default() - }; - - // when there are `REASONABLE_HEADERS_IN_JUSTIFICATION_ANCESTRY` headers => no refund - let justification = make_justification_for_header(justification_params.clone()); - let call = RuntimeCall::Grandpa(crate::Call::submit_finality_proof_ex { - finality_target: Box::new(finality_target.clone()), - justification, - current_set_id: TEST_GRANDPA_SET_ID, - }); - assert_eq!(call.submit_finality_proof_info().unwrap().extra_weight, Weight::zero()); - - // when there are `REASONABLE_HEADERS_IN_JUSTIFICATION_ANCESTRY + 1` headers => full refund - justification_params.ancestors += 1; - let justification = make_justification_for_header(justification_params); - let call_weight = ::WeightInfo::submit_finality_proof( - justification.commit.precommits.len().saturated_into(), - justification.votes_ancestries.len().saturated_into(), - ); - let call = RuntimeCall::Grandpa(crate::Call::submit_finality_proof_ex { - finality_target: Box::new(finality_target), - justification, - current_set_id: TEST_GRANDPA_SET_ID, - }); - assert_eq!(call.submit_finality_proof_info().unwrap().extra_weight, call_weight); - } -} diff --git a/modules/grandpa/src/lib.rs b/modules/grandpa/src/lib.rs deleted file mode 100644 index 9e095651e..000000000 --- a/modules/grandpa/src/lib.rs +++ /dev/null @@ -1,1527 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Substrate GRANDPA Pallet -//! -//! This pallet is an on-chain GRANDPA light client for Substrate based chains. -//! -//! This pallet achieves this by trustlessly verifying GRANDPA finality proofs on-chain. Once -//! verified, finalized headers are stored in the pallet, thereby creating a sparse header chain. -//! This sparse header chain can be used as a source of truth for other higher-level applications. -//! -//! The pallet is responsible for tracking GRANDPA validator set hand-offs. We only import headers -//! with justifications signed by the current validator set we know of. The header is inspected for -//! a `ScheduledChanges` digest item, which is then used to update to next validator set. -//! -//! Since this pallet only tracks finalized headers it does not deal with forks. Forks can only -//! occur if the GRANDPA validator set on the bridged chain is either colluding or there is a severe -//! bug causing resulting in an equivocation. Such events are outside the scope of this pallet. -//! Shall the fork occur on the bridged chain governance intervention will be required to -//! re-initialize the bridge and track the right fork. - -#![warn(missing_docs)] -#![cfg_attr(not(feature = "std"), no_std)] - -pub use storage_types::StoredAuthoritySet; - -use bp_header_chain::{ - justification::GrandpaJustification, AuthoritySet, ChainWithGrandpa, GrandpaConsensusLogReader, - HeaderChain, InitializationData, StoredHeaderData, StoredHeaderDataBuilder, - StoredHeaderGrandpaInfo, -}; -use bp_runtime::{BlockNumberOf, HashOf, HasherOf, HeaderId, HeaderOf, OwnedBridgeModule}; -use frame_support::{dispatch::PostDispatchInfo, ensure, DefaultNoBound}; -use sp_runtime::{ - traits::{Header as HeaderT, Zero}, - SaturatedConversion, -}; -use sp_std::{boxed::Box, convert::TryInto, prelude::*}; - -mod call_ext; -#[cfg(test)] -mod mock; -mod storage_types; - -/// Module, containing weights for this pallet. -pub mod weights; - -#[cfg(feature = "runtime-benchmarks")] -pub mod benchmarking; - -// Re-export in crate namespace for `construct_runtime!` -pub use call_ext::*; -pub use pallet::*; -pub use weights::WeightInfo; - -/// The target that will be used when publishing logs related to this pallet. -pub const LOG_TARGET: &str = "runtime::bridge-grandpa"; - -/// Bridged chain from the pallet configuration. -pub type BridgedChain = >::BridgedChain; -/// Block number of the bridged chain. -pub type BridgedBlockNumber = BlockNumberOf<>::BridgedChain>; -/// Block hash of the bridged chain. -pub type BridgedBlockHash = HashOf<>::BridgedChain>; -/// Block id of the bridged chain. -pub type BridgedBlockId = HeaderId, BridgedBlockNumber>; -/// Hasher of the bridged chain. -pub type BridgedBlockHasher = HasherOf<>::BridgedChain>; -/// Header of the bridged chain. -pub type BridgedHeader = HeaderOf<>::BridgedChain>; -/// Header data of the bridged chain that is stored at this chain by this pallet. -pub type BridgedStoredHeaderData = - StoredHeaderData, BridgedBlockHash>; - -#[frame_support::pallet] -pub mod pallet { - use super::*; - use bp_runtime::BasicOperatingMode; - use frame_support::pallet_prelude::*; - use frame_system::pallet_prelude::*; - - #[pallet::config] - pub trait Config: frame_system::Config { - /// The overarching event type. - type RuntimeEvent: From> - + IsType<::RuntimeEvent>; - - /// The chain we are bridging to here. - type BridgedChain: ChainWithGrandpa; - - /// Maximal number of "free" mandatory header transactions per block. - /// - /// To be able to track the bridged chain, the pallet requires all headers that are - /// changing GRANDPA authorities set at the bridged chain (we call them mandatory). - /// So it is a common good deed to submit mandatory headers to the pallet. However, if the - /// bridged chain gets compromised, its validators may generate as many mandatory headers - /// as they want. And they may fill the whole block (at this chain) for free. This constants - /// limits number of calls that we may refund in a single block. All calls above this - /// limit are accepted, but are not refunded. - #[pallet::constant] - type MaxFreeMandatoryHeadersPerBlock: Get; - - /// Maximal number of finalized headers to keep in the storage. - /// - /// The setting is there to prevent growing the on-chain state indefinitely. Note - /// the setting does not relate to block numbers - we will simply keep as much items - /// in the storage, so it doesn't guarantee any fixed timeframe for finality headers. - /// - /// Incautious change of this constant may lead to orphan entries in the runtime storage. - #[pallet::constant] - type HeadersToKeep: Get; - - /// Weights gathered through benchmarking. - type WeightInfo: WeightInfo; - } - - #[pallet::pallet] - pub struct Pallet(PhantomData<(T, I)>); - - #[pallet::hooks] - impl, I: 'static> Hooks> for Pallet { - fn on_initialize(_n: BlockNumberFor) -> Weight { - FreeMandatoryHeadersRemaining::::put(T::MaxFreeMandatoryHeadersPerBlock::get()); - Weight::zero() - } - - fn on_finalize(_n: BlockNumberFor) { - FreeMandatoryHeadersRemaining::::kill(); - } - } - - impl, I: 'static> OwnedBridgeModule for Pallet { - const LOG_TARGET: &'static str = LOG_TARGET; - type OwnerStorage = PalletOwner; - type OperatingMode = BasicOperatingMode; - type OperatingModeStorage = PalletOperatingMode; - } - - #[pallet::call] - impl, I: 'static> Pallet { - /// This call is deprecated and will be removed around May 2024. Use the - /// `submit_finality_proof_ex` instead. Semantically, this call is an equivalent of the - /// `submit_finality_proof_ex` call without current authority set id check. - #[pallet::call_index(0)] - #[pallet::weight(::submit_finality_proof( - justification.commit.precommits.len().saturated_into(), - justification.votes_ancestries.len().saturated_into(), - ))] - #[allow(deprecated)] - #[deprecated( - note = "`submit_finality_proof` will be removed in May 2024. Use `submit_finality_proof_ex` instead." - )] - pub fn submit_finality_proof( - origin: OriginFor, - finality_target: Box>, - justification: GrandpaJustification>, - ) -> DispatchResultWithPostInfo { - Self::submit_finality_proof_ex( - origin, - finality_target, - justification, - // the `submit_finality_proof_ex` also reads this value, but it is done from the - // cache, so we don't treat it as an additional db access - >::get().set_id, - ) - } - - /// Bootstrap the bridge pallet with an initial header and authority set from which to sync. - /// - /// The initial configuration provided does not need to be the genesis header of the bridged - /// chain, it can be any arbitrary header. You can also provide the next scheduled set - /// change if it is already know. - /// - /// This function is only allowed to be called from a trusted origin and writes to storage - /// with practically no checks in terms of the validity of the data. It is important that - /// you ensure that valid data is being passed in. - #[pallet::call_index(1)] - #[pallet::weight((T::DbWeight::get().reads_writes(2, 5), DispatchClass::Operational))] - pub fn initialize( - origin: OriginFor, - init_data: super::InitializationData>, - ) -> DispatchResultWithPostInfo { - Self::ensure_owner_or_root(origin)?; - - let init_allowed = !>::exists(); - ensure!(init_allowed, >::AlreadyInitialized); - initialize_bridge::(init_data.clone())?; - - log::info!( - target: LOG_TARGET, - "Pallet has been initialized with the following parameters: {:?}", - init_data - ); - - Ok(().into()) - } - - /// Change `PalletOwner`. - /// - /// May only be called either by root, or by `PalletOwner`. - #[pallet::call_index(2)] - #[pallet::weight((T::DbWeight::get().reads_writes(1, 1), DispatchClass::Operational))] - pub fn set_owner(origin: OriginFor, new_owner: Option) -> DispatchResult { - >::set_owner(origin, new_owner) - } - - /// Halt or resume all pallet operations. - /// - /// May only be called either by root, or by `PalletOwner`. - #[pallet::call_index(3)] - #[pallet::weight((T::DbWeight::get().reads_writes(1, 1), DispatchClass::Operational))] - pub fn set_operating_mode( - origin: OriginFor, - operating_mode: BasicOperatingMode, - ) -> DispatchResult { - >::set_operating_mode(origin, operating_mode) - } - - /// Verify a target header is finalized according to the given finality proof. The proof - /// is assumed to be signed by GRANDPA authorities set with `current_set_id` id. - /// - /// It will use the underlying storage pallet to fetch information about the current - /// authorities and best finalized header in order to verify that the header is finalized. - /// - /// If successful in verification, it will write the target header to the underlying storage - /// pallet. - /// - /// The call fails if: - /// - /// - the pallet is halted; - /// - /// - the pallet knows better header than the `finality_target`; - /// - /// - the id of best GRANDPA authority set, known to the pallet is not equal to the - /// `current_set_id`; - /// - /// - verification is not optimized or invalid; - /// - /// - header contains forced authorities set change or change with non-zero delay. - #[pallet::call_index(4)] - #[pallet::weight(::submit_finality_proof( - justification.commit.precommits.len().saturated_into(), - justification.votes_ancestries.len().saturated_into(), - ))] - pub fn submit_finality_proof_ex( - origin: OriginFor, - finality_target: Box>, - justification: GrandpaJustification>, - current_set_id: sp_consensus_grandpa::SetId, - ) -> DispatchResultWithPostInfo { - Self::ensure_not_halted().map_err(Error::::BridgeModule)?; - ensure_signed(origin)?; - - let (hash, number) = (finality_target.hash(), *finality_target.number()); - log::trace!( - target: LOG_TARGET, - "Going to try and finalize header {:?}", - finality_target - ); - - // it checks whether the `number` is better than the current best block number - // and whether the `current_set_id` matches the best known set id - SubmitFinalityProofHelper::::check_obsolete(number, Some(current_set_id))?; - - let authority_set = >::get(); - let unused_proof_size = authority_set.unused_proof_size(); - let set_id = authority_set.set_id; - let authority_set: AuthoritySet = authority_set.into(); - verify_justification::(&justification, hash, number, authority_set)?; - - let maybe_new_authority_set = - try_enact_authority_change::(&finality_target, set_id)?; - let may_refund_call_fee = maybe_new_authority_set.is_some() && - // if we have seen too many mandatory headers in this block, we don't want to refund - Self::free_mandatory_headers_remaining() > 0 && - // if arguments out of expected bounds, we don't want to refund - submit_finality_proof_info_from_args::(&finality_target, &justification, Some(current_set_id)) - .fits_limits(); - if may_refund_call_fee { - FreeMandatoryHeadersRemaining::::mutate(|count| { - *count = count.saturating_sub(1) - }); - } - insert_header::(*finality_target, hash); - log::info!( - target: LOG_TARGET, - "Successfully imported finalized header with hash {:?}!", - hash - ); - - // mandatory header is a header that changes authorities set. The pallet can't go - // further without importing this header. So every bridge MUST import mandatory headers. - // - // We don't want to charge extra costs for mandatory operations. So relayer is not - // paying fee for mandatory headers import transactions. - // - // If size/weight of the call is exceeds our estimated limits, the relayer still needs - // to pay for the transaction. - let pays_fee = if may_refund_call_fee { Pays::No } else { Pays::Yes }; - - // the proof size component of the call weight assumes that there are - // `MaxBridgedAuthorities` in the `CurrentAuthoritySet` (we use `MaxEncodedLen` - // estimation). But if their number is lower, then we may "refund" some `proof_size`, - // making proof smaller and leaving block space to other useful transactions - let pre_dispatch_weight = T::WeightInfo::submit_finality_proof( - justification.commit.precommits.len().saturated_into(), - justification.votes_ancestries.len().saturated_into(), - ); - let actual_weight = pre_dispatch_weight - .set_proof_size(pre_dispatch_weight.proof_size().saturating_sub(unused_proof_size)); - - Self::deposit_event(Event::UpdatedBestFinalizedHeader { - number, - hash, - grandpa_info: StoredHeaderGrandpaInfo { - finality_proof: justification, - new_verification_context: maybe_new_authority_set, - }, - }); - - Ok(PostDispatchInfo { actual_weight: Some(actual_weight), pays_fee }) - } - } - - /// Number mandatory headers that we may accept in the current block for free (returning - /// `Pays::No`). - /// - /// If the `FreeMandatoryHeadersRemaining` hits zero, all following mandatory headers in the - /// current block are accepted with fee (`Pays::Yes` is returned). - /// - /// The `FreeMandatoryHeadersRemaining` is an ephemeral value that is set to - /// `MaxFreeMandatoryHeadersPerBlock` at each block initialization and is killed on block - /// finalization. So it never ends up in the storage trie. - #[pallet::storage] - #[pallet::whitelist_storage] - #[pallet::getter(fn free_mandatory_headers_remaining)] - pub(super) type FreeMandatoryHeadersRemaining, I: 'static = ()> = - StorageValue<_, u32, ValueQuery>; - - /// Hash of the header used to bootstrap the pallet. - #[pallet::storage] - pub(super) type InitialHash, I: 'static = ()> = - StorageValue<_, BridgedBlockHash, ValueQuery>; - - /// Hash of the best finalized header. - #[pallet::storage] - #[pallet::getter(fn best_finalized)] - pub type BestFinalized, I: 'static = ()> = - StorageValue<_, BridgedBlockId, OptionQuery>; - - /// A ring buffer of imported hashes. Ordered by the insertion time. - #[pallet::storage] - pub(super) type ImportedHashes, I: 'static = ()> = StorageMap< - Hasher = Identity, - Key = u32, - Value = BridgedBlockHash, - QueryKind = OptionQuery, - OnEmpty = GetDefault, - MaxValues = MaybeHeadersToKeep, - >; - - /// Current ring buffer position. - #[pallet::storage] - pub(super) type ImportedHashesPointer, I: 'static = ()> = - StorageValue<_, u32, ValueQuery>; - - /// Relevant fields of imported headers. - #[pallet::storage] - pub type ImportedHeaders, I: 'static = ()> = StorageMap< - Hasher = Identity, - Key = BridgedBlockHash, - Value = BridgedStoredHeaderData, - QueryKind = OptionQuery, - OnEmpty = GetDefault, - MaxValues = MaybeHeadersToKeep, - >; - - /// The current GRANDPA Authority set. - #[pallet::storage] - pub type CurrentAuthoritySet, I: 'static = ()> = - StorageValue<_, StoredAuthoritySet, ValueQuery>; - - /// Optional pallet owner. - /// - /// Pallet owner has a right to halt all pallet operations and then resume it. If it is - /// `None`, then there are no direct ways to halt/resume pallet operations, but other - /// runtime methods may still be used to do that (i.e. democracy::referendum to update halt - /// flag directly or call the `halt_operations`). - #[pallet::storage] - pub type PalletOwner, I: 'static = ()> = - StorageValue<_, T::AccountId, OptionQuery>; - - /// The current operating mode of the pallet. - /// - /// Depending on the mode either all, or no transactions will be allowed. - #[pallet::storage] - pub type PalletOperatingMode, I: 'static = ()> = - StorageValue<_, BasicOperatingMode, ValueQuery>; - - #[pallet::genesis_config] - #[derive(DefaultNoBound)] - pub struct GenesisConfig, I: 'static = ()> { - /// Optional module owner account. - pub owner: Option, - /// Optional module initialization data. - pub init_data: Option>>, - } - - #[pallet::genesis_build] - impl, I: 'static> BuildGenesisConfig for GenesisConfig { - fn build(&self) { - if let Some(ref owner) = self.owner { - >::put(owner); - } - - if let Some(init_data) = self.init_data.clone() { - initialize_bridge::(init_data).expect("genesis config is correct; qed"); - } else { - // Since the bridge hasn't been initialized we shouldn't allow anyone to perform - // transactions. - >::put(BasicOperatingMode::Halted); - } - } - } - - #[pallet::event] - #[pallet::generate_deposit(pub(super) fn deposit_event)] - pub enum Event, I: 'static = ()> { - /// Best finalized chain header has been updated to the header with given number and hash. - UpdatedBestFinalizedHeader { - /// Number of the new best finalized header. - number: BridgedBlockNumber, - /// Hash of the new best finalized header. - hash: BridgedBlockHash, - /// The Grandpa info associated to the new best finalized header. - grandpa_info: StoredHeaderGrandpaInfo>, - }, - } - - #[pallet::error] - pub enum Error { - /// The given justification is invalid for the given header. - InvalidJustification, - /// The authority set from the underlying header chain is invalid. - InvalidAuthoritySet, - /// The header being imported is older than the best finalized header known to the pallet. - OldHeader, - /// The scheduled authority set change found in the header is unsupported by the pallet. - /// - /// This is the case for non-standard (e.g forced) authority set changes. - UnsupportedScheduledChange, - /// The pallet is not yet initialized. - NotInitialized, - /// The pallet has already been initialized. - AlreadyInitialized, - /// Too many authorities in the set. - TooManyAuthoritiesInSet, - /// Error generated by the `OwnedBridgeModule` trait. - BridgeModule(bp_runtime::OwnedBridgeModuleError), - /// The `current_set_id` argument of the `submit_finality_proof_ex` doesn't match - /// the id of the current set, known to the pallet. - InvalidAuthoritySetId, - } - - /// Check the given header for a GRANDPA scheduled authority set change. If a change - /// is found it will be enacted immediately. - /// - /// This function does not support forced changes, or scheduled changes with delays - /// since these types of changes are indicative of abnormal behavior from GRANDPA. - /// - /// Returned value will indicate if a change was enacted or not. - pub(crate) fn try_enact_authority_change, I: 'static>( - header: &BridgedHeader, - current_set_id: sp_consensus_grandpa::SetId, - ) -> Result, DispatchError> { - // We don't support forced changes - at that point governance intervention is required. - ensure!( - GrandpaConsensusLogReader::>::find_forced_change( - header.digest() - ) - .is_none(), - >::UnsupportedScheduledChange - ); - - if let Some(change) = - GrandpaConsensusLogReader::>::find_scheduled_change( - header.digest(), - ) { - // GRANDPA only includes a `delay` for forced changes, so this isn't valid. - ensure!(change.delay == Zero::zero(), >::UnsupportedScheduledChange); - - // TODO [#788]: Stop manually increasing the `set_id` here. - let next_authorities = StoredAuthoritySet:: { - authorities: change - .next_authorities - .try_into() - .map_err(|_| Error::::TooManyAuthoritiesInSet)?, - set_id: current_set_id + 1, - }; - - // Since our header schedules a change and we know the delay is 0, it must also enact - // the change. - >::put(&next_authorities); - - log::info!( - target: LOG_TARGET, - "Transitioned from authority set {} to {}! New authorities are: {:?}", - current_set_id, - current_set_id + 1, - next_authorities, - ); - - return Ok(Some(next_authorities.into())) - }; - - Ok(None) - } - - /// Verify a GRANDPA justification (finality proof) for a given header. - /// - /// Will use the GRANDPA current authorities known to the pallet. - /// - /// If successful it returns the decoded GRANDPA justification so we can refund any weight which - /// was overcharged in the initial call. - pub(crate) fn verify_justification, I: 'static>( - justification: &GrandpaJustification>, - hash: BridgedBlockHash, - number: BridgedBlockNumber, - authority_set: bp_header_chain::AuthoritySet, - ) -> Result<(), sp_runtime::DispatchError> { - use bp_header_chain::justification::verify_justification; - - Ok(verify_justification::>( - (hash, number), - &authority_set.try_into().map_err(|_| >::InvalidAuthoritySet)?, - justification, - ) - .map_err(|e| { - log::error!( - target: LOG_TARGET, - "Received invalid justification for {:?}: {:?}", - hash, - e, - ); - >::InvalidJustification - })?) - } - - /// Import a previously verified header to the storage. - /// - /// Note this function solely takes care of updating the storage and pruning old entries, - /// but does not verify the validity of such import. - pub(crate) fn insert_header, I: 'static>( - header: BridgedHeader, - hash: BridgedBlockHash, - ) { - let index = >::get(); - let pruning = >::try_get(index); - >::put(HeaderId(*header.number(), hash)); - >::insert(hash, header.build()); - >::insert(index, hash); - - // Update ring buffer pointer and remove old header. - >::put((index + 1) % T::HeadersToKeep::get()); - if let Ok(hash) = pruning { - log::debug!(target: LOG_TARGET, "Pruning old header: {:?}.", hash); - >::remove(hash); - } - } - - /// Since this writes to storage with no real checks this should only be used in functions that - /// were called by a trusted origin. - pub(crate) fn initialize_bridge, I: 'static>( - init_params: super::InitializationData>, - ) -> Result<(), Error> { - let super::InitializationData { header, authority_list, set_id, operating_mode } = - init_params; - let authority_set_length = authority_list.len(); - let authority_set = StoredAuthoritySet::::try_new(authority_list, set_id) - .map_err(|e| { - log::error!( - target: LOG_TARGET, - "Failed to initialize bridge. Number of authorities in the set {} is larger than the configured value {}", - authority_set_length, - T::BridgedChain::MAX_AUTHORITIES_COUNT, - ); - - e - })?; - let initial_hash = header.hash(); - - >::put(initial_hash); - >::put(0); - insert_header::(*header, initial_hash); - - >::put(authority_set); - - >::put(operating_mode); - - Ok(()) - } - - /// Adapter for using `Config::HeadersToKeep` as `MaxValues` bound in our storage maps. - pub struct MaybeHeadersToKeep(PhantomData<(T, I)>); - - // this implementation is required to use the struct as `MaxValues` - impl, I: 'static> Get> for MaybeHeadersToKeep { - fn get() -> Option { - Some(T::HeadersToKeep::get()) - } - } - - /// Initialize pallet so that it is ready for inserting new header. - /// - /// The function makes sure that the new insertion will cause the pruning of some old header. - /// - /// Returns parent header for the new header. - #[cfg(feature = "runtime-benchmarks")] - pub(crate) fn bootstrap_bridge, I: 'static>( - init_params: super::InitializationData>, - ) -> BridgedHeader { - let start_header = init_params.header.clone(); - initialize_bridge::(init_params).expect("benchmarks are correct"); - - // the most obvious way to cause pruning during next insertion would be to insert - // `HeadersToKeep` headers. But it'll make our benchmarks slow. So we will just play with - // our pruning ring-buffer. - assert_eq!(ImportedHashesPointer::::get(), 1); - ImportedHashesPointer::::put(0); - - *start_header - } -} - -impl, I: 'static> Pallet -where - ::RuntimeEvent: TryInto>, -{ - /// Get the GRANDPA justifications accepted in the current block. - pub fn synced_headers_grandpa_info() -> Vec>> { - frame_system::Pallet::::read_events_no_consensus() - .filter_map(|event| { - if let Event::::UpdatedBestFinalizedHeader { grandpa_info, .. } = - event.event.try_into().ok()? - { - return Some(grandpa_info) - } - None - }) - .collect() - } -} - -/// Bridge GRANDPA pallet as header chain. -pub type GrandpaChainHeaders = Pallet; - -impl, I: 'static> HeaderChain> for GrandpaChainHeaders { - fn finalized_header_state_root( - header_hash: HashOf>, - ) -> Option>> { - ImportedHeaders::::get(header_hash).map(|h| h.state_root) - } -} - -/// (Re)initialize bridge with given header for using it in `pallet-bridge-messages` benchmarks. -#[cfg(feature = "runtime-benchmarks")] -pub fn initialize_for_benchmarks, I: 'static>(header: BridgedHeader) { - initialize_bridge::(InitializationData { - header: Box::new(header), - authority_list: sp_std::vec::Vec::new(), /* we don't verify any proofs in external - * benchmarks */ - set_id: 0, - operating_mode: bp_runtime::BasicOperatingMode::Normal, - }) - .expect("only used from benchmarks; benchmarks are correct; qed"); -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::mock::{ - run_test, test_header, RuntimeEvent as TestEvent, RuntimeOrigin, System, TestBridgedChain, - TestHeader, TestNumber, TestRuntime, MAX_BRIDGED_AUTHORITIES, - }; - use bp_header_chain::BridgeGrandpaCall; - use bp_runtime::BasicOperatingMode; - use bp_test_utils::{ - authority_list, generate_owned_bridge_module_tests, make_default_justification, - make_justification_for_header, JustificationGeneratorParams, ALICE, BOB, - TEST_GRANDPA_SET_ID, - }; - use codec::Encode; - use frame_support::{ - assert_err, assert_noop, assert_ok, - dispatch::{Pays, PostDispatchInfo}, - storage::generator::StorageValue, - }; - use frame_system::{EventRecord, Phase}; - use sp_consensus_grandpa::{ConsensusLog, GRANDPA_ENGINE_ID}; - use sp_core::Get; - use sp_runtime::{Digest, DigestItem, DispatchError}; - - fn initialize_substrate_bridge() { - System::set_block_number(1); - System::reset_events(); - - assert_ok!(init_with_origin(RuntimeOrigin::root())); - } - - fn init_with_origin( - origin: RuntimeOrigin, - ) -> Result< - InitializationData, - sp_runtime::DispatchErrorWithPostInfo, - > { - let genesis = test_header(0); - - let init_data = InitializationData { - header: Box::new(genesis), - authority_list: authority_list(), - set_id: TEST_GRANDPA_SET_ID, - operating_mode: BasicOperatingMode::Normal, - }; - - Pallet::::initialize(origin, init_data.clone()).map(|_| init_data) - } - - fn submit_finality_proof(header: u8) -> frame_support::dispatch::DispatchResultWithPostInfo { - let header = test_header(header.into()); - let justification = make_default_justification(&header); - - Pallet::::submit_finality_proof_ex( - RuntimeOrigin::signed(1), - Box::new(header), - justification, - TEST_GRANDPA_SET_ID, - ) - } - - fn submit_finality_proof_with_set_id( - header: u8, - set_id: u64, - ) -> frame_support::dispatch::DispatchResultWithPostInfo { - let header = test_header(header.into()); - let justification = make_justification_for_header(JustificationGeneratorParams { - header: header.clone(), - set_id, - ..Default::default() - }); - - Pallet::::submit_finality_proof_ex( - RuntimeOrigin::signed(1), - Box::new(header), - justification, - set_id, - ) - } - - fn submit_mandatory_finality_proof( - number: u8, - set_id: u64, - ) -> frame_support::dispatch::DispatchResultWithPostInfo { - let mut header = test_header(number.into()); - // to ease tests that are using `submit_mandatory_finality_proof`, we'll be using the - // same set for all sessions - let consensus_log = - ConsensusLog::::ScheduledChange(sp_consensus_grandpa::ScheduledChange { - next_authorities: authority_list(), - delay: 0, - }); - header.digest = - Digest { logs: vec![DigestItem::Consensus(GRANDPA_ENGINE_ID, consensus_log.encode())] }; - let justification = make_justification_for_header(JustificationGeneratorParams { - header: header.clone(), - set_id, - ..Default::default() - }); - - Pallet::::submit_finality_proof_ex( - RuntimeOrigin::signed(1), - Box::new(header), - justification, - set_id, - ) - } - - fn next_block() { - use frame_support::traits::OnInitialize; - - let current_number = frame_system::Pallet::::block_number(); - frame_system::Pallet::::set_block_number(current_number + 1); - let _ = Pallet::::on_initialize(current_number); - } - - fn change_log(delay: u64) -> Digest { - let consensus_log = - ConsensusLog::::ScheduledChange(sp_consensus_grandpa::ScheduledChange { - next_authorities: vec![(ALICE.into(), 1), (BOB.into(), 1)], - delay, - }); - - Digest { logs: vec![DigestItem::Consensus(GRANDPA_ENGINE_ID, consensus_log.encode())] } - } - - fn forced_change_log(delay: u64) -> Digest { - let consensus_log = ConsensusLog::::ForcedChange( - delay, - sp_consensus_grandpa::ScheduledChange { - next_authorities: vec![(ALICE.into(), 1), (BOB.into(), 1)], - delay, - }, - ); - - Digest { logs: vec![DigestItem::Consensus(GRANDPA_ENGINE_ID, consensus_log.encode())] } - } - - fn many_authorities_log() -> Digest { - let consensus_log = - ConsensusLog::::ScheduledChange(sp_consensus_grandpa::ScheduledChange { - next_authorities: std::iter::repeat((ALICE.into(), 1)) - .take(MAX_BRIDGED_AUTHORITIES as usize + 1) - .collect(), - delay: 0, - }); - - Digest { logs: vec![DigestItem::Consensus(GRANDPA_ENGINE_ID, consensus_log.encode())] } - } - - #[test] - fn init_root_or_owner_origin_can_initialize_pallet() { - run_test(|| { - assert_noop!(init_with_origin(RuntimeOrigin::signed(1)), DispatchError::BadOrigin); - assert_ok!(init_with_origin(RuntimeOrigin::root())); - - // Reset storage so we can initialize the pallet again - BestFinalized::::kill(); - PalletOwner::::put(2); - assert_ok!(init_with_origin(RuntimeOrigin::signed(2))); - }) - } - - #[test] - fn init_storage_entries_are_correctly_initialized() { - run_test(|| { - assert_eq!(BestFinalized::::get(), None,); - assert_eq!(Pallet::::best_finalized(), None); - assert_eq!(PalletOperatingMode::::try_get(), Err(())); - - let init_data = init_with_origin(RuntimeOrigin::root()).unwrap(); - - assert!(>::contains_key(init_data.header.hash())); - assert_eq!(BestFinalized::::get().unwrap().1, init_data.header.hash()); - assert_eq!( - CurrentAuthoritySet::::get().authorities, - init_data.authority_list - ); - assert_eq!( - PalletOperatingMode::::try_get(), - Ok(BasicOperatingMode::Normal) - ); - }) - } - - #[test] - fn init_can_only_initialize_pallet_once() { - run_test(|| { - initialize_substrate_bridge(); - assert_noop!( - init_with_origin(RuntimeOrigin::root()), - >::AlreadyInitialized - ); - }) - } - - #[test] - fn init_fails_if_there_are_too_many_authorities_in_the_set() { - run_test(|| { - let genesis = test_header(0); - let init_data = InitializationData { - header: Box::new(genesis), - authority_list: std::iter::repeat(authority_list().remove(0)) - .take(MAX_BRIDGED_AUTHORITIES as usize + 1) - .collect(), - set_id: 1, - operating_mode: BasicOperatingMode::Normal, - }; - - assert_noop!( - Pallet::::initialize(RuntimeOrigin::root(), init_data), - Error::::TooManyAuthoritiesInSet, - ); - }); - } - - #[test] - fn pallet_rejects_transactions_if_halted() { - run_test(|| { - initialize_substrate_bridge(); - - assert_ok!(Pallet::::set_operating_mode( - RuntimeOrigin::root(), - BasicOperatingMode::Halted - )); - assert_noop!( - submit_finality_proof(1), - Error::::BridgeModule(bp_runtime::OwnedBridgeModuleError::Halted) - ); - - assert_ok!(Pallet::::set_operating_mode( - RuntimeOrigin::root(), - BasicOperatingMode::Normal - )); - assert_ok!(submit_finality_proof(1)); - }) - } - - #[test] - fn pallet_rejects_header_if_not_initialized_yet() { - run_test(|| { - assert_noop!(submit_finality_proof(1), Error::::NotInitialized); - }); - } - - #[test] - fn successfully_imports_header_with_valid_finality() { - run_test(|| { - initialize_substrate_bridge(); - - let header_number = 1; - let header = test_header(header_number.into()); - let justification = make_default_justification(&header); - - let pre_dispatch_weight = ::WeightInfo::submit_finality_proof( - justification.commit.precommits.len().try_into().unwrap_or(u32::MAX), - justification.votes_ancestries.len().try_into().unwrap_or(u32::MAX), - ); - - let result = submit_finality_proof(header_number); - assert_ok!(result); - assert_eq!(result.unwrap().pays_fee, frame_support::dispatch::Pays::Yes); - // our test config assumes 2048 max authorities and we are just using couple - let pre_dispatch_proof_size = pre_dispatch_weight.proof_size(); - let actual_proof_size = result.unwrap().actual_weight.unwrap().proof_size(); - assert!(actual_proof_size > 0); - assert!( - actual_proof_size < pre_dispatch_proof_size, - "Actual proof size {actual_proof_size} must be less than the pre-dispatch {pre_dispatch_proof_size}", - ); - - let header = test_header(1); - assert_eq!(>::get().unwrap().1, header.hash()); - assert!(>::contains_key(header.hash())); - - assert_eq!( - System::events(), - vec![EventRecord { - phase: Phase::Initialization, - event: TestEvent::Grandpa(Event::UpdatedBestFinalizedHeader { - number: *header.number(), - hash: header.hash(), - grandpa_info: StoredHeaderGrandpaInfo { - finality_proof: justification.clone(), - new_verification_context: None, - }, - }), - topics: vec![], - }], - ); - assert_eq!( - Pallet::::synced_headers_grandpa_info(), - vec![StoredHeaderGrandpaInfo { - finality_proof: justification, - new_verification_context: None - }] - ); - }) - } - - #[test] - fn rejects_justification_that_skips_authority_set_transition() { - run_test(|| { - initialize_substrate_bridge(); - - let header = test_header(1); - - let next_set_id = 2; - let params = JustificationGeneratorParams:: { - set_id: next_set_id, - ..Default::default() - }; - let justification = make_justification_for_header(params); - - assert_err!( - Pallet::::submit_finality_proof_ex( - RuntimeOrigin::signed(1), - Box::new(header.clone()), - justification.clone(), - TEST_GRANDPA_SET_ID, - ), - >::InvalidJustification - ); - assert_err!( - Pallet::::submit_finality_proof_ex( - RuntimeOrigin::signed(1), - Box::new(header), - justification, - next_set_id, - ), - >::InvalidAuthoritySetId - ); - }) - } - - #[test] - fn does_not_import_header_with_invalid_finality_proof() { - run_test(|| { - initialize_substrate_bridge(); - - let header = test_header(1); - let mut justification = make_default_justification(&header); - justification.round = 42; - - assert_err!( - Pallet::::submit_finality_proof_ex( - RuntimeOrigin::signed(1), - Box::new(header), - justification, - TEST_GRANDPA_SET_ID, - ), - >::InvalidJustification - ); - }) - } - - #[test] - fn disallows_invalid_authority_set() { - run_test(|| { - let genesis = test_header(0); - - let invalid_authority_list = vec![(ALICE.into(), u64::MAX), (BOB.into(), u64::MAX)]; - let init_data = InitializationData { - header: Box::new(genesis), - authority_list: invalid_authority_list, - set_id: 1, - operating_mode: BasicOperatingMode::Normal, - }; - - assert_ok!(Pallet::::initialize(RuntimeOrigin::root(), init_data)); - - let header = test_header(1); - let justification = make_default_justification(&header); - - assert_err!( - Pallet::::submit_finality_proof_ex( - RuntimeOrigin::signed(1), - Box::new(header), - justification, - TEST_GRANDPA_SET_ID, - ), - >::InvalidAuthoritySet - ); - }) - } - - #[test] - fn importing_header_ensures_that_chain_is_extended() { - run_test(|| { - initialize_substrate_bridge(); - - assert_ok!(submit_finality_proof(4)); - assert_err!(submit_finality_proof(3), Error::::OldHeader); - assert_ok!(submit_finality_proof(5)); - }) - } - - #[test] - fn importing_header_enacts_new_authority_set() { - run_test(|| { - initialize_substrate_bridge(); - - let next_set_id = 2; - let next_authorities = vec![(ALICE.into(), 1), (BOB.into(), 1)]; - - // Need to update the header digest to indicate that our header signals an authority set - // change. The change will be enacted when we import our header. - let mut header = test_header(2); - header.digest = change_log(0); - - // Create a valid justification for the header - let justification = make_default_justification(&header); - - // Let's import our test header - let result = Pallet::::submit_finality_proof_ex( - RuntimeOrigin::signed(1), - Box::new(header.clone()), - justification.clone(), - TEST_GRANDPA_SET_ID, - ); - assert_ok!(result); - assert_eq!(result.unwrap().pays_fee, frame_support::dispatch::Pays::No); - - // Make sure that our header is the best finalized - assert_eq!(>::get().unwrap().1, header.hash()); - assert!(>::contains_key(header.hash())); - - // Make sure that the authority set actually changed upon importing our header - assert_eq!( - >::get(), - StoredAuthoritySet::::try_new(next_authorities, next_set_id) - .unwrap(), - ); - - // Here - assert_eq!( - System::events(), - vec![EventRecord { - phase: Phase::Initialization, - event: TestEvent::Grandpa(Event::UpdatedBestFinalizedHeader { - number: *header.number(), - hash: header.hash(), - grandpa_info: StoredHeaderGrandpaInfo { - finality_proof: justification.clone(), - new_verification_context: Some( - >::get().into() - ), - }, - }), - topics: vec![], - }], - ); - assert_eq!( - Pallet::::synced_headers_grandpa_info(), - vec![StoredHeaderGrandpaInfo { - finality_proof: justification, - new_verification_context: Some( - >::get().into() - ), - }] - ); - }) - } - - #[test] - fn relayer_pays_tx_fee_when_submitting_huge_mandatory_header() { - run_test(|| { - initialize_substrate_bridge(); - - // let's prepare a huge authorities change header, which is definitely above size limits - let mut header = test_header(2); - header.digest = change_log(0); - header.digest.push(DigestItem::Other(vec![42u8; 1024 * 1024])); - let justification = make_default_justification(&header); - - // without large digest item ^^^ the relayer would have paid zero transaction fee - // (`Pays::No`) - let result = Pallet::::submit_finality_proof_ex( - RuntimeOrigin::signed(1), - Box::new(header.clone()), - justification, - TEST_GRANDPA_SET_ID, - ); - assert_ok!(result); - assert_eq!(result.unwrap().pays_fee, frame_support::dispatch::Pays::Yes); - - // Make sure that our header is the best finalized - assert_eq!(>::get().unwrap().1, header.hash()); - assert!(>::contains_key(header.hash())); - }) - } - - #[test] - fn relayer_pays_tx_fee_when_submitting_justification_with_long_ancestry_votes() { - run_test(|| { - initialize_substrate_bridge(); - - // let's prepare a huge authorities change header, which is definitely above weight - // limits - let mut header = test_header(2); - header.digest = change_log(0); - let justification = make_justification_for_header(JustificationGeneratorParams { - header: header.clone(), - ancestors: TestBridgedChain::REASONABLE_HEADERS_IN_JUSTIFICATION_ANCESTRY + 1, - ..Default::default() - }); - - // without many headers in votes ancestries ^^^ the relayer would have paid zero - // transaction fee (`Pays::No`) - let result = Pallet::::submit_finality_proof_ex( - RuntimeOrigin::signed(1), - Box::new(header.clone()), - justification, - TEST_GRANDPA_SET_ID, - ); - assert_ok!(result); - assert_eq!(result.unwrap().pays_fee, frame_support::dispatch::Pays::Yes); - - // Make sure that our header is the best finalized - assert_eq!(>::get().unwrap().1, header.hash()); - assert!(>::contains_key(header.hash())); - }) - } - - #[test] - fn importing_header_rejects_header_with_scheduled_change_delay() { - run_test(|| { - initialize_substrate_bridge(); - - // Need to update the header digest to indicate that our header signals an authority set - // change. However, the change doesn't happen until the next block. - let mut header = test_header(2); - header.digest = change_log(1); - - // Create a valid justification for the header - let justification = make_default_justification(&header); - - // Should not be allowed to import this header - assert_err!( - Pallet::::submit_finality_proof_ex( - RuntimeOrigin::signed(1), - Box::new(header), - justification, - TEST_GRANDPA_SET_ID, - ), - >::UnsupportedScheduledChange - ); - }) - } - - #[test] - fn importing_header_rejects_header_with_forced_changes() { - run_test(|| { - initialize_substrate_bridge(); - - // Need to update the header digest to indicate that it signals a forced authority set - // change. - let mut header = test_header(2); - header.digest = forced_change_log(0); - - // Create a valid justification for the header - let justification = make_default_justification(&header); - - // Should not be allowed to import this header - assert_err!( - Pallet::::submit_finality_proof_ex( - RuntimeOrigin::signed(1), - Box::new(header), - justification, - TEST_GRANDPA_SET_ID, - ), - >::UnsupportedScheduledChange - ); - }) - } - - #[test] - fn importing_header_rejects_header_with_too_many_authorities() { - run_test(|| { - initialize_substrate_bridge(); - - // Need to update the header digest to indicate that our header signals an authority set - // change. However, the change doesn't happen until the next block. - let mut header = test_header(2); - header.digest = many_authorities_log(); - - // Create a valid justification for the header - let justification = make_default_justification(&header); - - // Should not be allowed to import this header - assert_err!( - Pallet::::submit_finality_proof_ex( - RuntimeOrigin::signed(1), - Box::new(header), - justification, - TEST_GRANDPA_SET_ID, - ), - >::TooManyAuthoritiesInSet - ); - }); - } - - #[test] - fn parse_finalized_storage_proof_rejects_proof_on_unknown_header() { - run_test(|| { - assert_noop!( - Pallet::::storage_proof_checker(Default::default(), vec![],) - .map(|_| ()), - bp_header_chain::HeaderChainError::UnknownHeader, - ); - }); - } - - #[test] - fn parse_finalized_storage_accepts_valid_proof() { - run_test(|| { - let (state_root, storage_proof) = bp_runtime::craft_valid_storage_proof(); - - let mut header = test_header(2); - header.set_state_root(state_root); - - let hash = header.hash(); - >::put(HeaderId(2, hash)); - >::insert(hash, header.build()); - - assert_ok!( - Pallet::::storage_proof_checker(hash, storage_proof).map(|_| ()) - ); - }); - } - - #[test] - fn rate_limiter_disallows_free_imports_once_limit_is_hit_in_single_block() { - run_test(|| { - initialize_substrate_bridge(); - - let result = submit_mandatory_finality_proof(1, 1); - assert_eq!(result.expect("call failed").pays_fee, Pays::No); - - let result = submit_mandatory_finality_proof(2, 2); - assert_eq!(result.expect("call failed").pays_fee, Pays::No); - - let result = submit_mandatory_finality_proof(3, 3); - assert_eq!(result.expect("call failed").pays_fee, Pays::Yes); - }) - } - - #[test] - fn rate_limiter_invalid_requests_do_not_count_towards_request_count() { - run_test(|| { - let submit_invalid_request = || { - let mut header = test_header(1); - header.digest = change_log(0); - let mut invalid_justification = make_default_justification(&header); - invalid_justification.round = 42; - - Pallet::::submit_finality_proof_ex( - RuntimeOrigin::signed(1), - Box::new(header), - invalid_justification, - TEST_GRANDPA_SET_ID, - ) - }; - - initialize_substrate_bridge(); - - for _ in 0..::MaxFreeMandatoryHeadersPerBlock::get() + 1 { - assert_err!(submit_invalid_request(), >::InvalidJustification); - } - - // Can still submit free mandatory headers afterwards - let result = submit_mandatory_finality_proof(1, 1); - assert_eq!(result.expect("call failed").pays_fee, Pays::No); - - let result = submit_mandatory_finality_proof(2, 2); - assert_eq!(result.expect("call failed").pays_fee, Pays::No); - - let result = submit_mandatory_finality_proof(3, 3); - assert_eq!(result.expect("call failed").pays_fee, Pays::Yes); - }) - } - - #[test] - fn rate_limiter_allows_request_after_new_block_has_started() { - run_test(|| { - initialize_substrate_bridge(); - - let result = submit_mandatory_finality_proof(1, 1); - assert_eq!(result.expect("call failed").pays_fee, Pays::No); - - let result = submit_mandatory_finality_proof(2, 2); - assert_eq!(result.expect("call failed").pays_fee, Pays::No); - - let result = submit_mandatory_finality_proof(3, 3); - assert_eq!(result.expect("call failed").pays_fee, Pays::Yes); - - next_block(); - - let result = submit_mandatory_finality_proof(4, 4); - assert_eq!(result.expect("call failed").pays_fee, Pays::No); - - let result = submit_mandatory_finality_proof(5, 5); - assert_eq!(result.expect("call failed").pays_fee, Pays::No); - - let result = submit_mandatory_finality_proof(6, 6); - assert_eq!(result.expect("call failed").pays_fee, Pays::Yes); - }) - } - - #[test] - fn rate_limiter_ignores_non_mandatory_headers() { - run_test(|| { - initialize_substrate_bridge(); - - let result = submit_finality_proof(1); - assert_eq!(result.expect("call failed").pays_fee, Pays::Yes); - - let result = submit_mandatory_finality_proof(2, 1); - assert_eq!(result.expect("call failed").pays_fee, Pays::No); - - let result = submit_finality_proof_with_set_id(3, 2); - assert_eq!(result.expect("call failed").pays_fee, Pays::Yes); - - let result = submit_mandatory_finality_proof(4, 2); - assert_eq!(result.expect("call failed").pays_fee, Pays::No); - - let result = submit_finality_proof_with_set_id(5, 3); - assert_eq!(result.expect("call failed").pays_fee, Pays::Yes); - - let result = submit_mandatory_finality_proof(6, 3); - assert_eq!(result.expect("call failed").pays_fee, Pays::Yes); - }) - } - - #[test] - fn should_prune_headers_over_headers_to_keep_parameter() { - run_test(|| { - initialize_substrate_bridge(); - assert_ok!(submit_finality_proof(1)); - let first_header_hash = Pallet::::best_finalized().unwrap().hash(); - next_block(); - - assert_ok!(submit_finality_proof(2)); - next_block(); - assert_ok!(submit_finality_proof(3)); - next_block(); - assert_ok!(submit_finality_proof(4)); - next_block(); - assert_ok!(submit_finality_proof(5)); - next_block(); - - assert_ok!(submit_finality_proof(6)); - - assert!( - !ImportedHeaders::::contains_key(first_header_hash), - "First header should be pruned.", - ); - }) - } - - #[test] - fn storage_keys_computed_properly() { - assert_eq!( - PalletOperatingMode::::storage_value_final_key().to_vec(), - bp_header_chain::storage_keys::pallet_operating_mode_key("Grandpa").0, - ); - - assert_eq!( - CurrentAuthoritySet::::storage_value_final_key().to_vec(), - bp_header_chain::storage_keys::current_authority_set_key("Grandpa").0, - ); - - assert_eq!( - BestFinalized::::storage_value_final_key().to_vec(), - bp_header_chain::storage_keys::best_finalized_key("Grandpa").0, - ); - } - - #[test] - fn test_bridge_grandpa_call_is_correctly_defined() { - let header = test_header(0); - let init_data = InitializationData { - header: Box::new(header.clone()), - authority_list: authority_list(), - set_id: 1, - operating_mode: BasicOperatingMode::Normal, - }; - let justification = make_default_justification(&header); - - let direct_initialize_call = - Call::::initialize { init_data: init_data.clone() }; - let indirect_initialize_call = BridgeGrandpaCall::::initialize { init_data }; - assert_eq!(direct_initialize_call.encode(), indirect_initialize_call.encode()); - - let direct_submit_finality_proof_call = Call::::submit_finality_proof { - finality_target: Box::new(header.clone()), - justification: justification.clone(), - }; - let indirect_submit_finality_proof_call = - BridgeGrandpaCall::::submit_finality_proof { - finality_target: Box::new(header), - justification, - }; - assert_eq!( - direct_submit_finality_proof_call.encode(), - indirect_submit_finality_proof_call.encode() - ); - } - - generate_owned_bridge_module_tests!(BasicOperatingMode::Normal, BasicOperatingMode::Halted); - - #[test] - fn maybe_headers_to_keep_returns_correct_value() { - assert_eq!(MaybeHeadersToKeep::::get(), Some(mock::HeadersToKeep::get())); - } - - #[test] - fn submit_finality_proof_requires_signed_origin() { - run_test(|| { - initialize_substrate_bridge(); - - let header = test_header(1); - let justification = make_default_justification(&header); - - assert_noop!( - Pallet::::submit_finality_proof_ex( - RuntimeOrigin::root(), - Box::new(header), - justification, - TEST_GRANDPA_SET_ID, - ), - DispatchError::BadOrigin, - ); - }) - } -} diff --git a/modules/grandpa/src/mock.rs b/modules/grandpa/src/mock.rs deleted file mode 100644 index e689e520c..000000000 --- a/modules/grandpa/src/mock.rs +++ /dev/null @@ -1,112 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -// From construct_runtime macro -#![allow(clippy::from_over_into)] - -use bp_header_chain::ChainWithGrandpa; -use bp_runtime::{Chain, ChainId}; -use frame_support::{ - construct_runtime, derive_impl, parameter_types, traits::Hooks, weights::Weight, -}; -use sp_core::sr25519::Signature; - -pub type AccountId = u64; -pub type TestHeader = sp_runtime::testing::Header; -pub type TestNumber = u64; - -type Block = frame_system::mocking::MockBlock; - -pub const MAX_BRIDGED_AUTHORITIES: u32 = 5; - -use crate as grandpa; - -construct_runtime! { - pub enum TestRuntime - { - System: frame_system::{Pallet, Call, Config, Storage, Event}, - Grandpa: grandpa::{Pallet, Call, Event}, - } -} - -#[derive_impl(frame_system::config_preludes::TestDefaultConfig)] -impl frame_system::Config for TestRuntime { - type Block = Block; -} - -parameter_types! { - pub const MaxFreeMandatoryHeadersPerBlock: u32 = 2; - pub const HeadersToKeep: u32 = 5; -} - -impl grandpa::Config for TestRuntime { - type RuntimeEvent = RuntimeEvent; - type BridgedChain = TestBridgedChain; - type MaxFreeMandatoryHeadersPerBlock = MaxFreeMandatoryHeadersPerBlock; - type HeadersToKeep = HeadersToKeep; - type WeightInfo = (); -} - -#[derive(Debug)] -pub struct TestBridgedChain; - -impl Chain for TestBridgedChain { - const ID: ChainId = *b"tbch"; - - type BlockNumber = frame_system::pallet_prelude::BlockNumberFor; - type Hash = ::Hash; - type Hasher = ::Hashing; - type Header = TestHeader; - - type AccountId = AccountId; - type Balance = u64; - type Nonce = u64; - type Signature = Signature; - - fn max_extrinsic_size() -> u32 { - unreachable!() - } - fn max_extrinsic_weight() -> Weight { - unreachable!() - } -} - -impl ChainWithGrandpa for TestBridgedChain { - const WITH_CHAIN_GRANDPA_PALLET_NAME: &'static str = ""; - const MAX_AUTHORITIES_COUNT: u32 = MAX_BRIDGED_AUTHORITIES; - const REASONABLE_HEADERS_IN_JUSTIFICATION_ANCESTRY: u32 = 8; - const MAX_MANDATORY_HEADER_SIZE: u32 = 256; - const AVERAGE_HEADER_SIZE: u32 = 64; -} - -/// Return test externalities to use in tests. -pub fn new_test_ext() -> sp_io::TestExternalities { - sp_io::TestExternalities::new(Default::default()) -} - -/// Return test within default test externalities context. -pub fn run_test(test: impl FnOnce() -> T) -> T { - new_test_ext().execute_with(|| { - let _ = Grandpa::on_initialize(0); - test() - }) -} - -/// Return test header with given number. -pub fn test_header(num: TestNumber) -> TestHeader { - // We wrap the call to avoid explicit type annotations in our tests - bp_test_utils::test_header(num) -} diff --git a/modules/grandpa/src/storage_types.rs b/modules/grandpa/src/storage_types.rs deleted file mode 100644 index 6d1a7882d..000000000 --- a/modules/grandpa/src/storage_types.rs +++ /dev/null @@ -1,136 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Wrappers for public types that are implementing `MaxEncodedLen` - -use crate::{Config, Error}; - -use bp_header_chain::{AuthoritySet, ChainWithGrandpa}; -use codec::{Decode, Encode, MaxEncodedLen}; -use frame_support::{traits::Get, BoundedVec, CloneNoBound, RuntimeDebugNoBound}; -use scale_info::TypeInfo; -use sp_consensus_grandpa::{AuthorityId, AuthorityList, AuthorityWeight, SetId}; -use sp_std::marker::PhantomData; - -/// A bounded list of Grandpa authorities with associated weights. -pub type StoredAuthorityList = - BoundedVec<(AuthorityId, AuthorityWeight), MaxBridgedAuthorities>; - -/// Adapter for using `T::BridgedChain::MAX_BRIDGED_AUTHORITIES` in `BoundedVec`. -pub struct StoredAuthorityListLimit(PhantomData<(T, I)>); - -impl, I: 'static> Get for StoredAuthorityListLimit { - fn get() -> u32 { - T::BridgedChain::MAX_AUTHORITIES_COUNT - } -} - -/// A bounded GRANDPA Authority List and ID. -#[derive(CloneNoBound, Decode, Encode, Eq, TypeInfo, MaxEncodedLen, RuntimeDebugNoBound)] -#[scale_info(skip_type_params(T, I))] -pub struct StoredAuthoritySet, I: 'static> { - /// List of GRANDPA authorities for the current round. - pub authorities: StoredAuthorityList>, - /// Monotonic identifier of the current GRANDPA authority set. - pub set_id: SetId, -} - -impl, I: 'static> StoredAuthoritySet { - /// Try to create a new bounded GRANDPA Authority Set from unbounded list. - /// - /// Returns error if number of authorities in the provided list is too large. - pub fn try_new(authorities: AuthorityList, set_id: SetId) -> Result> { - Ok(Self { - authorities: TryFrom::try_from(authorities) - .map_err(|_| Error::TooManyAuthoritiesInSet)?, - set_id, - }) - } - - /// Returns number of bytes that may be subtracted from the PoV component of - /// `submit_finality_proof` call, because the actual authorities set is smaller than the maximal - /// configured. - /// - /// Maximal authorities set size is configured by the `MaxBridgedAuthorities` constant from - /// the pallet configuration. The PoV of the call includes the size of maximal authorities - /// count. If the actual size is smaller, we may subtract extra bytes from this component. - pub fn unused_proof_size(&self) -> u64 { - // we can only safely estimate bytes that are occupied by the authority data itself. We have - // no means here to compute PoV bytes, occupied by extra trie nodes or extra bytes in the - // whole set encoding - let single_authority_max_encoded_len = - <(AuthorityId, AuthorityWeight)>::max_encoded_len() as u64; - let extra_authorities = - T::BridgedChain::MAX_AUTHORITIES_COUNT.saturating_sub(self.authorities.len() as _); - single_authority_max_encoded_len.saturating_mul(extra_authorities as u64) - } -} - -impl, I: 'static> PartialEq for StoredAuthoritySet { - fn eq(&self, other: &Self) -> bool { - self.set_id == other.set_id && self.authorities == other.authorities - } -} - -impl, I: 'static> Default for StoredAuthoritySet { - fn default() -> Self { - StoredAuthoritySet { authorities: BoundedVec::default(), set_id: 0 } - } -} - -impl, I: 'static> From> for AuthoritySet { - fn from(t: StoredAuthoritySet) -> Self { - AuthoritySet { authorities: t.authorities.into(), set_id: t.set_id } - } -} - -#[cfg(test)] -mod tests { - use crate::mock::{TestRuntime, MAX_BRIDGED_AUTHORITIES}; - use bp_test_utils::authority_list; - - type StoredAuthoritySet = super::StoredAuthoritySet; - - #[test] - fn unused_proof_size_works() { - let authority_entry = authority_list().pop().unwrap(); - - // when we have exactly `MaxBridgedAuthorities` authorities - assert_eq!( - StoredAuthoritySet::try_new( - vec![authority_entry.clone(); MAX_BRIDGED_AUTHORITIES as usize], - 0, - ) - .unwrap() - .unused_proof_size(), - 0, - ); - - // when we have less than `MaxBridgedAuthorities` authorities - assert_eq!( - StoredAuthoritySet::try_new( - vec![authority_entry; MAX_BRIDGED_AUTHORITIES as usize - 1], - 0, - ) - .unwrap() - .unused_proof_size(), - 40, - ); - - // and we can't have more than `MaxBridgedAuthorities` authorities in the bounded vec, so - // no test for this case - } -} diff --git a/modules/grandpa/src/weights.rs b/modules/grandpa/src/weights.rs deleted file mode 100644 index a75e7b5a8..000000000 --- a/modules/grandpa/src/weights.rs +++ /dev/null @@ -1,167 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Autogenerated weights for pallet_bridge_grandpa -//! -//! THIS FILE WAS AUTO-GENERATED USING THE SUBSTRATE BENCHMARK CLI VERSION 4.0.0-dev -//! DATE: 2023-03-02, STEPS: `50`, REPEAT: `20`, LOW RANGE: `[]`, HIGH RANGE: `[]` -//! WORST CASE MAP SIZE: `1000000` -//! HOSTNAME: `covid`, CPU: `11th Gen Intel(R) Core(TM) i7-11800H @ 2.30GHz` -//! EXECUTION: Some(Wasm), WASM-EXECUTION: Compiled, CHAIN: Some("dev"), DB CACHE: 1024 - -// Executed Command: -// target/release/unknown-bridge-node -// benchmark -// pallet -// --chain=dev -// --steps=50 -// --repeat=20 -// --pallet=pallet_bridge_grandpa -// --extrinsic=* -// --execution=wasm -// --wasm-execution=Compiled -// --heap-pages=4096 -// --output=./modules/grandpa/src/weights.rs -// --template=./.maintain/bridge-weight-template.hbs - -#![allow(clippy::all)] -#![allow(unused_parens)] -#![allow(unused_imports)] -#![allow(missing_docs)] - -use frame_support::{ - traits::Get, - weights::{constants::RocksDbWeight, Weight}, -}; -use sp_std::marker::PhantomData; - -/// Weight functions needed for pallet_bridge_grandpa. -pub trait WeightInfo { - fn submit_finality_proof(p: u32, v: u32) -> Weight; -} - -/// Weights for `pallet_bridge_grandpa` that are generated using one of the Bridge testnets. -/// -/// Those weights are test only and must never be used in production. -pub struct BridgeWeight(PhantomData); -impl WeightInfo for BridgeWeight { - /// Storage: BridgeUnknownGrandpa PalletOperatingMode (r:1 w:0) - /// - /// Proof: BridgeUnknownGrandpa PalletOperatingMode (max_values: Some(1), max_size: Some(1), - /// added: 496, mode: MaxEncodedLen) - /// - /// Storage: BridgeUnknownGrandpa RequestCount (r:1 w:1) - /// - /// Proof: BridgeUnknownGrandpa RequestCount (max_values: Some(1), max_size: Some(4), added: - /// 499, mode: MaxEncodedLen) - /// - /// Storage: BridgeUnknownGrandpa BestFinalized (r:1 w:1) - /// - /// Proof: BridgeUnknownGrandpa BestFinalized (max_values: Some(1), max_size: Some(36), added: - /// 531, mode: MaxEncodedLen) - /// - /// Storage: BridgeUnknownGrandpa CurrentAuthoritySet (r:1 w:0) - /// - /// Proof: BridgeUnknownGrandpa CurrentAuthoritySet (max_values: Some(1), max_size: Some(209), - /// added: 704, mode: MaxEncodedLen) - /// - /// Storage: BridgeUnknownGrandpa ImportedHashesPointer (r:1 w:1) - /// - /// Proof: BridgeUnknownGrandpa ImportedHashesPointer (max_values: Some(1), max_size: Some(4), - /// added: 499, mode: MaxEncodedLen) - /// - /// Storage: BridgeUnknownGrandpa ImportedHashes (r:1 w:1) - /// - /// Proof: BridgeUnknownGrandpa ImportedHashes (max_values: Some(14400), max_size: Some(36), - /// added: 2016, mode: MaxEncodedLen) - /// - /// Storage: BridgeUnknownGrandpa ImportedHeaders (r:0 w:2) - /// - /// Proof: BridgeUnknownGrandpa ImportedHeaders (max_values: Some(14400), max_size: Some(68), - /// added: 2048, mode: MaxEncodedLen) - /// - /// The range of component `p` is `[1, 4]`. - /// - /// The range of component `v` is `[50, 100]`. - fn submit_finality_proof(p: u32, v: u32) -> Weight { - // Proof Size summary in bytes: - // Measured: `394 + p * (60 ±0)` - // Estimated: `4745` - // Minimum execution time: 228_072 nanoseconds. - Weight::from_parts(57_853_228, 4745) - // Standard Error: 149_421 - .saturating_add(Weight::from_parts(36_708_702, 0).saturating_mul(p.into())) - // Standard Error: 10_625 - .saturating_add(Weight::from_parts(1_469_032, 0).saturating_mul(v.into())) - .saturating_add(T::DbWeight::get().reads(6_u64)) - .saturating_add(T::DbWeight::get().writes(6_u64)) - } -} - -// For backwards compatibility and tests -impl WeightInfo for () { - /// Storage: BridgeUnknownGrandpa PalletOperatingMode (r:1 w:0) - /// - /// Proof: BridgeUnknownGrandpa PalletOperatingMode (max_values: Some(1), max_size: Some(1), - /// added: 496, mode: MaxEncodedLen) - /// - /// Storage: BridgeUnknownGrandpa RequestCount (r:1 w:1) - /// - /// Proof: BridgeUnknownGrandpa RequestCount (max_values: Some(1), max_size: Some(4), added: - /// 499, mode: MaxEncodedLen) - /// - /// Storage: BridgeUnknownGrandpa BestFinalized (r:1 w:1) - /// - /// Proof: BridgeUnknownGrandpa BestFinalized (max_values: Some(1), max_size: Some(36), added: - /// 531, mode: MaxEncodedLen) - /// - /// Storage: BridgeUnknownGrandpa CurrentAuthoritySet (r:1 w:0) - /// - /// Proof: BridgeUnknownGrandpa CurrentAuthoritySet (max_values: Some(1), max_size: Some(209), - /// added: 704, mode: MaxEncodedLen) - /// - /// Storage: BridgeUnknownGrandpa ImportedHashesPointer (r:1 w:1) - /// - /// Proof: BridgeUnknownGrandpa ImportedHashesPointer (max_values: Some(1), max_size: Some(4), - /// added: 499, mode: MaxEncodedLen) - /// - /// Storage: BridgeUnknownGrandpa ImportedHashes (r:1 w:1) - /// - /// Proof: BridgeUnknownGrandpa ImportedHashes (max_values: Some(14400), max_size: Some(36), - /// added: 2016, mode: MaxEncodedLen) - /// - /// Storage: BridgeUnknownGrandpa ImportedHeaders (r:0 w:2) - /// - /// Proof: BridgeUnknownGrandpa ImportedHeaders (max_values: Some(14400), max_size: Some(68), - /// added: 2048, mode: MaxEncodedLen) - /// - /// The range of component `p` is `[1, 4]`. - /// - /// The range of component `v` is `[50, 100]`. - fn submit_finality_proof(p: u32, v: u32) -> Weight { - // Proof Size summary in bytes: - // Measured: `394 + p * (60 ±0)` - // Estimated: `4745` - // Minimum execution time: 228_072 nanoseconds. - Weight::from_parts(57_853_228, 4745) - // Standard Error: 149_421 - .saturating_add(Weight::from_parts(36_708_702, 0).saturating_mul(p.into())) - // Standard Error: 10_625 - .saturating_add(Weight::from_parts(1_469_032, 0).saturating_mul(v.into())) - .saturating_add(RocksDbWeight::get().reads(6_u64)) - .saturating_add(RocksDbWeight::get().writes(6_u64)) - } -} diff --git a/modules/messages/Cargo.toml b/modules/messages/Cargo.toml deleted file mode 100644 index 24ad437be..000000000 --- a/modules/messages/Cargo.toml +++ /dev/null @@ -1,64 +0,0 @@ -[package] -name = "pallet-bridge-messages" -description = "Module that allows bridged chains to exchange messages using lane concept." -version = "0.7.0" -authors.workspace = true -edition.workspace = true -license = "GPL-3.0-or-later WITH Classpath-exception-2.0" -repository.workspace = true - -[lints] -workspace = true - -[dependencies] -codec = { package = "parity-scale-codec", version = "3.6.1", default-features = false } -log = { workspace = true } -num-traits = { version = "0.2", default-features = false } -scale-info = { version = "2.11.1", default-features = false, features = ["derive"] } - -# Bridge dependencies - -bp-messages = { path = "../../primitives/messages", default-features = false } -bp-runtime = { path = "../../primitives/runtime", default-features = false } - -# Substrate Dependencies - -frame-benchmarking = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false, optional = true } -frame-support = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -frame-system = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -sp-runtime = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -sp-std = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } - -[dev-dependencies] -bp-test-utils = { path = "../../primitives/test-utils" } -pallet-balances = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } -sp-io = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } - -[features] -default = ["std"] -std = [ - "bp-messages/std", - "bp-runtime/std", - "codec/std", - "frame-benchmarking/std", - "frame-support/std", - "frame-system/std", - "log/std", - "num-traits/std", - "scale-info/std", - "sp-runtime/std", - "sp-std/std", -] -runtime-benchmarks = [ - "frame-benchmarking/runtime-benchmarks", - "frame-support/runtime-benchmarks", - "frame-system/runtime-benchmarks", - "pallet-balances/runtime-benchmarks", - "sp-runtime/runtime-benchmarks", -] -try-runtime = [ - "frame-support/try-runtime", - "frame-system/try-runtime", - "pallet-balances/try-runtime", - "sp-runtime/try-runtime", -] diff --git a/modules/messages/README.md b/modules/messages/README.md deleted file mode 100644 index fe6230574..000000000 --- a/modules/messages/README.md +++ /dev/null @@ -1,201 +0,0 @@ -# Bridge Messages Pallet - -The messages pallet is used to deliver messages from source chain to target chain. Message is (almost) opaque to the -module and the final goal is to hand message to the message dispatch mechanism. - -## Contents - -- [Overview](#overview) -- [Message Workflow](#message-workflow) -- [Integrating Message Lane Module into Runtime](#integrating-messages-module-into-runtime) -- [Non-Essential Functionality](#non-essential-functionality) -- [Weights of Module Extrinsics](#weights-of-module-extrinsics) - -## Overview - -Message lane is an unidirectional channel, where messages are sent from source chain to the target chain. At the same -time, a single instance of messages module supports both outbound lanes and inbound lanes. So the chain where the module -is deployed (this chain), may act as a source chain for outbound messages (heading to a bridged chain) and as a target -chain for inbound messages (coming from a bridged chain). - -Messages module supports multiple message lanes. Every message lane is identified with a 4-byte identifier. Messages -sent through the lane are assigned unique (for this lane) increasing integer value that is known as nonce ("number that -can only be used once"). Messages that are sent over the same lane are guaranteed to be delivered to the target chain in -the same order they're sent from the source chain. In other words, message with nonce `N` will be delivered right before -delivering a message with nonce `N+1`. - -Single message lane may be seen as a transport channel for single application (onchain, offchain or mixed). At the same -time the module itself never dictates any lane or message rules. In the end, it is the runtime developer who defines -what message lane and message mean for this runtime. - -In our [Kusama<>Polkadot bridge](../../docs/polkadot-kusama-bridge-overview.md) we are using lane as a channel of -communication between two parachains of different relay chains. For example, lane `[0, 0, 0, 0]` is used for Polkadot <> -Kusama Asset Hub communications. Other lanes may be used to bridge other parachains. - -## Message Workflow - -The pallet is not intended to be used by end users and provides no public calls to send the message. Instead, it -provides runtime-internal method that allows other pallets (or other runtime code) to queue outbound messages. - -The message "appears" when some runtime code calls the `send_message()` method of the pallet. The submitter specifies -the lane that they're willing to use and the message itself. If some fee must be paid for sending the message, it must -be paid outside of the pallet. If a message passes all checks (that include, for example, message size check, disabled -lane check, ...), the nonce is assigned and the message is stored in the module storage. The message is in an -"undelivered" state now. - -We assume that there are external, offchain actors, called relayers, that are submitting module related transactions to -both target and source chains. The pallet itself has no assumptions about relayers incentivization scheme, but it has -some callbacks for paying rewards. See [Integrating Messages Module into -runtime](#Integrating-Messages-Module-into-runtime) for details. - -Eventually, some relayer would notice this message in the "undelivered" state and it would decide to deliver this -message. Relayer then crafts `receive_messages_proof()` transaction (aka delivery transaction) for the messages module -instance, deployed at the target chain. Relayer provides its account id at the source chain, the proof of message (or -several messages), the number of messages in the transaction and their cumulative dispatch weight. Once a transaction is -mined, the message is considered "delivered". - -Once a message is delivered, the relayer may want to confirm delivery back to the source chain. There are two reasons -why it would want to do that. The first is that we intentionally limit number of "delivered", but not yet "confirmed" -messages at inbound lanes (see [What about other Constants in the Messages Module Configuration -Trait](#What-about-other-Constants-in-the-Messages-Module-Configuration-Trait) for explanation). So at some point, the -target chain may stop accepting new messages until relayers confirm some of these. The second is that if the relayer -wants to be rewarded for delivery, it must prove the fact that it has actually delivered the message. And this proof may -only be generated after the delivery transaction is mined. So relayer crafts the `receive_messages_delivery_proof()` -transaction (aka confirmation transaction) for the messages module instance, deployed at the source chain. Once this -transaction is mined, the message is considered "confirmed". - -The "confirmed" state is the final state of the message. But there's one last thing related to the message - the fact -that it is now "confirmed" and reward has been paid to the relayer (or at least callback for this has been called), must -be confirmed to the target chain. Otherwise, we may reach the limit of "unconfirmed" messages at the target chain and it -will stop accepting new messages. So relayer sometimes includes a nonce of the latest "confirmed" message in the next -`receive_messages_proof()` transaction, proving that some messages have been confirmed. - -## Integrating Messages Module into Runtime - -As it has been said above, the messages module supports both outbound and inbound message lanes. So if we will integrate -a module in some runtime, it may act as the source chain runtime for outbound messages and as the target chain runtime -for inbound messages. In this section, we'll sometimes refer to the chain we're currently integrating with, as "this -chain" and the other chain as "bridged chain". - -Messages module doesn't simply accept transactions that are claiming that the bridged chain has some updated data for -us. Instead of this, the module assumes that the bridged chain is able to prove that updated data in some way. The proof -is abstracted from the module and may be of any kind. In our Substrate-to-Substrate bridge we're using runtime storage -proofs. Other bridges may use transaction proofs, Substrate header digests or anything else that may be proved. - -**IMPORTANT NOTE**: everything below in this chapter describes details of the messages module configuration. But if -you're interested in well-probed and relatively easy integration of two Substrate-based chains, you may want to look at -the [bridge-runtime-common](../../bin/runtime-common/) crate. This crate is providing a lot of helpers for integration, -which may be directly used from within your runtime. Then if you'll decide to change something in this scheme, get back -here for detailed information. - -### General Information - -The messages module supports instances. Every module instance is supposed to bridge this chain and some bridged chain. -To bridge with another chain, using another instance is suggested (this isn't forced anywhere in the code, though). Keep -in mind, that the pallet may be used to build virtual channels between multiple chains, as we do in our [Polkadot <> -Kusama bridge](../../docs/polkadot-kusama-bridge-overview.md). There, the pallet actually bridges only two parachains - -Kusama Bridge Hub and Polkadot Bridge Hub. However, other Kusama and Polkadot parachains are able to send (XCM) messages -to their Bridge Hubs. The messages will be delivered to the other side of the bridge and routed to the proper -destination parachain within the bridged chain consensus. - -Message submitters may track message progress by inspecting module events. When Message is accepted, the -`MessageAccepted` event is emitted. The event contains both message lane identifier and nonce that has been assigned to -the message. When a message is delivered to the target chain, the `MessagesDelivered` event is emitted from the -`receive_messages_delivery_proof()` transaction. The `MessagesDelivered` contains the message lane identifier and -inclusive range of delivered message nonces. - -The pallet provides no means to get the result of message dispatch at the target chain. If that is required, it must be -done outside of the pallet. For example, XCM messages, when dispatched, have special instructions to send some data back -to the sender. Other dispatchers may use similar mechanism for that. -### How to plug-in Messages Module to Send Messages to the Bridged Chain? - -The `pallet_bridge_messages::Config` trait has 3 main associated types that are used to work with outbound messages. The -`pallet_bridge_messages::Config::TargetHeaderChain` defines how we see the bridged chain as the target for our outbound -messages. It must be able to check that the bridged chain may accept our message - like that the message has size below -maximal possible transaction size of the chain and so on. And when the relayer sends us a confirmation transaction, this -implementation must be able to parse and verify the proof of messages delivery. Normally, you would reuse the same -(configurable) type on all chains that are sending messages to the same bridged chain. - -The last type is the `pallet_bridge_messages::Config::DeliveryConfirmationPayments`. When confirmation -transaction is received, we call the `pay_reward()` method, passing the range of delivered messages. -You may use the [`pallet-bridge-relayers`](../relayers/) pallet and its -[`DeliveryConfirmationPaymentsAdapter`](../relayers/src/payment_adapter.rs) adapter as a possible -implementation. It allows you to pay fixed reward for relaying the message and some of its portion -for confirming delivery. - -### I have a Messages Module in my Runtime, but I Want to Reject all Outbound Messages. What shall I do? - -You should be looking at the `bp_messages::source_chain::ForbidOutboundMessages` structure -[`bp_messages::source_chain`](../../primitives/messages/src/source_chain.rs). It implements all required traits and will -simply reject all transactions, related to outbound messages. - -### How to plug-in Messages Module to Receive Messages from the Bridged Chain? - -The `pallet_bridge_messages::Config` trait has 2 main associated types that are used to work with inbound messages. The -`pallet_bridge_messages::Config::SourceHeaderChain` defines how we see the bridged chain as the source of our inbound -messages. When relayer sends us a delivery transaction, this implementation must be able to parse and verify the proof -of messages wrapped in this transaction. Normally, you would reuse the same (configurable) type on all chains that are -sending messages to the same bridged chain. - -The `pallet_bridge_messages::Config::MessageDispatch` defines a way on how to dispatch delivered messages. Apart from -actually dispatching the message, the implementation must return the correct dispatch weight of the message before -dispatch is called. - -### I have a Messages Module in my Runtime, but I Want to Reject all Inbound Messages. What shall I do? - -You should be looking at the `bp_messages::target_chain::ForbidInboundMessages` structure from the -[`bp_messages::target_chain`](../../primitives/messages/src/target_chain.rs) module. It implements all required traits -and will simply reject all transactions, related to inbound messages. - -### What about other Constants in the Messages Module Configuration Trait? - -Two settings that are used to check messages in the `send_message()` function. The -`pallet_bridge_messages::Config::ActiveOutboundLanes` is an array of all message lanes, that may be used to send -messages. All messages sent using other lanes are rejected. All messages that have size above -`pallet_bridge_messages::Config::MaximalOutboundPayloadSize` will also be rejected. - -To be able to reward the relayer for delivering messages, we store a map of message nonces range => identifier of the -relayer that has delivered this range at the target chain runtime storage. If a relayer delivers multiple consequent -ranges, they're merged into single entry. So there may be more than one entry for the same relayer. Eventually, this -whole map must be delivered back to the source chain to confirm delivery and pay rewards. So to make sure we are able to -craft this confirmation transaction, we need to: (1) keep the size of this map below a certain limit and (2) make sure -that the weight of processing this map is below a certain limit. Both size and processing weight mostly depend on the -number of entries. The number of entries is limited with the -`pallet_bridge_messages::ConfigMaxUnrewardedRelayerEntriesAtInboundLane` parameter. Processing weight also depends on -the total number of messages that are being confirmed, because every confirmed message needs to be read. So there's -another `pallet_bridge_messages::Config::MaxUnconfirmedMessagesAtInboundLane` parameter for that. - -When choosing values for these parameters, you must also keep in mind that if proof in your scheme is based on finality -of headers (and it is the most obvious option for Substrate-based chains with finality notion), then choosing too small -values for these parameters may cause significant delays in message delivery. That's because there are too many actors -involved in this scheme: 1) authorities that are finalizing headers of the target chain need to finalize header with -non-empty map; 2) the headers relayer then needs to submit this header and its finality proof to the source chain; 3) -the messages relayer must then send confirmation transaction (storage proof of this map) to the source chain; 4) when -the confirmation transaction will be mined at some header, source chain authorities must finalize this header; 5) the -headers relay then needs to submit this header and its finality proof to the target chain; 6) only now the messages -relayer may submit new messages from the source to target chain and prune the entry from the map. - -Delivery transaction requires the relayer to provide both number of entries and total number of messages in the map. -This means that the module never charges an extra cost for delivering a map - the relayer would need to pay exactly for -the number of entries+messages it has delivered. So the best guess for values of these parameters would be the pair that -would occupy `N` percent of the maximal transaction size and weight of the source chain. The `N` should be large enough -to process large maps, at the same time keeping reserve for future source chain upgrades. - -## Non-Essential Functionality - -There may be a special account in every runtime where the messages module is deployed. This account, named 'module -owner', is like a module-level sudo account - he's able to halt and resume all module operations without requiring -runtime upgrade. Calls that are related to this account are: -- `fn set_owner()`: current module owner may call it to transfer "ownership" to another account; -- `fn halt_operations()`: the module owner (or sudo account) may call this function to stop all module operations. After - this call, all message-related transactions will be rejected until further `resume_operations` call'. This call may be - used when something extraordinary happens with the bridge; -- `fn resume_operations()`: module owner may call this function to resume bridge operations. The module will resume its - regular operations after this call. - -If pallet owner is not defined, the governance may be used to make those calls. - -## Messages Relay - -We have an offchain actor, who is watching for new messages and submits them to the bridged chain. It is the messages -relay - you may look at the [crate level documentation and the code](../../relays/messages/). diff --git a/modules/messages/src/benchmarking.rs b/modules/messages/src/benchmarking.rs deleted file mode 100644 index 4f13c4409..000000000 --- a/modules/messages/src/benchmarking.rs +++ /dev/null @@ -1,461 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Messages pallet benchmarking. - -use crate::{ - inbound_lane::InboundLaneStorage, outbound_lane, weights_ext::EXPECTED_DEFAULT_MESSAGE_LENGTH, - Call, OutboundLanes, RuntimeInboundLaneStorage, -}; - -use bp_messages::{ - source_chain::TargetHeaderChain, target_chain::SourceHeaderChain, DeliveredMessages, - InboundLaneData, LaneId, MessageNonce, OutboundLaneData, UnrewardedRelayer, - UnrewardedRelayersState, -}; -use bp_runtime::StorageProofSize; -use codec::Decode; -use frame_benchmarking::{account, benchmarks_instance_pallet}; -use frame_support::weights::Weight; -use frame_system::RawOrigin; -use sp_runtime::{traits::TrailingZeroInput, BoundedVec}; -use sp_std::{ops::RangeInclusive, prelude::*}; - -const SEED: u32 = 0; - -/// Pallet we're benchmarking here. -pub struct Pallet, I: 'static = ()>(crate::Pallet); - -/// Benchmark-specific message proof parameters. -#[derive(Debug)] -pub struct MessageProofParams { - /// Id of the lane. - pub lane: LaneId, - /// Range of messages to include in the proof. - pub message_nonces: RangeInclusive, - /// If `Some`, the proof needs to include this outbound lane data. - pub outbound_lane_data: Option, - /// If `true`, the caller expects that the proof will contain correct messages that will - /// be successfully dispatched. This is only called from the "optional" - /// `receive_single_message_proof_with_dispatch` benchmark. If you don't need it, just - /// return `true` from the `is_message_successfully_dispatched`. - pub is_successful_dispatch_expected: bool, - /// Proof size requirements. - pub size: StorageProofSize, -} - -/// Benchmark-specific message delivery proof parameters. -#[derive(Debug)] -pub struct MessageDeliveryProofParams { - /// Id of the lane. - pub lane: LaneId, - /// The proof needs to include this inbound lane data. - pub inbound_lane_data: InboundLaneData, - /// Proof size requirements. - pub size: StorageProofSize, -} - -/// Trait that must be implemented by runtime. -pub trait Config: crate::Config { - /// Lane id to use in benchmarks. - /// - /// By default, lane 00000000 is used. - fn bench_lane_id() -> LaneId { - LaneId([0, 0, 0, 0]) - } - - /// Return id of relayer account at the bridged chain. - /// - /// By default, zero account is returned. - fn bridged_relayer_id() -> Self::InboundRelayer { - Self::InboundRelayer::decode(&mut TrailingZeroInput::zeroes()).unwrap() - } - - /// Create given account and give it enough balance for test purposes. Used to create - /// relayer account at the target chain. Is strictly necessary when your rewards scheme - /// assumes that the relayer account must exist. - /// - /// Does nothing by default. - fn endow_account(_account: &Self::AccountId) {} - - /// Prepare messages proof to receive by the module. - fn prepare_message_proof( - params: MessageProofParams, - ) -> (::MessagesProof, Weight); - /// Prepare messages delivery proof to receive by the module. - fn prepare_message_delivery_proof( - params: MessageDeliveryProofParams, - ) -> >::MessagesDeliveryProof; - - /// Returns true if message has been successfully dispatched or not. - fn is_message_successfully_dispatched(_nonce: MessageNonce) -> bool { - true - } - - /// Returns true if given relayer has been rewarded for some of its actions. - fn is_relayer_rewarded(relayer: &Self::AccountId) -> bool; -} - -benchmarks_instance_pallet! { - // - // Benchmarks that are used directly by the runtime calls weight formulae. - // - - // Benchmark `receive_messages_proof` extrinsic with single minimal-weight message and following conditions: - // * proof does not include outbound lane state proof; - // * inbound lane already has state, so it needs to be read and decoded; - // * message is dispatched (reminder: dispatch weight should be minimal); - // * message requires all heavy checks done by dispatcher. - // - // This is base benchmark for all other message delivery benchmarks. - receive_single_message_proof { - let relayer_id_on_source = T::bridged_relayer_id(); - let relayer_id_on_target = account("relayer", 0, SEED); - T::endow_account(&relayer_id_on_target); - - // mark messages 1..=20 as delivered - receive_messages::(20); - - let (proof, dispatch_weight) = T::prepare_message_proof(MessageProofParams { - lane: T::bench_lane_id(), - message_nonces: 21..=21, - outbound_lane_data: None, - is_successful_dispatch_expected: false, - size: StorageProofSize::Minimal(EXPECTED_DEFAULT_MESSAGE_LENGTH), - }); - }: receive_messages_proof(RawOrigin::Signed(relayer_id_on_target), relayer_id_on_source, proof, 1, dispatch_weight) - verify { - assert_eq!( - crate::InboundLanes::::get(&T::bench_lane_id()).last_delivered_nonce(), - 21, - ); - } - - // Benchmark `receive_messages_proof` extrinsic with two minimal-weight messages and following conditions: - // * proof does not include outbound lane state proof; - // * inbound lane already has state, so it needs to be read and decoded; - // * message is dispatched (reminder: dispatch weight should be minimal); - // * message requires all heavy checks done by dispatcher. - // - // The weight of single message delivery could be approximated as - // `weight(receive_two_messages_proof) - weight(receive_single_message_proof)`. - // This won't be super-accurate if message has non-zero dispatch weight, but estimation should - // be close enough to real weight. - receive_two_messages_proof { - let relayer_id_on_source = T::bridged_relayer_id(); - let relayer_id_on_target = account("relayer", 0, SEED); - T::endow_account(&relayer_id_on_target); - - // mark messages 1..=20 as delivered - receive_messages::(20); - - let (proof, dispatch_weight) = T::prepare_message_proof(MessageProofParams { - lane: T::bench_lane_id(), - message_nonces: 21..=22, - outbound_lane_data: None, - is_successful_dispatch_expected: false, - size: StorageProofSize::Minimal(EXPECTED_DEFAULT_MESSAGE_LENGTH), - }); - }: receive_messages_proof(RawOrigin::Signed(relayer_id_on_target), relayer_id_on_source, proof, 2, dispatch_weight) - verify { - assert_eq!( - crate::InboundLanes::::get(&T::bench_lane_id()).last_delivered_nonce(), - 22, - ); - } - - // Benchmark `receive_messages_proof` extrinsic with single minimal-weight message and following conditions: - // * proof includes outbound lane state proof; - // * inbound lane already has state, so it needs to be read and decoded; - // * message is successfully dispatched (reminder: dispatch weight should be minimal); - // * message requires all heavy checks done by dispatcher. - // - // The weight of outbound lane state delivery would be - // `weight(receive_single_message_proof_with_outbound_lane_state) - weight(receive_single_message_proof)`. - // This won't be super-accurate if message has non-zero dispatch weight, but estimation should - // be close enough to real weight. - receive_single_message_proof_with_outbound_lane_state { - let relayer_id_on_source = T::bridged_relayer_id(); - let relayer_id_on_target = account("relayer", 0, SEED); - T::endow_account(&relayer_id_on_target); - - // mark messages 1..=20 as delivered - receive_messages::(20); - - let (proof, dispatch_weight) = T::prepare_message_proof(MessageProofParams { - lane: T::bench_lane_id(), - message_nonces: 21..=21, - outbound_lane_data: Some(OutboundLaneData { - oldest_unpruned_nonce: 21, - latest_received_nonce: 20, - latest_generated_nonce: 21, - }), - is_successful_dispatch_expected: false, - size: StorageProofSize::Minimal(EXPECTED_DEFAULT_MESSAGE_LENGTH), - }); - }: receive_messages_proof(RawOrigin::Signed(relayer_id_on_target), relayer_id_on_source, proof, 1, dispatch_weight) - verify { - let lane_state = crate::InboundLanes::::get(&T::bench_lane_id()); - assert_eq!(lane_state.last_delivered_nonce(), 21); - assert_eq!(lane_state.last_confirmed_nonce, 20); - } - - // Benchmark `receive_messages_proof` extrinsic with single minimal-weight message and following conditions: - // * the proof has large leaf with total size of approximately 1KB; - // * proof does not include outbound lane state proof; - // * inbound lane already has state, so it needs to be read and decoded; - // * message is dispatched (reminder: dispatch weight should be minimal); - // * message requires all heavy checks done by dispatcher. - // - // With single KB of messages proof, the weight of the call is increased (roughly) by - // `(receive_single_message_proof_16KB - receive_single_message_proof_1_kb) / 15`. - receive_single_message_proof_1_kb { - let relayer_id_on_source = T::bridged_relayer_id(); - let relayer_id_on_target = account("relayer", 0, SEED); - T::endow_account(&relayer_id_on_target); - - // mark messages 1..=20 as delivered - receive_messages::(20); - - let (proof, dispatch_weight) = T::prepare_message_proof(MessageProofParams { - lane: T::bench_lane_id(), - message_nonces: 21..=21, - outbound_lane_data: None, - is_successful_dispatch_expected: false, - size: StorageProofSize::HasLargeLeaf(1024), - }); - }: receive_messages_proof(RawOrigin::Signed(relayer_id_on_target), relayer_id_on_source, proof, 1, dispatch_weight) - verify { - assert_eq!( - crate::InboundLanes::::get(&T::bench_lane_id()).last_delivered_nonce(), - 21, - ); - } - - // Benchmark `receive_messages_proof` extrinsic with single minimal-weight message and following conditions: - // * the proof has large leaf with total size of approximately 16KB; - // * proof does not include outbound lane state proof; - // * inbound lane already has state, so it needs to be read and decoded; - // * message is dispatched (reminder: dispatch weight should be minimal); - // * message requires all heavy checks done by dispatcher. - // - // Size of proof grows because it contains extra trie nodes in it. - // - // With single KB of messages proof, the weight of the call is increased (roughly) by - // `(receive_single_message_proof_16KB - receive_single_message_proof) / 15`. - receive_single_message_proof_16_kb { - let relayer_id_on_source = T::bridged_relayer_id(); - let relayer_id_on_target = account("relayer", 0, SEED); - T::endow_account(&relayer_id_on_target); - - // mark messages 1..=20 as delivered - receive_messages::(20); - - let (proof, dispatch_weight) = T::prepare_message_proof(MessageProofParams { - lane: T::bench_lane_id(), - message_nonces: 21..=21, - outbound_lane_data: None, - is_successful_dispatch_expected: false, - size: StorageProofSize::HasLargeLeaf(16 * 1024), - }); - }: receive_messages_proof(RawOrigin::Signed(relayer_id_on_target), relayer_id_on_source, proof, 1, dispatch_weight) - verify { - assert_eq!( - crate::InboundLanes::::get(&T::bench_lane_id()).last_delivered_nonce(), - 21, - ); - } - - // Benchmark `receive_messages_delivery_proof` extrinsic with following conditions: - // * single relayer is rewarded for relaying single message; - // * relayer account does not exist (in practice it needs to exist in production environment). - // - // This is base benchmark for all other confirmations delivery benchmarks. - receive_delivery_proof_for_single_message { - let relayer_id: T::AccountId = account("relayer", 0, SEED); - - // send message that we're going to confirm - send_regular_message::(); - - let relayers_state = UnrewardedRelayersState { - unrewarded_relayer_entries: 1, - messages_in_oldest_entry: 1, - total_messages: 1, - last_delivered_nonce: 1, - }; - let proof = T::prepare_message_delivery_proof(MessageDeliveryProofParams { - lane: T::bench_lane_id(), - inbound_lane_data: InboundLaneData { - relayers: vec![UnrewardedRelayer { - relayer: relayer_id.clone(), - messages: DeliveredMessages::new(1), - }].into_iter().collect(), - last_confirmed_nonce: 0, - }, - size: StorageProofSize::Minimal(0), - }); - }: receive_messages_delivery_proof(RawOrigin::Signed(relayer_id.clone()), proof, relayers_state) - verify { - assert_eq!(OutboundLanes::::get(T::bench_lane_id()).latest_received_nonce, 1); - assert!(T::is_relayer_rewarded(&relayer_id)); - } - - // Benchmark `receive_messages_delivery_proof` extrinsic with following conditions: - // * single relayer is rewarded for relaying two messages; - // * relayer account does not exist (in practice it needs to exist in production environment). - // - // Additional weight for paying single-message reward to the same relayer could be computed - // as `weight(receive_delivery_proof_for_two_messages_by_single_relayer) - // - weight(receive_delivery_proof_for_single_message)`. - receive_delivery_proof_for_two_messages_by_single_relayer { - let relayer_id: T::AccountId = account("relayer", 0, SEED); - - // send message that we're going to confirm - send_regular_message::(); - send_regular_message::(); - - let relayers_state = UnrewardedRelayersState { - unrewarded_relayer_entries: 1, - messages_in_oldest_entry: 2, - total_messages: 2, - last_delivered_nonce: 2, - }; - let mut delivered_messages = DeliveredMessages::new(1); - delivered_messages.note_dispatched_message(); - let proof = T::prepare_message_delivery_proof(MessageDeliveryProofParams { - lane: T::bench_lane_id(), - inbound_lane_data: InboundLaneData { - relayers: vec![UnrewardedRelayer { - relayer: relayer_id.clone(), - messages: delivered_messages, - }].into_iter().collect(), - last_confirmed_nonce: 0, - }, - size: StorageProofSize::Minimal(0), - }); - }: receive_messages_delivery_proof(RawOrigin::Signed(relayer_id.clone()), proof, relayers_state) - verify { - assert_eq!(OutboundLanes::::get(T::bench_lane_id()).latest_received_nonce, 2); - assert!(T::is_relayer_rewarded(&relayer_id)); - } - - // Benchmark `receive_messages_delivery_proof` extrinsic with following conditions: - // * two relayers are rewarded for relaying single message each; - // * relayer account does not exist (in practice it needs to exist in production environment). - // - // Additional weight for paying reward to the next relayer could be computed - // as `weight(receive_delivery_proof_for_two_messages_by_two_relayers) - // - weight(receive_delivery_proof_for_two_messages_by_single_relayer)`. - receive_delivery_proof_for_two_messages_by_two_relayers { - let relayer1_id: T::AccountId = account("relayer1", 1, SEED); - let relayer2_id: T::AccountId = account("relayer2", 2, SEED); - - // send message that we're going to confirm - send_regular_message::(); - send_regular_message::(); - - let relayers_state = UnrewardedRelayersState { - unrewarded_relayer_entries: 2, - messages_in_oldest_entry: 1, - total_messages: 2, - last_delivered_nonce: 2, - }; - let proof = T::prepare_message_delivery_proof(MessageDeliveryProofParams { - lane: T::bench_lane_id(), - inbound_lane_data: InboundLaneData { - relayers: vec![ - UnrewardedRelayer { - relayer: relayer1_id.clone(), - messages: DeliveredMessages::new(1), - }, - UnrewardedRelayer { - relayer: relayer2_id.clone(), - messages: DeliveredMessages::new(2), - }, - ].into_iter().collect(), - last_confirmed_nonce: 0, - }, - size: StorageProofSize::Minimal(0), - }); - }: receive_messages_delivery_proof(RawOrigin::Signed(relayer1_id.clone()), proof, relayers_state) - verify { - assert_eq!(OutboundLanes::::get(T::bench_lane_id()).latest_received_nonce, 2); - assert!(T::is_relayer_rewarded(&relayer1_id)); - assert!(T::is_relayer_rewarded(&relayer2_id)); - } - - // - // Benchmarks that the runtime developers may use for proper pallet configuration. - // - - // This benchmark is optional and may be used when runtime developer need a way to compute - // message dispatch weight. In this case, he needs to provide messages that can go the whole - // dispatch - // - // Benchmark `receive_messages_proof` extrinsic with single message and following conditions: - // - // * proof does not include outbound lane state proof; - // * inbound lane already has state, so it needs to be read and decoded; - // * message is **SUCCESSFULLY** dispatched; - // * message requires all heavy checks done by dispatcher. - receive_single_message_proof_with_dispatch { - // maybe dispatch weight relies on the message size too? - let i in EXPECTED_DEFAULT_MESSAGE_LENGTH .. EXPECTED_DEFAULT_MESSAGE_LENGTH * 16; - - let relayer_id_on_source = T::bridged_relayer_id(); - let relayer_id_on_target = account("relayer", 0, SEED); - T::endow_account(&relayer_id_on_target); - - // mark messages 1..=20 as delivered - receive_messages::(20); - - let (proof, dispatch_weight) = T::prepare_message_proof(MessageProofParams { - lane: T::bench_lane_id(), - message_nonces: 21..=21, - outbound_lane_data: None, - is_successful_dispatch_expected: true, - size: StorageProofSize::Minimal(i), - }); - }: receive_messages_proof(RawOrigin::Signed(relayer_id_on_target), relayer_id_on_source, proof, 1, dispatch_weight) - verify { - assert_eq!( - crate::InboundLanes::::get(&T::bench_lane_id()).last_delivered_nonce(), - 21, - ); - assert!(T::is_message_successfully_dispatched(21)); - } - - impl_benchmark_test_suite!(Pallet, crate::mock::new_test_ext(), crate::mock::TestRuntime) -} - -fn send_regular_message, I: 'static>() { - let mut outbound_lane = outbound_lane::(T::bench_lane_id()); - outbound_lane.send_message(BoundedVec::try_from(vec![]).expect("We craft valid messages")); -} - -fn receive_messages, I: 'static>(nonce: MessageNonce) { - let mut inbound_lane_storage = - RuntimeInboundLaneStorage::::from_lane_id(T::bench_lane_id()); - inbound_lane_storage.set_data(InboundLaneData { - relayers: vec![UnrewardedRelayer { - relayer: T::bridged_relayer_id(), - messages: DeliveredMessages::new(nonce), - }] - .into_iter() - .collect(), - last_confirmed_nonce: 0, - }); -} diff --git a/modules/messages/src/inbound_lane.rs b/modules/messages/src/inbound_lane.rs deleted file mode 100644 index da1698e6e..000000000 --- a/modules/messages/src/inbound_lane.rs +++ /dev/null @@ -1,556 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Everything about incoming messages receival. - -use crate::Config; - -use bp_messages::{ - target_chain::{DispatchMessage, DispatchMessageData, MessageDispatch}, - DeliveredMessages, InboundLaneData, LaneId, MessageKey, MessageNonce, OutboundLaneData, - ReceptionResult, UnrewardedRelayer, -}; -use codec::{Decode, Encode, EncodeLike, MaxEncodedLen}; -use frame_support::traits::Get; -use scale_info::{Type, TypeInfo}; -use sp_runtime::RuntimeDebug; -use sp_std::prelude::PartialEq; - -/// Inbound lane storage. -pub trait InboundLaneStorage { - /// Id of relayer on source chain. - type Relayer: Clone + PartialEq; - - /// Lane id. - fn id(&self) -> LaneId; - /// Return maximal number of unrewarded relayer entries in inbound lane. - fn max_unrewarded_relayer_entries(&self) -> MessageNonce; - /// Return maximal number of unconfirmed messages in inbound lane. - fn max_unconfirmed_messages(&self) -> MessageNonce; - /// Get lane data from the storage. - fn get_or_init_data(&mut self) -> InboundLaneData; - /// Update lane data in the storage. - fn set_data(&mut self, data: InboundLaneData); -} - -/// Inbound lane data wrapper that implements `MaxEncodedLen`. -/// -/// We have already had `MaxEncodedLen`-like functionality before, but its usage has -/// been localized and we haven't been passing bounds (maximal count of unrewarded relayer entries, -/// maximal count of unconfirmed messages) everywhere. This wrapper allows us to avoid passing -/// these generic bounds all over the code. -/// -/// The encoding of this type matches encoding of the corresponding `MessageData`. -#[derive(Encode, Decode, Clone, RuntimeDebug, PartialEq, Eq)] -pub struct StoredInboundLaneData, I: 'static>(pub InboundLaneData); - -impl, I: 'static> sp_std::ops::Deref for StoredInboundLaneData { - type Target = InboundLaneData; - - fn deref(&self) -> &Self::Target { - &self.0 - } -} - -impl, I: 'static> sp_std::ops::DerefMut for StoredInboundLaneData { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.0 - } -} - -impl, I: 'static> Default for StoredInboundLaneData { - fn default() -> Self { - StoredInboundLaneData(Default::default()) - } -} - -impl, I: 'static> From> - for InboundLaneData -{ - fn from(data: StoredInboundLaneData) -> Self { - data.0 - } -} - -impl, I: 'static> EncodeLike> - for InboundLaneData -{ -} - -impl, I: 'static> TypeInfo for StoredInboundLaneData { - type Identity = Self; - - fn type_info() -> Type { - InboundLaneData::::type_info() - } -} - -impl, I: 'static> MaxEncodedLen for StoredInboundLaneData { - fn max_encoded_len() -> usize { - InboundLaneData::::encoded_size_hint( - T::MaxUnrewardedRelayerEntriesAtInboundLane::get() as usize, - ) - .unwrap_or(usize::MAX) - } -} - -/// Inbound messages lane. -pub struct InboundLane { - storage: S, -} - -impl InboundLane { - /// Create new inbound lane backed by given storage. - pub fn new(storage: S) -> Self { - InboundLane { storage } - } - - /// Returns `mut` storage reference. - pub fn storage_mut(&mut self) -> &mut S { - &mut self.storage - } - - /// Receive state of the corresponding outbound lane. - pub fn receive_state_update( - &mut self, - outbound_lane_data: OutboundLaneData, - ) -> Option { - let mut data = self.storage.get_or_init_data(); - let last_delivered_nonce = data.last_delivered_nonce(); - - if outbound_lane_data.latest_received_nonce > last_delivered_nonce { - // this is something that should never happen if proofs are correct - return None - } - if outbound_lane_data.latest_received_nonce <= data.last_confirmed_nonce { - return None - } - - let new_confirmed_nonce = outbound_lane_data.latest_received_nonce; - data.last_confirmed_nonce = new_confirmed_nonce; - // Firstly, remove all of the records where higher nonce <= new confirmed nonce - while data - .relayers - .front() - .map(|entry| entry.messages.end <= new_confirmed_nonce) - .unwrap_or(false) - { - data.relayers.pop_front(); - } - // Secondly, update the next record with lower nonce equal to new confirmed nonce if needed. - // Note: There will be max. 1 record to update as we don't allow messages from relayers to - // overlap. - match data.relayers.front_mut() { - Some(entry) if entry.messages.begin <= new_confirmed_nonce => { - entry.messages.begin = new_confirmed_nonce + 1; - }, - _ => {}, - } - - self.storage.set_data(data); - Some(outbound_lane_data.latest_received_nonce) - } - - /// Receive new message. - pub fn receive_message( - &mut self, - relayer_at_bridged_chain: &S::Relayer, - nonce: MessageNonce, - message_data: DispatchMessageData, - ) -> ReceptionResult { - let mut data = self.storage.get_or_init_data(); - if Some(nonce) != data.last_delivered_nonce().checked_add(1) { - return ReceptionResult::InvalidNonce - } - - // if there are more unrewarded relayer entries than we may accept, reject this message - if data.relayers.len() as MessageNonce >= self.storage.max_unrewarded_relayer_entries() { - return ReceptionResult::TooManyUnrewardedRelayers - } - - // if there are more unconfirmed messages than we may accept, reject this message - let unconfirmed_messages_count = nonce.saturating_sub(data.last_confirmed_nonce); - if unconfirmed_messages_count > self.storage.max_unconfirmed_messages() { - return ReceptionResult::TooManyUnconfirmedMessages - } - - // then, dispatch message - let dispatch_result = Dispatch::dispatch(DispatchMessage { - key: MessageKey { lane_id: self.storage.id(), nonce }, - data: message_data, - }); - - // now let's update inbound lane storage - match data.relayers.back_mut() { - Some(entry) if entry.relayer == *relayer_at_bridged_chain => { - entry.messages.note_dispatched_message(); - }, - _ => { - data.relayers.push_back(UnrewardedRelayer { - relayer: relayer_at_bridged_chain.clone(), - messages: DeliveredMessages::new(nonce), - }); - }, - }; - self.storage.set_data(data); - - ReceptionResult::Dispatched(dispatch_result) - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::{ - inbound_lane, - mock::{ - dispatch_result, inbound_message_data, inbound_unrewarded_relayers_state, run_test, - unrewarded_relayer, TestMessageDispatch, TestRuntime, REGULAR_PAYLOAD, TEST_LANE_ID, - TEST_RELAYER_A, TEST_RELAYER_B, TEST_RELAYER_C, - }, - RuntimeInboundLaneStorage, - }; - use bp_messages::UnrewardedRelayersState; - - fn receive_regular_message( - lane: &mut InboundLane>, - nonce: MessageNonce, - ) { - assert_eq!( - lane.receive_message::( - &TEST_RELAYER_A, - nonce, - inbound_message_data(REGULAR_PAYLOAD) - ), - ReceptionResult::Dispatched(dispatch_result(0)) - ); - } - - #[test] - fn receive_status_update_ignores_status_from_the_future() { - run_test(|| { - let mut lane = inbound_lane::(TEST_LANE_ID); - receive_regular_message(&mut lane, 1); - assert_eq!( - lane.receive_state_update(OutboundLaneData { - latest_received_nonce: 10, - ..Default::default() - }), - None, - ); - - assert_eq!(lane.storage.get_or_init_data().last_confirmed_nonce, 0); - }); - } - - #[test] - fn receive_status_update_ignores_obsolete_status() { - run_test(|| { - let mut lane = inbound_lane::(TEST_LANE_ID); - receive_regular_message(&mut lane, 1); - receive_regular_message(&mut lane, 2); - receive_regular_message(&mut lane, 3); - assert_eq!( - lane.receive_state_update(OutboundLaneData { - latest_received_nonce: 3, - ..Default::default() - }), - Some(3), - ); - assert_eq!(lane.storage.get_or_init_data().last_confirmed_nonce, 3); - - assert_eq!( - lane.receive_state_update(OutboundLaneData { - latest_received_nonce: 3, - ..Default::default() - }), - None, - ); - assert_eq!(lane.storage.get_or_init_data().last_confirmed_nonce, 3); - }); - } - - #[test] - fn receive_status_update_works() { - run_test(|| { - let mut lane = inbound_lane::(TEST_LANE_ID); - receive_regular_message(&mut lane, 1); - receive_regular_message(&mut lane, 2); - receive_regular_message(&mut lane, 3); - assert_eq!(lane.storage.get_or_init_data().last_confirmed_nonce, 0); - assert_eq!( - lane.storage.get_or_init_data().relayers, - vec![unrewarded_relayer(1, 3, TEST_RELAYER_A)] - ); - - assert_eq!( - lane.receive_state_update(OutboundLaneData { - latest_received_nonce: 2, - ..Default::default() - }), - Some(2), - ); - assert_eq!(lane.storage.get_or_init_data().last_confirmed_nonce, 2); - assert_eq!( - lane.storage.get_or_init_data().relayers, - vec![unrewarded_relayer(3, 3, TEST_RELAYER_A)] - ); - - assert_eq!( - lane.receive_state_update(OutboundLaneData { - latest_received_nonce: 3, - ..Default::default() - }), - Some(3), - ); - assert_eq!(lane.storage.get_or_init_data().last_confirmed_nonce, 3); - assert_eq!(lane.storage.get_or_init_data().relayers, vec![]); - }); - } - - #[test] - fn receive_status_update_works_with_batches_from_relayers() { - run_test(|| { - let mut lane = inbound_lane::(TEST_LANE_ID); - let mut seed_storage_data = lane.storage.get_or_init_data(); - // Prepare data - seed_storage_data.last_confirmed_nonce = 0; - seed_storage_data.relayers.push_back(unrewarded_relayer(1, 1, TEST_RELAYER_A)); - // Simulate messages batch (2, 3, 4) from relayer #2 - seed_storage_data.relayers.push_back(unrewarded_relayer(2, 4, TEST_RELAYER_B)); - seed_storage_data.relayers.push_back(unrewarded_relayer(5, 5, TEST_RELAYER_C)); - lane.storage.set_data(seed_storage_data); - // Check - assert_eq!( - lane.receive_state_update(OutboundLaneData { - latest_received_nonce: 3, - ..Default::default() - }), - Some(3), - ); - assert_eq!(lane.storage.get_or_init_data().last_confirmed_nonce, 3); - assert_eq!( - lane.storage.get_or_init_data().relayers, - vec![ - unrewarded_relayer(4, 4, TEST_RELAYER_B), - unrewarded_relayer(5, 5, TEST_RELAYER_C) - ] - ); - }); - } - - #[test] - fn fails_to_receive_message_with_incorrect_nonce() { - run_test(|| { - let mut lane = inbound_lane::(TEST_LANE_ID); - assert_eq!( - lane.receive_message::( - &TEST_RELAYER_A, - 10, - inbound_message_data(REGULAR_PAYLOAD) - ), - ReceptionResult::InvalidNonce - ); - assert_eq!(lane.storage.get_or_init_data().last_delivered_nonce(), 0); - }); - } - - #[test] - fn fails_to_receive_messages_above_unrewarded_relayer_entries_limit_per_lane() { - run_test(|| { - let mut lane = inbound_lane::(TEST_LANE_ID); - let max_nonce = - ::MaxUnrewardedRelayerEntriesAtInboundLane::get(); - for current_nonce in 1..max_nonce + 1 { - assert_eq!( - lane.receive_message::( - &(TEST_RELAYER_A + current_nonce), - current_nonce, - inbound_message_data(REGULAR_PAYLOAD) - ), - ReceptionResult::Dispatched(dispatch_result(0)) - ); - } - // Fails to dispatch new message from different than latest relayer. - assert_eq!( - lane.receive_message::( - &(TEST_RELAYER_A + max_nonce + 1), - max_nonce + 1, - inbound_message_data(REGULAR_PAYLOAD) - ), - ReceptionResult::TooManyUnrewardedRelayers, - ); - // Fails to dispatch new messages from latest relayer. Prevents griefing attacks. - assert_eq!( - lane.receive_message::( - &(TEST_RELAYER_A + max_nonce), - max_nonce + 1, - inbound_message_data(REGULAR_PAYLOAD) - ), - ReceptionResult::TooManyUnrewardedRelayers, - ); - }); - } - - #[test] - fn fails_to_receive_messages_above_unconfirmed_messages_limit_per_lane() { - run_test(|| { - let mut lane = inbound_lane::(TEST_LANE_ID); - let max_nonce = ::MaxUnconfirmedMessagesAtInboundLane::get(); - for current_nonce in 1..=max_nonce { - assert_eq!( - lane.receive_message::( - &TEST_RELAYER_A, - current_nonce, - inbound_message_data(REGULAR_PAYLOAD) - ), - ReceptionResult::Dispatched(dispatch_result(0)) - ); - } - // Fails to dispatch new message from different than latest relayer. - assert_eq!( - lane.receive_message::( - &TEST_RELAYER_B, - max_nonce + 1, - inbound_message_data(REGULAR_PAYLOAD) - ), - ReceptionResult::TooManyUnconfirmedMessages, - ); - // Fails to dispatch new messages from latest relayer. - assert_eq!( - lane.receive_message::( - &TEST_RELAYER_A, - max_nonce + 1, - inbound_message_data(REGULAR_PAYLOAD) - ), - ReceptionResult::TooManyUnconfirmedMessages, - ); - }); - } - - #[test] - fn correctly_receives_following_messages_from_two_relayers_alternately() { - run_test(|| { - let mut lane = inbound_lane::(TEST_LANE_ID); - assert_eq!( - lane.receive_message::( - &TEST_RELAYER_A, - 1, - inbound_message_data(REGULAR_PAYLOAD) - ), - ReceptionResult::Dispatched(dispatch_result(0)) - ); - assert_eq!( - lane.receive_message::( - &TEST_RELAYER_B, - 2, - inbound_message_data(REGULAR_PAYLOAD) - ), - ReceptionResult::Dispatched(dispatch_result(0)) - ); - assert_eq!( - lane.receive_message::( - &TEST_RELAYER_A, - 3, - inbound_message_data(REGULAR_PAYLOAD) - ), - ReceptionResult::Dispatched(dispatch_result(0)) - ); - assert_eq!( - lane.storage.get_or_init_data().relayers, - vec![ - unrewarded_relayer(1, 1, TEST_RELAYER_A), - unrewarded_relayer(2, 2, TEST_RELAYER_B), - unrewarded_relayer(3, 3, TEST_RELAYER_A) - ] - ); - }); - } - - #[test] - fn rejects_same_message_from_two_different_relayers() { - run_test(|| { - let mut lane = inbound_lane::(TEST_LANE_ID); - assert_eq!( - lane.receive_message::( - &TEST_RELAYER_A, - 1, - inbound_message_data(REGULAR_PAYLOAD) - ), - ReceptionResult::Dispatched(dispatch_result(0)) - ); - assert_eq!( - lane.receive_message::( - &TEST_RELAYER_B, - 1, - inbound_message_data(REGULAR_PAYLOAD) - ), - ReceptionResult::InvalidNonce, - ); - }); - } - - #[test] - fn correct_message_is_processed_instantly() { - run_test(|| { - let mut lane = inbound_lane::(TEST_LANE_ID); - receive_regular_message(&mut lane, 1); - assert_eq!(lane.storage.get_or_init_data().last_delivered_nonce(), 1); - }); - } - - #[test] - fn unspent_weight_is_returned_by_receive_message() { - run_test(|| { - let mut lane = inbound_lane::(TEST_LANE_ID); - let mut payload = REGULAR_PAYLOAD; - *payload.dispatch_result.unspent_weight.ref_time_mut() = 1; - assert_eq!( - lane.receive_message::( - &TEST_RELAYER_A, - 1, - inbound_message_data(payload) - ), - ReceptionResult::Dispatched(dispatch_result(1)) - ); - }); - } - - #[test] - fn first_message_is_confirmed_correctly() { - run_test(|| { - let mut lane = inbound_lane::(TEST_LANE_ID); - receive_regular_message(&mut lane, 1); - receive_regular_message(&mut lane, 2); - assert_eq!( - lane.receive_state_update(OutboundLaneData { - latest_received_nonce: 1, - ..Default::default() - }), - Some(1), - ); - assert_eq!( - inbound_unrewarded_relayers_state(TEST_LANE_ID), - UnrewardedRelayersState { - unrewarded_relayer_entries: 1, - messages_in_oldest_entry: 1, - total_messages: 1, - last_delivered_nonce: 2, - }, - ); - }); - } -} diff --git a/modules/messages/src/lib.rs b/modules/messages/src/lib.rs deleted file mode 100644 index bc00db9eb..000000000 --- a/modules/messages/src/lib.rs +++ /dev/null @@ -1,2117 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Runtime module that allows sending and receiving messages using lane concept: -//! -//! 1) the message is sent using `send_message()` call; -//! 2) every outbound message is assigned nonce; -//! 3) the messages are stored in the storage; -//! 4) external component (relay) delivers messages to bridged chain; -//! 5) messages are processed in order (ordered by assigned nonce); -//! 6) relay may send proof-of-delivery back to this chain. -//! -//! Once message is sent, its progress can be tracked by looking at module events. -//! The assigned nonce is reported using `MessageAccepted` event. When message is -//! delivered to the the bridged chain, it is reported using `MessagesDelivered` event. -//! -//! **IMPORTANT NOTE**: after generating weights (custom `WeighInfo` implementation) for -//! your runtime (where this module is plugged to), please add test for these weights. -//! The test should call the `ensure_weights_are_correct` function from this module. -//! If this test fails with your weights, then either weights are computed incorrectly, -//! or some benchmarks assumptions are broken for your runtime. - -#![warn(missing_docs)] -#![cfg_attr(not(feature = "std"), no_std)] - -pub use inbound_lane::StoredInboundLaneData; -pub use outbound_lane::StoredMessagePayload; -pub use weights::WeightInfo; -pub use weights_ext::{ - ensure_able_to_receive_confirmation, ensure_able_to_receive_message, - ensure_weights_are_correct, WeightInfoExt, EXPECTED_DEFAULT_MESSAGE_LENGTH, - EXTRA_STORAGE_PROOF_SIZE, -}; - -use crate::{ - inbound_lane::{InboundLane, InboundLaneStorage}, - outbound_lane::{OutboundLane, OutboundLaneStorage, ReceptionConfirmationError}, -}; - -use bp_messages::{ - source_chain::{ - DeliveryConfirmationPayments, OnMessagesDelivered, SendMessageArtifacts, TargetHeaderChain, - }, - target_chain::{ - DeliveryPayments, DispatchMessage, MessageDispatch, ProvedLaneMessages, ProvedMessages, - SourceHeaderChain, - }, - DeliveredMessages, InboundLaneData, InboundMessageDetails, LaneId, MessageKey, MessageNonce, - MessagePayload, MessagesOperatingMode, OutboundLaneData, OutboundMessageDetails, - UnrewardedRelayersState, VerificationError, -}; -use bp_runtime::{ - BasicOperatingMode, ChainId, OwnedBridgeModule, PreComputedSize, RangeInclusiveExt, Size, -}; -use codec::{Decode, Encode, MaxEncodedLen}; -use frame_support::{dispatch::PostDispatchInfo, ensure, fail, traits::Get, DefaultNoBound}; -use sp_runtime::traits::UniqueSaturatedFrom; -use sp_std::{marker::PhantomData, prelude::*}; - -mod inbound_lane; -mod outbound_lane; -mod weights_ext; - -pub mod weights; - -#[cfg(feature = "runtime-benchmarks")] -pub mod benchmarking; - -#[cfg(test)] -mod mock; - -pub use pallet::*; - -/// The target that will be used when publishing logs related to this pallet. -pub const LOG_TARGET: &str = "runtime::bridge-messages"; - -#[frame_support::pallet] -pub mod pallet { - use super::*; - use bp_messages::{ReceivedMessages, ReceptionResult}; - use bp_runtime::RangeInclusiveExt; - use frame_support::pallet_prelude::*; - use frame_system::pallet_prelude::*; - - #[pallet::config] - pub trait Config: frame_system::Config { - // General types - - /// The overarching event type. - type RuntimeEvent: From> - + IsType<::RuntimeEvent>; - /// Benchmarks results from runtime we're plugged into. - type WeightInfo: WeightInfoExt; - - /// Gets the chain id value from the instance. - #[pallet::constant] - type BridgedChainId: Get; - - /// Get all active outbound lanes that the message pallet is serving. - type ActiveOutboundLanes: Get<&'static [LaneId]>; - /// Maximal number of unrewarded relayer entries at inbound lane. Unrewarded means that the - /// relayer has delivered messages, but either confirmations haven't been delivered back to - /// the source chain, or we haven't received reward confirmations yet. - /// - /// This constant limits maximal number of entries in the `InboundLaneData::relayers`. Keep - /// in mind that the same relayer account may take several (non-consecutive) entries in this - /// set. - type MaxUnrewardedRelayerEntriesAtInboundLane: Get; - /// Maximal number of unconfirmed messages at inbound lane. Unconfirmed means that the - /// message has been delivered, but either confirmations haven't been delivered back to the - /// source chain, or we haven't received reward confirmations for these messages yet. - /// - /// This constant limits difference between last message from last entry of the - /// `InboundLaneData::relayers` and first message at the first entry. - /// - /// There is no point of making this parameter lesser than - /// MaxUnrewardedRelayerEntriesAtInboundLane, because then maximal number of relayer entries - /// will be limited by maximal number of messages. - /// - /// This value also represents maximal number of messages in single delivery transaction. - /// Transaction that is declaring more messages than this value, will be rejected. Even if - /// these messages are from different lanes. - type MaxUnconfirmedMessagesAtInboundLane: Get; - - /// Maximal encoded size of the outbound payload. - #[pallet::constant] - type MaximalOutboundPayloadSize: Get; - /// Payload type of outbound messages. This payload is dispatched on the bridged chain. - type OutboundPayload: Parameter + Size; - - /// Payload type of inbound messages. This payload is dispatched on this chain. - type InboundPayload: Decode; - /// Identifier of relayer that deliver messages to this chain. Relayer reward is paid on the - /// bridged chain. - type InboundRelayer: Parameter + MaxEncodedLen; - /// Delivery payments. - type DeliveryPayments: DeliveryPayments; - - // Types that are used by outbound_lane (on source chain). - - /// Target header chain. - type TargetHeaderChain: TargetHeaderChain; - /// Delivery confirmation payments. - type DeliveryConfirmationPayments: DeliveryConfirmationPayments; - /// Delivery confirmation callback. - type OnMessagesDelivered: OnMessagesDelivered; - - // Types that are used by inbound_lane (on target chain). - - /// Source header chain, as it is represented on target chain. - type SourceHeaderChain: SourceHeaderChain; - /// Message dispatch. - type MessageDispatch: MessageDispatch; - } - - /// Shortcut to messages proof type for Config. - pub type MessagesProofOf = - <>::SourceHeaderChain as SourceHeaderChain>::MessagesProof; - /// Shortcut to messages delivery proof type for Config. - pub type MessagesDeliveryProofOf = - <>::TargetHeaderChain as TargetHeaderChain< - >::OutboundPayload, - ::AccountId, - >>::MessagesDeliveryProof; - - #[pallet::pallet] - pub struct Pallet(PhantomData<(T, I)>); - - impl, I: 'static> OwnedBridgeModule for Pallet { - const LOG_TARGET: &'static str = LOG_TARGET; - type OwnerStorage = PalletOwner; - type OperatingMode = MessagesOperatingMode; - type OperatingModeStorage = PalletOperatingMode; - } - - #[pallet::hooks] - impl, I: 'static> Hooks> for Pallet - where - u32: TryFrom>, - { - fn on_idle(_block: BlockNumberFor, remaining_weight: Weight) -> Weight { - // we'll need at least to read outbound lane state, kill a message and update lane state - let db_weight = T::DbWeight::get(); - if !remaining_weight.all_gte(db_weight.reads_writes(1, 2)) { - return Weight::zero() - } - - // messages from lane with index `i` in `ActiveOutboundLanes` are pruned when - // `System::block_number() % lanes.len() == i`. Otherwise we need to read lane states on - // every block, wasting the whole `remaining_weight` for nothing and causing starvation - // of the last lane pruning - let active_lanes = T::ActiveOutboundLanes::get(); - let active_lanes_len = (active_lanes.len() as u32).into(); - let active_lane_index = u32::unique_saturated_from( - frame_system::Pallet::::block_number() % active_lanes_len, - ); - let active_lane_id = active_lanes[active_lane_index as usize]; - - // first db read - outbound lane state - let mut active_lane = outbound_lane::(active_lane_id); - let mut used_weight = db_weight.reads(1); - // and here we'll have writes - used_weight += active_lane.prune_messages(db_weight, remaining_weight - used_weight); - - // we already checked we have enough `remaining_weight` to cover this `used_weight` - used_weight - } - } - - #[pallet::call] - impl, I: 'static> Pallet { - /// Change `PalletOwner`. - /// - /// May only be called either by root, or by `PalletOwner`. - #[pallet::call_index(0)] - #[pallet::weight((T::DbWeight::get().reads_writes(1, 1), DispatchClass::Operational))] - pub fn set_owner(origin: OriginFor, new_owner: Option) -> DispatchResult { - >::set_owner(origin, new_owner) - } - - /// Halt or resume all/some pallet operations. - /// - /// May only be called either by root, or by `PalletOwner`. - #[pallet::call_index(1)] - #[pallet::weight((T::DbWeight::get().reads_writes(1, 1), DispatchClass::Operational))] - pub fn set_operating_mode( - origin: OriginFor, - operating_mode: MessagesOperatingMode, - ) -> DispatchResult { - >::set_operating_mode(origin, operating_mode) - } - - /// Receive messages proof from bridged chain. - /// - /// The weight of the call assumes that the transaction always brings outbound lane - /// state update. Because of that, the submitter (relayer) has no benefit of not including - /// this data in the transaction, so reward confirmations lags should be minimal. - /// - /// The call fails if: - /// - /// - the pallet is halted; - /// - /// - the call origin is not `Signed(_)`; - /// - /// - there are too many messages in the proof; - /// - /// - the proof verification procedure returns an error - e.g. because header used to craft - /// proof is not imported by the associated finality pallet; - /// - /// - the `dispatch_weight` argument is not sufficient to dispatch all bundled messages. - /// - /// The call may succeed, but some messages may not be delivered e.g. if they are not fit - /// into the unrewarded relayers vector. - #[pallet::call_index(2)] - #[pallet::weight(T::WeightInfo::receive_messages_proof_weight(proof, *messages_count, *dispatch_weight))] - pub fn receive_messages_proof( - origin: OriginFor, - relayer_id_at_bridged_chain: T::InboundRelayer, - proof: MessagesProofOf, - messages_count: u32, - dispatch_weight: Weight, - ) -> DispatchResultWithPostInfo { - Self::ensure_not_halted().map_err(Error::::BridgeModule)?; - let relayer_id_at_this_chain = ensure_signed(origin)?; - - // reject transactions that are declaring too many messages - ensure!( - MessageNonce::from(messages_count) <= T::MaxUnconfirmedMessagesAtInboundLane::get(), - Error::::TooManyMessagesInTheProof - ); - - // if message dispatcher is currently inactive, we won't accept any messages - ensure!(T::MessageDispatch::is_active(), Error::::MessageDispatchInactive); - - // why do we need to know the weight of this (`receive_messages_proof`) call? Because - // we may want to return some funds for not-dispatching (or partially dispatching) some - // messages to the call origin (relayer). And this is done by returning actual weight - // from the call. But we only know dispatch weight of every messages. So to refund - // relayer because we have not dispatched Message, we need to: - // - // ActualWeight = DeclaredWeight - Message.DispatchWeight - // - // The DeclaredWeight is exactly what's computed here. Unfortunately it is impossible - // to get pre-computed value (and it has been already computed by the executive). - let declared_weight = T::WeightInfo::receive_messages_proof_weight( - &proof, - messages_count, - dispatch_weight, - ); - let mut actual_weight = declared_weight; - - // verify messages proof && convert proof into messages - let messages = verify_and_decode_messages_proof::< - T::SourceHeaderChain, - T::InboundPayload, - >(proof, messages_count) - .map_err(|err| { - log::trace!(target: LOG_TARGET, "Rejecting invalid messages proof: {:?}", err,); - - Error::::InvalidMessagesProof - })?; - - // dispatch messages and (optionally) update lane(s) state(s) - let mut total_messages = 0; - let mut valid_messages = 0; - let mut messages_received_status = Vec::with_capacity(messages.len()); - let mut dispatch_weight_left = dispatch_weight; - for (lane_id, lane_data) in messages { - let mut lane = inbound_lane::(lane_id); - - // subtract extra storage proof bytes from the actual PoV size - there may be - // less unrewarded relayers than the maximal configured value - let lane_extra_proof_size_bytes = lane.storage_mut().extra_proof_size_bytes(); - actual_weight = actual_weight.set_proof_size( - actual_weight.proof_size().saturating_sub(lane_extra_proof_size_bytes), - ); - - if let Some(lane_state) = lane_data.lane_state { - let updated_latest_confirmed_nonce = lane.receive_state_update(lane_state); - if let Some(updated_latest_confirmed_nonce) = updated_latest_confirmed_nonce { - log::trace!( - target: LOG_TARGET, - "Received lane {:?} state update: latest_confirmed_nonce={}. Unrewarded relayers: {:?}", - lane_id, - updated_latest_confirmed_nonce, - UnrewardedRelayersState::from(&lane.storage_mut().get_or_init_data()), - ); - } - } - - let mut lane_messages_received_status = - ReceivedMessages::new(lane_id, Vec::with_capacity(lane_data.messages.len())); - for mut message in lane_data.messages { - debug_assert_eq!(message.key.lane_id, lane_id); - total_messages += 1; - - // ensure that relayer has declared enough weight for dispatching next message - // on this lane. We can't dispatch lane messages out-of-order, so if declared - // weight is not enough, let's move to next lane - let message_dispatch_weight = T::MessageDispatch::dispatch_weight(&mut message); - if message_dispatch_weight.any_gt(dispatch_weight_left) { - log::trace!( - target: LOG_TARGET, - "Cannot dispatch any more messages on lane {:?}. Weight: declared={}, left={}", - lane_id, - message_dispatch_weight, - dispatch_weight_left, - ); - - fail!(Error::::InsufficientDispatchWeight); - } - - let receival_result = lane.receive_message::( - &relayer_id_at_bridged_chain, - message.key.nonce, - message.data, - ); - - // note that we're returning unspent weight to relayer even if message has been - // rejected by the lane. This allows relayers to submit spam transactions with - // e.g. the same set of already delivered messages over and over again, without - // losing funds for messages dispatch. But keep in mind that relayer pays base - // delivery transaction cost anyway. And base cost covers everything except - // dispatch, so we have a balance here. - let unspent_weight = match &receival_result { - ReceptionResult::Dispatched(dispatch_result) => { - valid_messages += 1; - dispatch_result.unspent_weight - }, - ReceptionResult::InvalidNonce | - ReceptionResult::TooManyUnrewardedRelayers | - ReceptionResult::TooManyUnconfirmedMessages => message_dispatch_weight, - }; - lane_messages_received_status.push(message.key.nonce, receival_result); - - let unspent_weight = unspent_weight.min(message_dispatch_weight); - dispatch_weight_left -= message_dispatch_weight - unspent_weight; - actual_weight = actual_weight.saturating_sub(unspent_weight); - } - - messages_received_status.push(lane_messages_received_status); - } - - // let's now deal with relayer payments - T::DeliveryPayments::pay_reward( - relayer_id_at_this_chain, - total_messages, - valid_messages, - actual_weight, - ); - - log::debug!( - target: LOG_TARGET, - "Received messages: total={}, valid={}. Weight used: {}/{}.", - total_messages, - valid_messages, - actual_weight, - declared_weight, - ); - - Self::deposit_event(Event::MessagesReceived(messages_received_status)); - - Ok(PostDispatchInfo { actual_weight: Some(actual_weight), pays_fee: Pays::Yes }) - } - - /// Receive messages delivery proof from bridged chain. - #[pallet::call_index(3)] - #[pallet::weight(T::WeightInfo::receive_messages_delivery_proof_weight( - proof, - relayers_state, - ))] - pub fn receive_messages_delivery_proof( - origin: OriginFor, - proof: MessagesDeliveryProofOf, - mut relayers_state: UnrewardedRelayersState, - ) -> DispatchResultWithPostInfo { - Self::ensure_not_halted().map_err(Error::::BridgeModule)?; - - let proof_size = proof.size(); - let confirmation_relayer = ensure_signed(origin)?; - let (lane_id, lane_data) = T::TargetHeaderChain::verify_messages_delivery_proof(proof) - .map_err(|err| { - log::trace!( - target: LOG_TARGET, - "Rejecting invalid messages delivery proof: {:?}", - err, - ); - - Error::::InvalidMessagesDeliveryProof - })?; - ensure!( - relayers_state.is_valid(&lane_data), - Error::::InvalidUnrewardedRelayersState - ); - - // mark messages as delivered - let mut lane = outbound_lane::(lane_id); - let last_delivered_nonce = lane_data.last_delivered_nonce(); - let confirmed_messages = lane - .confirm_delivery( - relayers_state.total_messages, - last_delivered_nonce, - &lane_data.relayers, - ) - .map_err(Error::::ReceptionConfirmation)?; - - if let Some(confirmed_messages) = confirmed_messages { - // emit 'delivered' event - let received_range = confirmed_messages.begin..=confirmed_messages.end; - Self::deposit_event(Event::MessagesDelivered { - lane_id, - messages: confirmed_messages, - }); - - // if some new messages have been confirmed, reward relayers - let actually_rewarded_relayers = T::DeliveryConfirmationPayments::pay_reward( - lane_id, - lane_data.relayers, - &confirmation_relayer, - &received_range, - ); - - // update relayers state with actual numbers to compute actual weight below - relayers_state.unrewarded_relayer_entries = sp_std::cmp::min( - relayers_state.unrewarded_relayer_entries, - actually_rewarded_relayers, - ); - relayers_state.total_messages = sp_std::cmp::min( - relayers_state.total_messages, - received_range.checked_len().unwrap_or(MessageNonce::MAX), - ); - }; - - log::trace!( - target: LOG_TARGET, - "Received messages delivery proof up to (and including) {} at lane {:?}", - last_delivered_nonce, - lane_id, - ); - - // notify others about messages delivery - T::OnMessagesDelivered::on_messages_delivered( - lane_id, - lane.data().queued_messages().saturating_len(), - ); - - // because of lags, the inbound lane state (`lane_data`) may have entries for - // already rewarded relayers and messages (if all entries are duplicated, then - // this transaction must be filtered out by our signed extension) - let actual_weight = T::WeightInfo::receive_messages_delivery_proof_weight( - &PreComputedSize(proof_size as usize), - &relayers_state, - ); - - Ok(PostDispatchInfo { actual_weight: Some(actual_weight), pays_fee: Pays::Yes }) - } - } - - #[pallet::event] - #[pallet::generate_deposit(pub(super) fn deposit_event)] - pub enum Event, I: 'static = ()> { - /// Message has been accepted and is waiting to be delivered. - MessageAccepted { - /// Lane, which has accepted the message. - lane_id: LaneId, - /// Nonce of accepted message. - nonce: MessageNonce, - }, - /// Messages have been received from the bridged chain. - MessagesReceived( - /// Result of received messages dispatch. - Vec::DispatchLevelResult>>, - ), - /// Messages in the inclusive range have been delivered to the bridged chain. - MessagesDelivered { - /// Lane for which the delivery has been confirmed. - lane_id: LaneId, - /// Delivered messages. - messages: DeliveredMessages, - }, - } - - #[pallet::error] - #[derive(PartialEq, Eq)] - pub enum Error { - /// Pallet is not in Normal operating mode. - NotOperatingNormally, - /// The outbound lane is inactive. - InactiveOutboundLane, - /// The inbound message dispatcher is inactive. - MessageDispatchInactive, - /// Message has been treated as invalid by chain verifier. - MessageRejectedByChainVerifier(VerificationError), - /// Message has been treated as invalid by the pallet logic. - MessageRejectedByPallet(VerificationError), - /// Submitter has failed to pay fee for delivering and dispatching messages. - FailedToWithdrawMessageFee, - /// The transaction brings too many messages. - TooManyMessagesInTheProof, - /// Invalid messages has been submitted. - InvalidMessagesProof, - /// Invalid messages delivery proof has been submitted. - InvalidMessagesDeliveryProof, - /// The relayer has declared invalid unrewarded relayers state in the - /// `receive_messages_delivery_proof` call. - InvalidUnrewardedRelayersState, - /// The cumulative dispatch weight, passed by relayer is not enough to cover dispatch - /// of all bundled messages. - InsufficientDispatchWeight, - /// The message someone is trying to work with (i.e. increase fee) is not yet sent. - MessageIsNotYetSent, - /// Error confirming messages receival. - ReceptionConfirmation(ReceptionConfirmationError), - /// Error generated by the `OwnedBridgeModule` trait. - BridgeModule(bp_runtime::OwnedBridgeModuleError), - } - - /// Optional pallet owner. - /// - /// Pallet owner has a right to halt all pallet operations and then resume it. If it is - /// `None`, then there are no direct ways to halt/resume pallet operations, but other - /// runtime methods may still be used to do that (i.e. democracy::referendum to update halt - /// flag directly or call the `halt_operations`). - #[pallet::storage] - #[pallet::getter(fn module_owner)] - pub type PalletOwner, I: 'static = ()> = StorageValue<_, T::AccountId>; - - /// The current operating mode of the pallet. - /// - /// Depending on the mode either all, some, or no transactions will be allowed. - #[pallet::storage] - #[pallet::getter(fn operating_mode)] - pub type PalletOperatingMode, I: 'static = ()> = - StorageValue<_, MessagesOperatingMode, ValueQuery>; - - /// Map of lane id => inbound lane data. - #[pallet::storage] - pub type InboundLanes, I: 'static = ()> = - StorageMap<_, Blake2_128Concat, LaneId, StoredInboundLaneData, ValueQuery>; - - /// Map of lane id => outbound lane data. - #[pallet::storage] - pub type OutboundLanes, I: 'static = ()> = StorageMap< - Hasher = Blake2_128Concat, - Key = LaneId, - Value = OutboundLaneData, - QueryKind = ValueQuery, - OnEmpty = GetDefault, - MaxValues = MaybeOutboundLanesCount, - >; - - /// Map of lane id => is congested signal sent. It is managed by the - /// `bridge_runtime_common::LocalXcmQueueManager`. - /// - /// **bridges-v1**: this map is a temporary hack and will be dropped in the `v2`. We can emulate - /// a storage map using `sp_io::unhashed` storage functions, but then benchmarks are not - /// accounting its `proof_size`, so it is missing from the final weights. So we need to make it - /// a map inside some pallet. We could use a simply value instead of map here, because - /// in `v1` we'll only have a single lane. But in the case of adding another lane before `v2`, - /// it'll be easier to deal with the isolated storage map instead. - #[pallet::storage] - pub type OutboundLanesCongestedSignals, I: 'static = ()> = StorageMap< - Hasher = Blake2_128Concat, - Key = LaneId, - Value = bool, - QueryKind = ValueQuery, - OnEmpty = GetDefault, - MaxValues = MaybeOutboundLanesCount, - >; - - /// All queued outbound messages. - #[pallet::storage] - pub type OutboundMessages, I: 'static = ()> = - StorageMap<_, Blake2_128Concat, MessageKey, StoredMessagePayload>; - - #[pallet::genesis_config] - #[derive(DefaultNoBound)] - pub struct GenesisConfig, I: 'static = ()> { - /// Initial pallet operating mode. - pub operating_mode: MessagesOperatingMode, - /// Initial pallet owner. - pub owner: Option, - /// Dummy marker. - pub phantom: sp_std::marker::PhantomData, - } - - #[pallet::genesis_build] - impl, I: 'static> BuildGenesisConfig for GenesisConfig { - fn build(&self) { - PalletOperatingMode::::put(self.operating_mode); - if let Some(ref owner) = self.owner { - PalletOwner::::put(owner); - } - } - } - - impl, I: 'static> Pallet { - /// Get stored data of the outbound message with given nonce. - pub fn outbound_message_data(lane: LaneId, nonce: MessageNonce) -> Option { - OutboundMessages::::get(MessageKey { lane_id: lane, nonce }).map(Into::into) - } - - /// Prepare data, related to given inbound message. - pub fn inbound_message_data( - lane: LaneId, - payload: MessagePayload, - outbound_details: OutboundMessageDetails, - ) -> InboundMessageDetails { - let mut dispatch_message = DispatchMessage { - key: MessageKey { lane_id: lane, nonce: outbound_details.nonce }, - data: payload.into(), - }; - InboundMessageDetails { - dispatch_weight: T::MessageDispatch::dispatch_weight(&mut dispatch_message), - } - } - - /// Return outbound lane data. - pub fn outbound_lane_data(lane: LaneId) -> OutboundLaneData { - OutboundLanes::::get(lane) - } - - /// Return inbound lane data. - pub fn inbound_lane_data(lane: LaneId) -> InboundLaneData { - InboundLanes::::get(lane).0 - } - } - - /// Get-parameter that returns number of active outbound lanes that the pallet maintains. - pub struct MaybeOutboundLanesCount(PhantomData<(T, I)>); - - impl, I: 'static> Get> for MaybeOutboundLanesCount { - fn get() -> Option { - Some(T::ActiveOutboundLanes::get().len() as u32) - } - } -} - -/// Structure, containing a validated message payload and all the info required -/// to send it on the bridge. -#[derive(Debug, PartialEq, Eq)] -pub struct SendMessageArgs, I: 'static> { - lane_id: LaneId, - payload: StoredMessagePayload, -} - -impl bp_messages::source_chain::MessagesBridge for Pallet -where - T: Config, - I: 'static, -{ - type Error = Error; - type SendMessageArgs = SendMessageArgs; - - fn validate_message( - lane: LaneId, - message: &T::OutboundPayload, - ) -> Result, Self::Error> { - ensure_normal_operating_mode::()?; - - // let's check if outbound lane is active - ensure!(T::ActiveOutboundLanes::get().contains(&lane), Error::::InactiveOutboundLane); - - // let's first check if message can be delivered to target chain - T::TargetHeaderChain::verify_message(message).map_err(|err| { - log::trace!( - target: LOG_TARGET, - "Message to lane {:?} is rejected by target chain: {:?}", - lane, - err, - ); - - Error::::MessageRejectedByChainVerifier(err) - })?; - - Ok(SendMessageArgs { - lane_id: lane, - payload: StoredMessagePayload::::try_from(message.encode()).map_err(|_| { - Error::::MessageRejectedByPallet(VerificationError::MessageTooLarge) - })?, - }) - } - - fn send_message(args: SendMessageArgs) -> SendMessageArtifacts { - // save message in outbound storage and emit event - let mut lane = outbound_lane::(args.lane_id); - let message_len = args.payload.len(); - let nonce = lane.send_message(args.payload); - - // return number of messages in the queue to let sender know about its state - let enqueued_messages = lane.data().queued_messages().saturating_len(); - - log::trace!( - target: LOG_TARGET, - "Accepted message {} to lane {:?}. Message size: {:?}", - nonce, - args.lane_id, - message_len, - ); - - Pallet::::deposit_event(Event::MessageAccepted { lane_id: args.lane_id, nonce }); - - SendMessageArtifacts { nonce, enqueued_messages } - } -} - -/// Ensure that the pallet is in normal operational mode. -fn ensure_normal_operating_mode, I: 'static>() -> Result<(), Error> { - if PalletOperatingMode::::get() == - MessagesOperatingMode::Basic(BasicOperatingMode::Normal) - { - return Ok(()) - } - - Err(Error::::NotOperatingNormally) -} - -/// Creates new inbound lane object, backed by runtime storage. -fn inbound_lane, I: 'static>( - lane_id: LaneId, -) -> InboundLane> { - InboundLane::new(RuntimeInboundLaneStorage::from_lane_id(lane_id)) -} - -/// Creates new outbound lane object, backed by runtime storage. -fn outbound_lane, I: 'static>( - lane_id: LaneId, -) -> OutboundLane> { - OutboundLane::new(RuntimeOutboundLaneStorage { lane_id, _phantom: Default::default() }) -} - -/// Runtime inbound lane storage. -struct RuntimeInboundLaneStorage, I: 'static = ()> { - lane_id: LaneId, - cached_data: Option>, - _phantom: PhantomData, -} - -impl, I: 'static> RuntimeInboundLaneStorage { - /// Creates new runtime inbound lane storage. - fn from_lane_id(lane_id: LaneId) -> RuntimeInboundLaneStorage { - RuntimeInboundLaneStorage { lane_id, cached_data: None, _phantom: Default::default() } - } -} - -impl, I: 'static> RuntimeInboundLaneStorage { - /// Returns number of bytes that may be subtracted from the PoV component of - /// `receive_messages_proof` call, because the actual inbound lane state is smaller than the - /// maximal configured. - /// - /// Maximal inbound lane state set size is configured by the - /// `MaxUnrewardedRelayerEntriesAtInboundLane` constant from the pallet configuration. The PoV - /// of the call includes the maximal size of inbound lane state. If the actual size is smaller, - /// we may subtract extra bytes from this component. - pub fn extra_proof_size_bytes(&mut self) -> u64 { - let max_encoded_len = StoredInboundLaneData::::max_encoded_len(); - let relayers_count = self.get_or_init_data().relayers.len(); - let actual_encoded_len = - InboundLaneData::::encoded_size_hint(relayers_count) - .unwrap_or(usize::MAX); - max_encoded_len.saturating_sub(actual_encoded_len) as _ - } -} - -impl, I: 'static> InboundLaneStorage for RuntimeInboundLaneStorage { - type Relayer = T::InboundRelayer; - - fn id(&self) -> LaneId { - self.lane_id - } - - fn max_unrewarded_relayer_entries(&self) -> MessageNonce { - T::MaxUnrewardedRelayerEntriesAtInboundLane::get() - } - - fn max_unconfirmed_messages(&self) -> MessageNonce { - T::MaxUnconfirmedMessagesAtInboundLane::get() - } - - fn get_or_init_data(&mut self) -> InboundLaneData { - match self.cached_data { - Some(ref data) => data.clone(), - None => { - let data: InboundLaneData = - InboundLanes::::get(self.lane_id).into(); - self.cached_data = Some(data.clone()); - data - }, - } - } - - fn set_data(&mut self, data: InboundLaneData) { - self.cached_data = Some(data.clone()); - InboundLanes::::insert(self.lane_id, StoredInboundLaneData::(data)) - } -} - -/// Runtime outbound lane storage. -struct RuntimeOutboundLaneStorage { - lane_id: LaneId, - _phantom: PhantomData<(T, I)>, -} - -impl, I: 'static> OutboundLaneStorage for RuntimeOutboundLaneStorage { - type StoredMessagePayload = StoredMessagePayload; - - fn id(&self) -> LaneId { - self.lane_id - } - - fn data(&self) -> OutboundLaneData { - OutboundLanes::::get(self.lane_id) - } - - fn set_data(&mut self, data: OutboundLaneData) { - OutboundLanes::::insert(self.lane_id, data) - } - - #[cfg(test)] - fn message(&self, nonce: &MessageNonce) -> Option { - OutboundMessages::::get(MessageKey { lane_id: self.lane_id, nonce: *nonce }) - } - - fn save_message(&mut self, nonce: MessageNonce, message_payload: Self::StoredMessagePayload) { - OutboundMessages::::insert( - MessageKey { lane_id: self.lane_id, nonce }, - message_payload, - ); - } - - fn remove_message(&mut self, nonce: &MessageNonce) { - OutboundMessages::::remove(MessageKey { lane_id: self.lane_id, nonce: *nonce }); - } -} - -/// Verify messages proof and return proved messages with decoded payload. -fn verify_and_decode_messages_proof( - proof: Chain::MessagesProof, - messages_count: u32, -) -> Result>, VerificationError> { - // `receive_messages_proof` weight formula and `MaxUnconfirmedMessagesAtInboundLane` check - // guarantees that the `message_count` is sane and Vec may be allocated. - // (tx with too many messages will either be rejected from the pool, or will fail earlier) - Chain::verify_messages_proof(proof, messages_count).map(|messages_by_lane| { - messages_by_lane - .into_iter() - .map(|(lane, lane_data)| { - ( - lane, - ProvedLaneMessages { - lane_state: lane_data.lane_state, - messages: lane_data.messages.into_iter().map(Into::into).collect(), - }, - ) - }) - .collect() - }) -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::{ - mock::{ - inbound_unrewarded_relayers_state, message, message_payload, run_test, - unrewarded_relayer, AccountId, DbWeight, RuntimeEvent as TestEvent, RuntimeOrigin, - TestDeliveryConfirmationPayments, TestDeliveryPayments, TestMessageDispatch, - TestMessagesDeliveryProof, TestMessagesProof, TestOnMessagesDelivered, TestRelayer, - TestRuntime, TestWeightInfo, MAX_OUTBOUND_PAYLOAD_SIZE, - PAYLOAD_REJECTED_BY_TARGET_CHAIN, REGULAR_PAYLOAD, TEST_LANE_ID, TEST_LANE_ID_2, - TEST_LANE_ID_3, TEST_RELAYER_A, TEST_RELAYER_B, - }, - outbound_lane::ReceptionConfirmationError, - }; - use bp_messages::{ - source_chain::MessagesBridge, BridgeMessagesCall, UnrewardedRelayer, - UnrewardedRelayersState, - }; - use bp_test_utils::generate_owned_bridge_module_tests; - use frame_support::{ - assert_noop, assert_ok, - dispatch::Pays, - storage::generator::{StorageMap, StorageValue}, - traits::Hooks, - weights::Weight, - }; - use frame_system::{EventRecord, Pallet as System, Phase}; - use sp_runtime::DispatchError; - - fn get_ready_for_events() { - System::::set_block_number(1); - System::::reset_events(); - } - - fn send_regular_message(lane_id: LaneId) { - get_ready_for_events(); - - let outbound_lane = outbound_lane::(lane_id); - let message_nonce = outbound_lane.data().latest_generated_nonce + 1; - let prev_enqueued_messages = outbound_lane.data().queued_messages().saturating_len(); - let valid_message = Pallet::::validate_message(lane_id, ®ULAR_PAYLOAD) - .expect("validate_message has failed"); - let artifacts = Pallet::::send_message(valid_message); - assert_eq!(artifacts.enqueued_messages, prev_enqueued_messages + 1); - - // check event with assigned nonce - assert_eq!( - System::::events(), - vec![EventRecord { - phase: Phase::Initialization, - event: TestEvent::Messages(Event::MessageAccepted { - lane_id, - nonce: message_nonce - }), - topics: vec![], - }], - ); - } - - fn receive_messages_delivery_proof() { - System::::set_block_number(1); - System::::reset_events(); - - assert_ok!(Pallet::::receive_messages_delivery_proof( - RuntimeOrigin::signed(1), - TestMessagesDeliveryProof(Ok(( - TEST_LANE_ID, - InboundLaneData { - last_confirmed_nonce: 1, - relayers: vec![UnrewardedRelayer { - relayer: 0, - messages: DeliveredMessages::new(1), - }] - .into_iter() - .collect(), - }, - ))), - UnrewardedRelayersState { - unrewarded_relayer_entries: 1, - messages_in_oldest_entry: 1, - total_messages: 1, - last_delivered_nonce: 1, - }, - )); - - assert_eq!( - System::::events(), - vec![EventRecord { - phase: Phase::Initialization, - event: TestEvent::Messages(Event::MessagesDelivered { - lane_id: TEST_LANE_ID, - messages: DeliveredMessages::new(1), - }), - topics: vec![], - }], - ); - } - - #[test] - fn pallet_rejects_transactions_if_halted() { - run_test(|| { - // send message first to be able to check that delivery_proof fails later - send_regular_message(TEST_LANE_ID); - - PalletOperatingMode::::put(MessagesOperatingMode::Basic( - BasicOperatingMode::Halted, - )); - - assert_noop!( - Pallet::::validate_message(TEST_LANE_ID, ®ULAR_PAYLOAD), - Error::::NotOperatingNormally, - ); - - assert_noop!( - Pallet::::receive_messages_proof( - RuntimeOrigin::signed(1), - TEST_RELAYER_A, - Ok(vec![message(2, REGULAR_PAYLOAD)]).into(), - 1, - REGULAR_PAYLOAD.declared_weight, - ), - Error::::BridgeModule(bp_runtime::OwnedBridgeModuleError::Halted), - ); - - assert_noop!( - Pallet::::receive_messages_delivery_proof( - RuntimeOrigin::signed(1), - TestMessagesDeliveryProof(Ok(( - TEST_LANE_ID, - InboundLaneData { - last_confirmed_nonce: 1, - relayers: vec![unrewarded_relayer(1, 1, TEST_RELAYER_A)] - .into_iter() - .collect(), - }, - ))), - UnrewardedRelayersState { - unrewarded_relayer_entries: 1, - messages_in_oldest_entry: 1, - total_messages: 1, - last_delivered_nonce: 1, - }, - ), - Error::::BridgeModule(bp_runtime::OwnedBridgeModuleError::Halted), - ); - }); - } - - #[test] - fn pallet_rejects_new_messages_in_rejecting_outbound_messages_operating_mode() { - run_test(|| { - // send message first to be able to check that delivery_proof fails later - send_regular_message(TEST_LANE_ID); - - PalletOperatingMode::::put( - MessagesOperatingMode::RejectingOutboundMessages, - ); - - assert_noop!( - Pallet::::validate_message(TEST_LANE_ID, ®ULAR_PAYLOAD), - Error::::NotOperatingNormally, - ); - - assert_ok!(Pallet::::receive_messages_proof( - RuntimeOrigin::signed(1), - TEST_RELAYER_A, - Ok(vec![message(1, REGULAR_PAYLOAD)]).into(), - 1, - REGULAR_PAYLOAD.declared_weight, - ),); - - assert_ok!(Pallet::::receive_messages_delivery_proof( - RuntimeOrigin::signed(1), - TestMessagesDeliveryProof(Ok(( - TEST_LANE_ID, - InboundLaneData { - last_confirmed_nonce: 1, - relayers: vec![unrewarded_relayer(1, 1, TEST_RELAYER_A)] - .into_iter() - .collect(), - }, - ))), - UnrewardedRelayersState { - unrewarded_relayer_entries: 1, - messages_in_oldest_entry: 1, - total_messages: 1, - last_delivered_nonce: 1, - }, - )); - }); - } - - #[test] - fn send_message_works() { - run_test(|| { - send_regular_message(TEST_LANE_ID); - }); - } - - #[test] - fn send_message_rejects_too_large_message() { - run_test(|| { - let mut message_payload = message_payload(1, 0); - // the payload isn't simply extra, so it'll definitely overflow - // `MAX_OUTBOUND_PAYLOAD_SIZE` if we add `MAX_OUTBOUND_PAYLOAD_SIZE` bytes to extra - message_payload - .extra - .extend_from_slice(&[0u8; MAX_OUTBOUND_PAYLOAD_SIZE as usize]); - assert_noop!( - Pallet::::validate_message(TEST_LANE_ID, &message_payload.clone(),), - Error::::MessageRejectedByPallet( - VerificationError::MessageTooLarge - ), - ); - - // let's check that we're able to send `MAX_OUTBOUND_PAYLOAD_SIZE` messages - while message_payload.encoded_size() as u32 > MAX_OUTBOUND_PAYLOAD_SIZE { - message_payload.extra.pop(); - } - assert_eq!(message_payload.encoded_size() as u32, MAX_OUTBOUND_PAYLOAD_SIZE); - - let valid_message = - Pallet::::validate_message(TEST_LANE_ID, &message_payload) - .expect("validate_message has failed"); - Pallet::::send_message(valid_message); - }) - } - - #[test] - fn chain_verifier_rejects_invalid_message_in_send_message() { - run_test(|| { - // messages with this payload are rejected by target chain verifier - assert_noop!( - Pallet::::validate_message( - TEST_LANE_ID, - &PAYLOAD_REJECTED_BY_TARGET_CHAIN, - ), - Error::::MessageRejectedByChainVerifier(VerificationError::Other( - mock::TEST_ERROR - )), - ); - }); - } - - #[test] - fn receive_messages_proof_works() { - run_test(|| { - assert_ok!(Pallet::::receive_messages_proof( - RuntimeOrigin::signed(1), - TEST_RELAYER_A, - Ok(vec![message(1, REGULAR_PAYLOAD)]).into(), - 1, - REGULAR_PAYLOAD.declared_weight, - )); - - assert_eq!(InboundLanes::::get(TEST_LANE_ID).0.last_delivered_nonce(), 1); - - assert!(TestDeliveryPayments::is_reward_paid(1)); - }); - } - - #[test] - fn receive_messages_proof_updates_confirmed_message_nonce() { - run_test(|| { - // say we have received 10 messages && last confirmed message is 8 - InboundLanes::::insert( - TEST_LANE_ID, - InboundLaneData { - last_confirmed_nonce: 8, - relayers: vec![ - unrewarded_relayer(9, 9, TEST_RELAYER_A), - unrewarded_relayer(10, 10, TEST_RELAYER_B), - ] - .into_iter() - .collect(), - }, - ); - assert_eq!( - inbound_unrewarded_relayers_state(TEST_LANE_ID), - UnrewardedRelayersState { - unrewarded_relayer_entries: 2, - messages_in_oldest_entry: 1, - total_messages: 2, - last_delivered_nonce: 10, - }, - ); - - // message proof includes outbound lane state with latest confirmed message updated to 9 - let mut message_proof: TestMessagesProof = - Ok(vec![message(11, REGULAR_PAYLOAD)]).into(); - message_proof.result.as_mut().unwrap()[0].1.lane_state = - Some(OutboundLaneData { latest_received_nonce: 9, ..Default::default() }); - - assert_ok!(Pallet::::receive_messages_proof( - RuntimeOrigin::signed(1), - TEST_RELAYER_A, - message_proof, - 1, - REGULAR_PAYLOAD.declared_weight, - )); - - assert_eq!( - InboundLanes::::get(TEST_LANE_ID).0, - InboundLaneData { - last_confirmed_nonce: 9, - relayers: vec![ - unrewarded_relayer(10, 10, TEST_RELAYER_B), - unrewarded_relayer(11, 11, TEST_RELAYER_A) - ] - .into_iter() - .collect(), - }, - ); - assert_eq!( - inbound_unrewarded_relayers_state(TEST_LANE_ID), - UnrewardedRelayersState { - unrewarded_relayer_entries: 2, - messages_in_oldest_entry: 1, - total_messages: 2, - last_delivered_nonce: 11, - }, - ); - }); - } - - #[test] - fn receive_messages_fails_if_dispatcher_is_inactive() { - run_test(|| { - TestMessageDispatch::deactivate(); - assert_noop!( - Pallet::::receive_messages_proof( - RuntimeOrigin::signed(1), - TEST_RELAYER_A, - Ok(vec![message(1, REGULAR_PAYLOAD)]).into(), - 1, - REGULAR_PAYLOAD.declared_weight, - ), - Error::::MessageDispatchInactive, - ); - }); - } - - #[test] - fn receive_messages_proof_does_not_accept_message_if_dispatch_weight_is_not_enough() { - run_test(|| { - let mut declared_weight = REGULAR_PAYLOAD.declared_weight; - *declared_weight.ref_time_mut() -= 1; - assert_noop!( - Pallet::::receive_messages_proof( - RuntimeOrigin::signed(1), - TEST_RELAYER_A, - Ok(vec![message(1, REGULAR_PAYLOAD)]).into(), - 1, - declared_weight, - ), - Error::::InsufficientDispatchWeight - ); - assert_eq!(InboundLanes::::get(TEST_LANE_ID).last_delivered_nonce(), 0); - }); - } - - #[test] - fn receive_messages_proof_rejects_invalid_proof() { - run_test(|| { - assert_noop!( - Pallet::::receive_messages_proof( - RuntimeOrigin::signed(1), - TEST_RELAYER_A, - Err(()).into(), - 1, - Weight::zero(), - ), - Error::::InvalidMessagesProof, - ); - }); - } - - #[test] - fn receive_messages_proof_rejects_proof_with_too_many_messages() { - run_test(|| { - assert_noop!( - Pallet::::receive_messages_proof( - RuntimeOrigin::signed(1), - TEST_RELAYER_A, - Ok(vec![message(1, REGULAR_PAYLOAD)]).into(), - u32::MAX, - Weight::zero(), - ), - Error::::TooManyMessagesInTheProof, - ); - }); - } - - #[test] - fn receive_messages_delivery_proof_works() { - run_test(|| { - send_regular_message(TEST_LANE_ID); - receive_messages_delivery_proof(); - - assert_eq!( - OutboundLanes::::get(TEST_LANE_ID).latest_received_nonce, - 1, - ); - }); - } - - #[test] - fn receive_messages_delivery_proof_rewards_relayers() { - run_test(|| { - send_regular_message(TEST_LANE_ID); - send_regular_message(TEST_LANE_ID); - - // this reports delivery of message 1 => reward is paid to TEST_RELAYER_A - let single_message_delivery_proof = TestMessagesDeliveryProof(Ok(( - TEST_LANE_ID, - InboundLaneData { - relayers: vec![unrewarded_relayer(1, 1, TEST_RELAYER_A)].into_iter().collect(), - ..Default::default() - }, - ))); - let single_message_delivery_proof_size = single_message_delivery_proof.size(); - let result = Pallet::::receive_messages_delivery_proof( - RuntimeOrigin::signed(1), - single_message_delivery_proof, - UnrewardedRelayersState { - unrewarded_relayer_entries: 1, - messages_in_oldest_entry: 1, - total_messages: 1, - last_delivered_nonce: 1, - }, - ); - assert_ok!(result); - assert_eq!( - result.unwrap().actual_weight.unwrap(), - TestWeightInfo::receive_messages_delivery_proof_weight( - &PreComputedSize(single_message_delivery_proof_size as _), - &UnrewardedRelayersState { - unrewarded_relayer_entries: 1, - total_messages: 1, - ..Default::default() - }, - ) - ); - assert!(TestDeliveryConfirmationPayments::is_reward_paid(TEST_RELAYER_A, 1)); - assert!(!TestDeliveryConfirmationPayments::is_reward_paid(TEST_RELAYER_B, 1)); - assert_eq!(TestOnMessagesDelivered::call_arguments(), Some((TEST_LANE_ID, 1))); - - // this reports delivery of both message 1 and message 2 => reward is paid only to - // TEST_RELAYER_B - let two_messages_delivery_proof = TestMessagesDeliveryProof(Ok(( - TEST_LANE_ID, - InboundLaneData { - relayers: vec![ - unrewarded_relayer(1, 1, TEST_RELAYER_A), - unrewarded_relayer(2, 2, TEST_RELAYER_B), - ] - .into_iter() - .collect(), - ..Default::default() - }, - ))); - let two_messages_delivery_proof_size = two_messages_delivery_proof.size(); - let result = Pallet::::receive_messages_delivery_proof( - RuntimeOrigin::signed(1), - two_messages_delivery_proof, - UnrewardedRelayersState { - unrewarded_relayer_entries: 2, - messages_in_oldest_entry: 1, - total_messages: 2, - last_delivered_nonce: 2, - }, - ); - assert_ok!(result); - // even though the pre-dispatch weight was for two messages, the actual weight is - // for single message only - assert_eq!( - result.unwrap().actual_weight.unwrap(), - TestWeightInfo::receive_messages_delivery_proof_weight( - &PreComputedSize(two_messages_delivery_proof_size as _), - &UnrewardedRelayersState { - unrewarded_relayer_entries: 1, - total_messages: 1, - ..Default::default() - }, - ) - ); - assert!(!TestDeliveryConfirmationPayments::is_reward_paid(TEST_RELAYER_A, 1)); - assert!(TestDeliveryConfirmationPayments::is_reward_paid(TEST_RELAYER_B, 1)); - assert_eq!(TestOnMessagesDelivered::call_arguments(), Some((TEST_LANE_ID, 0))); - }); - } - - #[test] - fn receive_messages_delivery_proof_rejects_invalid_proof() { - run_test(|| { - assert_noop!( - Pallet::::receive_messages_delivery_proof( - RuntimeOrigin::signed(1), - TestMessagesDeliveryProof(Err(())), - Default::default(), - ), - Error::::InvalidMessagesDeliveryProof, - ); - }); - } - - #[test] - fn receive_messages_delivery_proof_rejects_proof_if_declared_relayers_state_is_invalid() { - run_test(|| { - // when number of relayers entries is invalid - assert_noop!( - Pallet::::receive_messages_delivery_proof( - RuntimeOrigin::signed(1), - TestMessagesDeliveryProof(Ok(( - TEST_LANE_ID, - InboundLaneData { - relayers: vec![ - unrewarded_relayer(1, 1, TEST_RELAYER_A), - unrewarded_relayer(2, 2, TEST_RELAYER_B) - ] - .into_iter() - .collect(), - ..Default::default() - } - ))), - UnrewardedRelayersState { - unrewarded_relayer_entries: 1, - total_messages: 2, - last_delivered_nonce: 2, - ..Default::default() - }, - ), - Error::::InvalidUnrewardedRelayersState, - ); - - // when number of messages is invalid - assert_noop!( - Pallet::::receive_messages_delivery_proof( - RuntimeOrigin::signed(1), - TestMessagesDeliveryProof(Ok(( - TEST_LANE_ID, - InboundLaneData { - relayers: vec![ - unrewarded_relayer(1, 1, TEST_RELAYER_A), - unrewarded_relayer(2, 2, TEST_RELAYER_B) - ] - .into_iter() - .collect(), - ..Default::default() - } - ))), - UnrewardedRelayersState { - unrewarded_relayer_entries: 2, - total_messages: 1, - last_delivered_nonce: 2, - ..Default::default() - }, - ), - Error::::InvalidUnrewardedRelayersState, - ); - - // when last delivered nonce is invalid - assert_noop!( - Pallet::::receive_messages_delivery_proof( - RuntimeOrigin::signed(1), - TestMessagesDeliveryProof(Ok(( - TEST_LANE_ID, - InboundLaneData { - relayers: vec![ - unrewarded_relayer(1, 1, TEST_RELAYER_A), - unrewarded_relayer(2, 2, TEST_RELAYER_B) - ] - .into_iter() - .collect(), - ..Default::default() - } - ))), - UnrewardedRelayersState { - unrewarded_relayer_entries: 2, - total_messages: 2, - last_delivered_nonce: 8, - ..Default::default() - }, - ), - Error::::InvalidUnrewardedRelayersState, - ); - }); - } - - #[test] - fn receive_messages_accepts_single_message_with_invalid_payload() { - run_test(|| { - let mut invalid_message = message(1, REGULAR_PAYLOAD); - invalid_message.payload = Vec::new(); - - assert_ok!(Pallet::::receive_messages_proof( - RuntimeOrigin::signed(1), - TEST_RELAYER_A, - Ok(vec![invalid_message]).into(), - 1, - Weight::zero(), /* weight may be zero in this case (all messages are - * improperly encoded) */ - ),); - - assert_eq!(InboundLanes::::get(TEST_LANE_ID).last_delivered_nonce(), 1,); - }); - } - - #[test] - fn receive_messages_accepts_batch_with_message_with_invalid_payload() { - run_test(|| { - let mut invalid_message = message(2, REGULAR_PAYLOAD); - invalid_message.payload = Vec::new(); - - assert_ok!(Pallet::::receive_messages_proof( - RuntimeOrigin::signed(1), - TEST_RELAYER_A, - Ok( - vec![message(1, REGULAR_PAYLOAD), invalid_message, message(3, REGULAR_PAYLOAD),] - ) - .into(), - 3, - REGULAR_PAYLOAD.declared_weight + REGULAR_PAYLOAD.declared_weight, - ),); - - assert_eq!(InboundLanes::::get(TEST_LANE_ID).last_delivered_nonce(), 3,); - }); - } - - #[test] - fn actual_dispatch_weight_does_not_overflow() { - run_test(|| { - let message1 = message(1, message_payload(0, u64::MAX / 2)); - let message2 = message(2, message_payload(0, u64::MAX / 2)); - let message3 = message(3, message_payload(0, u64::MAX / 2)); - - assert_noop!( - Pallet::::receive_messages_proof( - RuntimeOrigin::signed(1), - TEST_RELAYER_A, - // this may cause overflow if source chain storage is invalid - Ok(vec![message1, message2, message3]).into(), - 3, - Weight::MAX, - ), - Error::::InsufficientDispatchWeight - ); - assert_eq!(InboundLanes::::get(TEST_LANE_ID).last_delivered_nonce(), 0); - }); - } - - #[test] - fn ref_time_refund_from_receive_messages_proof_works() { - run_test(|| { - fn submit_with_unspent_weight( - nonce: MessageNonce, - unspent_weight: u64, - ) -> (Weight, Weight) { - let mut payload = REGULAR_PAYLOAD; - *payload.dispatch_result.unspent_weight.ref_time_mut() = unspent_weight; - let proof = Ok(vec![message(nonce, payload)]).into(); - let messages_count = 1; - let pre_dispatch_weight = - ::WeightInfo::receive_messages_proof_weight( - &proof, - messages_count, - REGULAR_PAYLOAD.declared_weight, - ); - let result = Pallet::::receive_messages_proof( - RuntimeOrigin::signed(1), - TEST_RELAYER_A, - proof, - messages_count, - REGULAR_PAYLOAD.declared_weight, - ) - .expect("delivery has failed"); - let post_dispatch_weight = - result.actual_weight.expect("receive_messages_proof always returns Some"); - - // message delivery transactions are never free - assert_eq!(result.pays_fee, Pays::Yes); - - (pre_dispatch_weight, post_dispatch_weight) - } - - // when dispatch is returning `unspent_weight < declared_weight` - let (pre, post) = submit_with_unspent_weight(1, 1); - assert_eq!(post.ref_time(), pre.ref_time() - 1); - - // when dispatch is returning `unspent_weight = declared_weight` - let (pre, post) = - submit_with_unspent_weight(2, REGULAR_PAYLOAD.declared_weight.ref_time()); - assert_eq!( - post.ref_time(), - pre.ref_time() - REGULAR_PAYLOAD.declared_weight.ref_time() - ); - - // when dispatch is returning `unspent_weight > declared_weight` - let (pre, post) = - submit_with_unspent_weight(3, REGULAR_PAYLOAD.declared_weight.ref_time() + 1); - assert_eq!( - post.ref_time(), - pre.ref_time() - REGULAR_PAYLOAD.declared_weight.ref_time() - ); - - // when there's no unspent weight - let (pre, post) = submit_with_unspent_weight(4, 0); - assert_eq!(post.ref_time(), pre.ref_time()); - - // when dispatch is returning `unspent_weight < declared_weight` - let (pre, post) = submit_with_unspent_weight(5, 1); - assert_eq!(post.ref_time(), pre.ref_time() - 1); - }); - } - - #[test] - fn proof_size_refund_from_receive_messages_proof_works() { - run_test(|| { - let max_entries = crate::mock::MaxUnrewardedRelayerEntriesAtInboundLane::get() as usize; - - // if there's maximal number of unrewarded relayer entries at the inbound lane, then - // `proof_size` is unchanged in post-dispatch weight - let proof: TestMessagesProof = Ok(vec![message(101, REGULAR_PAYLOAD)]).into(); - let messages_count = 1; - let pre_dispatch_weight = - ::WeightInfo::receive_messages_proof_weight( - &proof, - messages_count, - REGULAR_PAYLOAD.declared_weight, - ); - InboundLanes::::insert( - TEST_LANE_ID, - StoredInboundLaneData(InboundLaneData { - relayers: vec![ - UnrewardedRelayer { - relayer: 42, - messages: DeliveredMessages { begin: 0, end: 100 } - }; - max_entries - ] - .into_iter() - .collect(), - last_confirmed_nonce: 0, - }), - ); - let post_dispatch_weight = Pallet::::receive_messages_proof( - RuntimeOrigin::signed(1), - TEST_RELAYER_A, - proof.clone(), - messages_count, - REGULAR_PAYLOAD.declared_weight, - ) - .unwrap() - .actual_weight - .unwrap(); - assert_eq!(post_dispatch_weight.proof_size(), pre_dispatch_weight.proof_size()); - - // if count of unrewarded relayer entries is less than maximal, then some `proof_size` - // must be refunded - InboundLanes::::insert( - TEST_LANE_ID, - StoredInboundLaneData(InboundLaneData { - relayers: vec![ - UnrewardedRelayer { - relayer: 42, - messages: DeliveredMessages { begin: 0, end: 100 } - }; - max_entries - 1 - ] - .into_iter() - .collect(), - last_confirmed_nonce: 0, - }), - ); - let post_dispatch_weight = Pallet::::receive_messages_proof( - RuntimeOrigin::signed(1), - TEST_RELAYER_A, - proof, - messages_count, - REGULAR_PAYLOAD.declared_weight, - ) - .unwrap() - .actual_weight - .unwrap(); - assert!( - post_dispatch_weight.proof_size() < pre_dispatch_weight.proof_size(), - "Expected post-dispatch PoV {} to be less than pre-dispatch PoV {}", - post_dispatch_weight.proof_size(), - pre_dispatch_weight.proof_size(), - ); - }); - } - - #[test] - fn messages_delivered_callbacks_are_called() { - run_test(|| { - send_regular_message(TEST_LANE_ID); - send_regular_message(TEST_LANE_ID); - send_regular_message(TEST_LANE_ID); - - // messages 1+2 are confirmed in 1 tx, message 3 in a separate tx - // dispatch of message 2 has failed - let mut delivered_messages_1_and_2 = DeliveredMessages::new(1); - delivered_messages_1_and_2.note_dispatched_message(); - let messages_1_and_2_proof = Ok(( - TEST_LANE_ID, - InboundLaneData { - last_confirmed_nonce: 0, - relayers: vec![UnrewardedRelayer { - relayer: 0, - messages: delivered_messages_1_and_2.clone(), - }] - .into_iter() - .collect(), - }, - )); - let delivered_message_3 = DeliveredMessages::new(3); - let messages_3_proof = Ok(( - TEST_LANE_ID, - InboundLaneData { - last_confirmed_nonce: 0, - relayers: vec![UnrewardedRelayer { relayer: 0, messages: delivered_message_3 }] - .into_iter() - .collect(), - }, - )); - - // first tx with messages 1+2 - assert_ok!(Pallet::::receive_messages_delivery_proof( - RuntimeOrigin::signed(1), - TestMessagesDeliveryProof(messages_1_and_2_proof), - UnrewardedRelayersState { - unrewarded_relayer_entries: 1, - messages_in_oldest_entry: 2, - total_messages: 2, - last_delivered_nonce: 2, - }, - )); - // second tx with message 3 - assert_ok!(Pallet::::receive_messages_delivery_proof( - RuntimeOrigin::signed(1), - TestMessagesDeliveryProof(messages_3_proof), - UnrewardedRelayersState { - unrewarded_relayer_entries: 1, - messages_in_oldest_entry: 1, - total_messages: 1, - last_delivered_nonce: 3, - }, - )); - }); - } - - #[test] - fn receive_messages_delivery_proof_rejects_proof_if_trying_to_confirm_more_messages_than_expected( - ) { - run_test(|| { - // send message first to be able to check that delivery_proof fails later - send_regular_message(TEST_LANE_ID); - - // 1) InboundLaneData declares that the `last_confirmed_nonce` is 1; - // 2) InboundLaneData has no entries => `InboundLaneData::last_delivered_nonce()` - // returns `last_confirmed_nonce`; - // 3) it means that we're going to confirm delivery of messages 1..=1; - // 4) so the number of declared messages (see `UnrewardedRelayersState`) is `0` and - // number of actually confirmed messages is `1`. - assert_noop!( - Pallet::::receive_messages_delivery_proof( - RuntimeOrigin::signed(1), - TestMessagesDeliveryProof(Ok(( - TEST_LANE_ID, - InboundLaneData { last_confirmed_nonce: 1, relayers: Default::default() }, - ))), - UnrewardedRelayersState { last_delivered_nonce: 1, ..Default::default() }, - ), - Error::::ReceptionConfirmation( - ReceptionConfirmationError::TryingToConfirmMoreMessagesThanExpected - ), - ); - }); - } - - #[test] - fn storage_keys_computed_properly() { - assert_eq!( - PalletOperatingMode::::storage_value_final_key().to_vec(), - bp_messages::storage_keys::operating_mode_key("Messages").0, - ); - - assert_eq!( - OutboundMessages::::storage_map_final_key(MessageKey { - lane_id: TEST_LANE_ID, - nonce: 42 - }), - bp_messages::storage_keys::message_key("Messages", &TEST_LANE_ID, 42).0, - ); - - assert_eq!( - OutboundLanes::::storage_map_final_key(TEST_LANE_ID), - bp_messages::storage_keys::outbound_lane_data_key("Messages", &TEST_LANE_ID).0, - ); - - assert_eq!( - InboundLanes::::storage_map_final_key(TEST_LANE_ID), - bp_messages::storage_keys::inbound_lane_data_key("Messages", &TEST_LANE_ID).0, - ); - } - - #[test] - fn inbound_message_details_works() { - run_test(|| { - assert_eq!( - Pallet::::inbound_message_data( - TEST_LANE_ID, - REGULAR_PAYLOAD.encode(), - OutboundMessageDetails { nonce: 0, dispatch_weight: Weight::zero(), size: 0 }, - ), - InboundMessageDetails { dispatch_weight: REGULAR_PAYLOAD.declared_weight }, - ); - }); - } - - #[test] - fn on_idle_callback_respects_remaining_weight() { - run_test(|| { - send_regular_message(TEST_LANE_ID); - send_regular_message(TEST_LANE_ID); - send_regular_message(TEST_LANE_ID); - send_regular_message(TEST_LANE_ID); - - assert_ok!(Pallet::::receive_messages_delivery_proof( - RuntimeOrigin::signed(1), - TestMessagesDeliveryProof(Ok(( - TEST_LANE_ID, - InboundLaneData { - last_confirmed_nonce: 4, - relayers: vec![unrewarded_relayer(1, 4, TEST_RELAYER_A)] - .into_iter() - .collect(), - }, - ))), - UnrewardedRelayersState { - unrewarded_relayer_entries: 1, - messages_in_oldest_entry: 4, - total_messages: 4, - last_delivered_nonce: 4, - }, - )); - - // all 4 messages may be pruned now - assert_eq!( - outbound_lane::(TEST_LANE_ID).data().latest_received_nonce, - 4 - ); - assert_eq!( - outbound_lane::(TEST_LANE_ID).data().oldest_unpruned_nonce, - 1 - ); - System::::set_block_number(2); - - // if passed wight is too low to do anything - let dbw = DbWeight::get(); - assert_eq!( - Pallet::::on_idle(0, dbw.reads_writes(1, 1)), - Weight::zero(), - ); - assert_eq!( - outbound_lane::(TEST_LANE_ID).data().oldest_unpruned_nonce, - 1 - ); - - // if passed wight is enough to prune single message - assert_eq!( - Pallet::::on_idle(0, dbw.reads_writes(1, 2)), - dbw.reads_writes(1, 2), - ); - assert_eq!( - outbound_lane::(TEST_LANE_ID).data().oldest_unpruned_nonce, - 2 - ); - - // if passed wight is enough to prune two more messages - assert_eq!( - Pallet::::on_idle(0, dbw.reads_writes(1, 3)), - dbw.reads_writes(1, 3), - ); - assert_eq!( - outbound_lane::(TEST_LANE_ID).data().oldest_unpruned_nonce, - 4 - ); - - // if passed wight is enough to prune many messages - assert_eq!( - Pallet::::on_idle(0, dbw.reads_writes(100, 100)), - dbw.reads_writes(1, 2), - ); - assert_eq!( - outbound_lane::(TEST_LANE_ID).data().oldest_unpruned_nonce, - 5 - ); - }); - } - - #[test] - fn on_idle_callback_is_rotating_lanes_to_prune() { - run_test(|| { - // send + receive confirmation for lane 1 - send_regular_message(TEST_LANE_ID); - receive_messages_delivery_proof(); - // send + receive confirmation for lane 2 - send_regular_message(TEST_LANE_ID_2); - assert_ok!(Pallet::::receive_messages_delivery_proof( - RuntimeOrigin::signed(1), - TestMessagesDeliveryProof(Ok(( - TEST_LANE_ID_2, - InboundLaneData { - last_confirmed_nonce: 1, - relayers: vec![unrewarded_relayer(1, 1, TEST_RELAYER_A)] - .into_iter() - .collect(), - }, - ))), - UnrewardedRelayersState { - unrewarded_relayer_entries: 1, - messages_in_oldest_entry: 1, - total_messages: 1, - last_delivered_nonce: 1, - }, - )); - - // nothing is pruned yet - assert_eq!( - outbound_lane::(TEST_LANE_ID).data().latest_received_nonce, - 1 - ); - assert_eq!( - outbound_lane::(TEST_LANE_ID).data().oldest_unpruned_nonce, - 1 - ); - assert_eq!( - outbound_lane::(TEST_LANE_ID_2).data().latest_received_nonce, - 1 - ); - assert_eq!( - outbound_lane::(TEST_LANE_ID_2).data().oldest_unpruned_nonce, - 1 - ); - - // in block#2.on_idle lane messages of lane 1 are pruned - let dbw = DbWeight::get(); - System::::set_block_number(2); - assert_eq!( - Pallet::::on_idle(0, dbw.reads_writes(100, 100)), - dbw.reads_writes(1, 2), - ); - assert_eq!( - outbound_lane::(TEST_LANE_ID).data().oldest_unpruned_nonce, - 2 - ); - assert_eq!( - outbound_lane::(TEST_LANE_ID_2).data().oldest_unpruned_nonce, - 1 - ); - - // in block#3.on_idle lane messages of lane 2 are pruned - System::::set_block_number(3); - - assert_eq!( - Pallet::::on_idle(0, dbw.reads_writes(100, 100)), - dbw.reads_writes(1, 2), - ); - assert_eq!( - outbound_lane::(TEST_LANE_ID).data().oldest_unpruned_nonce, - 2 - ); - assert_eq!( - outbound_lane::(TEST_LANE_ID_2).data().oldest_unpruned_nonce, - 2 - ); - }); - } - - #[test] - fn outbound_message_from_unconfigured_lane_is_rejected() { - run_test(|| { - assert_noop!( - Pallet::::validate_message(TEST_LANE_ID_3, ®ULAR_PAYLOAD,), - Error::::InactiveOutboundLane, - ); - }); - } - - #[test] - fn test_bridge_messages_call_is_correctly_defined() { - let account_id = 1; - let message_proof: TestMessagesProof = Ok(vec![message(1, REGULAR_PAYLOAD)]).into(); - let message_delivery_proof = TestMessagesDeliveryProof(Ok(( - TEST_LANE_ID, - InboundLaneData { - last_confirmed_nonce: 1, - relayers: vec![UnrewardedRelayer { - relayer: 0, - messages: DeliveredMessages::new(1), - }] - .into_iter() - .collect(), - }, - ))); - let unrewarded_relayer_state = UnrewardedRelayersState { - unrewarded_relayer_entries: 1, - total_messages: 1, - last_delivered_nonce: 1, - ..Default::default() - }; - - let direct_receive_messages_proof_call = Call::::receive_messages_proof { - relayer_id_at_bridged_chain: account_id, - proof: message_proof.clone(), - messages_count: 1, - dispatch_weight: REGULAR_PAYLOAD.declared_weight, - }; - let indirect_receive_messages_proof_call = BridgeMessagesCall::< - AccountId, - TestMessagesProof, - TestMessagesDeliveryProof, - >::receive_messages_proof { - relayer_id_at_bridged_chain: account_id, - proof: message_proof, - messages_count: 1, - dispatch_weight: REGULAR_PAYLOAD.declared_weight, - }; - assert_eq!( - direct_receive_messages_proof_call.encode(), - indirect_receive_messages_proof_call.encode() - ); - - let direct_receive_messages_delivery_proof_call = - Call::::receive_messages_delivery_proof { - proof: message_delivery_proof.clone(), - relayers_state: unrewarded_relayer_state.clone(), - }; - let indirect_receive_messages_delivery_proof_call = BridgeMessagesCall::< - AccountId, - TestMessagesProof, - TestMessagesDeliveryProof, - >::receive_messages_delivery_proof { - proof: message_delivery_proof, - relayers_state: unrewarded_relayer_state, - }; - assert_eq!( - direct_receive_messages_delivery_proof_call.encode(), - indirect_receive_messages_delivery_proof_call.encode() - ); - } - - generate_owned_bridge_module_tests!( - MessagesOperatingMode::Basic(BasicOperatingMode::Normal), - MessagesOperatingMode::Basic(BasicOperatingMode::Halted) - ); - - #[test] - fn inbound_storage_extra_proof_size_bytes_works() { - fn relayer_entry() -> UnrewardedRelayer { - UnrewardedRelayer { relayer: 42u64, messages: DeliveredMessages { begin: 0, end: 100 } } - } - - fn storage(relayer_entries: usize) -> RuntimeInboundLaneStorage { - RuntimeInboundLaneStorage { - lane_id: Default::default(), - cached_data: Some(InboundLaneData { - relayers: vec![relayer_entry(); relayer_entries].into_iter().collect(), - last_confirmed_nonce: 0, - }), - _phantom: Default::default(), - } - } - - let max_entries = crate::mock::MaxUnrewardedRelayerEntriesAtInboundLane::get() as usize; - - // when we have exactly `MaxUnrewardedRelayerEntriesAtInboundLane` unrewarded relayers - assert_eq!(storage(max_entries).extra_proof_size_bytes(), 0); - - // when we have less than `MaxUnrewardedRelayerEntriesAtInboundLane` unrewarded relayers - assert_eq!( - storage(max_entries - 1).extra_proof_size_bytes(), - relayer_entry().encode().len() as u64 - ); - assert_eq!( - storage(max_entries - 2).extra_proof_size_bytes(), - 2 * relayer_entry().encode().len() as u64 - ); - - // when we have more than `MaxUnrewardedRelayerEntriesAtInboundLane` unrewarded relayers - // (shall not happen in practice) - assert_eq!(storage(max_entries + 1).extra_proof_size_bytes(), 0); - } - - #[test] - fn maybe_outbound_lanes_count_returns_correct_value() { - assert_eq!( - MaybeOutboundLanesCount::::get(), - Some(mock::ActiveOutboundLanes::get().len() as u32) - ); - } -} diff --git a/modules/messages/src/mock.rs b/modules/messages/src/mock.rs deleted file mode 100644 index ec63f15b9..000000000 --- a/modules/messages/src/mock.rs +++ /dev/null @@ -1,461 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -// From construct_runtime macro -#![allow(clippy::from_over_into)] - -use crate::{Config, StoredMessagePayload}; - -use bp_messages::{ - calc_relayers_rewards, - source_chain::{DeliveryConfirmationPayments, OnMessagesDelivered, TargetHeaderChain}, - target_chain::{ - DeliveryPayments, DispatchMessage, DispatchMessageData, MessageDispatch, - ProvedLaneMessages, ProvedMessages, SourceHeaderChain, - }, - DeliveredMessages, InboundLaneData, LaneId, Message, MessageKey, MessageNonce, - UnrewardedRelayer, UnrewardedRelayersState, VerificationError, -}; -use bp_runtime::{messages::MessageDispatchResult, Size}; -use codec::{Decode, Encode}; -use frame_support::{ - derive_impl, parameter_types, - weights::{constants::RocksDbWeight, Weight}, -}; -use scale_info::TypeInfo; -use sp_runtime::BuildStorage; -use std::{ - collections::{BTreeMap, VecDeque}, - ops::RangeInclusive, -}; - -pub type AccountId = u64; -pub type Balance = u64; -#[derive(Decode, Encode, Clone, Debug, PartialEq, Eq, TypeInfo)] -pub struct TestPayload { - /// Field that may be used to identify messages. - pub id: u64, - /// Dispatch weight that is declared by the message sender. - pub declared_weight: Weight, - /// Message dispatch result. - /// - /// Note: in correct code `dispatch_result.unspent_weight` will always be <= `declared_weight`, - /// but for test purposes we'll be making it larger than `declared_weight` sometimes. - pub dispatch_result: MessageDispatchResult, - /// Extra bytes that affect payload size. - pub extra: Vec, -} -pub type TestMessageFee = u64; -pub type TestRelayer = u64; -pub type TestDispatchLevelResult = (); - -type Block = frame_system::mocking::MockBlock; - -use crate as pallet_bridge_messages; - -frame_support::construct_runtime! { - pub enum TestRuntime - { - System: frame_system::{Pallet, Call, Config, Storage, Event}, - Balances: pallet_balances::{Pallet, Call, Event}, - Messages: pallet_bridge_messages::{Pallet, Call, Event}, - } -} - -pub type DbWeight = RocksDbWeight; - -#[derive_impl(frame_system::config_preludes::TestDefaultConfig)] -impl frame_system::Config for TestRuntime { - type Block = Block; - type AccountData = pallet_balances::AccountData; - type DbWeight = DbWeight; -} - -#[derive_impl(pallet_balances::config_preludes::TestDefaultConfig)] -impl pallet_balances::Config for TestRuntime { - type ReserveIdentifier = [u8; 8]; - type AccountStore = System; -} - -parameter_types! { - pub const MaxMessagesToPruneAtOnce: u64 = 10; - pub const MaxUnrewardedRelayerEntriesAtInboundLane: u64 = 16; - pub const MaxUnconfirmedMessagesAtInboundLane: u64 = 128; - pub const TestBridgedChainId: bp_runtime::ChainId = *b"test"; - pub const ActiveOutboundLanes: &'static [LaneId] = &[TEST_LANE_ID, TEST_LANE_ID_2]; -} - -/// weights of messages pallet calls we use in tests. -pub type TestWeightInfo = (); - -impl Config for TestRuntime { - type RuntimeEvent = RuntimeEvent; - type WeightInfo = TestWeightInfo; - type ActiveOutboundLanes = ActiveOutboundLanes; - type MaxUnrewardedRelayerEntriesAtInboundLane = MaxUnrewardedRelayerEntriesAtInboundLane; - type MaxUnconfirmedMessagesAtInboundLane = MaxUnconfirmedMessagesAtInboundLane; - - type MaximalOutboundPayloadSize = frame_support::traits::ConstU32; - type OutboundPayload = TestPayload; - - type InboundPayload = TestPayload; - type InboundRelayer = TestRelayer; - type DeliveryPayments = TestDeliveryPayments; - - type TargetHeaderChain = TestTargetHeaderChain; - type DeliveryConfirmationPayments = TestDeliveryConfirmationPayments; - type OnMessagesDelivered = TestOnMessagesDelivered; - - type SourceHeaderChain = TestSourceHeaderChain; - type MessageDispatch = TestMessageDispatch; - type BridgedChainId = TestBridgedChainId; -} - -#[cfg(feature = "runtime-benchmarks")] -impl crate::benchmarking::Config<()> for TestRuntime { - fn bench_lane_id() -> LaneId { - TEST_LANE_ID - } - - fn prepare_message_proof( - params: crate::benchmarking::MessageProofParams, - ) -> (TestMessagesProof, Weight) { - // in mock run we only care about benchmarks correctness, not the benchmark results - // => ignore size related arguments - let (messages, total_dispatch_weight) = - params.message_nonces.into_iter().map(|n| message(n, REGULAR_PAYLOAD)).fold( - (Vec::new(), Weight::zero()), - |(mut messages, total_dispatch_weight), message| { - let weight = REGULAR_PAYLOAD.declared_weight; - messages.push(message); - (messages, total_dispatch_weight.saturating_add(weight)) - }, - ); - let mut proof: TestMessagesProof = Ok(messages).into(); - proof.result.as_mut().unwrap().get_mut(0).unwrap().1.lane_state = params.outbound_lane_data; - (proof, total_dispatch_weight) - } - - fn prepare_message_delivery_proof( - params: crate::benchmarking::MessageDeliveryProofParams, - ) -> TestMessagesDeliveryProof { - // in mock run we only care about benchmarks correctness, not the benchmark results - // => ignore size related arguments - TestMessagesDeliveryProof(Ok((params.lane, params.inbound_lane_data))) - } - - fn is_relayer_rewarded(_relayer: &AccountId) -> bool { - true - } -} - -impl Size for TestPayload { - fn size(&self) -> u32 { - 16 + self.extra.len() as u32 - } -} - -/// Maximal outbound payload size. -pub const MAX_OUTBOUND_PAYLOAD_SIZE: u32 = 4096; - -/// Account that has balance to use in tests. -pub const ENDOWED_ACCOUNT: AccountId = 0xDEAD; - -/// Account id of test relayer. -pub const TEST_RELAYER_A: AccountId = 100; - -/// Account id of additional test relayer - B. -pub const TEST_RELAYER_B: AccountId = 101; - -/// Account id of additional test relayer - C. -pub const TEST_RELAYER_C: AccountId = 102; - -/// Error that is returned by all test implementations. -pub const TEST_ERROR: &str = "Test error"; - -/// Lane that we're using in tests. -pub const TEST_LANE_ID: LaneId = LaneId([0, 0, 0, 1]); - -/// Secondary lane that we're using in tests. -pub const TEST_LANE_ID_2: LaneId = LaneId([0, 0, 0, 2]); - -/// Inactive outbound lane. -pub const TEST_LANE_ID_3: LaneId = LaneId([0, 0, 0, 3]); - -/// Regular message payload. -pub const REGULAR_PAYLOAD: TestPayload = message_payload(0, 50); - -/// Payload that is rejected by `TestTargetHeaderChain`. -pub const PAYLOAD_REJECTED_BY_TARGET_CHAIN: TestPayload = message_payload(1, 50); - -/// Vec of proved messages, grouped by lane. -pub type MessagesByLaneVec = Vec<(LaneId, ProvedLaneMessages)>; - -/// Test messages proof. -#[derive(Debug, Encode, Decode, Clone, PartialEq, Eq, TypeInfo)] -pub struct TestMessagesProof { - pub result: Result, -} - -impl Size for TestMessagesProof { - fn size(&self) -> u32 { - 0 - } -} - -impl From, ()>> for TestMessagesProof { - fn from(result: Result, ()>) -> Self { - Self { - result: result.map(|messages| { - let mut messages_by_lane: BTreeMap> = - BTreeMap::new(); - for message in messages { - messages_by_lane.entry(message.key.lane_id).or_default().messages.push(message); - } - messages_by_lane.into_iter().collect() - }), - } - } -} - -/// Messages delivery proof used in tests. -#[derive(Debug, Encode, Decode, Eq, Clone, PartialEq, TypeInfo)] -pub struct TestMessagesDeliveryProof(pub Result<(LaneId, InboundLaneData), ()>); - -impl Size for TestMessagesDeliveryProof { - fn size(&self) -> u32 { - 0 - } -} - -/// Target header chain that is used in tests. -#[derive(Debug, Default)] -pub struct TestTargetHeaderChain; - -impl TargetHeaderChain for TestTargetHeaderChain { - type MessagesDeliveryProof = TestMessagesDeliveryProof; - - fn verify_message(payload: &TestPayload) -> Result<(), VerificationError> { - if *payload == PAYLOAD_REJECTED_BY_TARGET_CHAIN { - Err(VerificationError::Other(TEST_ERROR)) - } else { - Ok(()) - } - } - - fn verify_messages_delivery_proof( - proof: Self::MessagesDeliveryProof, - ) -> Result<(LaneId, InboundLaneData), VerificationError> { - proof.0.map_err(|_| VerificationError::Other(TEST_ERROR)) - } -} - -/// Reward payments at the target chain during delivery transaction. -#[derive(Debug, Default)] -pub struct TestDeliveryPayments; - -impl TestDeliveryPayments { - /// Returns true if given relayer has been rewarded with given balance. The reward-paid flag is - /// cleared after the call. - pub fn is_reward_paid(relayer: AccountId) -> bool { - let key = (b":delivery-relayer-reward:", relayer).encode(); - frame_support::storage::unhashed::take::(&key).is_some() - } -} - -impl DeliveryPayments for TestDeliveryPayments { - type Error = &'static str; - - fn pay_reward( - relayer: AccountId, - _total_messages: MessageNonce, - _valid_messages: MessageNonce, - _actual_weight: Weight, - ) { - let key = (b":delivery-relayer-reward:", relayer).encode(); - frame_support::storage::unhashed::put(&key, &true); - } -} - -/// Reward payments at the source chain during delivery confirmation transaction. -#[derive(Debug, Default)] -pub struct TestDeliveryConfirmationPayments; - -impl TestDeliveryConfirmationPayments { - /// Returns true if given relayer has been rewarded with given balance. The reward-paid flag is - /// cleared after the call. - pub fn is_reward_paid(relayer: AccountId, fee: TestMessageFee) -> bool { - let key = (b":relayer-reward:", relayer, fee).encode(); - frame_support::storage::unhashed::take::(&key).is_some() - } -} - -impl DeliveryConfirmationPayments for TestDeliveryConfirmationPayments { - type Error = &'static str; - - fn pay_reward( - _lane_id: LaneId, - messages_relayers: VecDeque>, - _confirmation_relayer: &AccountId, - received_range: &RangeInclusive, - ) -> MessageNonce { - let relayers_rewards = calc_relayers_rewards(messages_relayers, received_range); - let rewarded_relayers = relayers_rewards.len(); - for (relayer, reward) in &relayers_rewards { - let key = (b":relayer-reward:", relayer, reward).encode(); - frame_support::storage::unhashed::put(&key, &true); - } - - rewarded_relayers as _ - } -} - -/// Source header chain that is used in tests. -#[derive(Debug)] -pub struct TestSourceHeaderChain; - -impl SourceHeaderChain for TestSourceHeaderChain { - type MessagesProof = TestMessagesProof; - - fn verify_messages_proof( - proof: Self::MessagesProof, - _messages_count: u32, - ) -> Result, VerificationError> { - proof - .result - .map(|proof| proof.into_iter().collect()) - .map_err(|_| VerificationError::Other(TEST_ERROR)) - } -} - -/// Test message dispatcher. -#[derive(Debug)] -pub struct TestMessageDispatch; - -impl TestMessageDispatch { - pub fn deactivate() { - frame_support::storage::unhashed::put(b"TestMessageDispatch.IsCongested", &true) - } -} - -impl MessageDispatch for TestMessageDispatch { - type DispatchPayload = TestPayload; - type DispatchLevelResult = TestDispatchLevelResult; - - fn is_active() -> bool { - !frame_support::storage::unhashed::get_or_default::( - b"TestMessageDispatch.IsCongested", - ) - } - - fn dispatch_weight(message: &mut DispatchMessage) -> Weight { - match message.data.payload.as_ref() { - Ok(payload) => payload.declared_weight, - Err(_) => Weight::zero(), - } - } - - fn dispatch( - message: DispatchMessage, - ) -> MessageDispatchResult { - match message.data.payload.as_ref() { - Ok(payload) => payload.dispatch_result.clone(), - Err(_) => dispatch_result(0), - } - } -} - -/// Test callback, called during message delivery confirmation transaction. -pub struct TestOnMessagesDelivered; - -impl TestOnMessagesDelivered { - pub fn call_arguments() -> Option<(LaneId, MessageNonce)> { - frame_support::storage::unhashed::get(b"TestOnMessagesDelivered.OnMessagesDelivered") - } -} - -impl OnMessagesDelivered for TestOnMessagesDelivered { - fn on_messages_delivered(lane: LaneId, enqueued_messages: MessageNonce) { - frame_support::storage::unhashed::put( - b"TestOnMessagesDelivered.OnMessagesDelivered", - &(lane, enqueued_messages), - ); - } -} - -/// Return test lane message with given nonce and payload. -pub fn message(nonce: MessageNonce, payload: TestPayload) -> Message { - Message { key: MessageKey { lane_id: TEST_LANE_ID, nonce }, payload: payload.encode() } -} - -/// Return valid outbound message data, constructed from given payload. -pub fn outbound_message_data(payload: TestPayload) -> StoredMessagePayload { - StoredMessagePayload::::try_from(payload.encode()).expect("payload too large") -} - -/// Return valid inbound (dispatch) message data, constructed from given payload. -pub fn inbound_message_data(payload: TestPayload) -> DispatchMessageData { - DispatchMessageData { payload: Ok(payload) } -} - -/// Constructs message payload using given arguments and zero unspent weight. -pub const fn message_payload(id: u64, declared_weight: u64) -> TestPayload { - TestPayload { - id, - declared_weight: Weight::from_parts(declared_weight, 0), - dispatch_result: dispatch_result(0), - extra: Vec::new(), - } -} - -/// Returns message dispatch result with given unspent weight. -pub const fn dispatch_result( - unspent_weight: u64, -) -> MessageDispatchResult { - MessageDispatchResult { - unspent_weight: Weight::from_parts(unspent_weight, 0), - dispatch_level_result: (), - } -} - -/// Constructs unrewarded relayer entry from nonces range and relayer id. -pub fn unrewarded_relayer( - begin: MessageNonce, - end: MessageNonce, - relayer: TestRelayer, -) -> UnrewardedRelayer { - UnrewardedRelayer { relayer, messages: DeliveredMessages { begin, end } } -} - -/// Returns unrewarded relayers state at given lane. -pub fn inbound_unrewarded_relayers_state(lane: bp_messages::LaneId) -> UnrewardedRelayersState { - let inbound_lane_data = crate::InboundLanes::::get(lane).0; - UnrewardedRelayersState::from(&inbound_lane_data) -} - -/// Return test externalities to use in tests. -pub fn new_test_ext() -> sp_io::TestExternalities { - let mut t = frame_system::GenesisConfig::::default().build_storage().unwrap(); - pallet_balances::GenesisConfig:: { balances: vec![(ENDOWED_ACCOUNT, 1_000_000)] } - .assimilate_storage(&mut t) - .unwrap(); - sp_io::TestExternalities::new(t) -} - -/// Run pallet test. -pub fn run_test(test: impl FnOnce() -> T) -> T { - new_test_ext().execute_with(test) -} diff --git a/modules/messages/src/outbound_lane.rs b/modules/messages/src/outbound_lane.rs deleted file mode 100644 index acef5546d..000000000 --- a/modules/messages/src/outbound_lane.rs +++ /dev/null @@ -1,424 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Everything about outgoing messages sending. - -use crate::{Config, LOG_TARGET}; - -use bp_messages::{DeliveredMessages, LaneId, MessageNonce, OutboundLaneData, UnrewardedRelayer}; -use codec::{Decode, Encode}; -use frame_support::{ - weights::{RuntimeDbWeight, Weight}, - BoundedVec, PalletError, -}; -use num_traits::Zero; -use scale_info::TypeInfo; -use sp_runtime::RuntimeDebug; -use sp_std::collections::vec_deque::VecDeque; - -/// Outbound lane storage. -pub trait OutboundLaneStorage { - type StoredMessagePayload; - - /// Lane id. - fn id(&self) -> LaneId; - /// Get lane data from the storage. - fn data(&self) -> OutboundLaneData; - /// Update lane data in the storage. - fn set_data(&mut self, data: OutboundLaneData); - /// Returns saved outbound message payload. - #[cfg(test)] - fn message(&self, nonce: &MessageNonce) -> Option; - /// Save outbound message in the storage. - fn save_message(&mut self, nonce: MessageNonce, message_payload: Self::StoredMessagePayload); - /// Remove outbound message from the storage. - fn remove_message(&mut self, nonce: &MessageNonce); -} - -/// Outbound message data wrapper that implements `MaxEncodedLen`. -pub type StoredMessagePayload = BoundedVec>::MaximalOutboundPayloadSize>; - -/// Result of messages receival confirmation. -#[derive(Encode, Decode, RuntimeDebug, PartialEq, Eq, PalletError, TypeInfo)] -pub enum ReceptionConfirmationError { - /// Bridged chain is trying to confirm more messages than we have generated. May be a result - /// of invalid bridged chain storage. - FailedToConfirmFutureMessages, - /// The unrewarded relayers vec contains an empty entry. May be a result of invalid bridged - /// chain storage. - EmptyUnrewardedRelayerEntry, - /// The unrewarded relayers vec contains non-consecutive entries. May be a result of invalid - /// bridged chain storage. - NonConsecutiveUnrewardedRelayerEntries, - /// The chain has more messages that need to be confirmed than there is in the proof. - TryingToConfirmMoreMessagesThanExpected, -} - -/// Outbound messages lane. -pub struct OutboundLane { - storage: S, -} - -impl OutboundLane { - /// Create new outbound lane backed by given storage. - pub fn new(storage: S) -> Self { - OutboundLane { storage } - } - - /// Get this lane data. - pub fn data(&self) -> OutboundLaneData { - self.storage.data() - } - - /// Send message over lane. - /// - /// Returns new message nonce. - pub fn send_message(&mut self, message_payload: S::StoredMessagePayload) -> MessageNonce { - let mut data = self.storage.data(); - let nonce = data.latest_generated_nonce + 1; - data.latest_generated_nonce = nonce; - - self.storage.save_message(nonce, message_payload); - self.storage.set_data(data); - - nonce - } - - /// Confirm messages delivery. - pub fn confirm_delivery( - &mut self, - max_allowed_messages: MessageNonce, - latest_delivered_nonce: MessageNonce, - relayers: &VecDeque>, - ) -> Result, ReceptionConfirmationError> { - let mut data = self.storage.data(); - let confirmed_messages = DeliveredMessages { - begin: data.latest_received_nonce.saturating_add(1), - end: latest_delivered_nonce, - }; - if confirmed_messages.total_messages() == 0 { - return Ok(None) - } - if confirmed_messages.end > data.latest_generated_nonce { - return Err(ReceptionConfirmationError::FailedToConfirmFutureMessages) - } - if confirmed_messages.total_messages() > max_allowed_messages { - // that the relayer has declared correct number of messages that the proof contains (it - // is checked outside of the function). But it may happen (but only if this/bridged - // chain storage is corrupted, though) that the actual number of confirmed messages if - // larger than declared. This would mean that 'reward loop' will take more time than the - // weight formula accounts, so we can't allow that. - log::trace!( - target: LOG_TARGET, - "Messages delivery proof contains too many messages to confirm: {} vs declared {}", - confirmed_messages.total_messages(), - max_allowed_messages, - ); - return Err(ReceptionConfirmationError::TryingToConfirmMoreMessagesThanExpected) - } - - ensure_unrewarded_relayers_are_correct(confirmed_messages.end, relayers)?; - - data.latest_received_nonce = confirmed_messages.end; - self.storage.set_data(data); - - Ok(Some(confirmed_messages)) - } - - /// Prune at most `max_messages_to_prune` already received messages. - /// - /// Returns weight, consumed by messages pruning and lane state update. - pub fn prune_messages( - &mut self, - db_weight: RuntimeDbWeight, - mut remaining_weight: Weight, - ) -> Weight { - let write_weight = db_weight.writes(1); - let two_writes_weight = write_weight + write_weight; - let mut spent_weight = Weight::zero(); - let mut data = self.storage.data(); - while remaining_weight.all_gte(two_writes_weight) && - data.oldest_unpruned_nonce <= data.latest_received_nonce - { - self.storage.remove_message(&data.oldest_unpruned_nonce); - - spent_weight += write_weight; - remaining_weight -= write_weight; - data.oldest_unpruned_nonce += 1; - } - - if !spent_weight.is_zero() { - spent_weight += write_weight; - self.storage.set_data(data); - } - - spent_weight - } -} - -/// Verifies unrewarded relayers vec. -/// -/// Returns `Err(_)` if unrewarded relayers vec contains invalid data, meaning that the bridged -/// chain has invalid runtime storage. -fn ensure_unrewarded_relayers_are_correct( - latest_received_nonce: MessageNonce, - relayers: &VecDeque>, -) -> Result<(), ReceptionConfirmationError> { - let mut expected_entry_begin = relayers.front().map(|entry| entry.messages.begin); - for entry in relayers { - // unrewarded relayer entry must have at least 1 unconfirmed message - // (guaranteed by the `InboundLane::receive_message()`) - if entry.messages.end < entry.messages.begin { - return Err(ReceptionConfirmationError::EmptyUnrewardedRelayerEntry) - } - // every entry must confirm range of messages that follows previous entry range - // (guaranteed by the `InboundLane::receive_message()`) - if expected_entry_begin != Some(entry.messages.begin) { - return Err(ReceptionConfirmationError::NonConsecutiveUnrewardedRelayerEntries) - } - expected_entry_begin = entry.messages.end.checked_add(1); - // entry can't confirm messages larger than `inbound_lane_data.latest_received_nonce()` - // (guaranteed by the `InboundLane::receive_message()`) - if entry.messages.end > latest_received_nonce { - return Err(ReceptionConfirmationError::FailedToConfirmFutureMessages) - } - } - - Ok(()) -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::{ - mock::{ - outbound_message_data, run_test, unrewarded_relayer, TestRelayer, TestRuntime, - REGULAR_PAYLOAD, TEST_LANE_ID, - }, - outbound_lane, - }; - use frame_support::weights::constants::RocksDbWeight; - use sp_std::ops::RangeInclusive; - - fn unrewarded_relayers( - nonces: RangeInclusive, - ) -> VecDeque> { - vec![unrewarded_relayer(*nonces.start(), *nonces.end(), 0)] - .into_iter() - .collect() - } - - fn delivered_messages(nonces: RangeInclusive) -> DeliveredMessages { - DeliveredMessages { begin: *nonces.start(), end: *nonces.end() } - } - - fn assert_3_messages_confirmation_fails( - latest_received_nonce: MessageNonce, - relayers: &VecDeque>, - ) -> Result, ReceptionConfirmationError> { - run_test(|| { - let mut lane = outbound_lane::(TEST_LANE_ID); - lane.send_message(outbound_message_data(REGULAR_PAYLOAD)); - lane.send_message(outbound_message_data(REGULAR_PAYLOAD)); - lane.send_message(outbound_message_data(REGULAR_PAYLOAD)); - assert_eq!(lane.storage.data().latest_generated_nonce, 3); - assert_eq!(lane.storage.data().latest_received_nonce, 0); - let result = lane.confirm_delivery(3, latest_received_nonce, relayers); - assert_eq!(lane.storage.data().latest_generated_nonce, 3); - assert_eq!(lane.storage.data().latest_received_nonce, 0); - result - }) - } - - #[test] - fn send_message_works() { - run_test(|| { - let mut lane = outbound_lane::(TEST_LANE_ID); - assert_eq!(lane.storage.data().latest_generated_nonce, 0); - assert_eq!(lane.send_message(outbound_message_data(REGULAR_PAYLOAD)), 1); - assert!(lane.storage.message(&1).is_some()); - assert_eq!(lane.storage.data().latest_generated_nonce, 1); - }); - } - - #[test] - fn confirm_delivery_works() { - run_test(|| { - let mut lane = outbound_lane::(TEST_LANE_ID); - assert_eq!(lane.send_message(outbound_message_data(REGULAR_PAYLOAD)), 1); - assert_eq!(lane.send_message(outbound_message_data(REGULAR_PAYLOAD)), 2); - assert_eq!(lane.send_message(outbound_message_data(REGULAR_PAYLOAD)), 3); - assert_eq!(lane.storage.data().latest_generated_nonce, 3); - assert_eq!(lane.storage.data().latest_received_nonce, 0); - assert_eq!( - lane.confirm_delivery(3, 3, &unrewarded_relayers(1..=3)), - Ok(Some(delivered_messages(1..=3))), - ); - assert_eq!(lane.storage.data().latest_generated_nonce, 3); - assert_eq!(lane.storage.data().latest_received_nonce, 3); - }); - } - - #[test] - fn confirm_delivery_rejects_nonce_lesser_than_latest_received() { - run_test(|| { - let mut lane = outbound_lane::(TEST_LANE_ID); - lane.send_message(outbound_message_data(REGULAR_PAYLOAD)); - lane.send_message(outbound_message_data(REGULAR_PAYLOAD)); - lane.send_message(outbound_message_data(REGULAR_PAYLOAD)); - assert_eq!(lane.storage.data().latest_generated_nonce, 3); - assert_eq!(lane.storage.data().latest_received_nonce, 0); - assert_eq!( - lane.confirm_delivery(3, 3, &unrewarded_relayers(1..=3)), - Ok(Some(delivered_messages(1..=3))), - ); - assert_eq!(lane.confirm_delivery(3, 3, &unrewarded_relayers(1..=3)), Ok(None),); - assert_eq!(lane.storage.data().latest_generated_nonce, 3); - assert_eq!(lane.storage.data().latest_received_nonce, 3); - - assert_eq!(lane.confirm_delivery(1, 2, &unrewarded_relayers(1..=1)), Ok(None),); - assert_eq!(lane.storage.data().latest_generated_nonce, 3); - assert_eq!(lane.storage.data().latest_received_nonce, 3); - }); - } - - #[test] - fn confirm_delivery_rejects_nonce_larger_than_last_generated() { - assert_eq!( - assert_3_messages_confirmation_fails(10, &unrewarded_relayers(1..=10),), - Err(ReceptionConfirmationError::FailedToConfirmFutureMessages), - ); - } - - #[test] - fn confirm_delivery_fails_if_entry_confirms_future_messages() { - assert_eq!( - assert_3_messages_confirmation_fails( - 3, - &unrewarded_relayers(1..=1) - .into_iter() - .chain(unrewarded_relayers(2..=30).into_iter()) - .chain(unrewarded_relayers(3..=3).into_iter()) - .collect(), - ), - Err(ReceptionConfirmationError::FailedToConfirmFutureMessages), - ); - } - - #[test] - #[allow(clippy::reversed_empty_ranges)] - fn confirm_delivery_fails_if_entry_is_empty() { - assert_eq!( - assert_3_messages_confirmation_fails( - 3, - &unrewarded_relayers(1..=1) - .into_iter() - .chain(unrewarded_relayers(2..=1).into_iter()) - .chain(unrewarded_relayers(2..=3).into_iter()) - .collect(), - ), - Err(ReceptionConfirmationError::EmptyUnrewardedRelayerEntry), - ); - } - - #[test] - fn confirm_delivery_fails_if_entries_are_non_consecutive() { - assert_eq!( - assert_3_messages_confirmation_fails( - 3, - &unrewarded_relayers(1..=1) - .into_iter() - .chain(unrewarded_relayers(3..=3).into_iter()) - .chain(unrewarded_relayers(2..=2).into_iter()) - .collect(), - ), - Err(ReceptionConfirmationError::NonConsecutiveUnrewardedRelayerEntries), - ); - } - - #[test] - fn prune_messages_works() { - run_test(|| { - let mut lane = outbound_lane::(TEST_LANE_ID); - // when lane is empty, nothing is pruned - assert_eq!( - lane.prune_messages(RocksDbWeight::get(), RocksDbWeight::get().writes(101)), - Weight::zero() - ); - assert_eq!(lane.storage.data().oldest_unpruned_nonce, 1); - // when nothing is confirmed, nothing is pruned - lane.send_message(outbound_message_data(REGULAR_PAYLOAD)); - lane.send_message(outbound_message_data(REGULAR_PAYLOAD)); - lane.send_message(outbound_message_data(REGULAR_PAYLOAD)); - assert!(lane.storage.message(&1).is_some()); - assert!(lane.storage.message(&2).is_some()); - assert!(lane.storage.message(&3).is_some()); - assert_eq!( - lane.prune_messages(RocksDbWeight::get(), RocksDbWeight::get().writes(101)), - Weight::zero() - ); - assert_eq!(lane.storage.data().oldest_unpruned_nonce, 1); - // after confirmation, some messages are received - assert_eq!( - lane.confirm_delivery(2, 2, &unrewarded_relayers(1..=2)), - Ok(Some(delivered_messages(1..=2))), - ); - assert_eq!( - lane.prune_messages(RocksDbWeight::get(), RocksDbWeight::get().writes(101)), - RocksDbWeight::get().writes(3), - ); - assert!(lane.storage.message(&1).is_none()); - assert!(lane.storage.message(&2).is_none()); - assert!(lane.storage.message(&3).is_some()); - assert_eq!(lane.storage.data().oldest_unpruned_nonce, 3); - // after last message is confirmed, everything is pruned - assert_eq!( - lane.confirm_delivery(1, 3, &unrewarded_relayers(3..=3)), - Ok(Some(delivered_messages(3..=3))), - ); - assert_eq!( - lane.prune_messages(RocksDbWeight::get(), RocksDbWeight::get().writes(101)), - RocksDbWeight::get().writes(2), - ); - assert!(lane.storage.message(&1).is_none()); - assert!(lane.storage.message(&2).is_none()); - assert!(lane.storage.message(&3).is_none()); - assert_eq!(lane.storage.data().oldest_unpruned_nonce, 4); - }); - } - - #[test] - fn confirm_delivery_detects_when_more_than_expected_messages_are_confirmed() { - run_test(|| { - let mut lane = outbound_lane::(TEST_LANE_ID); - lane.send_message(outbound_message_data(REGULAR_PAYLOAD)); - lane.send_message(outbound_message_data(REGULAR_PAYLOAD)); - lane.send_message(outbound_message_data(REGULAR_PAYLOAD)); - assert_eq!( - lane.confirm_delivery(0, 3, &unrewarded_relayers(1..=3)), - Err(ReceptionConfirmationError::TryingToConfirmMoreMessagesThanExpected), - ); - assert_eq!( - lane.confirm_delivery(2, 3, &unrewarded_relayers(1..=3)), - Err(ReceptionConfirmationError::TryingToConfirmMoreMessagesThanExpected), - ); - assert_eq!( - lane.confirm_delivery(3, 3, &unrewarded_relayers(1..=3)), - Ok(Some(delivered_messages(1..=3))), - ); - }); - } -} diff --git a/modules/messages/src/weights.rs b/modules/messages/src/weights.rs deleted file mode 100644 index 5bf7d5675..000000000 --- a/modules/messages/src/weights.rs +++ /dev/null @@ -1,525 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Autogenerated weights for pallet_bridge_messages -//! -//! THIS FILE WAS AUTO-GENERATED USING THE SUBSTRATE BENCHMARK CLI VERSION 4.0.0-dev -//! DATE: 2023-03-23, STEPS: `50`, REPEAT: `20`, LOW RANGE: `[]`, HIGH RANGE: `[]` -//! WORST CASE MAP SIZE: `1000000` -//! HOSTNAME: `covid`, CPU: `11th Gen Intel(R) Core(TM) i7-11800H @ 2.30GHz` -//! EXECUTION: Some(Wasm), WASM-EXECUTION: Compiled, CHAIN: Some("dev"), DB CACHE: 1024 - -// Executed Command: -// target/release/unknown-bridge-node -// benchmark -// pallet -// --chain=dev -// --steps=50 -// --repeat=20 -// --pallet=pallet_bridge_messages -// --extrinsic=* -// --execution=wasm -// --wasm-execution=Compiled -// --heap-pages=4096 -// --output=./modules/messages/src/weights.rs -// --template=./.maintain/bridge-weight-template.hbs - -#![allow(clippy::all)] -#![allow(unused_parens)] -#![allow(unused_imports)] -#![allow(missing_docs)] - -use frame_support::{ - traits::Get, - weights::{constants::RocksDbWeight, Weight}, -}; -use sp_std::marker::PhantomData; - -/// Weight functions needed for pallet_bridge_messages. -pub trait WeightInfo { - fn receive_single_message_proof() -> Weight; - fn receive_two_messages_proof() -> Weight; - fn receive_single_message_proof_with_outbound_lane_state() -> Weight; - fn receive_single_message_proof_1_kb() -> Weight; - fn receive_single_message_proof_16_kb() -> Weight; - fn receive_delivery_proof_for_single_message() -> Weight; - fn receive_delivery_proof_for_two_messages_by_single_relayer() -> Weight; - fn receive_delivery_proof_for_two_messages_by_two_relayers() -> Weight; - fn receive_single_message_proof_with_dispatch(i: u32) -> Weight; -} - -/// Weights for `pallet_bridge_messages` that are generated using one of the Bridge testnets. -/// -/// Those weights are test only and must never be used in production. -pub struct BridgeWeight(PhantomData); -impl WeightInfo for BridgeWeight { - /// Storage: BridgeUnknownMessages PalletOperatingMode (r:1 w:0) - /// - /// Proof: BridgeUnknownMessages PalletOperatingMode (max_values: Some(1), max_size: Some(2), - /// added: 497, mode: MaxEncodedLen) - /// - /// Storage: BridgeUnknownGrandpa ImportedHeaders (r:1 w:0) - /// - /// Proof: BridgeUnknownGrandpa ImportedHeaders (max_values: Some(14400), max_size: Some(68), - /// added: 2048, mode: MaxEncodedLen) - /// - /// Storage: BridgeUnknownMessages InboundLanes (r:1 w:1) - /// - /// Proof: BridgeUnknownMessages InboundLanes (max_values: None, max_size: Some(49180), added: - /// 51655, mode: MaxEncodedLen) - fn receive_single_message_proof() -> Weight { - // Proof Size summary in bytes: - // Measured: `618` - // Estimated: `57170` - // Minimum execution time: 52_321 nanoseconds. - Weight::from_parts(54_478_000, 57170) - .saturating_add(T::DbWeight::get().reads(3_u64)) - .saturating_add(T::DbWeight::get().writes(1_u64)) - } - /// Storage: BridgeUnknownMessages PalletOperatingMode (r:1 w:0) - /// - /// Proof: BridgeUnknownMessages PalletOperatingMode (max_values: Some(1), max_size: Some(2), - /// added: 497, mode: MaxEncodedLen) - /// - /// Storage: BridgeUnknownGrandpa ImportedHeaders (r:1 w:0) - /// - /// Proof: BridgeUnknownGrandpa ImportedHeaders (max_values: Some(14400), max_size: Some(68), - /// added: 2048, mode: MaxEncodedLen) - /// - /// Storage: BridgeUnknownMessages InboundLanes (r:1 w:1) - /// - /// Proof: BridgeUnknownMessages InboundLanes (max_values: None, max_size: Some(49180), added: - /// 51655, mode: MaxEncodedLen) - fn receive_two_messages_proof() -> Weight { - // Proof Size summary in bytes: - // Measured: `618` - // Estimated: `57170` - // Minimum execution time: 64_597 nanoseconds. - Weight::from_parts(69_267_000, 57170) - .saturating_add(T::DbWeight::get().reads(3_u64)) - .saturating_add(T::DbWeight::get().writes(1_u64)) - } - /// Storage: BridgeUnknownMessages PalletOperatingMode (r:1 w:0) - /// - /// Proof: BridgeUnknownMessages PalletOperatingMode (max_values: Some(1), max_size: Some(2), - /// added: 497, mode: MaxEncodedLen) - /// - /// Storage: BridgeUnknownGrandpa ImportedHeaders (r:1 w:0) - /// - /// Proof: BridgeUnknownGrandpa ImportedHeaders (max_values: Some(14400), max_size: Some(68), - /// added: 2048, mode: MaxEncodedLen) - /// - /// Storage: BridgeUnknownMessages InboundLanes (r:1 w:1) - /// - /// Proof: BridgeUnknownMessages InboundLanes (max_values: None, max_size: Some(49180), added: - /// 51655, mode: MaxEncodedLen) - fn receive_single_message_proof_with_outbound_lane_state() -> Weight { - // Proof Size summary in bytes: - // Measured: `618` - // Estimated: `57170` - // Minimum execution time: 64_079 nanoseconds. - Weight::from_parts(65_905_000, 57170) - .saturating_add(T::DbWeight::get().reads(3_u64)) - .saturating_add(T::DbWeight::get().writes(1_u64)) - } - /// Storage: BridgeUnknownMessages PalletOperatingMode (r:1 w:0) - /// - /// Proof: BridgeUnknownMessages PalletOperatingMode (max_values: Some(1), max_size: Some(2), - /// added: 497, mode: MaxEncodedLen) - /// - /// Storage: BridgeUnknownGrandpa ImportedHeaders (r:1 w:0) - /// - /// Proof: BridgeUnknownGrandpa ImportedHeaders (max_values: Some(14400), max_size: Some(68), - /// added: 2048, mode: MaxEncodedLen) - /// - /// Storage: BridgeUnknownMessages InboundLanes (r:1 w:1) - /// - /// Proof: BridgeUnknownMessages InboundLanes (max_values: None, max_size: Some(49180), added: - /// 51655, mode: MaxEncodedLen) - fn receive_single_message_proof_1_kb() -> Weight { - // Proof Size summary in bytes: - // Measured: `618` - // Estimated: `57170` - // Minimum execution time: 50_588 nanoseconds. - Weight::from_parts(53_544_000, 57170) - .saturating_add(T::DbWeight::get().reads(3_u64)) - .saturating_add(T::DbWeight::get().writes(1_u64)) - } - /// Storage: BridgeUnknownMessages PalletOperatingMode (r:1 w:0) - /// - /// Proof: BridgeUnknownMessages PalletOperatingMode (max_values: Some(1), max_size: Some(2), - /// added: 497, mode: MaxEncodedLen) - /// - /// Storage: BridgeUnknownGrandpa ImportedHeaders (r:1 w:0) - /// - /// Proof: BridgeUnknownGrandpa ImportedHeaders (max_values: Some(14400), max_size: Some(68), - /// added: 2048, mode: MaxEncodedLen) - /// - /// Storage: BridgeUnknownMessages InboundLanes (r:1 w:1) - /// - /// Proof: BridgeUnknownMessages InboundLanes (max_values: None, max_size: Some(49180), added: - /// 51655, mode: MaxEncodedLen) - fn receive_single_message_proof_16_kb() -> Weight { - // Proof Size summary in bytes: - // Measured: `618` - // Estimated: `57170` - // Minimum execution time: 78_269 nanoseconds. - Weight::from_parts(81_748_000, 57170) - .saturating_add(T::DbWeight::get().reads(3_u64)) - .saturating_add(T::DbWeight::get().writes(1_u64)) - } - /// Storage: BridgeUnknownMessages PalletOperatingMode (r:1 w:0) - /// - /// Proof: BridgeUnknownMessages PalletOperatingMode (max_values: Some(1), max_size: Some(2), - /// added: 497, mode: MaxEncodedLen) - /// - /// Storage: BridgeUnknownGrandpa ImportedHeaders (r:1 w:0) - /// - /// Proof: BridgeUnknownGrandpa ImportedHeaders (max_values: Some(14400), max_size: Some(68), - /// added: 2048, mode: MaxEncodedLen) - /// - /// Storage: BridgeUnknownMessages OutboundLanes (r:1 w:1) - /// - /// Proof: BridgeUnknownMessages OutboundLanes (max_values: Some(1), max_size: Some(44), added: - /// 539, mode: MaxEncodedLen) - /// - /// Storage: BridgeRelayers RelayerRewards (r:1 w:1) - /// - /// Proof: BridgeRelayers RelayerRewards (max_values: None, max_size: Some(65), added: 2540, - /// mode: MaxEncodedLen) - fn receive_delivery_proof_for_single_message() -> Weight { - // Proof Size summary in bytes: - // Measured: `579` - // Estimated: `9584` - // Minimum execution time: 45_786 nanoseconds. - Weight::from_parts(47_382_000, 9584) - .saturating_add(T::DbWeight::get().reads(4_u64)) - .saturating_add(T::DbWeight::get().writes(2_u64)) - } - /// Storage: BridgeUnknownMessages PalletOperatingMode (r:1 w:0) - /// - /// Proof: BridgeUnknownMessages PalletOperatingMode (max_values: Some(1), max_size: Some(2), - /// added: 497, mode: MaxEncodedLen) - /// - /// Storage: BridgeUnknownGrandpa ImportedHeaders (r:1 w:0) - /// - /// Proof: BridgeUnknownGrandpa ImportedHeaders (max_values: Some(14400), max_size: Some(68), - /// added: 2048, mode: MaxEncodedLen) - /// - /// Storage: BridgeUnknownMessages OutboundLanes (r:1 w:1) - /// - /// Proof: BridgeUnknownMessages OutboundLanes (max_values: Some(1), max_size: Some(44), added: - /// 539, mode: MaxEncodedLen) - /// - /// Storage: BridgeRelayers RelayerRewards (r:1 w:1) - /// - /// Proof: BridgeRelayers RelayerRewards (max_values: None, max_size: Some(65), added: 2540, - /// mode: MaxEncodedLen) - fn receive_delivery_proof_for_two_messages_by_single_relayer() -> Weight { - // Proof Size summary in bytes: - // Measured: `596` - // Estimated: `9584` - // Minimum execution time: 44_544 nanoseconds. - Weight::from_parts(45_451_000, 9584) - .saturating_add(T::DbWeight::get().reads(4_u64)) - .saturating_add(T::DbWeight::get().writes(2_u64)) - } - /// Storage: BridgeUnknownMessages PalletOperatingMode (r:1 w:0) - /// - /// Proof: BridgeUnknownMessages PalletOperatingMode (max_values: Some(1), max_size: Some(2), - /// added: 497, mode: MaxEncodedLen) - /// - /// Storage: BridgeUnknownGrandpa ImportedHeaders (r:1 w:0) - /// - /// Proof: BridgeUnknownGrandpa ImportedHeaders (max_values: Some(14400), max_size: Some(68), - /// added: 2048, mode: MaxEncodedLen) - /// - /// Storage: BridgeUnknownMessages OutboundLanes (r:1 w:1) - /// - /// Proof: BridgeUnknownMessages OutboundLanes (max_values: Some(1), max_size: Some(44), added: - /// 539, mode: MaxEncodedLen) - /// - /// Storage: BridgeRelayers RelayerRewards (r:2 w:2) - /// - /// Proof: BridgeRelayers RelayerRewards (max_values: None, max_size: Some(65), added: 2540, - /// mode: MaxEncodedLen) - fn receive_delivery_proof_for_two_messages_by_two_relayers() -> Weight { - // Proof Size summary in bytes: - // Measured: `596` - // Estimated: `12124` - // Minimum execution time: 47_344 nanoseconds. - Weight::from_parts(48_311_000, 12124) - .saturating_add(T::DbWeight::get().reads(5_u64)) - .saturating_add(T::DbWeight::get().writes(3_u64)) - } - /// Storage: BridgeUnknownMessages PalletOperatingMode (r:1 w:0) - /// - /// Proof: BridgeUnknownMessages PalletOperatingMode (max_values: Some(1), max_size: Some(2), - /// added: 497, mode: MaxEncodedLen) - /// - /// Storage: BridgeUnknownGrandpa ImportedHeaders (r:1 w:0) - /// - /// Proof: BridgeUnknownGrandpa ImportedHeaders (max_values: Some(14400), max_size: Some(68), - /// added: 2048, mode: MaxEncodedLen) - /// - /// Storage: BridgeUnknownMessages InboundLanes (r:1 w:1) - /// - /// Proof: BridgeUnknownMessages InboundLanes (max_values: None, max_size: Some(49180), added: - /// 51655, mode: MaxEncodedLen) - /// - /// The range of component `i` is `[128, 2048]`. - fn receive_single_message_proof_with_dispatch(i: u32) -> Weight { - // Proof Size summary in bytes: - // Measured: `618` - // Estimated: `57170` - // Minimum execution time: 52_385 nanoseconds. - Weight::from_parts(54_919_468, 57170) - // Standard Error: 108 - .saturating_add(Weight::from_parts(3_286, 0).saturating_mul(i.into())) - .saturating_add(T::DbWeight::get().reads(3_u64)) - .saturating_add(T::DbWeight::get().writes(1_u64)) - } -} - -// For backwards compatibility and tests -impl WeightInfo for () { - /// Storage: BridgeUnknownMessages PalletOperatingMode (r:1 w:0) - /// - /// Proof: BridgeUnknownMessages PalletOperatingMode (max_values: Some(1), max_size: Some(2), - /// added: 497, mode: MaxEncodedLen) - /// - /// Storage: BridgeUnknownGrandpa ImportedHeaders (r:1 w:0) - /// - /// Proof: BridgeUnknownGrandpa ImportedHeaders (max_values: Some(14400), max_size: Some(68), - /// added: 2048, mode: MaxEncodedLen) - /// - /// Storage: BridgeUnknownMessages InboundLanes (r:1 w:1) - /// - /// Proof: BridgeUnknownMessages InboundLanes (max_values: None, max_size: Some(49180), added: - /// 51655, mode: MaxEncodedLen) - fn receive_single_message_proof() -> Weight { - // Proof Size summary in bytes: - // Measured: `618` - // Estimated: `57170` - // Minimum execution time: 52_321 nanoseconds. - Weight::from_parts(54_478_000, 57170) - .saturating_add(RocksDbWeight::get().reads(3_u64)) - .saturating_add(RocksDbWeight::get().writes(1_u64)) - } - /// Storage: BridgeUnknownMessages PalletOperatingMode (r:1 w:0) - /// - /// Proof: BridgeUnknownMessages PalletOperatingMode (max_values: Some(1), max_size: Some(2), - /// added: 497, mode: MaxEncodedLen) - /// - /// Storage: BridgeUnknownGrandpa ImportedHeaders (r:1 w:0) - /// - /// Proof: BridgeUnknownGrandpa ImportedHeaders (max_values: Some(14400), max_size: Some(68), - /// added: 2048, mode: MaxEncodedLen) - /// - /// Storage: BridgeUnknownMessages InboundLanes (r:1 w:1) - /// - /// Proof: BridgeUnknownMessages InboundLanes (max_values: None, max_size: Some(49180), added: - /// 51655, mode: MaxEncodedLen) - fn receive_two_messages_proof() -> Weight { - // Proof Size summary in bytes: - // Measured: `618` - // Estimated: `57170` - // Minimum execution time: 64_597 nanoseconds. - Weight::from_parts(69_267_000, 57170) - .saturating_add(RocksDbWeight::get().reads(3_u64)) - .saturating_add(RocksDbWeight::get().writes(1_u64)) - } - /// Storage: BridgeUnknownMessages PalletOperatingMode (r:1 w:0) - /// - /// Proof: BridgeUnknownMessages PalletOperatingMode (max_values: Some(1), max_size: Some(2), - /// added: 497, mode: MaxEncodedLen) - /// - /// Storage: BridgeUnknownGrandpa ImportedHeaders (r:1 w:0) - /// - /// Proof: BridgeUnknownGrandpa ImportedHeaders (max_values: Some(14400), max_size: Some(68), - /// added: 2048, mode: MaxEncodedLen) - /// - /// Storage: BridgeUnknownMessages InboundLanes (r:1 w:1) - /// - /// Proof: BridgeUnknownMessages InboundLanes (max_values: None, max_size: Some(49180), added: - /// 51655, mode: MaxEncodedLen) - fn receive_single_message_proof_with_outbound_lane_state() -> Weight { - // Proof Size summary in bytes: - // Measured: `618` - // Estimated: `57170` - // Minimum execution time: 64_079 nanoseconds. - Weight::from_parts(65_905_000, 57170) - .saturating_add(RocksDbWeight::get().reads(3_u64)) - .saturating_add(RocksDbWeight::get().writes(1_u64)) - } - /// Storage: BridgeUnknownMessages PalletOperatingMode (r:1 w:0) - /// - /// Proof: BridgeUnknownMessages PalletOperatingMode (max_values: Some(1), max_size: Some(2), - /// added: 497, mode: MaxEncodedLen) - /// - /// Storage: BridgeUnknownGrandpa ImportedHeaders (r:1 w:0) - /// - /// Proof: BridgeUnknownGrandpa ImportedHeaders (max_values: Some(14400), max_size: Some(68), - /// added: 2048, mode: MaxEncodedLen) - /// - /// Storage: BridgeUnknownMessages InboundLanes (r:1 w:1) - /// - /// Proof: BridgeUnknownMessages InboundLanes (max_values: None, max_size: Some(49180), added: - /// 51655, mode: MaxEncodedLen) - fn receive_single_message_proof_1_kb() -> Weight { - // Proof Size summary in bytes: - // Measured: `618` - // Estimated: `57170` - // Minimum execution time: 50_588 nanoseconds. - Weight::from_parts(53_544_000, 57170) - .saturating_add(RocksDbWeight::get().reads(3_u64)) - .saturating_add(RocksDbWeight::get().writes(1_u64)) - } - /// Storage: BridgeUnknownMessages PalletOperatingMode (r:1 w:0) - /// - /// Proof: BridgeUnknownMessages PalletOperatingMode (max_values: Some(1), max_size: Some(2), - /// added: 497, mode: MaxEncodedLen) - /// - /// Storage: BridgeUnknownGrandpa ImportedHeaders (r:1 w:0) - /// - /// Proof: BridgeUnknownGrandpa ImportedHeaders (max_values: Some(14400), max_size: Some(68), - /// added: 2048, mode: MaxEncodedLen) - /// - /// Storage: BridgeUnknownMessages InboundLanes (r:1 w:1) - /// - /// Proof: BridgeUnknownMessages InboundLanes (max_values: None, max_size: Some(49180), added: - /// 51655, mode: MaxEncodedLen) - fn receive_single_message_proof_16_kb() -> Weight { - // Proof Size summary in bytes: - // Measured: `618` - // Estimated: `57170` - // Minimum execution time: 78_269 nanoseconds. - Weight::from_parts(81_748_000, 57170) - .saturating_add(RocksDbWeight::get().reads(3_u64)) - .saturating_add(RocksDbWeight::get().writes(1_u64)) - } - /// Storage: BridgeUnknownMessages PalletOperatingMode (r:1 w:0) - /// - /// Proof: BridgeUnknownMessages PalletOperatingMode (max_values: Some(1), max_size: Some(2), - /// added: 497, mode: MaxEncodedLen) - /// - /// Storage: BridgeUnknownGrandpa ImportedHeaders (r:1 w:0) - /// - /// Proof: BridgeUnknownGrandpa ImportedHeaders (max_values: Some(14400), max_size: Some(68), - /// added: 2048, mode: MaxEncodedLen) - /// - /// Storage: BridgeUnknownMessages OutboundLanes (r:1 w:1) - /// - /// Proof: BridgeUnknownMessages OutboundLanes (max_values: Some(1), max_size: Some(44), added: - /// 539, mode: MaxEncodedLen) - /// - /// Storage: BridgeRelayers RelayerRewards (r:1 w:1) - /// - /// Proof: BridgeRelayers RelayerRewards (max_values: None, max_size: Some(65), added: 2540, - /// mode: MaxEncodedLen) - fn receive_delivery_proof_for_single_message() -> Weight { - // Proof Size summary in bytes: - // Measured: `579` - // Estimated: `9584` - // Minimum execution time: 45_786 nanoseconds. - Weight::from_parts(47_382_000, 9584) - .saturating_add(RocksDbWeight::get().reads(4_u64)) - .saturating_add(RocksDbWeight::get().writes(2_u64)) - } - /// Storage: BridgeUnknownMessages PalletOperatingMode (r:1 w:0) - /// - /// Proof: BridgeUnknownMessages PalletOperatingMode (max_values: Some(1), max_size: Some(2), - /// added: 497, mode: MaxEncodedLen) - /// - /// Storage: BridgeUnknownGrandpa ImportedHeaders (r:1 w:0) - /// - /// Proof: BridgeUnknownGrandpa ImportedHeaders (max_values: Some(14400), max_size: Some(68), - /// added: 2048, mode: MaxEncodedLen) - /// - /// Storage: BridgeUnknownMessages OutboundLanes (r:1 w:1) - /// - /// Proof: BridgeUnknownMessages OutboundLanes (max_values: Some(1), max_size: Some(44), added: - /// 539, mode: MaxEncodedLen) - /// - /// Storage: BridgeRelayers RelayerRewards (r:1 w:1) - /// - /// Proof: BridgeRelayers RelayerRewards (max_values: None, max_size: Some(65), added: 2540, - /// mode: MaxEncodedLen) - fn receive_delivery_proof_for_two_messages_by_single_relayer() -> Weight { - // Proof Size summary in bytes: - // Measured: `596` - // Estimated: `9584` - // Minimum execution time: 44_544 nanoseconds. - Weight::from_parts(45_451_000, 9584) - .saturating_add(RocksDbWeight::get().reads(4_u64)) - .saturating_add(RocksDbWeight::get().writes(2_u64)) - } - /// Storage: BridgeUnknownMessages PalletOperatingMode (r:1 w:0) - /// - /// Proof: BridgeUnknownMessages PalletOperatingMode (max_values: Some(1), max_size: Some(2), - /// added: 497, mode: MaxEncodedLen) - /// - /// Storage: BridgeUnknownGrandpa ImportedHeaders (r:1 w:0) - /// - /// Proof: BridgeUnknownGrandpa ImportedHeaders (max_values: Some(14400), max_size: Some(68), - /// added: 2048, mode: MaxEncodedLen) - /// - /// Storage: BridgeUnknownMessages OutboundLanes (r:1 w:1) - /// - /// Proof: BridgeUnknownMessages OutboundLanes (max_values: Some(1), max_size: Some(44), added: - /// 539, mode: MaxEncodedLen) - /// - /// Storage: BridgeRelayers RelayerRewards (r:2 w:2) - /// - /// Proof: BridgeRelayers RelayerRewards (max_values: None, max_size: Some(65), added: 2540, - /// mode: MaxEncodedLen) - fn receive_delivery_proof_for_two_messages_by_two_relayers() -> Weight { - // Proof Size summary in bytes: - // Measured: `596` - // Estimated: `12124` - // Minimum execution time: 47_344 nanoseconds. - Weight::from_parts(48_311_000, 12124) - .saturating_add(RocksDbWeight::get().reads(5_u64)) - .saturating_add(RocksDbWeight::get().writes(3_u64)) - } - /// Storage: BridgeUnknownMessages PalletOperatingMode (r:1 w:0) - /// - /// Proof: BridgeUnknownMessages PalletOperatingMode (max_values: Some(1), max_size: Some(2), - /// added: 497, mode: MaxEncodedLen) - /// - /// Storage: BridgeUnknownGrandpa ImportedHeaders (r:1 w:0) - /// - /// Proof: BridgeUnknownGrandpa ImportedHeaders (max_values: Some(14400), max_size: Some(68), - /// added: 2048, mode: MaxEncodedLen) - /// - /// Storage: BridgeUnknownMessages InboundLanes (r:1 w:1) - /// - /// Proof: BridgeUnknownMessages InboundLanes (max_values: None, max_size: Some(49180), added: - /// 51655, mode: MaxEncodedLen) - /// - /// The range of component `i` is `[128, 2048]`. - fn receive_single_message_proof_with_dispatch(i: u32) -> Weight { - // Proof Size summary in bytes: - // Measured: `618` - // Estimated: `57170` - // Minimum execution time: 52_385 nanoseconds. - Weight::from_parts(54_919_468, 57170) - // Standard Error: 108 - .saturating_add(Weight::from_parts(3_286, 0).saturating_mul(i.into())) - .saturating_add(RocksDbWeight::get().reads(3_u64)) - .saturating_add(RocksDbWeight::get().writes(1_u64)) - } -} diff --git a/modules/messages/src/weights_ext.rs b/modules/messages/src/weights_ext.rs deleted file mode 100644 index c12e04f69..000000000 --- a/modules/messages/src/weights_ext.rs +++ /dev/null @@ -1,488 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Weight-related utilities. - -use crate::weights::WeightInfo; - -use bp_messages::{MessageNonce, UnrewardedRelayersState}; -use bp_runtime::{PreComputedSize, Size}; -use frame_support::weights::Weight; - -/// Size of the message being delivered in benchmarks. -pub const EXPECTED_DEFAULT_MESSAGE_LENGTH: u32 = 128; - -/// We assume that size of signed extensions on all our chains and size of all 'small' arguments of -/// calls we're checking here would fit 1KB. -const SIGNED_EXTENSIONS_SIZE: u32 = 1024; - -/// Number of extra bytes (excluding size of storage value itself) of storage proof. -/// This mostly depends on number of entries (and their density) in the storage trie. -/// Some reserve is reserved to account future chain growth. -pub const EXTRA_STORAGE_PROOF_SIZE: u32 = 1024; - -/// Ensure that weights from `WeightInfoExt` implementation are looking correct. -pub fn ensure_weights_are_correct() { - // all components of weight formulae must have zero `proof_size`, because the `proof_size` is - // benchmarked using `MaxEncodedLen` approach and there are no components that cause additional - // db reads - - // verify `receive_messages_proof` weight components - assert_ne!(W::receive_messages_proof_overhead().ref_time(), 0); - assert_ne!(W::receive_messages_proof_overhead().proof_size(), 0); - // W::receive_messages_proof_messages_overhead(1).ref_time() may be zero because: - // the message processing code (`InboundLane::receive_message`) is minimal and may not be - // accounted by our benchmarks - assert_eq!(W::receive_messages_proof_messages_overhead(1).proof_size(), 0); - // W::receive_messages_proof_outbound_lane_state_overhead().ref_time() may be zero because: - // the outbound lane state processing code (`InboundLane::receive_state_update`) is minimal and - // may not be accounted by our benchmarks - assert_eq!(W::receive_messages_proof_outbound_lane_state_overhead().proof_size(), 0); - assert_ne!(W::storage_proof_size_overhead(1).ref_time(), 0); - assert_eq!(W::storage_proof_size_overhead(1).proof_size(), 0); - - // verify `receive_messages_delivery_proof` weight components - assert_ne!(W::receive_messages_delivery_proof_overhead().ref_time(), 0); - assert_ne!(W::receive_messages_delivery_proof_overhead().proof_size(), 0); - // W::receive_messages_delivery_proof_messages_overhead(1).ref_time() may be zero because: - // there's no code that iterates over confirmed messages in confirmation transaction - assert_eq!(W::receive_messages_delivery_proof_messages_overhead(1).proof_size(), 0); - // W::receive_messages_delivery_proof_relayers_overhead(1).ref_time() may be zero because: - // runtime **can** choose not to pay any rewards to relayers - // W::receive_messages_delivery_proof_relayers_overhead(1).proof_size() is an exception - // it may or may not cause additional db reads, so proof size may vary - assert_ne!(W::storage_proof_size_overhead(1).ref_time(), 0); - assert_eq!(W::storage_proof_size_overhead(1).proof_size(), 0); - - // verify `receive_message_proof` weight - let receive_messages_proof_weight = - W::receive_messages_proof_weight(&PreComputedSize(1), 10, Weight::zero()); - assert_ne!(receive_messages_proof_weight.ref_time(), 0); - assert_ne!(receive_messages_proof_weight.proof_size(), 0); - messages_proof_size_does_not_affect_proof_size::(); - messages_count_does_not_affect_proof_size::(); - - // verify `receive_message_proof` weight - let receive_messages_delivery_proof_weight = W::receive_messages_delivery_proof_weight( - &PreComputedSize(1), - &UnrewardedRelayersState::default(), - ); - assert_ne!(receive_messages_delivery_proof_weight.ref_time(), 0); - assert_ne!(receive_messages_delivery_proof_weight.proof_size(), 0); - messages_delivery_proof_size_does_not_affect_proof_size::(); - total_messages_in_delivery_proof_does_not_affect_proof_size::(); -} - -/// Ensure that we're able to receive maximal (by-size and by-weight) message from other chain. -pub fn ensure_able_to_receive_message( - max_extrinsic_size: u32, - max_extrinsic_weight: Weight, - max_incoming_message_proof_size: u32, - max_incoming_message_dispatch_weight: Weight, -) { - // verify that we're able to receive proof of maximal-size message - let max_delivery_transaction_size = - max_incoming_message_proof_size.saturating_add(SIGNED_EXTENSIONS_SIZE); - assert!( - max_delivery_transaction_size <= max_extrinsic_size, - "Size of maximal message delivery transaction {max_incoming_message_proof_size} + {SIGNED_EXTENSIONS_SIZE} is larger than maximal possible transaction size {max_extrinsic_size}", - ); - - // verify that we're able to receive proof of maximal-size message with maximal dispatch weight - let max_delivery_transaction_dispatch_weight = W::receive_messages_proof_weight( - &PreComputedSize( - (max_incoming_message_proof_size + W::expected_extra_storage_proof_size()) as usize, - ), - 1, - max_incoming_message_dispatch_weight, - ); - assert!( - max_delivery_transaction_dispatch_weight.all_lte(max_extrinsic_weight), - "Weight of maximal message delivery transaction + {max_delivery_transaction_dispatch_weight} is larger than maximal possible transaction weight {max_extrinsic_weight}", - ); -} - -/// Ensure that we're able to receive maximal confirmation from other chain. -pub fn ensure_able_to_receive_confirmation( - max_extrinsic_size: u32, - max_extrinsic_weight: Weight, - max_inbound_lane_data_proof_size_from_peer_chain: u32, - max_unrewarded_relayer_entries_at_peer_inbound_lane: MessageNonce, - max_unconfirmed_messages_at_inbound_lane: MessageNonce, -) { - // verify that we're able to receive confirmation of maximal-size - let max_confirmation_transaction_size = - max_inbound_lane_data_proof_size_from_peer_chain.saturating_add(SIGNED_EXTENSIONS_SIZE); - assert!( - max_confirmation_transaction_size <= max_extrinsic_size, - "Size of maximal message delivery confirmation transaction {max_inbound_lane_data_proof_size_from_peer_chain} + {SIGNED_EXTENSIONS_SIZE} is larger than maximal possible transaction size {max_extrinsic_size}", - ); - - // verify that we're able to reward maximal number of relayers that have delivered maximal - // number of messages - let max_confirmation_transaction_dispatch_weight = W::receive_messages_delivery_proof_weight( - &PreComputedSize(max_inbound_lane_data_proof_size_from_peer_chain as usize), - &UnrewardedRelayersState { - unrewarded_relayer_entries: max_unrewarded_relayer_entries_at_peer_inbound_lane, - total_messages: max_unconfirmed_messages_at_inbound_lane, - ..Default::default() - }, - ); - assert!( - max_confirmation_transaction_dispatch_weight.all_lte(max_extrinsic_weight), - "Weight of maximal confirmation transaction {max_confirmation_transaction_dispatch_weight} is larger than maximal possible transaction weight {max_extrinsic_weight}", - ); -} - -/// Panics if `proof_size` of message delivery call depends on the message proof size. -fn messages_proof_size_does_not_affect_proof_size() { - let dispatch_weight = Weight::zero(); - let weight_when_proof_size_is_8k = - W::receive_messages_proof_weight(&PreComputedSize(8 * 1024), 1, dispatch_weight); - let weight_when_proof_size_is_16k = - W::receive_messages_proof_weight(&PreComputedSize(16 * 1024), 1, dispatch_weight); - - ensure_weight_components_are_not_zero(weight_when_proof_size_is_8k); - ensure_weight_components_are_not_zero(weight_when_proof_size_is_16k); - ensure_proof_size_is_the_same( - weight_when_proof_size_is_8k, - weight_when_proof_size_is_16k, - "Messages proof size does not affect values that we read from our storage", - ); -} - -/// Panics if `proof_size` of message delivery call depends on the messages count. -/// -/// In practice, it will depend on the messages count, because most probably every -/// message will read something from db during dispatch. But this must be accounted -/// by the `dispatch_weight`. -fn messages_count_does_not_affect_proof_size() { - let messages_proof_size = PreComputedSize(8 * 1024); - let dispatch_weight = Weight::zero(); - let weight_of_one_incoming_message = - W::receive_messages_proof_weight(&messages_proof_size, 1, dispatch_weight); - let weight_of_two_incoming_messages = - W::receive_messages_proof_weight(&messages_proof_size, 2, dispatch_weight); - - ensure_weight_components_are_not_zero(weight_of_one_incoming_message); - ensure_weight_components_are_not_zero(weight_of_two_incoming_messages); - ensure_proof_size_is_the_same( - weight_of_one_incoming_message, - weight_of_two_incoming_messages, - "Number of same-lane incoming messages does not affect values that we read from our storage", - ); -} - -/// Panics if `proof_size` of delivery confirmation call depends on the delivery proof size. -fn messages_delivery_proof_size_does_not_affect_proof_size() { - let relayers_state = UnrewardedRelayersState { - unrewarded_relayer_entries: 1, - messages_in_oldest_entry: 1, - total_messages: 1, - last_delivered_nonce: 1, - }; - let weight_when_proof_size_is_8k = - W::receive_messages_delivery_proof_weight(&PreComputedSize(8 * 1024), &relayers_state); - let weight_when_proof_size_is_16k = - W::receive_messages_delivery_proof_weight(&PreComputedSize(16 * 1024), &relayers_state); - - ensure_weight_components_are_not_zero(weight_when_proof_size_is_8k); - ensure_weight_components_are_not_zero(weight_when_proof_size_is_16k); - ensure_proof_size_is_the_same( - weight_when_proof_size_is_8k, - weight_when_proof_size_is_16k, - "Messages delivery proof size does not affect values that we read from our storage", - ); -} - -/// Panics if `proof_size` of delivery confirmation call depends on the number of confirmed -/// messages. -fn total_messages_in_delivery_proof_does_not_affect_proof_size() { - let proof_size = PreComputedSize(8 * 1024); - let weight_when_1k_messages_confirmed = W::receive_messages_delivery_proof_weight( - &proof_size, - &UnrewardedRelayersState { - unrewarded_relayer_entries: 1, - messages_in_oldest_entry: 1, - total_messages: 1024, - last_delivered_nonce: 1, - }, - ); - let weight_when_2k_messages_confirmed = W::receive_messages_delivery_proof_weight( - &proof_size, - &UnrewardedRelayersState { - unrewarded_relayer_entries: 1, - messages_in_oldest_entry: 1, - total_messages: 2048, - last_delivered_nonce: 1, - }, - ); - - ensure_weight_components_are_not_zero(weight_when_1k_messages_confirmed); - ensure_weight_components_are_not_zero(weight_when_2k_messages_confirmed); - ensure_proof_size_is_the_same( - weight_when_1k_messages_confirmed, - weight_when_2k_messages_confirmed, - "More messages in delivery proof does not affect values that we read from our storage", - ); -} - -/// Panics if either Weight' `proof_size` or `ref_time` are zero. -fn ensure_weight_components_are_not_zero(weight: Weight) { - assert_ne!(weight.ref_time(), 0); - assert_ne!(weight.proof_size(), 0); -} - -/// Panics if `proof_size` of `weight1` is not equal to `proof_size` of `weight2`. -fn ensure_proof_size_is_the_same(weight1: Weight, weight2: Weight, msg: &str) { - assert_eq!( - weight1.proof_size(), - weight2.proof_size(), - "{msg}: {} must be equal to {}", - weight1.proof_size(), - weight2.proof_size(), - ); -} - -/// Extended weight info. -pub trait WeightInfoExt: WeightInfo { - /// Size of proof that is already included in the single message delivery weight. - /// - /// The message submitter (at source chain) has already covered this cost. But there are two - /// factors that may increase proof size: (1) the message size may be larger than predefined - /// and (2) relayer may add extra trie nodes to the proof. So if proof size is larger than - /// this value, we're going to charge relayer for that. - fn expected_extra_storage_proof_size() -> u32; - - // Our configuration assumes that the runtime has special signed extensions used to: - // - // 1) reject obsolete delivery and confirmation transactions; - // - // 2) refund transaction cost to relayer and register his rewards. - // - // The checks in (1) are trivial, so its computation weight may be ignored. And we only touch - // storage values that are read during the call. So we may ignore the weight of this check. - // - // However, during (2) we read and update storage values of other pallets - // (`pallet-bridge-relayers` and balances/assets pallet). So we need to add this weight to the - // weight of our call. Hence two following methods. - - /// Extra weight that is added to the `receive_messages_proof` call weight by signed extensions - /// that are declared at runtime level. - fn receive_messages_proof_overhead_from_runtime() -> Weight; - - /// Extra weight that is added to the `receive_messages_delivery_proof` call weight by signed - /// extensions that are declared at runtime level. - fn receive_messages_delivery_proof_overhead_from_runtime() -> Weight; - - // Functions that are directly mapped to extrinsics weights. - - /// Weight of message delivery extrinsic. - fn receive_messages_proof_weight( - proof: &impl Size, - messages_count: u32, - dispatch_weight: Weight, - ) -> Weight { - // basic components of extrinsic weight - let transaction_overhead = Self::receive_messages_proof_overhead(); - let transaction_overhead_from_runtime = - Self::receive_messages_proof_overhead_from_runtime(); - let outbound_state_delivery_weight = - Self::receive_messages_proof_outbound_lane_state_overhead(); - let messages_delivery_weight = - Self::receive_messages_proof_messages_overhead(MessageNonce::from(messages_count)); - let messages_dispatch_weight = dispatch_weight; - - // proof size overhead weight - let expected_proof_size = EXPECTED_DEFAULT_MESSAGE_LENGTH - .saturating_mul(messages_count.saturating_sub(1)) - .saturating_add(Self::expected_extra_storage_proof_size()); - let actual_proof_size = proof.size(); - let proof_size_overhead = Self::storage_proof_size_overhead( - actual_proof_size.saturating_sub(expected_proof_size), - ); - - transaction_overhead - .saturating_add(transaction_overhead_from_runtime) - .saturating_add(outbound_state_delivery_weight) - .saturating_add(messages_delivery_weight) - .saturating_add(messages_dispatch_weight) - .saturating_add(proof_size_overhead) - } - - /// Weight of confirmation delivery extrinsic. - fn receive_messages_delivery_proof_weight( - proof: &impl Size, - relayers_state: &UnrewardedRelayersState, - ) -> Weight { - // basic components of extrinsic weight - let transaction_overhead = Self::receive_messages_delivery_proof_overhead(); - let transaction_overhead_from_runtime = - Self::receive_messages_delivery_proof_overhead_from_runtime(); - let messages_overhead = - Self::receive_messages_delivery_proof_messages_overhead(relayers_state.total_messages); - let relayers_overhead = Self::receive_messages_delivery_proof_relayers_overhead( - relayers_state.unrewarded_relayer_entries, - ); - - // proof size overhead weight - let expected_proof_size = Self::expected_extra_storage_proof_size(); - let actual_proof_size = proof.size(); - let proof_size_overhead = Self::storage_proof_size_overhead( - actual_proof_size.saturating_sub(expected_proof_size), - ); - - transaction_overhead - .saturating_add(transaction_overhead_from_runtime) - .saturating_add(messages_overhead) - .saturating_add(relayers_overhead) - .saturating_add(proof_size_overhead) - } - - // Functions that are used by extrinsics weights formulas. - - /// Returns weight overhead of message delivery transaction (`receive_messages_proof`). - fn receive_messages_proof_overhead() -> Weight { - let weight_of_two_messages_and_two_tx_overheads = - Self::receive_single_message_proof().saturating_mul(2); - let weight_of_two_messages_and_single_tx_overhead = Self::receive_two_messages_proof(); - weight_of_two_messages_and_two_tx_overheads - .saturating_sub(weight_of_two_messages_and_single_tx_overhead) - } - - /// Returns weight that needs to be accounted when receiving given a number of messages with - /// message delivery transaction (`receive_messages_proof`). - fn receive_messages_proof_messages_overhead(messages: MessageNonce) -> Weight { - let weight_of_two_messages_and_single_tx_overhead = Self::receive_two_messages_proof(); - let weight_of_single_message_and_single_tx_overhead = Self::receive_single_message_proof(); - weight_of_two_messages_and_single_tx_overhead - .saturating_sub(weight_of_single_message_and_single_tx_overhead) - .saturating_mul(messages as _) - } - - /// Returns weight that needs to be accounted when message delivery transaction - /// (`receive_messages_proof`) is carrying outbound lane state proof. - fn receive_messages_proof_outbound_lane_state_overhead() -> Weight { - let weight_of_single_message_and_lane_state = - Self::receive_single_message_proof_with_outbound_lane_state(); - let weight_of_single_message = Self::receive_single_message_proof(); - weight_of_single_message_and_lane_state.saturating_sub(weight_of_single_message) - } - - /// Returns weight overhead of delivery confirmation transaction - /// (`receive_messages_delivery_proof`). - fn receive_messages_delivery_proof_overhead() -> Weight { - let weight_of_two_messages_and_two_tx_overheads = - Self::receive_delivery_proof_for_single_message().saturating_mul(2); - let weight_of_two_messages_and_single_tx_overhead = - Self::receive_delivery_proof_for_two_messages_by_single_relayer(); - weight_of_two_messages_and_two_tx_overheads - .saturating_sub(weight_of_two_messages_and_single_tx_overhead) - } - - /// Returns weight that needs to be accounted when receiving confirmations for given a number of - /// messages with delivery confirmation transaction (`receive_messages_delivery_proof`). - fn receive_messages_delivery_proof_messages_overhead(messages: MessageNonce) -> Weight { - let weight_of_two_messages = - Self::receive_delivery_proof_for_two_messages_by_single_relayer(); - let weight_of_single_message = Self::receive_delivery_proof_for_single_message(); - weight_of_two_messages - .saturating_sub(weight_of_single_message) - .saturating_mul(messages as _) - } - - /// Returns weight that needs to be accounted when receiving confirmations for given a number of - /// relayers entries with delivery confirmation transaction (`receive_messages_delivery_proof`). - fn receive_messages_delivery_proof_relayers_overhead(relayers: MessageNonce) -> Weight { - let weight_of_two_messages_by_two_relayers = - Self::receive_delivery_proof_for_two_messages_by_two_relayers(); - let weight_of_two_messages_by_single_relayer = - Self::receive_delivery_proof_for_two_messages_by_single_relayer(); - weight_of_two_messages_by_two_relayers - .saturating_sub(weight_of_two_messages_by_single_relayer) - .saturating_mul(relayers as _) - } - - /// Returns weight that needs to be accounted when storage proof of given size is received - /// (either in `receive_messages_proof` or `receive_messages_delivery_proof`). - /// - /// **IMPORTANT**: this overhead is already included in the 'base' transaction cost - e.g. proof - /// size depends on messages count or number of entries in the unrewarded relayers set. So this - /// shouldn't be added to cost of transaction, but instead should act as a minimal cost that the - /// relayer must pay when it relays proof of given size (even if cost based on other parameters - /// is less than that cost). - fn storage_proof_size_overhead(proof_size: u32) -> Weight { - let proof_size_in_bytes = proof_size; - let byte_weight = (Self::receive_single_message_proof_16_kb() - - Self::receive_single_message_proof_1_kb()) / - (15 * 1024); - proof_size_in_bytes * byte_weight - } - - // Functions that may be used by runtime developers. - - /// Returns dispatch weight of message of given size. - /// - /// This function would return correct value only if your runtime is configured to run - /// `receive_single_message_proof_with_dispatch` benchmark. See its requirements for - /// details. - fn message_dispatch_weight(message_size: u32) -> Weight { - // There may be a tiny overweight/underweight here, because we don't account how message - // size affects all steps before dispatch. But the effect should be small enough and we - // may ignore it. - Self::receive_single_message_proof_with_dispatch(message_size) - .saturating_sub(Self::receive_single_message_proof()) - } -} - -impl WeightInfoExt for () { - fn expected_extra_storage_proof_size() -> u32 { - EXTRA_STORAGE_PROOF_SIZE - } - - fn receive_messages_proof_overhead_from_runtime() -> Weight { - Weight::zero() - } - - fn receive_messages_delivery_proof_overhead_from_runtime() -> Weight { - Weight::zero() - } -} - -impl WeightInfoExt for crate::weights::BridgeWeight { - fn expected_extra_storage_proof_size() -> u32 { - EXTRA_STORAGE_PROOF_SIZE - } - - fn receive_messages_proof_overhead_from_runtime() -> Weight { - Weight::zero() - } - - fn receive_messages_delivery_proof_overhead_from_runtime() -> Weight { - Weight::zero() - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::{mock::TestRuntime, weights::BridgeWeight}; - - #[test] - fn ensure_default_weights_are_correct() { - ensure_weights_are_correct::>(); - } -} diff --git a/modules/parachains/Cargo.toml b/modules/parachains/Cargo.toml deleted file mode 100644 index 6352b21b8..000000000 --- a/modules/parachains/Cargo.toml +++ /dev/null @@ -1,71 +0,0 @@ -[package] -name = "pallet-bridge-parachains" -version = "0.7.0" -description = "Module that allows bridged relay chains to exchange information on their parachains' heads." -authors.workspace = true -edition.workspace = true -license = "GPL-3.0-or-later WITH Classpath-exception-2.0" -repository.workspace = true - -[lints] -workspace = true - -[dependencies] -codec = { package = "parity-scale-codec", version = "3.6.1", default-features = false } -log = { workspace = true } -scale-info = { version = "2.11.1", default-features = false, features = ["derive"] } - -# Bridge Dependencies - -bp-header-chain = { path = "../../primitives/header-chain", default-features = false } -bp-parachains = { path = "../../primitives/parachains", default-features = false } -bp-polkadot-core = { path = "../../primitives/polkadot-core", default-features = false } -bp-runtime = { path = "../../primitives/runtime", default-features = false } -pallet-bridge-grandpa = { path = "../grandpa", default-features = false } - -# Substrate Dependencies - -frame-benchmarking = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false, optional = true } -frame-support = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -frame-system = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -sp-runtime = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -sp-std = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -sp-trie = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } - -[dev-dependencies] -bp-header-chain = { path = "../../primitives/header-chain" } -bp-test-utils = { path = "../../primitives/test-utils" } -sp-core = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } -sp-io = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } - -[features] -default = ["std"] -std = [ - "bp-header-chain/std", - "bp-parachains/std", - "bp-polkadot-core/std", - "bp-runtime/std", - "codec/std", - "frame-benchmarking/std", - "frame-support/std", - "frame-system/std", - "log/std", - "pallet-bridge-grandpa/std", - "scale-info/std", - "sp-runtime/std", - "sp-std/std", - "sp-trie/std", -] -runtime-benchmarks = [ - "frame-benchmarking/runtime-benchmarks", - "frame-support/runtime-benchmarks", - "frame-system/runtime-benchmarks", - "pallet-bridge-grandpa/runtime-benchmarks", - "sp-runtime/runtime-benchmarks", -] -try-runtime = [ - "frame-support/try-runtime", - "frame-system/try-runtime", - "pallet-bridge-grandpa/try-runtime", - "sp-runtime/try-runtime", -] diff --git a/modules/parachains/README.md b/modules/parachains/README.md deleted file mode 100644 index 9ca608038..000000000 --- a/modules/parachains/README.md +++ /dev/null @@ -1,90 +0,0 @@ -# Bridge Parachains Pallet - -The bridge parachains pallet is a light client for one or several parachains of the bridged relay chain. -It serves as a source of finalized parachain headers and is used when you need to build a bridge with -a parachain. - -The pallet requires [bridge GRANDPA pallet](../grandpa/) to be deployed at the same chain - it is used -to verify storage proofs, generated at the bridged relay chain. - -## A Brief Introduction into Parachains Finality - -You can find detailed information on parachains finality in the -[Polkadot-SDK](https://github.com/paritytech/polkadot-sdk) repository. This section gives a brief overview of how the -parachain finality works and how to build a light client for a parachain. - -The main thing there is that the parachain generates blocks on its own, but it can't achieve finality without -help of its relay chain. Instead, the parachain collators create a block and hand it over to the relay chain -validators. Validators validate the block and register the new parachain head in the -[`Heads` map](https://github.com/paritytech/polkadot-sdk/blob/bc5005217a8c2e7c95b9011c96d7e619879b1200/polkadot/runtime/parachains/src/paras/mod.rs#L683-L686) -of the [`paras`](https://github.com/paritytech/polkadot-sdk/tree/master/polkadot/runtime/parachains/src/paras) pallet, -deployed at the relay chain. Keep in mind that this pallet, deployed at a relay chain, is **NOT** a bridge pallet, -even though the names are similar. - -And what the bridge parachains pallet does, is simply verifying storage proofs of parachain heads within that -`Heads` map. It does that using relay chain header, that has been previously imported by the -[bridge GRANDPA pallet](../grandpa/). Once the proof is verified, the pallet knows that the given parachain -header has been finalized by the relay chain. The parachain header fields may then be used to verify storage -proofs, coming from the parachain. This allows the pallet to be used e.g. as a source of finality for the messages -pallet. - -## Pallet Operations - -The main entrypoint of the pallet is the `submit_parachain_heads` call. It has three arguments: - -- storage proof of parachain heads from the `Heads` map; - -- parachain identifiers and hashes of their heads from the storage proof; - -- the relay block, at which the storage proof has been generated. - -The pallet may track multiple parachains. And the parachains may use different primitives - one may use 128-bit block -numbers, other - 32-bit. To avoid extra decode operations, the pallet is using relay chain block number to order -parachain headers. Any finalized descendant of finalized relay block `RB`, which has parachain block `PB` in -its `Heads` map, is guaranteed to have either `PB`, or its descendant. So parachain block number grows with relay -block number. - -The pallet may reject parachain head if it already knows better (or the same) head. In addition, pallet rejects -heads of untracked parachains. - -The pallet doesn't track anything behind parachain heads. So it requires no initialization - it is ready to accept -headers right after deployment. - -## Non-Essential Functionality - -There may be a special account in every runtime where the bridge parachains module is deployed. This -account, named 'module owner', is like a module-level sudo account - he's able to halt and -resume all module operations without requiring runtime upgrade. Calls that are related to this -account are: - -- `fn set_owner()`: current module owner may call it to transfer "ownership" to another account; - -- `fn set_operating_mode()`: the module owner (or sudo account) may call this function to stop all - module operations. After this call, all finality proofs will be rejected until further `set_operating_mode` call'. - This call may be used when something extraordinary happens with the bridge. - -If pallet owner is not defined, the governance may be used to make those calls. - -## Signed Extension to Reject Obsolete Headers - -It'd be better for anyone (for chain and for submitters) to reject all transactions that are submitting -already known parachain heads to the pallet. This way, we leave block space to other useful transactions and -we don't charge concurrent submitters for their honest actions. - -To deal with that, we have a [signed extension](./src/call_ext) that may be added to the runtime. -It does exactly what is required - rejects all transactions with already known heads. The submitter -pays nothing for such transactions - they're simply removed from the transaction pool, when the block -is built. - -The signed extension, however, is a bit limited - it only works with transactions that provide single -parachain head. So it won't work with multiple parachain heads transactions. This fits our needs -for [Kusama <> Polkadot bridge](../../docs/polkadot-kusama-bridge-overview.md). If you need to deal -with other transaction formats, you may implement similar extension for your runtime. - -You may also take a look at the [`generate_bridge_reject_obsolete_headers_and_messages`](../../bin/runtime-common/src/lib.rs) -macro that bundles several similar signed extensions in a single one. - -## Parachains Finality Relay - -We have an offchain actor, who is watching for new parachain heads and submits them to the bridged chain. -It is the parachains relay - you may look at the [crate level documentation and the code](../../relays/parachains/). diff --git a/modules/parachains/src/benchmarking.rs b/modules/parachains/src/benchmarking.rs deleted file mode 100644 index 27e06a12a..000000000 --- a/modules/parachains/src/benchmarking.rs +++ /dev/null @@ -1,116 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Parachains finality pallet benchmarking. - -use crate::{ - weights_ext::DEFAULT_PARACHAIN_HEAD_SIZE, Call, RelayBlockHash, RelayBlockHasher, - RelayBlockNumber, -}; - -use bp_polkadot_core::parachains::{ParaHash, ParaHeadsProof, ParaId}; -use bp_runtime::StorageProofSize; -use frame_benchmarking::{account, benchmarks_instance_pallet}; -use frame_system::RawOrigin; -use sp_std::prelude::*; - -/// Pallet we're benchmarking here. -pub struct Pallet, I: 'static = ()>(crate::Pallet); - -/// Trait that must be implemented by runtime to benchmark the parachains finality pallet. -pub trait Config: crate::Config { - /// Returns vector of supported parachains. - fn parachains() -> Vec; - /// Generate parachain heads proof and prepare environment for verifying this proof. - fn prepare_parachain_heads_proof( - parachains: &[ParaId], - parachain_head_size: u32, - proof_size: StorageProofSize, - ) -> (RelayBlockNumber, RelayBlockHash, ParaHeadsProof, Vec<(ParaId, ParaHash)>); -} - -benchmarks_instance_pallet! { - where_clause { - where - >::BridgedChain: - bp_runtime::Chain< - BlockNumber = RelayBlockNumber, - Hash = RelayBlockHash, - Hasher = RelayBlockHasher, - >, - } - - // Benchmark `submit_parachain_heads` extrinsic with different number of parachains. - submit_parachain_heads_with_n_parachains { - let p in 1..(T::parachains().len() + 1) as u32; - - let sender = account("sender", 0, 0); - let mut parachains = T::parachains(); - let _ = if p <= parachains.len() as u32 { - parachains.split_off(p as usize) - } else { - Default::default() - }; - log::trace!(target: crate::LOG_TARGET, "=== {:?}", parachains.len()); - let (relay_block_number, relay_block_hash, parachain_heads_proof, parachains_heads) = T::prepare_parachain_heads_proof( - ¶chains, - DEFAULT_PARACHAIN_HEAD_SIZE, - StorageProofSize::Minimal(0), - ); - let at_relay_block = (relay_block_number, relay_block_hash); - }: submit_parachain_heads(RawOrigin::Signed(sender), at_relay_block, parachains_heads, parachain_heads_proof) - verify { - for parachain in parachains { - assert!(crate::Pallet::::best_parachain_head(parachain).is_some()); - } - } - - // Benchmark `submit_parachain_heads` extrinsic with 1kb proof size. - submit_parachain_heads_with_1kb_proof { - let sender = account("sender", 0, 0); - let parachains = vec![T::parachains()[0]]; - let (relay_block_number, relay_block_hash, parachain_heads_proof, parachains_heads) = T::prepare_parachain_heads_proof( - ¶chains, - DEFAULT_PARACHAIN_HEAD_SIZE, - StorageProofSize::HasLargeLeaf(1024), - ); - let at_relay_block = (relay_block_number, relay_block_hash); - }: submit_parachain_heads(RawOrigin::Signed(sender), at_relay_block, parachains_heads, parachain_heads_proof) - verify { - for parachain in parachains { - assert!(crate::Pallet::::best_parachain_head(parachain).is_some()); - } - } - - // Benchmark `submit_parachain_heads` extrinsic with 16kb proof size. - submit_parachain_heads_with_16kb_proof { - let sender = account("sender", 0, 0); - let parachains = vec![T::parachains()[0]]; - let (relay_block_number, relay_block_hash, parachain_heads_proof, parachains_heads) = T::prepare_parachain_heads_proof( - ¶chains, - DEFAULT_PARACHAIN_HEAD_SIZE, - StorageProofSize::HasLargeLeaf(16 * 1024), - ); - let at_relay_block = (relay_block_number, relay_block_hash); - }: submit_parachain_heads(RawOrigin::Signed(sender), at_relay_block, parachains_heads, parachain_heads_proof) - verify { - for parachain in parachains { - assert!(crate::Pallet::::best_parachain_head(parachain).is_some()); - } - } - - impl_benchmark_test_suite!(Pallet, crate::mock::new_test_ext(), crate::mock::TestRuntime) -} diff --git a/modules/parachains/src/call_ext.rs b/modules/parachains/src/call_ext.rs deleted file mode 100644 index da91a40a2..000000000 --- a/modules/parachains/src/call_ext.rs +++ /dev/null @@ -1,263 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -use crate::{Config, Pallet, RelayBlockNumber}; -use bp_parachains::BestParaHeadHash; -use bp_polkadot_core::parachains::{ParaHash, ParaId}; -use bp_runtime::OwnedBridgeModule; -use frame_support::{dispatch::CallableCallFor, traits::IsSubType}; -use sp_runtime::{ - transaction_validity::{InvalidTransaction, TransactionValidity, ValidTransaction}, - RuntimeDebug, -}; - -/// Info about a `SubmitParachainHeads` call which tries to update a single parachain. -#[derive(PartialEq, RuntimeDebug)] -pub struct SubmitParachainHeadsInfo { - /// Number of the finalized relay block that has been used to prove parachain finality. - pub at_relay_block_number: RelayBlockNumber, - /// Parachain identifier. - pub para_id: ParaId, - /// Hash of the bundled parachain head. - pub para_head_hash: ParaHash, -} - -/// Helper struct that provides methods for working with the `SubmitParachainHeads` call. -pub struct SubmitParachainHeadsHelper, I: 'static> { - _phantom_data: sp_std::marker::PhantomData<(T, I)>, -} - -impl, I: 'static> SubmitParachainHeadsHelper { - /// Check if the para head provided by the `SubmitParachainHeads` is better than the best one - /// we know. - pub fn is_obsolete(update: &SubmitParachainHeadsInfo) -> bool { - let stored_best_head = match crate::ParasInfo::::get(update.para_id) { - Some(stored_best_head) => stored_best_head, - None => return false, - }; - - if stored_best_head.best_head_hash.at_relay_block_number >= update.at_relay_block_number { - log::trace!( - target: crate::LOG_TARGET, - "The parachain head can't be updated. The parachain head for {:?} \ - was already updated at better relay chain block {} >= {}.", - update.para_id, - stored_best_head.best_head_hash.at_relay_block_number, - update.at_relay_block_number - ); - return true - } - - if stored_best_head.best_head_hash.head_hash == update.para_head_hash { - log::trace!( - target: crate::LOG_TARGET, - "The parachain head can't be updated. The parachain head hash for {:?} \ - was already updated to {} at block {} < {}.", - update.para_id, - update.para_head_hash, - stored_best_head.best_head_hash.at_relay_block_number, - update.at_relay_block_number - ); - return true - } - - false - } - - /// Check if the `SubmitParachainHeads` was successfully executed. - pub fn was_successful(update: &SubmitParachainHeadsInfo) -> bool { - match crate::ParasInfo::::get(update.para_id) { - Some(stored_best_head) => - stored_best_head.best_head_hash == - BestParaHeadHash { - at_relay_block_number: update.at_relay_block_number, - head_hash: update.para_head_hash, - }, - None => false, - } - } -} - -/// Trait representing a call that is a sub type of this pallet's call. -pub trait CallSubType, I: 'static>: - IsSubType, T>> -{ - /// Create a new instance of `SubmitParachainHeadsInfo` from a `SubmitParachainHeads` call with - /// one single parachain entry. - fn one_entry_submit_parachain_heads_info(&self) -> Option { - if let Some(crate::Call::::submit_parachain_heads { - ref at_relay_block, - ref parachains, - .. - }) = self.is_sub_type() - { - if let &[(para_id, para_head_hash)] = parachains.as_slice() { - return Some(SubmitParachainHeadsInfo { - at_relay_block_number: at_relay_block.0, - para_id, - para_head_hash, - }) - } - } - - None - } - - /// Create a new instance of `SubmitParachainHeadsInfo` from a `SubmitParachainHeads` call with - /// one single parachain entry, if the entry is for the provided parachain id. - fn submit_parachain_heads_info_for(&self, para_id: u32) -> Option { - self.one_entry_submit_parachain_heads_info() - .filter(|update| update.para_id.0 == para_id) - } - - /// Validate parachain heads in order to avoid "mining" transactions that provide - /// outdated bridged parachain heads. Without this validation, even honest relayers - /// may lose their funds if there are multiple relays running and submitting the - /// same information. - /// - /// This validation only works with transactions that are updating single parachain - /// head. We can't use unbounded validation - it may take too long and either break - /// block production, or "eat" significant portion of block production time literally - /// for nothing. In addition, the single-parachain-head-per-transaction is how the - /// pallet will be used in our environment. - fn check_obsolete_submit_parachain_heads(&self) -> TransactionValidity - where - Self: Sized, - { - let update = match self.one_entry_submit_parachain_heads_info() { - Some(update) => update, - None => return Ok(ValidTransaction::default()), - }; - - if Pallet::::ensure_not_halted().is_err() { - return InvalidTransaction::Call.into() - } - - if SubmitParachainHeadsHelper::::is_obsolete(&update) { - return InvalidTransaction::Stale.into() - } - - Ok(ValidTransaction::default()) - } -} - -impl CallSubType for T::RuntimeCall -where - T: Config, - T::RuntimeCall: IsSubType, T>>, -{ -} - -#[cfg(test)] -mod tests { - use crate::{ - mock::{run_test, RuntimeCall, TestRuntime}, - CallSubType, PalletOperatingMode, ParaInfo, ParasInfo, RelayBlockNumber, - }; - use bp_parachains::BestParaHeadHash; - use bp_polkadot_core::parachains::{ParaHash, ParaHeadsProof, ParaId}; - use bp_runtime::BasicOperatingMode; - - fn validate_submit_parachain_heads( - num: RelayBlockNumber, - parachains: Vec<(ParaId, ParaHash)>, - ) -> bool { - RuntimeCall::Parachains(crate::Call::::submit_parachain_heads { - at_relay_block: (num, Default::default()), - parachains, - parachain_heads_proof: ParaHeadsProof { storage_proof: Vec::new() }, - }) - .check_obsolete_submit_parachain_heads() - .is_ok() - } - - fn sync_to_relay_header_10() { - ParasInfo::::insert( - ParaId(1), - ParaInfo { - best_head_hash: BestParaHeadHash { - at_relay_block_number: 10, - head_hash: [1u8; 32].into(), - }, - next_imported_hash_position: 0, - }, - ); - } - - #[test] - fn extension_rejects_header_from_the_obsolete_relay_block() { - run_test(|| { - // when current best finalized is #10 and we're trying to import header#5 => tx is - // rejected - sync_to_relay_header_10(); - assert!(!validate_submit_parachain_heads(5, vec![(ParaId(1), [1u8; 32].into())])); - }); - } - - #[test] - fn extension_rejects_header_from_the_same_relay_block() { - run_test(|| { - // when current best finalized is #10 and we're trying to import header#10 => tx is - // rejected - sync_to_relay_header_10(); - assert!(!validate_submit_parachain_heads(10, vec![(ParaId(1), [1u8; 32].into())])); - }); - } - - #[test] - fn extension_rejects_header_from_new_relay_block_with_same_hash() { - run_test(|| { - // when current best finalized is #10 and we're trying to import header#10 => tx is - // rejected - sync_to_relay_header_10(); - assert!(!validate_submit_parachain_heads(20, vec![(ParaId(1), [1u8; 32].into())])); - }); - } - - #[test] - fn extension_rejects_header_if_pallet_is_halted() { - run_test(|| { - // when pallet is halted => tx is rejected - sync_to_relay_header_10(); - PalletOperatingMode::::put(BasicOperatingMode::Halted); - - assert!(!validate_submit_parachain_heads(15, vec![(ParaId(1), [2u8; 32].into())])); - }); - } - - #[test] - fn extension_accepts_new_header() { - run_test(|| { - // when current best finalized is #10 and we're trying to import header#15 => tx is - // accepted - sync_to_relay_header_10(); - assert!(validate_submit_parachain_heads(15, vec![(ParaId(1), [2u8; 32].into())])); - }); - } - - #[test] - fn extension_accepts_if_more_than_one_parachain_is_submitted() { - run_test(|| { - // when current best finalized is #10 and we're trying to import header#5, but another - // parachain head is also supplied => tx is accepted - sync_to_relay_header_10(); - assert!(validate_submit_parachain_heads( - 5, - vec![(ParaId(1), [1u8; 32].into()), (ParaId(2), [1u8; 32].into())] - )); - }); - } -} diff --git a/modules/parachains/src/lib.rs b/modules/parachains/src/lib.rs deleted file mode 100644 index 1363a6376..000000000 --- a/modules/parachains/src/lib.rs +++ /dev/null @@ -1,1650 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Parachains finality module. -//! -//! This module needs to be deployed with GRANDPA module, which is syncing relay -//! chain blocks. The main entry point of this module is `submit_parachain_heads`, which -//! accepts storage proof of some parachain `Heads` entries from bridged relay chain. -//! It requires corresponding relay headers to be already synced. - -#![warn(missing_docs)] -#![cfg_attr(not(feature = "std"), no_std)] - -pub use weights::WeightInfo; -pub use weights_ext::WeightInfoExt; - -use bp_header_chain::{HeaderChain, HeaderChainError}; -use bp_parachains::{parachain_head_storage_key_at_source, ParaInfo, ParaStoredHeaderData}; -use bp_polkadot_core::parachains::{ParaHash, ParaHead, ParaHeadsProof, ParaId}; -use bp_runtime::{Chain, HashOf, HeaderId, HeaderIdOf, Parachain, StorageProofError}; -use frame_support::{dispatch::PostDispatchInfo, DefaultNoBound}; -use sp_std::{marker::PhantomData, vec::Vec}; - -#[cfg(feature = "runtime-benchmarks")] -use bp_parachains::ParaStoredHeaderDataBuilder; -#[cfg(feature = "runtime-benchmarks")] -use bp_runtime::HeaderOf; -#[cfg(feature = "runtime-benchmarks")] -use codec::Encode; - -// Re-export in crate namespace for `construct_runtime!`. -pub use call_ext::*; -pub use pallet::*; - -pub mod weights; -pub mod weights_ext; - -#[cfg(feature = "runtime-benchmarks")] -pub mod benchmarking; - -mod call_ext; -#[cfg(test)] -mod mock; - -/// The target that will be used when publishing logs related to this pallet. -pub const LOG_TARGET: &str = "runtime::bridge-parachains"; - -/// Block hash of the bridged relay chain. -pub type RelayBlockHash = bp_polkadot_core::Hash; -/// Block number of the bridged relay chain. -pub type RelayBlockNumber = bp_polkadot_core::BlockNumber; -/// Hasher of the bridged relay chain. -pub type RelayBlockHasher = bp_polkadot_core::Hasher; - -/// Artifacts of the parachains head update. -struct UpdateParachainHeadArtifacts { - /// New best head of the parachain. - pub best_head: ParaInfo, - /// If `true`, some old parachain head has been pruned during update. - pub prune_happened: bool, -} - -#[frame_support::pallet] -pub mod pallet { - use super::*; - use bp_parachains::{ - BestParaHeadHash, ImportedParaHeadsKeyProvider, ParaStoredHeaderDataBuilder, - ParasInfoKeyProvider, - }; - use bp_runtime::{ - BasicOperatingMode, BoundedStorageValue, OwnedBridgeModule, StorageDoubleMapKeyProvider, - StorageMapKeyProvider, - }; - use frame_support::pallet_prelude::*; - use frame_system::pallet_prelude::*; - - /// Stored parachain head data of given parachains pallet. - pub type StoredParaHeadDataOf = - BoundedStorageValue<>::MaxParaHeadDataSize, ParaStoredHeaderData>; - /// Weight info of the given parachains pallet. - pub type WeightInfoOf = >::WeightInfo; - type GrandpaPalletOf = - pallet_bridge_grandpa::Pallet>::BridgesGrandpaPalletInstance>; - - #[pallet::event] - #[pallet::generate_deposit(pub(super) fn deposit_event)] - pub enum Event, I: 'static = ()> { - /// The caller has provided head of parachain that the pallet is not configured to track. - UntrackedParachainRejected { - /// Identifier of the parachain that is not tracked by the pallet. - parachain: ParaId, - }, - /// The caller has declared that he has provided given parachain head, but it is missing - /// from the storage proof. - MissingParachainHead { - /// Identifier of the parachain with missing head. - parachain: ParaId, - }, - /// The caller has provided parachain head hash that is not matching the hash read from the - /// storage proof. - IncorrectParachainHeadHash { - /// Identifier of the parachain with incorrect head hast. - parachain: ParaId, - /// Specified parachain head hash. - parachain_head_hash: ParaHash, - /// Actual parachain head hash. - actual_parachain_head_hash: ParaHash, - }, - /// The caller has provided obsolete parachain head, which is already known to the pallet. - RejectedObsoleteParachainHead { - /// Identifier of the parachain with obsolete head. - parachain: ParaId, - /// Obsolete parachain head hash. - parachain_head_hash: ParaHash, - }, - /// The caller has provided parachain head that exceeds the maximal configured head size. - RejectedLargeParachainHead { - /// Identifier of the parachain with rejected head. - parachain: ParaId, - /// Parachain head hash. - parachain_head_hash: ParaHash, - /// Parachain head size. - parachain_head_size: u32, - }, - /// Parachain head has been updated. - UpdatedParachainHead { - /// Identifier of the parachain that has been updated. - parachain: ParaId, - /// Parachain head hash. - parachain_head_hash: ParaHash, - }, - } - - #[pallet::error] - pub enum Error { - /// Relay chain block hash is unknown to us. - UnknownRelayChainBlock, - /// The number of stored relay block is different from what the relayer has provided. - InvalidRelayChainBlockNumber, - /// Parachain heads storage proof is invalid. - HeaderChainStorageProof(HeaderChainError), - /// Error generated by the `OwnedBridgeModule` trait. - BridgeModule(bp_runtime::OwnedBridgeModuleError), - } - - /// Convenience trait for defining `BridgedChain` bounds. - pub trait BoundedBridgeGrandpaConfig: - pallet_bridge_grandpa::Config - { - /// Type of the bridged relay chain. - type BridgedRelayChain: Chain< - BlockNumber = RelayBlockNumber, - Hash = RelayBlockHash, - Hasher = RelayBlockHasher, - >; - } - - impl BoundedBridgeGrandpaConfig for T - where - T: pallet_bridge_grandpa::Config, - T::BridgedChain: - Chain, - { - type BridgedRelayChain = T::BridgedChain; - } - - #[pallet::config] - #[pallet::disable_frame_system_supertrait_check] - pub trait Config: - BoundedBridgeGrandpaConfig - { - /// The overarching event type. - type RuntimeEvent: From> - + IsType<::RuntimeEvent>; - /// Benchmarks results from runtime we're plugged into. - type WeightInfo: WeightInfoExt; - - /// Instance of bridges GRANDPA pallet (within this runtime) that this pallet is linked to. - /// - /// The GRANDPA pallet instance must be configured to import headers of relay chain that - /// we're interested in. - type BridgesGrandpaPalletInstance: 'static; - - /// Name of the original `paras` pallet in the `construct_runtime!()` call at the bridged - /// chain. - /// - /// Please keep in mind that this should be the name of the `runtime_parachains::paras` - /// pallet from polkadot repository, not the `pallet-bridge-parachains`. - #[pallet::constant] - type ParasPalletName: Get<&'static str>; - - /// Parachain head data builder. - /// - /// We never store parachain heads here, since they may be too big (e.g. because of large - /// digest items). Instead we're using the same approach as `pallet-bridge-grandpa` - /// pallet - we are only storing `bp_messages::StoredHeaderData` (number and state root), - /// which is enough for our applications. However, we work with different parachains here - /// and they can use different primitives (for block numbers and hash). So we can't store - /// it directly. Instead, we're storing `bp_messages::StoredHeaderData` in SCALE-encoded - /// form, wrapping it into `bp_parachains::ParaStoredHeaderData`. - /// - /// This builder helps to convert from `HeadData` to `bp_parachains::ParaStoredHeaderData`. - type ParaStoredHeaderDataBuilder: ParaStoredHeaderDataBuilder; - - /// Maximal number of single parachain heads to keep in the storage. - /// - /// The setting is there to prevent growing the on-chain state indefinitely. Note - /// the setting does not relate to parachain block numbers - we will simply keep as much - /// items in the storage, so it doesn't guarantee any fixed timeframe for heads. - /// - /// Incautious change of this constant may lead to orphan entries in the runtime storage. - #[pallet::constant] - type HeadsToKeep: Get; - - /// Maximal size (in bytes) of the SCALE-encoded parachain head data - /// (`bp_parachains::ParaStoredHeaderData`). - /// - /// Keep in mind that the size of any tracked parachain header data must not exceed this - /// value. So if you're going to track multiple parachains, one of which is using large - /// hashes, you shall choose this maximal value. - /// - /// There's no mandatory headers in this pallet, so it can't stall if there's some header - /// that exceeds this bound. - #[pallet::constant] - type MaxParaHeadDataSize: Get; - } - - /// Optional pallet owner. - /// - /// Pallet owner has a right to halt all pallet operations and then resume them. If it is - /// `None`, then there are no direct ways to halt/resume pallet operations, but other - /// runtime methods may still be used to do that (i.e. democracy::referendum to update halt - /// flag directly or call the `halt_operations`). - #[pallet::storage] - pub type PalletOwner, I: 'static = ()> = - StorageValue<_, T::AccountId, OptionQuery>; - - /// The current operating mode of the pallet. - /// - /// Depending on the mode either all, or no transactions will be allowed. - #[pallet::storage] - pub type PalletOperatingMode, I: 'static = ()> = - StorageValue<_, BasicOperatingMode, ValueQuery>; - - /// Parachains info. - /// - /// Contains the following info: - /// - best parachain head hash - /// - the head of the `ImportedParaHashes` ring buffer - #[pallet::storage] - pub type ParasInfo, I: 'static = ()> = StorageMap< - Hasher = ::Hasher, - Key = ::Key, - Value = ::Value, - QueryKind = OptionQuery, - OnEmpty = GetDefault, - MaxValues = MaybeMaxParachains, - >; - - /// State roots of parachain heads which have been imported into the pallet. - #[pallet::storage] - pub type ImportedParaHeads, I: 'static = ()> = StorageDoubleMap< - Hasher1 = ::Hasher1, - Key1 = ::Key1, - Hasher2 = ::Hasher2, - Key2 = ::Key2, - Value = StoredParaHeadDataOf, - QueryKind = OptionQuery, - OnEmpty = GetDefault, - MaxValues = MaybeMaxTotalParachainHashes, - >; - - /// A ring buffer of imported parachain head hashes. Ordered by the insertion time. - #[pallet::storage] - pub(super) type ImportedParaHashes, I: 'static = ()> = StorageDoubleMap< - Hasher1 = Blake2_128Concat, - Key1 = ParaId, - Hasher2 = Twox64Concat, - Key2 = u32, - Value = ParaHash, - QueryKind = OptionQuery, - OnEmpty = GetDefault, - MaxValues = MaybeMaxTotalParachainHashes, - >; - - #[pallet::pallet] - pub struct Pallet(PhantomData<(T, I)>); - - impl, I: 'static> OwnedBridgeModule for Pallet { - const LOG_TARGET: &'static str = LOG_TARGET; - type OwnerStorage = PalletOwner; - type OperatingMode = BasicOperatingMode; - type OperatingModeStorage = PalletOperatingMode; - } - - #[pallet::call] - impl, I: 'static> Pallet { - /// Submit proof of one or several parachain heads. - /// - /// The proof is supposed to be proof of some `Heads` entries from the - /// `polkadot-runtime-parachains::paras` pallet instance, deployed at the bridged chain. - /// The proof is supposed to be crafted at the `relay_header_hash` that must already be - /// imported by corresponding GRANDPA pallet at this chain. - /// - /// The call fails if: - /// - /// - the pallet is halted; - /// - /// - the relay chain block `at_relay_block` is not imported by the associated bridge - /// GRANDPA pallet. - /// - /// The call may succeed, but some heads may not be updated e.g. because pallet knows - /// better head or it isn't tracked by the pallet. - #[pallet::call_index(0)] - #[pallet::weight(WeightInfoOf::::submit_parachain_heads_weight( - T::DbWeight::get(), - parachain_heads_proof, - parachains.len() as _, - ))] - pub fn submit_parachain_heads( - origin: OriginFor, - at_relay_block: (RelayBlockNumber, RelayBlockHash), - parachains: Vec<(ParaId, ParaHash)>, - parachain_heads_proof: ParaHeadsProof, - ) -> DispatchResultWithPostInfo { - Self::ensure_not_halted().map_err(Error::::BridgeModule)?; - ensure_signed(origin)?; - - // we'll need relay chain header to verify that parachains heads are always increasing. - let (relay_block_number, relay_block_hash) = at_relay_block; - let relay_block = pallet_bridge_grandpa::ImportedHeaders::< - T, - T::BridgesGrandpaPalletInstance, - >::get(relay_block_hash) - .ok_or(Error::::UnknownRelayChainBlock)?; - ensure!( - relay_block.number == relay_block_number, - Error::::InvalidRelayChainBlockNumber, - ); - - // now parse storage proof and read parachain heads - let mut actual_weight = WeightInfoOf::::submit_parachain_heads_weight( - T::DbWeight::get(), - ¶chain_heads_proof, - parachains.len() as _, - ); - - let mut storage = GrandpaPalletOf::::storage_proof_checker( - relay_block_hash, - parachain_heads_proof.storage_proof, - ) - .map_err(Error::::HeaderChainStorageProof)?; - - for (parachain, parachain_head_hash) in parachains { - let parachain_head = match Self::read_parachain_head(&mut storage, parachain) { - Ok(Some(parachain_head)) => parachain_head, - Ok(None) => { - log::trace!( - target: LOG_TARGET, - "The head of parachain {:?} is None. {}", - parachain, - if ParasInfo::::contains_key(parachain) { - "Looks like it is not yet registered at the source relay chain" - } else { - "Looks like it has been deregistered from the source relay chain" - }, - ); - Self::deposit_event(Event::MissingParachainHead { parachain }); - continue - }, - Err(e) => { - log::trace!( - target: LOG_TARGET, - "The read of head of parachain {:?} has failed: {:?}", - parachain, - e, - ); - Self::deposit_event(Event::MissingParachainHead { parachain }); - continue - }, - }; - - // if relayer has specified invalid parachain head hash, ignore the head - // (this isn't strictly necessary, but better safe than sorry) - let actual_parachain_head_hash = parachain_head.hash(); - if parachain_head_hash != actual_parachain_head_hash { - log::trace!( - target: LOG_TARGET, - "The submitter has specified invalid parachain {:?} head hash: \ - {:?} vs {:?}", - parachain, - parachain_head_hash, - actual_parachain_head_hash, - ); - Self::deposit_event(Event::IncorrectParachainHeadHash { - parachain, - parachain_head_hash, - actual_parachain_head_hash, - }); - continue - } - - // convert from parachain head into stored parachain head data - let parachain_head_data = - match T::ParaStoredHeaderDataBuilder::try_build(parachain, ¶chain_head) { - Some(parachain_head_data) => parachain_head_data, - None => { - log::trace!( - target: LOG_TARGET, - "The head of parachain {:?} has been provided, but it is not tracked by the pallet", - parachain, - ); - Self::deposit_event(Event::UntrackedParachainRejected { parachain }); - continue - }, - }; - - let update_result: Result<_, ()> = - ParasInfo::::try_mutate(parachain, |stored_best_head| { - let artifacts = Pallet::::update_parachain_head( - parachain, - stored_best_head.take(), - relay_block_number, - parachain_head_data, - parachain_head_hash, - )?; - *stored_best_head = Some(artifacts.best_head); - Ok(artifacts.prune_happened) - }); - - // we're refunding weight if update has not happened and if pruning has not happened - let is_update_happened = update_result.is_ok(); - if !is_update_happened { - actual_weight = actual_weight.saturating_sub( - WeightInfoOf::::parachain_head_storage_write_weight( - T::DbWeight::get(), - ), - ); - } - let is_prune_happened = matches!(update_result, Ok(true)); - if !is_prune_happened { - actual_weight = actual_weight.saturating_sub( - WeightInfoOf::::parachain_head_pruning_weight(T::DbWeight::get()), - ); - } - } - - // even though we may have accepted some parachain heads, we can't allow relayers to - // submit proof with unused trie nodes - // => treat this as an error - // - // (we can throw error here, because now all our calls are transactional) - storage.ensure_no_unused_nodes().map_err(|e| { - Error::::HeaderChainStorageProof(HeaderChainError::StorageProof(e)) - })?; - - Ok(PostDispatchInfo { actual_weight: Some(actual_weight), pays_fee: Pays::Yes }) - } - - /// Change `PalletOwner`. - /// - /// May only be called either by root, or by `PalletOwner`. - #[pallet::call_index(1)] - #[pallet::weight((T::DbWeight::get().reads_writes(1, 1), DispatchClass::Operational))] - pub fn set_owner(origin: OriginFor, new_owner: Option) -> DispatchResult { - >::set_owner(origin, new_owner) - } - - /// Halt or resume all pallet operations. - /// - /// May only be called either by root, or by `PalletOwner`. - #[pallet::call_index(2)] - #[pallet::weight((T::DbWeight::get().reads_writes(1, 1), DispatchClass::Operational))] - pub fn set_operating_mode( - origin: OriginFor, - operating_mode: BasicOperatingMode, - ) -> DispatchResult { - >::set_operating_mode(origin, operating_mode) - } - } - - impl, I: 'static> Pallet { - /// Get stored parachain info. - pub fn best_parachain_info(parachain: ParaId) -> Option { - ParasInfo::::get(parachain) - } - - /// Get best finalized head data of the given parachain. - pub fn best_parachain_head(parachain: ParaId) -> Option { - let best_para_head_hash = ParasInfo::::get(parachain)?.best_head_hash.head_hash; - ImportedParaHeads::::get(parachain, best_para_head_hash).map(|h| h.into_inner()) - } - - /// Get best finalized head hash of the given parachain. - pub fn best_parachain_head_hash(parachain: ParaId) -> Option { - Some(ParasInfo::::get(parachain)?.best_head_hash.head_hash) - } - - /// Get best finalized head id of the given parachain. - pub fn best_parachain_head_id + Parachain>( - ) -> Result>, codec::Error> { - let parachain = ParaId(C::PARACHAIN_ID); - let best_head_hash = match Self::best_parachain_head_hash(parachain) { - Some(best_head_hash) => best_head_hash, - None => return Ok(None), - }; - let encoded_head = match Self::parachain_head(parachain, best_head_hash) { - Some(encoded_head) => encoded_head, - None => return Ok(None), - }; - encoded_head - .decode_parachain_head_data::() - .map(|data| Some(HeaderId(data.number, best_head_hash))) - } - - /// Get parachain head data with given hash. - pub fn parachain_head(parachain: ParaId, hash: ParaHash) -> Option { - ImportedParaHeads::::get(parachain, hash).map(|h| h.into_inner()) - } - - /// Read parachain head from storage proof. - fn read_parachain_head( - storage: &mut bp_runtime::StorageProofChecker, - parachain: ParaId, - ) -> Result, StorageProofError> { - let parachain_head_key = - parachain_head_storage_key_at_source(T::ParasPalletName::get(), parachain); - storage.read_and_decode_value(parachain_head_key.0.as_ref()) - } - - /// Try to update parachain head. - pub(super) fn update_parachain_head( - parachain: ParaId, - stored_best_head: Option, - new_at_relay_block_number: RelayBlockNumber, - new_head_data: ParaStoredHeaderData, - new_head_hash: ParaHash, - ) -> Result { - // check if head has been already updated at better relay chain block. Without this - // check, we may import heads in random order - let update = SubmitParachainHeadsInfo { - at_relay_block_number: new_at_relay_block_number, - para_id: parachain, - para_head_hash: new_head_hash, - }; - if SubmitParachainHeadsHelper::::is_obsolete(&update) { - Self::deposit_event(Event::RejectedObsoleteParachainHead { - parachain, - parachain_head_hash: new_head_hash, - }); - return Err(()) - } - - // verify that the parachain head data size is <= `MaxParaHeadDataSize` - let updated_head_data = - match StoredParaHeadDataOf::::try_from_inner(new_head_data) { - Ok(updated_head_data) => updated_head_data, - Err(e) => { - log::trace!( - target: LOG_TARGET, - "The parachain head can't be updated. The parachain head data size \ - for {:?} is {}. It exceeds maximal configured size {}.", - parachain, - e.value_size, - e.maximal_size, - ); - - Self::deposit_event(Event::RejectedLargeParachainHead { - parachain, - parachain_head_hash: new_head_hash, - parachain_head_size: e.value_size as _, - }); - - return Err(()) - }, - }; - - let next_imported_hash_position = stored_best_head - .map_or(0, |stored_best_head| stored_best_head.next_imported_hash_position); - - // insert updated best parachain head - let head_hash_to_prune = - ImportedParaHashes::::try_get(parachain, next_imported_hash_position); - let updated_best_para_head = ParaInfo { - best_head_hash: BestParaHeadHash { - at_relay_block_number: new_at_relay_block_number, - head_hash: new_head_hash, - }, - next_imported_hash_position: (next_imported_hash_position + 1) % - T::HeadsToKeep::get(), - }; - ImportedParaHashes::::insert( - parachain, - next_imported_hash_position, - new_head_hash, - ); - ImportedParaHeads::::insert(parachain, new_head_hash, updated_head_data); - log::trace!( - target: LOG_TARGET, - "Updated head of parachain {:?} to {}", - parachain, - new_head_hash, - ); - - // remove old head - let prune_happened = head_hash_to_prune.is_ok(); - if let Ok(head_hash_to_prune) = head_hash_to_prune { - log::trace!( - target: LOG_TARGET, - "Pruning old head of parachain {:?}: {}", - parachain, - head_hash_to_prune, - ); - ImportedParaHeads::::remove(parachain, head_hash_to_prune); - } - Self::deposit_event(Event::UpdatedParachainHead { - parachain, - parachain_head_hash: new_head_hash, - }); - - Ok(UpdateParachainHeadArtifacts { best_head: updated_best_para_head, prune_happened }) - } - } - - #[pallet::genesis_config] - #[derive(DefaultNoBound)] - pub struct GenesisConfig, I: 'static = ()> { - /// Initial pallet operating mode. - pub operating_mode: BasicOperatingMode, - /// Initial pallet owner. - pub owner: Option, - /// Dummy marker. - pub phantom: sp_std::marker::PhantomData, - } - - #[pallet::genesis_build] - impl, I: 'static> BuildGenesisConfig for GenesisConfig { - fn build(&self) { - PalletOperatingMode::::put(self.operating_mode); - if let Some(ref owner) = self.owner { - PalletOwner::::put(owner); - } - } - } - - /// Returns maximal number of parachains, supported by the pallet. - pub struct MaybeMaxParachains(PhantomData<(T, I)>); - - impl, I: 'static> Get> for MaybeMaxParachains { - fn get() -> Option { - Some(T::ParaStoredHeaderDataBuilder::supported_parachains()) - } - } - - /// Returns total number of all parachains hashes/heads, stored by the pallet. - pub struct MaybeMaxTotalParachainHashes(PhantomData<(T, I)>); - - impl, I: 'static> Get> for MaybeMaxTotalParachainHashes { - fn get() -> Option { - Some( - T::ParaStoredHeaderDataBuilder::supported_parachains() - .saturating_mul(T::HeadsToKeep::get()), - ) - } - } -} - -/// Single parachain header chain adapter. -pub struct ParachainHeaders(PhantomData<(T, I, C)>); - -impl, I: 'static, C: Parachain> HeaderChain - for ParachainHeaders -{ - fn finalized_header_state_root(hash: HashOf) -> Option> { - Pallet::::parachain_head(ParaId(C::PARACHAIN_ID), hash) - .and_then(|head| head.decode_parachain_head_data::().ok()) - .map(|h| h.state_root) - } -} - -/// (Re)initialize pallet with given header for using it in `pallet-bridge-messages` benchmarks. -#[cfg(feature = "runtime-benchmarks")] -pub fn initialize_for_benchmarks, I: 'static, PC: Parachain>( - header: HeaderOf, -) { - let parachain = ParaId(PC::PARACHAIN_ID); - let parachain_head = ParaHead(header.encode()); - let updated_head_data = T::ParaStoredHeaderDataBuilder::try_build(parachain, ¶chain_head) - .expect("failed to build stored parachain head in benchmarks"); - Pallet::::update_parachain_head( - parachain, - None, - 0, - updated_head_data, - parachain_head.hash(), - ) - .expect("failed to insert parachain head in benchmarks"); -} - -#[cfg(test)] -pub(crate) mod tests { - use super::*; - use crate::mock::{ - run_test, test_relay_header, BigParachainHeader, RegularParachainHasher, - RegularParachainHeader, RelayBlockHeader, RuntimeEvent as TestEvent, RuntimeOrigin, - TestRuntime, UNTRACKED_PARACHAIN_ID, - }; - use bp_test_utils::prepare_parachain_heads_proof; - use codec::Encode; - - use bp_header_chain::{justification::GrandpaJustification, StoredHeaderGrandpaInfo}; - use bp_parachains::{ - BestParaHeadHash, BridgeParachainCall, ImportedParaHeadsKeyProvider, ParasInfoKeyProvider, - }; - use bp_runtime::{ - BasicOperatingMode, OwnedBridgeModuleError, StorageDoubleMapKeyProvider, - StorageMapKeyProvider, - }; - use bp_test_utils::{ - authority_list, generate_owned_bridge_module_tests, make_default_justification, - TEST_GRANDPA_SET_ID, - }; - use frame_support::{ - assert_noop, assert_ok, - dispatch::DispatchResultWithPostInfo, - storage::generator::{StorageDoubleMap, StorageMap}, - traits::{Get, OnInitialize}, - weights::Weight, - }; - use frame_system::{EventRecord, Pallet as System, Phase}; - use sp_core::Hasher; - use sp_runtime::{traits::Header as HeaderT, DispatchError}; - - type BridgesGrandpaPalletInstance = pallet_bridge_grandpa::Instance1; - type WeightInfo = ::WeightInfo; - type DbWeight = ::DbWeight; - - pub(crate) fn initialize(state_root: RelayBlockHash) -> RelayBlockHash { - pallet_bridge_grandpa::Pallet::::initialize( - RuntimeOrigin::root(), - bp_header_chain::InitializationData { - header: Box::new(test_relay_header(0, state_root)), - authority_list: authority_list(), - set_id: 1, - operating_mode: BasicOperatingMode::Normal, - }, - ) - .unwrap(); - - System::::set_block_number(1); - System::::reset_events(); - - test_relay_header(0, state_root).hash() - } - - fn proceed( - num: RelayBlockNumber, - state_root: RelayBlockHash, - ) -> (ParaHash, GrandpaJustification) { - pallet_bridge_grandpa::Pallet::::on_initialize( - 0, - ); - - let header = test_relay_header(num, state_root); - let hash = header.hash(); - let justification = make_default_justification(&header); - assert_ok!( - pallet_bridge_grandpa::Pallet::::submit_finality_proof_ex( - RuntimeOrigin::signed(1), - Box::new(header), - justification.clone(), - TEST_GRANDPA_SET_ID, - ) - ); - - (hash, justification) - } - - fn initial_best_head(parachain: u32) -> ParaInfo { - ParaInfo { - best_head_hash: BestParaHeadHash { - at_relay_block_number: 0, - head_hash: head_data(parachain, 0).hash(), - }, - next_imported_hash_position: 1, - } - } - - pub(crate) fn head_data(parachain: u32, head_number: u32) -> ParaHead { - ParaHead( - RegularParachainHeader::new( - head_number as _, - Default::default(), - RegularParachainHasher::hash(&(parachain, head_number).encode()), - Default::default(), - Default::default(), - ) - .encode(), - ) - } - - fn stored_head_data(parachain: u32, head_number: u32) -> ParaStoredHeaderData { - ParaStoredHeaderData( - (head_number as u64, RegularParachainHasher::hash(&(parachain, head_number).encode())) - .encode(), - ) - } - - fn big_head_data(parachain: u32, head_number: u32) -> ParaHead { - ParaHead( - BigParachainHeader::new( - head_number as _, - Default::default(), - RegularParachainHasher::hash(&(parachain, head_number).encode()), - Default::default(), - Default::default(), - ) - .encode(), - ) - } - - fn big_stored_head_data(parachain: u32, head_number: u32) -> ParaStoredHeaderData { - ParaStoredHeaderData( - (head_number as u128, RegularParachainHasher::hash(&(parachain, head_number).encode())) - .encode(), - ) - } - - fn head_hash(parachain: u32, head_number: u32) -> ParaHash { - head_data(parachain, head_number).hash() - } - - fn import_parachain_1_head( - relay_chain_block: RelayBlockNumber, - relay_state_root: RelayBlockHash, - parachains: Vec<(ParaId, ParaHash)>, - proof: ParaHeadsProof, - ) -> DispatchResultWithPostInfo { - Pallet::::submit_parachain_heads( - RuntimeOrigin::signed(1), - (relay_chain_block, test_relay_header(relay_chain_block, relay_state_root).hash()), - parachains, - proof, - ) - } - - fn weight_of_import_parachain_1_head(proof: &ParaHeadsProof, prune_expected: bool) -> Weight { - let db_weight = ::DbWeight::get(); - WeightInfoOf::::submit_parachain_heads_weight(db_weight, proof, 1) - .saturating_sub(if prune_expected { - Weight::zero() - } else { - WeightInfoOf::::parachain_head_pruning_weight(db_weight) - }) - } - - #[test] - fn submit_parachain_heads_checks_operating_mode() { - let (state_root, proof, parachains) = - prepare_parachain_heads_proof::(vec![(1, head_data(1, 0))]); - - run_test(|| { - initialize(state_root); - - // `submit_parachain_heads()` should fail when the pallet is halted. - PalletOperatingMode::::put(BasicOperatingMode::Halted); - assert_noop!( - Pallet::::submit_parachain_heads( - RuntimeOrigin::signed(1), - (0, test_relay_header(0, state_root).hash()), - parachains.clone(), - proof.clone(), - ), - Error::::BridgeModule(OwnedBridgeModuleError::Halted) - ); - - // `submit_parachain_heads()` should succeed now that the pallet is resumed. - PalletOperatingMode::::put(BasicOperatingMode::Normal); - assert_ok!(Pallet::::submit_parachain_heads( - RuntimeOrigin::signed(1), - (0, test_relay_header(0, state_root).hash()), - parachains, - proof, - ),); - }); - } - - #[test] - fn imports_initial_parachain_heads() { - let (state_root, proof, parachains) = - prepare_parachain_heads_proof::(vec![ - (1, head_data(1, 0)), - (3, head_data(3, 10)), - ]); - run_test(|| { - initialize(state_root); - - // we're trying to update heads of parachains 1, 2 and 3 - let expected_weight = - WeightInfo::submit_parachain_heads_weight(DbWeight::get(), &proof, 2); - let result = Pallet::::submit_parachain_heads( - RuntimeOrigin::signed(1), - (0, test_relay_header(0, state_root).hash()), - parachains, - proof, - ); - assert_ok!(result); - assert_eq!(result.expect("checked above").actual_weight, Some(expected_weight)); - - // but only 1 and 2 are updated, because proof is missing head of parachain#2 - assert_eq!(ParasInfo::::get(ParaId(1)), Some(initial_best_head(1))); - assert_eq!(ParasInfo::::get(ParaId(2)), None); - assert_eq!( - ParasInfo::::get(ParaId(3)), - Some(ParaInfo { - best_head_hash: BestParaHeadHash { - at_relay_block_number: 0, - head_hash: head_data(3, 10).hash() - }, - next_imported_hash_position: 1, - }) - ); - - assert_eq!( - ImportedParaHeads::::get( - ParaId(1), - initial_best_head(1).best_head_hash.head_hash - ) - .map(|h| h.into_inner()), - Some(stored_head_data(1, 0)) - ); - assert_eq!( - ImportedParaHeads::::get( - ParaId(2), - initial_best_head(2).best_head_hash.head_hash - ) - .map(|h| h.into_inner()), - None - ); - assert_eq!( - ImportedParaHeads::::get(ParaId(3), head_hash(3, 10)) - .map(|h| h.into_inner()), - Some(stored_head_data(3, 10)) - ); - - assert_eq!( - System::::events(), - vec![ - EventRecord { - phase: Phase::Initialization, - event: TestEvent::Parachains(Event::UpdatedParachainHead { - parachain: ParaId(1), - parachain_head_hash: initial_best_head(1).best_head_hash.head_hash, - }), - topics: vec![], - }, - EventRecord { - phase: Phase::Initialization, - event: TestEvent::Parachains(Event::UpdatedParachainHead { - parachain: ParaId(3), - parachain_head_hash: head_data(3, 10).hash(), - }), - topics: vec![], - } - ], - ); - }); - } - - #[test] - fn imports_parachain_heads_is_able_to_progress() { - let (state_root_5, proof_5, parachains_5) = - prepare_parachain_heads_proof::(vec![(1, head_data(1, 5))]); - let (state_root_10, proof_10, parachains_10) = - prepare_parachain_heads_proof::(vec![(1, head_data(1, 10))]); - run_test(|| { - // start with relay block #0 and import head#5 of parachain#1 - initialize(state_root_5); - assert_ok!(import_parachain_1_head(0, state_root_5, parachains_5, proof_5)); - assert_eq!( - ParasInfo::::get(ParaId(1)), - Some(ParaInfo { - best_head_hash: BestParaHeadHash { - at_relay_block_number: 0, - head_hash: head_data(1, 5).hash() - }, - next_imported_hash_position: 1, - }) - ); - assert_eq!( - ImportedParaHeads::::get(ParaId(1), head_data(1, 5).hash()) - .map(|h| h.into_inner()), - Some(stored_head_data(1, 5)) - ); - assert_eq!( - ImportedParaHeads::::get(ParaId(1), head_data(1, 10).hash()) - .map(|h| h.into_inner()), - None - ); - assert_eq!( - System::::events(), - vec![EventRecord { - phase: Phase::Initialization, - event: TestEvent::Parachains(Event::UpdatedParachainHead { - parachain: ParaId(1), - parachain_head_hash: head_data(1, 5).hash(), - }), - topics: vec![], - }], - ); - - // import head#10 of parachain#1 at relay block #1 - let (relay_1_hash, justification) = proceed(1, state_root_10); - assert_ok!(import_parachain_1_head(1, state_root_10, parachains_10, proof_10)); - assert_eq!( - ParasInfo::::get(ParaId(1)), - Some(ParaInfo { - best_head_hash: BestParaHeadHash { - at_relay_block_number: 1, - head_hash: head_data(1, 10).hash() - }, - next_imported_hash_position: 2, - }) - ); - assert_eq!( - ImportedParaHeads::::get(ParaId(1), head_data(1, 5).hash()) - .map(|h| h.into_inner()), - Some(stored_head_data(1, 5)) - ); - assert_eq!( - ImportedParaHeads::::get(ParaId(1), head_data(1, 10).hash()) - .map(|h| h.into_inner()), - Some(stored_head_data(1, 10)) - ); - assert_eq!( - System::::events(), - vec![ - EventRecord { - phase: Phase::Initialization, - event: TestEvent::Parachains(Event::UpdatedParachainHead { - parachain: ParaId(1), - parachain_head_hash: head_data(1, 5).hash(), - }), - topics: vec![], - }, - EventRecord { - phase: Phase::Initialization, - event: TestEvent::Grandpa1( - pallet_bridge_grandpa::Event::UpdatedBestFinalizedHeader { - number: 1, - hash: relay_1_hash, - grandpa_info: StoredHeaderGrandpaInfo { - finality_proof: justification, - new_verification_context: None, - }, - } - ), - topics: vec![], - }, - EventRecord { - phase: Phase::Initialization, - event: TestEvent::Parachains(Event::UpdatedParachainHead { - parachain: ParaId(1), - parachain_head_hash: head_data(1, 10).hash(), - }), - topics: vec![], - } - ], - ); - }); - } - - #[test] - fn ignores_untracked_parachain() { - let (state_root, proof, parachains) = - prepare_parachain_heads_proof::(vec![ - (1, head_data(1, 5)), - (UNTRACKED_PARACHAIN_ID, head_data(1, 5)), - (2, head_data(1, 5)), - ]); - run_test(|| { - // start with relay block #0 and try to import head#5 of parachain#1 and untracked - // parachain - let expected_weight = - WeightInfo::submit_parachain_heads_weight(DbWeight::get(), &proof, 3) - .saturating_sub(WeightInfo::parachain_head_storage_write_weight( - DbWeight::get(), - )); - initialize(state_root); - let result = Pallet::::submit_parachain_heads( - RuntimeOrigin::signed(1), - (0, test_relay_header(0, state_root).hash()), - parachains, - proof, - ); - assert_ok!(result); - assert_eq!(result.expect("checked above").actual_weight, Some(expected_weight)); - assert_eq!( - ParasInfo::::get(ParaId(1)), - Some(ParaInfo { - best_head_hash: BestParaHeadHash { - at_relay_block_number: 0, - head_hash: head_data(1, 5).hash() - }, - next_imported_hash_position: 1, - }) - ); - assert_eq!(ParasInfo::::get(ParaId(UNTRACKED_PARACHAIN_ID)), None,); - assert_eq!( - ParasInfo::::get(ParaId(2)), - Some(ParaInfo { - best_head_hash: BestParaHeadHash { - at_relay_block_number: 0, - head_hash: head_data(1, 5).hash() - }, - next_imported_hash_position: 1, - }) - ); - assert_eq!( - System::::events(), - vec![ - EventRecord { - phase: Phase::Initialization, - event: TestEvent::Parachains(Event::UpdatedParachainHead { - parachain: ParaId(1), - parachain_head_hash: head_data(1, 5).hash(), - }), - topics: vec![], - }, - EventRecord { - phase: Phase::Initialization, - event: TestEvent::Parachains(Event::UntrackedParachainRejected { - parachain: ParaId(UNTRACKED_PARACHAIN_ID), - }), - topics: vec![], - }, - EventRecord { - phase: Phase::Initialization, - event: TestEvent::Parachains(Event::UpdatedParachainHead { - parachain: ParaId(2), - parachain_head_hash: head_data(1, 5).hash(), - }), - topics: vec![], - } - ], - ); - }); - } - - #[test] - fn does_nothing_when_already_imported_this_head_at_previous_relay_header() { - let (state_root, proof, parachains) = - prepare_parachain_heads_proof::(vec![(1, head_data(1, 0))]); - run_test(|| { - // import head#0 of parachain#1 at relay block#0 - initialize(state_root); - assert_ok!(import_parachain_1_head(0, state_root, parachains.clone(), proof.clone())); - assert_eq!(ParasInfo::::get(ParaId(1)), Some(initial_best_head(1))); - assert_eq!( - System::::events(), - vec![EventRecord { - phase: Phase::Initialization, - event: TestEvent::Parachains(Event::UpdatedParachainHead { - parachain: ParaId(1), - parachain_head_hash: initial_best_head(1).best_head_hash.head_hash, - }), - topics: vec![], - }], - ); - - // try to import head#0 of parachain#1 at relay block#1 - // => call succeeds, but nothing is changed - let (relay_1_hash, justification) = proceed(1, state_root); - assert_ok!(import_parachain_1_head(1, state_root, parachains, proof)); - assert_eq!(ParasInfo::::get(ParaId(1)), Some(initial_best_head(1))); - assert_eq!( - System::::events(), - vec![ - EventRecord { - phase: Phase::Initialization, - event: TestEvent::Parachains(Event::UpdatedParachainHead { - parachain: ParaId(1), - parachain_head_hash: initial_best_head(1).best_head_hash.head_hash, - }), - topics: vec![], - }, - EventRecord { - phase: Phase::Initialization, - event: TestEvent::Grandpa1( - pallet_bridge_grandpa::Event::UpdatedBestFinalizedHeader { - number: 1, - hash: relay_1_hash, - grandpa_info: StoredHeaderGrandpaInfo { - finality_proof: justification, - new_verification_context: None, - } - } - ), - topics: vec![], - }, - EventRecord { - phase: Phase::Initialization, - event: TestEvent::Parachains(Event::RejectedObsoleteParachainHead { - parachain: ParaId(1), - parachain_head_hash: initial_best_head(1).best_head_hash.head_hash, - }), - topics: vec![], - } - ], - ); - }); - } - - #[test] - fn does_nothing_when_already_imported_head_at_better_relay_header() { - let (state_root_5, proof_5, parachains_5) = - prepare_parachain_heads_proof::(vec![(1, head_data(1, 5))]); - let (state_root_10, proof_10, parachains_10) = - prepare_parachain_heads_proof::(vec![(1, head_data(1, 10))]); - run_test(|| { - // start with relay block #0 - initialize(state_root_5); - - // head#10 of parachain#1 at relay block#1 - let (relay_1_hash, justification) = proceed(1, state_root_10); - assert_ok!(import_parachain_1_head(1, state_root_10, parachains_10, proof_10)); - assert_eq!( - ParasInfo::::get(ParaId(1)), - Some(ParaInfo { - best_head_hash: BestParaHeadHash { - at_relay_block_number: 1, - head_hash: head_data(1, 10).hash() - }, - next_imported_hash_position: 1, - }) - ); - assert_eq!( - System::::events(), - vec![ - EventRecord { - phase: Phase::Initialization, - event: TestEvent::Grandpa1( - pallet_bridge_grandpa::Event::UpdatedBestFinalizedHeader { - number: 1, - hash: relay_1_hash, - grandpa_info: StoredHeaderGrandpaInfo { - finality_proof: justification.clone(), - new_verification_context: None, - } - } - ), - topics: vec![], - }, - EventRecord { - phase: Phase::Initialization, - event: TestEvent::Parachains(Event::UpdatedParachainHead { - parachain: ParaId(1), - parachain_head_hash: head_data(1, 10).hash(), - }), - topics: vec![], - } - ], - ); - - // now try to import head#5 at relay block#0 - // => nothing is changed, because better head has already been imported - assert_ok!(import_parachain_1_head(0, state_root_5, parachains_5, proof_5)); - assert_eq!( - ParasInfo::::get(ParaId(1)), - Some(ParaInfo { - best_head_hash: BestParaHeadHash { - at_relay_block_number: 1, - head_hash: head_data(1, 10).hash() - }, - next_imported_hash_position: 1, - }) - ); - assert_eq!( - System::::events(), - vec![ - EventRecord { - phase: Phase::Initialization, - event: TestEvent::Grandpa1( - pallet_bridge_grandpa::Event::UpdatedBestFinalizedHeader { - number: 1, - hash: relay_1_hash, - grandpa_info: StoredHeaderGrandpaInfo { - finality_proof: justification, - new_verification_context: None, - } - } - ), - topics: vec![], - }, - EventRecord { - phase: Phase::Initialization, - event: TestEvent::Parachains(Event::UpdatedParachainHead { - parachain: ParaId(1), - parachain_head_hash: head_data(1, 10).hash(), - }), - topics: vec![], - }, - EventRecord { - phase: Phase::Initialization, - event: TestEvent::Parachains(Event::RejectedObsoleteParachainHead { - parachain: ParaId(1), - parachain_head_hash: head_data(1, 5).hash(), - }), - topics: vec![], - } - ], - ); - }); - } - - #[test] - fn does_nothing_when_parachain_head_is_too_large() { - let (state_root, proof, parachains) = - prepare_parachain_heads_proof::(vec![ - (1, head_data(1, 5)), - (4, big_head_data(1, 5)), - ]); - run_test(|| { - // start with relay block #0 and try to import head#5 of parachain#1 and big parachain - initialize(state_root); - let result = Pallet::::submit_parachain_heads( - RuntimeOrigin::signed(1), - (0, test_relay_header(0, state_root).hash()), - parachains, - proof, - ); - assert_ok!(result); - assert_eq!( - ParasInfo::::get(ParaId(1)), - Some(ParaInfo { - best_head_hash: BestParaHeadHash { - at_relay_block_number: 0, - head_hash: head_data(1, 5).hash() - }, - next_imported_hash_position: 1, - }) - ); - assert_eq!(ParasInfo::::get(ParaId(4)), None); - assert_eq!( - System::::events(), - vec![ - EventRecord { - phase: Phase::Initialization, - event: TestEvent::Parachains(Event::UpdatedParachainHead { - parachain: ParaId(1), - parachain_head_hash: head_data(1, 5).hash(), - }), - topics: vec![], - }, - EventRecord { - phase: Phase::Initialization, - event: TestEvent::Parachains(Event::RejectedLargeParachainHead { - parachain: ParaId(4), - parachain_head_hash: big_head_data(1, 5).hash(), - parachain_head_size: big_stored_head_data(1, 5).encoded_size() as u32, - }), - topics: vec![], - }, - ], - ); - }); - } - - #[test] - fn prunes_old_heads() { - run_test(|| { - let heads_to_keep = crate::mock::HeadsToKeep::get(); - - // import exactly `HeadsToKeep` headers - for i in 0..heads_to_keep { - let (state_root, proof, parachains) = prepare_parachain_heads_proof::< - RegularParachainHeader, - >(vec![(1, head_data(1, i))]); - if i == 0 { - initialize(state_root); - } else { - proceed(i, state_root); - } - - let expected_weight = weight_of_import_parachain_1_head(&proof, false); - let result = import_parachain_1_head(i, state_root, parachains, proof); - assert_ok!(result); - assert_eq!(result.expect("checked above").actual_weight, Some(expected_weight)); - } - - // nothing is pruned yet - for i in 0..heads_to_keep { - assert!(ImportedParaHeads::::get(ParaId(1), head_data(1, i).hash()) - .is_some()); - } - - // import next relay chain header and next parachain head - let (state_root, proof, parachains) = prepare_parachain_heads_proof::< - RegularParachainHeader, - >(vec![(1, head_data(1, heads_to_keep))]); - proceed(heads_to_keep, state_root); - let expected_weight = weight_of_import_parachain_1_head(&proof, true); - let result = import_parachain_1_head(heads_to_keep, state_root, parachains, proof); - assert_ok!(result); - assert_eq!(result.expect("checked above").actual_weight, Some(expected_weight)); - - // and the head#0 is pruned - assert!( - ImportedParaHeads::::get(ParaId(1), head_data(1, 0).hash()).is_none() - ); - for i in 1..=heads_to_keep { - assert!(ImportedParaHeads::::get(ParaId(1), head_data(1, i).hash()) - .is_some()); - } - }); - } - - #[test] - fn fails_on_unknown_relay_chain_block() { - let (state_root, proof, parachains) = - prepare_parachain_heads_proof::(vec![(1, head_data(1, 5))]); - run_test(|| { - // start with relay block #0 - initialize(state_root); - - // try to import head#5 of parachain#1 at unknown relay chain block #1 - assert_noop!( - import_parachain_1_head(1, state_root, parachains, proof), - Error::::UnknownRelayChainBlock - ); - }); - } - - #[test] - fn fails_on_invalid_storage_proof() { - let (_state_root, proof, parachains) = - prepare_parachain_heads_proof::(vec![(1, head_data(1, 5))]); - run_test(|| { - // start with relay block #0 - initialize(Default::default()); - - // try to import head#5 of parachain#1 at relay chain block #0 - assert_noop!( - import_parachain_1_head(0, Default::default(), parachains, proof), - Error::::HeaderChainStorageProof(HeaderChainError::StorageProof( - StorageProofError::StorageRootMismatch - )) - ); - }); - } - - #[test] - fn is_not_rewriting_existing_head_if_failed_to_read_updated_head() { - let (state_root_5, proof_5, parachains_5) = - prepare_parachain_heads_proof::(vec![(1, head_data(1, 5))]); - let (state_root_10_at_20, proof_10_at_20, parachains_10_at_20) = - prepare_parachain_heads_proof::(vec![(2, head_data(2, 10))]); - let (state_root_10_at_30, proof_10_at_30, parachains_10_at_30) = - prepare_parachain_heads_proof::(vec![(1, head_data(1, 10))]); - run_test(|| { - // we've already imported head#5 of parachain#1 at relay block#10 - initialize(state_root_5); - import_parachain_1_head(0, state_root_5, parachains_5, proof_5).expect("ok"); - assert_eq!( - Pallet::::best_parachain_head(ParaId(1)), - Some(stored_head_data(1, 5)) - ); - - // then if someone is pretending to provide updated head#10 of parachain#1 at relay - // block#20, but fails to do that - // - // => we'll leave previous value - proceed(20, state_root_10_at_20); - assert_ok!(Pallet::::submit_parachain_heads( - RuntimeOrigin::signed(1), - (20, test_relay_header(20, state_root_10_at_20).hash()), - parachains_10_at_20, - proof_10_at_20, - ),); - assert_eq!( - Pallet::::best_parachain_head(ParaId(1)), - Some(stored_head_data(1, 5)) - ); - - // then if someone is pretending to provide updated head#10 of parachain#1 at relay - // block#30, and actually provides it - // - // => we'll update value - proceed(30, state_root_10_at_30); - assert_ok!(Pallet::::submit_parachain_heads( - RuntimeOrigin::signed(1), - (30, test_relay_header(30, state_root_10_at_30).hash()), - parachains_10_at_30, - proof_10_at_30, - ),); - assert_eq!( - Pallet::::best_parachain_head(ParaId(1)), - Some(stored_head_data(1, 10)) - ); - }); - } - - #[test] - fn storage_keys_computed_properly() { - assert_eq!( - ParasInfo::::storage_map_final_key(ParaId(42)).to_vec(), - ParasInfoKeyProvider::final_key("Parachains", &ParaId(42)).0 - ); - - assert_eq!( - ImportedParaHeads::::storage_double_map_final_key( - ParaId(42), - ParaHash::from([21u8; 32]) - ) - .to_vec(), - ImportedParaHeadsKeyProvider::final_key( - "Parachains", - &ParaId(42), - &ParaHash::from([21u8; 32]) - ) - .0, - ); - } - - #[test] - fn ignores_parachain_head_if_it_is_missing_from_storage_proof() { - let (state_root, proof, _) = - prepare_parachain_heads_proof::(vec![]); - let parachains = vec![(ParaId(2), Default::default())]; - run_test(|| { - initialize(state_root); - assert_ok!(Pallet::::submit_parachain_heads( - RuntimeOrigin::signed(1), - (0, test_relay_header(0, state_root).hash()), - parachains, - proof, - )); - assert_eq!( - System::::events(), - vec![EventRecord { - phase: Phase::Initialization, - event: TestEvent::Parachains(Event::MissingParachainHead { - parachain: ParaId(2), - }), - topics: vec![], - }], - ); - }); - } - - #[test] - fn ignores_parachain_head_if_parachain_head_hash_is_wrong() { - let (state_root, proof, _) = - prepare_parachain_heads_proof::(vec![(1, head_data(1, 0))]); - let parachains = vec![(ParaId(1), head_data(1, 10).hash())]; - run_test(|| { - initialize(state_root); - assert_ok!(Pallet::::submit_parachain_heads( - RuntimeOrigin::signed(1), - (0, test_relay_header(0, state_root).hash()), - parachains, - proof, - )); - assert_eq!( - System::::events(), - vec![EventRecord { - phase: Phase::Initialization, - event: TestEvent::Parachains(Event::IncorrectParachainHeadHash { - parachain: ParaId(1), - parachain_head_hash: head_data(1, 10).hash(), - actual_parachain_head_hash: head_data(1, 0).hash(), - }), - topics: vec![], - }], - ); - }); - } - - #[test] - fn test_bridge_parachain_call_is_correctly_defined() { - let (state_root, proof, _) = - prepare_parachain_heads_proof::(vec![(1, head_data(1, 0))]); - let parachains = vec![(ParaId(2), Default::default())]; - let relay_header_id = (0, test_relay_header(0, state_root).hash()); - - let direct_submit_parachain_heads_call = Call::::submit_parachain_heads { - at_relay_block: relay_header_id, - parachains: parachains.clone(), - parachain_heads_proof: proof.clone(), - }; - let indirect_submit_parachain_heads_call = BridgeParachainCall::submit_parachain_heads { - at_relay_block: relay_header_id, - parachains, - parachain_heads_proof: proof, - }; - assert_eq!( - direct_submit_parachain_heads_call.encode(), - indirect_submit_parachain_heads_call.encode() - ); - } - - generate_owned_bridge_module_tests!(BasicOperatingMode::Normal, BasicOperatingMode::Halted); - - #[test] - fn maybe_max_parachains_returns_correct_value() { - assert_eq!(MaybeMaxParachains::::get(), Some(mock::TOTAL_PARACHAINS)); - } - - #[test] - fn maybe_max_total_parachain_hashes_returns_correct_value() { - assert_eq!( - MaybeMaxTotalParachainHashes::::get(), - Some(mock::TOTAL_PARACHAINS * mock::HeadsToKeep::get()), - ); - } - - #[test] - fn submit_finality_proof_requires_signed_origin() { - run_test(|| { - let (state_root, proof, parachains) = - prepare_parachain_heads_proof::(vec![(1, head_data(1, 0))]); - - initialize(state_root); - - // `submit_parachain_heads()` should fail when the pallet is halted. - assert_noop!( - Pallet::::submit_parachain_heads( - RuntimeOrigin::root(), - (0, test_relay_header(0, state_root).hash()), - parachains, - proof, - ), - DispatchError::BadOrigin - ); - }) - } -} diff --git a/modules/parachains/src/mock.rs b/modules/parachains/src/mock.rs deleted file mode 100644 index d9cbabf85..000000000 --- a/modules/parachains/src/mock.rs +++ /dev/null @@ -1,328 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -use bp_header_chain::ChainWithGrandpa; -use bp_polkadot_core::parachains::ParaId; -use bp_runtime::{Chain, ChainId, Parachain}; -use frame_support::{ - construct_runtime, derive_impl, parameter_types, traits::ConstU32, weights::Weight, -}; -use sp_runtime::{ - testing::H256, - traits::{BlakeTwo256, Header as HeaderT}, - MultiSignature, -}; - -use crate as pallet_bridge_parachains; - -pub type AccountId = u64; - -pub type RelayBlockHeader = - sp_runtime::generic::Header; - -type Block = frame_system::mocking::MockBlock; - -pub const PARAS_PALLET_NAME: &str = "Paras"; -pub const UNTRACKED_PARACHAIN_ID: u32 = 10; -// use exact expected encoded size: `vec_len_size + header_number_size + state_root_hash_size` -pub const MAXIMAL_PARACHAIN_HEAD_DATA_SIZE: u32 = 1 + 8 + 32; -// total parachains that we use in tests -pub const TOTAL_PARACHAINS: u32 = 4; - -pub type RegularParachainHeader = sp_runtime::testing::Header; -pub type RegularParachainHasher = BlakeTwo256; -pub type BigParachainHeader = sp_runtime::generic::Header; - -pub struct Parachain1; - -impl Chain for Parachain1 { - const ID: ChainId = *b"pch1"; - - type BlockNumber = u64; - type Hash = H256; - type Hasher = RegularParachainHasher; - type Header = RegularParachainHeader; - type AccountId = u64; - type Balance = u64; - type Nonce = u64; - type Signature = MultiSignature; - - fn max_extrinsic_size() -> u32 { - 0 - } - fn max_extrinsic_weight() -> Weight { - Weight::zero() - } -} - -impl Parachain for Parachain1 { - const PARACHAIN_ID: u32 = 1; -} - -pub struct Parachain2; - -impl Chain for Parachain2 { - const ID: ChainId = *b"pch2"; - - type BlockNumber = u64; - type Hash = H256; - type Hasher = RegularParachainHasher; - type Header = RegularParachainHeader; - type AccountId = u64; - type Balance = u64; - type Nonce = u64; - type Signature = MultiSignature; - - fn max_extrinsic_size() -> u32 { - 0 - } - fn max_extrinsic_weight() -> Weight { - Weight::zero() - } -} - -impl Parachain for Parachain2 { - const PARACHAIN_ID: u32 = 2; -} - -pub struct Parachain3; - -impl Chain for Parachain3 { - const ID: ChainId = *b"pch3"; - - type BlockNumber = u64; - type Hash = H256; - type Hasher = RegularParachainHasher; - type Header = RegularParachainHeader; - type AccountId = u64; - type Balance = u64; - type Nonce = u64; - type Signature = MultiSignature; - - fn max_extrinsic_size() -> u32 { - 0 - } - fn max_extrinsic_weight() -> Weight { - Weight::zero() - } -} - -impl Parachain for Parachain3 { - const PARACHAIN_ID: u32 = 3; -} - -// this parachain is using u128 as block number and stored head data size exceeds limit -pub struct BigParachain; - -impl Chain for BigParachain { - const ID: ChainId = *b"bpch"; - - type BlockNumber = u128; - type Hash = H256; - type Hasher = RegularParachainHasher; - type Header = BigParachainHeader; - type AccountId = u64; - type Balance = u64; - type Nonce = u64; - type Signature = MultiSignature; - - fn max_extrinsic_size() -> u32 { - 0 - } - fn max_extrinsic_weight() -> Weight { - Weight::zero() - } -} - -impl Parachain for BigParachain { - const PARACHAIN_ID: u32 = 4; -} - -construct_runtime! { - pub enum TestRuntime - { - System: frame_system::{Pallet, Call, Config, Storage, Event}, - Grandpa1: pallet_bridge_grandpa::::{Pallet, Event}, - Grandpa2: pallet_bridge_grandpa::::{Pallet, Event}, - Parachains: pallet_bridge_parachains::{Call, Pallet, Event}, - } -} - -#[derive_impl(frame_system::config_preludes::TestDefaultConfig)] -impl frame_system::Config for TestRuntime { - type Block = Block; -} - -parameter_types! { - pub const HeadersToKeep: u32 = 5; -} - -impl pallet_bridge_grandpa::Config for TestRuntime { - type RuntimeEvent = RuntimeEvent; - type BridgedChain = TestBridgedChain; - type MaxFreeMandatoryHeadersPerBlock = ConstU32<2>; - type HeadersToKeep = HeadersToKeep; - type WeightInfo = (); -} - -impl pallet_bridge_grandpa::Config for TestRuntime { - type RuntimeEvent = RuntimeEvent; - type BridgedChain = TestBridgedChain; - type MaxFreeMandatoryHeadersPerBlock = ConstU32<2>; - type HeadersToKeep = HeadersToKeep; - type WeightInfo = (); -} - -parameter_types! { - pub const HeadsToKeep: u32 = 4; - pub const ParasPalletName: &'static str = PARAS_PALLET_NAME; - pub GetTenFirstParachains: Vec = (0..10).map(ParaId).collect(); -} - -impl pallet_bridge_parachains::Config for TestRuntime { - type RuntimeEvent = RuntimeEvent; - type WeightInfo = (); - type BridgesGrandpaPalletInstance = pallet_bridge_grandpa::Instance1; - type ParasPalletName = ParasPalletName; - type ParaStoredHeaderDataBuilder = (Parachain1, Parachain2, Parachain3, BigParachain); - type HeadsToKeep = HeadsToKeep; - type MaxParaHeadDataSize = ConstU32; -} - -#[cfg(feature = "runtime-benchmarks")] -impl pallet_bridge_parachains::benchmarking::Config<()> for TestRuntime { - fn parachains() -> Vec { - vec![ - ParaId(Parachain1::PARACHAIN_ID), - ParaId(Parachain2::PARACHAIN_ID), - ParaId(Parachain3::PARACHAIN_ID), - ] - } - - fn prepare_parachain_heads_proof( - parachains: &[ParaId], - _parachain_head_size: u32, - _proof_size: bp_runtime::StorageProofSize, - ) -> ( - crate::RelayBlockNumber, - crate::RelayBlockHash, - bp_polkadot_core::parachains::ParaHeadsProof, - Vec<(ParaId, bp_polkadot_core::parachains::ParaHash)>, - ) { - // in mock run we only care about benchmarks correctness, not the benchmark results - // => ignore size related arguments - let (state_root, proof, parachains) = - bp_test_utils::prepare_parachain_heads_proof::( - parachains.iter().map(|p| (p.0, crate::tests::head_data(p.0, 1))).collect(), - ); - let relay_genesis_hash = crate::tests::initialize(state_root); - (0, relay_genesis_hash, proof, parachains) - } -} - -#[derive(Debug)] -pub struct TestBridgedChain; - -impl Chain for TestBridgedChain { - const ID: ChainId = *b"tbch"; - - type BlockNumber = crate::RelayBlockNumber; - type Hash = crate::RelayBlockHash; - type Hasher = crate::RelayBlockHasher; - type Header = RelayBlockHeader; - - type AccountId = AccountId; - type Balance = u32; - type Nonce = u32; - type Signature = sp_runtime::testing::TestSignature; - - fn max_extrinsic_size() -> u32 { - unreachable!() - } - - fn max_extrinsic_weight() -> Weight { - unreachable!() - } -} - -impl ChainWithGrandpa for TestBridgedChain { - const WITH_CHAIN_GRANDPA_PALLET_NAME: &'static str = ""; - const MAX_AUTHORITIES_COUNT: u32 = 16; - const REASONABLE_HEADERS_IN_JUSTIFICATION_ANCESTRY: u32 = 8; - const MAX_MANDATORY_HEADER_SIZE: u32 = 256; - const AVERAGE_HEADER_SIZE: u32 = 64; -} - -#[derive(Debug)] -pub struct OtherBridgedChain; - -impl Chain for OtherBridgedChain { - const ID: ChainId = *b"obch"; - - type BlockNumber = u64; - type Hash = crate::RelayBlockHash; - type Hasher = crate::RelayBlockHasher; - type Header = sp_runtime::generic::Header; - - type AccountId = AccountId; - type Balance = u32; - type Nonce = u32; - type Signature = sp_runtime::testing::TestSignature; - - fn max_extrinsic_size() -> u32 { - unreachable!() - } - - fn max_extrinsic_weight() -> Weight { - unreachable!() - } -} - -impl ChainWithGrandpa for OtherBridgedChain { - const WITH_CHAIN_GRANDPA_PALLET_NAME: &'static str = ""; - const MAX_AUTHORITIES_COUNT: u32 = 16; - const REASONABLE_HEADERS_IN_JUSTIFICATION_ANCESTRY: u32 = 8; - const MAX_MANDATORY_HEADER_SIZE: u32 = 256; - const AVERAGE_HEADER_SIZE: u32 = 64; -} - -/// Return test externalities to use in tests. -pub fn new_test_ext() -> sp_io::TestExternalities { - sp_io::TestExternalities::new(Default::default()) -} - -/// Run pallet test. -pub fn run_test(test: impl FnOnce() -> T) -> T { - new_test_ext().execute_with(|| { - System::set_block_number(1); - System::reset_events(); - test() - }) -} - -/// Return test relay chain header with given number. -pub fn test_relay_header( - num: crate::RelayBlockNumber, - state_root: crate::RelayBlockHash, -) -> RelayBlockHeader { - RelayBlockHeader::new( - num, - Default::default(), - state_root, - Default::default(), - Default::default(), - ) -} diff --git a/modules/parachains/src/weights.rs b/modules/parachains/src/weights.rs deleted file mode 100644 index abddc8768..000000000 --- a/modules/parachains/src/weights.rs +++ /dev/null @@ -1,273 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Autogenerated weights for pallet_bridge_parachains -//! -//! THIS FILE WAS AUTO-GENERATED USING THE SUBSTRATE BENCHMARK CLI VERSION 4.0.0-dev -//! DATE: 2023-03-02, STEPS: `50`, REPEAT: `20`, LOW RANGE: `[]`, HIGH RANGE: `[]` -//! WORST CASE MAP SIZE: `1000000` -//! HOSTNAME: `covid`, CPU: `11th Gen Intel(R) Core(TM) i7-11800H @ 2.30GHz` -//! EXECUTION: Some(Wasm), WASM-EXECUTION: Compiled, CHAIN: Some("dev"), DB CACHE: 1024 - -// Executed Command: -// target/release/unknown-bridge-node -// benchmark -// pallet -// --chain=dev -// --steps=50 -// --repeat=20 -// --pallet=pallet_bridge_parachains -// --extrinsic=* -// --execution=wasm -// --wasm-execution=Compiled -// --heap-pages=4096 -// --output=./modules/parachains/src/weights.rs -// --template=./.maintain/bridge-weight-template.hbs - -#![allow(clippy::all)] -#![allow(unused_parens)] -#![allow(unused_imports)] -#![allow(missing_docs)] - -use frame_support::{ - traits::Get, - weights::{constants::RocksDbWeight, Weight}, -}; -use sp_std::marker::PhantomData; - -/// Weight functions needed for pallet_bridge_parachains. -pub trait WeightInfo { - fn submit_parachain_heads_with_n_parachains(p: u32) -> Weight; - fn submit_parachain_heads_with_1kb_proof() -> Weight; - fn submit_parachain_heads_with_16kb_proof() -> Weight; -} - -/// Weights for `pallet_bridge_parachains` that are generated using one of the Bridge testnets. -/// -/// Those weights are test only and must never be used in production. -pub struct BridgeWeight(PhantomData); -impl WeightInfo for BridgeWeight { - /// Storage: BridgeUnknownParachains PalletOperatingMode (r:1 w:0) - /// - /// Proof: BridgeUnknownParachains PalletOperatingMode (max_values: Some(1), max_size: Some(1), - /// added: 496, mode: MaxEncodedLen) - /// - /// Storage: BridgeUnknownGrandpa ImportedHeaders (r:1 w:0) - /// - /// Proof: BridgeUnknownGrandpa ImportedHeaders (max_values: Some(14400), max_size: Some(68), - /// added: 2048, mode: MaxEncodedLen) - /// - /// Storage: BridgeUnknownParachains ParasInfo (r:1 w:1) - /// - /// Proof: BridgeUnknownParachains ParasInfo (max_values: Some(1), max_size: Some(60), added: - /// 555, mode: MaxEncodedLen) - /// - /// Storage: BridgeUnknownParachains ImportedParaHashes (r:1 w:1) - /// - /// Proof: BridgeUnknownParachains ImportedParaHashes (max_values: Some(1024), max_size: - /// Some(64), added: 1549, mode: MaxEncodedLen) - /// - /// Storage: BridgeUnknownParachains ImportedParaHeads (r:0 w:1) - /// - /// Proof: BridgeUnknownParachains ImportedParaHeads (max_values: Some(1024), max_size: - /// Some(196), added: 1681, mode: MaxEncodedLen) - /// - /// The range of component `p` is `[1, 2]`. - fn submit_parachain_heads_with_n_parachains(p: u32) -> Weight { - // Proof Size summary in bytes: - // Measured: `366` - // Estimated: `4648` - // Minimum execution time: 36_701 nanoseconds. - Weight::from_parts(38_597_828, 4648) - // Standard Error: 190_859 - .saturating_add(Weight::from_parts(60_685, 0).saturating_mul(p.into())) - .saturating_add(T::DbWeight::get().reads(4_u64)) - .saturating_add(T::DbWeight::get().writes(3_u64)) - } - /// Storage: BridgeUnknownParachains PalletOperatingMode (r:1 w:0) - /// - /// Proof: BridgeUnknownParachains PalletOperatingMode (max_values: Some(1), max_size: Some(1), - /// added: 496, mode: MaxEncodedLen) - /// - /// Storage: BridgeUnknownGrandpa ImportedHeaders (r:1 w:0) - /// - /// Proof: BridgeUnknownGrandpa ImportedHeaders (max_values: Some(14400), max_size: Some(68), - /// added: 2048, mode: MaxEncodedLen) - /// - /// Storage: BridgeUnknownParachains ParasInfo (r:1 w:1) - /// - /// Proof: BridgeUnknownParachains ParasInfo (max_values: Some(1), max_size: Some(60), added: - /// 555, mode: MaxEncodedLen) - /// - /// Storage: BridgeUnknownParachains ImportedParaHashes (r:1 w:1) - /// - /// Proof: BridgeUnknownParachains ImportedParaHashes (max_values: Some(1024), max_size: - /// Some(64), added: 1549, mode: MaxEncodedLen) - /// - /// Storage: BridgeUnknownParachains ImportedParaHeads (r:0 w:1) - /// - /// Proof: BridgeUnknownParachains ImportedParaHeads (max_values: Some(1024), max_size: - /// Some(196), added: 1681, mode: MaxEncodedLen) - fn submit_parachain_heads_with_1kb_proof() -> Weight { - // Proof Size summary in bytes: - // Measured: `366` - // Estimated: `4648` - // Minimum execution time: 38_189 nanoseconds. - Weight::from_parts(39_252_000, 4648) - .saturating_add(T::DbWeight::get().reads(4_u64)) - .saturating_add(T::DbWeight::get().writes(3_u64)) - } - /// Storage: BridgeUnknownParachains PalletOperatingMode (r:1 w:0) - /// - /// Proof: BridgeUnknownParachains PalletOperatingMode (max_values: Some(1), max_size: Some(1), - /// added: 496, mode: MaxEncodedLen) - /// - /// Storage: BridgeUnknownGrandpa ImportedHeaders (r:1 w:0) - /// - /// Proof: BridgeUnknownGrandpa ImportedHeaders (max_values: Some(14400), max_size: Some(68), - /// added: 2048, mode: MaxEncodedLen) - /// - /// Storage: BridgeUnknownParachains ParasInfo (r:1 w:1) - /// - /// Proof: BridgeUnknownParachains ParasInfo (max_values: Some(1), max_size: Some(60), added: - /// 555, mode: MaxEncodedLen) - /// - /// Storage: BridgeUnknownParachains ImportedParaHashes (r:1 w:1) - /// - /// Proof: BridgeUnknownParachains ImportedParaHashes (max_values: Some(1024), max_size: - /// Some(64), added: 1549, mode: MaxEncodedLen) - /// - /// Storage: BridgeUnknownParachains ImportedParaHeads (r:0 w:1) - /// - /// Proof: BridgeUnknownParachains ImportedParaHeads (max_values: Some(1024), max_size: - /// Some(196), added: 1681, mode: MaxEncodedLen) - fn submit_parachain_heads_with_16kb_proof() -> Weight { - // Proof Size summary in bytes: - // Measured: `366` - // Estimated: `4648` - // Minimum execution time: 62_868 nanoseconds. - Weight::from_parts(63_581_000, 4648) - .saturating_add(T::DbWeight::get().reads(4_u64)) - .saturating_add(T::DbWeight::get().writes(3_u64)) - } -} - -// For backwards compatibility and tests -impl WeightInfo for () { - /// Storage: BridgeUnknownParachains PalletOperatingMode (r:1 w:0) - /// - /// Proof: BridgeUnknownParachains PalletOperatingMode (max_values: Some(1), max_size: Some(1), - /// added: 496, mode: MaxEncodedLen) - /// - /// Storage: BridgeUnknownGrandpa ImportedHeaders (r:1 w:0) - /// - /// Proof: BridgeUnknownGrandpa ImportedHeaders (max_values: Some(14400), max_size: Some(68), - /// added: 2048, mode: MaxEncodedLen) - /// - /// Storage: BridgeUnknownParachains ParasInfo (r:1 w:1) - /// - /// Proof: BridgeUnknownParachains ParasInfo (max_values: Some(1), max_size: Some(60), added: - /// 555, mode: MaxEncodedLen) - /// - /// Storage: BridgeUnknownParachains ImportedParaHashes (r:1 w:1) - /// - /// Proof: BridgeUnknownParachains ImportedParaHashes (max_values: Some(1024), max_size: - /// Some(64), added: 1549, mode: MaxEncodedLen) - /// - /// Storage: BridgeUnknownParachains ImportedParaHeads (r:0 w:1) - /// - /// Proof: BridgeUnknownParachains ImportedParaHeads (max_values: Some(1024), max_size: - /// Some(196), added: 1681, mode: MaxEncodedLen) - /// - /// The range of component `p` is `[1, 2]`. - fn submit_parachain_heads_with_n_parachains(p: u32) -> Weight { - // Proof Size summary in bytes: - // Measured: `366` - // Estimated: `4648` - // Minimum execution time: 36_701 nanoseconds. - Weight::from_parts(38_597_828, 4648) - // Standard Error: 190_859 - .saturating_add(Weight::from_parts(60_685, 0).saturating_mul(p.into())) - .saturating_add(RocksDbWeight::get().reads(4_u64)) - .saturating_add(RocksDbWeight::get().writes(3_u64)) - } - /// Storage: BridgeUnknownParachains PalletOperatingMode (r:1 w:0) - /// - /// Proof: BridgeUnknownParachains PalletOperatingMode (max_values: Some(1), max_size: Some(1), - /// added: 496, mode: MaxEncodedLen) - /// - /// Storage: BridgeUnknownGrandpa ImportedHeaders (r:1 w:0) - /// - /// Proof: BridgeUnknownGrandpa ImportedHeaders (max_values: Some(14400), max_size: Some(68), - /// added: 2048, mode: MaxEncodedLen) - /// - /// Storage: BridgeUnknownParachains ParasInfo (r:1 w:1) - /// - /// Proof: BridgeUnknownParachains ParasInfo (max_values: Some(1), max_size: Some(60), added: - /// 555, mode: MaxEncodedLen) - /// - /// Storage: BridgeUnknownParachains ImportedParaHashes (r:1 w:1) - /// - /// Proof: BridgeUnknownParachains ImportedParaHashes (max_values: Some(1024), max_size: - /// Some(64), added: 1549, mode: MaxEncodedLen) - /// - /// Storage: BridgeUnknownParachains ImportedParaHeads (r:0 w:1) - /// - /// Proof: BridgeUnknownParachains ImportedParaHeads (max_values: Some(1024), max_size: - /// Some(196), added: 1681, mode: MaxEncodedLen) - fn submit_parachain_heads_with_1kb_proof() -> Weight { - // Proof Size summary in bytes: - // Measured: `366` - // Estimated: `4648` - // Minimum execution time: 38_189 nanoseconds. - Weight::from_parts(39_252_000, 4648) - .saturating_add(RocksDbWeight::get().reads(4_u64)) - .saturating_add(RocksDbWeight::get().writes(3_u64)) - } - /// Storage: BridgeUnknownParachains PalletOperatingMode (r:1 w:0) - /// - /// Proof: BridgeUnknownParachains PalletOperatingMode (max_values: Some(1), max_size: Some(1), - /// added: 496, mode: MaxEncodedLen) - /// - /// Storage: BridgeUnknownGrandpa ImportedHeaders (r:1 w:0) - /// - /// Proof: BridgeUnknownGrandpa ImportedHeaders (max_values: Some(14400), max_size: Some(68), - /// added: 2048, mode: MaxEncodedLen) - /// - /// Storage: BridgeUnknownParachains ParasInfo (r:1 w:1) - /// - /// Proof: BridgeUnknownParachains ParasInfo (max_values: Some(1), max_size: Some(60), added: - /// 555, mode: MaxEncodedLen) - /// - /// Storage: BridgeUnknownParachains ImportedParaHashes (r:1 w:1) - /// - /// Proof: BridgeUnknownParachains ImportedParaHashes (max_values: Some(1024), max_size: - /// Some(64), added: 1549, mode: MaxEncodedLen) - /// - /// Storage: BridgeUnknownParachains ImportedParaHeads (r:0 w:1) - /// - /// Proof: BridgeUnknownParachains ImportedParaHeads (max_values: Some(1024), max_size: - /// Some(196), added: 1681, mode: MaxEncodedLen) - fn submit_parachain_heads_with_16kb_proof() -> Weight { - // Proof Size summary in bytes: - // Measured: `366` - // Estimated: `4648` - // Minimum execution time: 62_868 nanoseconds. - Weight::from_parts(63_581_000, 4648) - .saturating_add(RocksDbWeight::get().reads(4_u64)) - .saturating_add(RocksDbWeight::get().writes(3_u64)) - } -} diff --git a/modules/parachains/src/weights_ext.rs b/modules/parachains/src/weights_ext.rs deleted file mode 100644 index 393086a85..000000000 --- a/modules/parachains/src/weights_ext.rs +++ /dev/null @@ -1,107 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Weight-related utilities. - -use crate::weights::{BridgeWeight, WeightInfo}; - -use bp_runtime::Size; -use frame_support::weights::{RuntimeDbWeight, Weight}; - -/// Size of the regular parachain head. -/// -/// It's not that we are expecting all parachain heads to share the same size or that we would -/// reject all heads that have larger/lesser size. It is about head size that we use in benchmarks. -/// Relayer would need to pay additional fee for extra bytes. -/// -/// 384 is a bit larger (1.3 times) than the size of the randomly chosen Polkadot block. -pub const DEFAULT_PARACHAIN_HEAD_SIZE: u32 = 384; - -/// Number of extra bytes (excluding size of storage value itself) of storage proof, built at -/// some generic chain. -pub const EXTRA_STORAGE_PROOF_SIZE: u32 = 1024; - -/// Extended weight info. -pub trait WeightInfoExt: WeightInfo { - /// Storage proof overhead, that is included in every storage proof. - /// - /// The relayer would pay some extra fee for additional proof bytes, since they mean - /// more hashing operations. - fn expected_extra_storage_proof_size() -> u32; - - /// Weight of the parachain heads delivery extrinsic. - fn submit_parachain_heads_weight( - db_weight: RuntimeDbWeight, - proof: &impl Size, - parachains_count: u32, - ) -> Weight { - // weight of the `submit_parachain_heads` with exactly `parachains_count` parachain - // heads of the default size (`DEFAULT_PARACHAIN_HEAD_SIZE`) - let base_weight = Self::submit_parachain_heads_with_n_parachains(parachains_count); - - // overhead because of extra storage proof bytes - let expected_proof_size = parachains_count - .saturating_mul(DEFAULT_PARACHAIN_HEAD_SIZE) - .saturating_add(Self::expected_extra_storage_proof_size()); - let actual_proof_size = proof.size(); - let proof_size_overhead = Self::storage_proof_size_overhead( - actual_proof_size.saturating_sub(expected_proof_size), - ); - - // potential pruning weight (refunded if hasn't happened) - let pruning_weight = - Self::parachain_head_pruning_weight(db_weight).saturating_mul(parachains_count as u64); - - base_weight.saturating_add(proof_size_overhead).saturating_add(pruning_weight) - } - - /// Returns weight of single parachain head storage update. - /// - /// This weight only includes db write operations that happens if parachain head is actually - /// updated. All extra weights (weight of storage proof validation, additional checks, ...) is - /// not included. - fn parachain_head_storage_write_weight(db_weight: RuntimeDbWeight) -> Weight { - // it's just a couple of operations - we need to write the hash (`ImportedParaHashes`) and - // the head itself (`ImportedParaHeads`. Pruning is not included here - db_weight.writes(2) - } - - /// Returns weight of single parachain head pruning. - fn parachain_head_pruning_weight(db_weight: RuntimeDbWeight) -> Weight { - // it's just one write operation, we don't want any benchmarks for that - db_weight.writes(1) - } - - /// Returns weight that needs to be accounted when storage proof of given size is received. - fn storage_proof_size_overhead(extra_proof_bytes: u32) -> Weight { - let extra_byte_weight = (Self::submit_parachain_heads_with_16kb_proof() - - Self::submit_parachain_heads_with_1kb_proof()) / - (15 * 1024); - extra_byte_weight.saturating_mul(extra_proof_bytes as u64) - } -} - -impl WeightInfoExt for () { - fn expected_extra_storage_proof_size() -> u32 { - EXTRA_STORAGE_PROOF_SIZE - } -} - -impl WeightInfoExt for BridgeWeight { - fn expected_extra_storage_proof_size() -> u32 { - EXTRA_STORAGE_PROOF_SIZE - } -} diff --git a/modules/relayers/Cargo.toml b/modules/relayers/Cargo.toml deleted file mode 100644 index ae57e36f7..000000000 --- a/modules/relayers/Cargo.toml +++ /dev/null @@ -1,71 +0,0 @@ -[package] -name = "pallet-bridge-relayers" -description = "Module used to store relayer rewards and coordinate relayers set." -version = "0.7.0" -authors.workspace = true -edition.workspace = true -license = "GPL-3.0-or-later WITH Classpath-exception-2.0" -repository.workspace = true - -[lints] -workspace = true - -[dependencies] -codec = { package = "parity-scale-codec", version = "3.6.1", default-features = false } -log = { workspace = true } -scale-info = { version = "2.11.1", default-features = false, features = ["derive"] } - -# Bridge dependencies - -bp-messages = { path = "../../primitives/messages", default-features = false } -bp-relayers = { path = "../../primitives/relayers", default-features = false } -bp-runtime = { path = "../../primitives/runtime", default-features = false } -pallet-bridge-messages = { path = "../messages", default-features = false } - -# Substrate Dependencies - -frame-benchmarking = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false, optional = true } -frame-support = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -frame-system = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -sp-arithmetic = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -sp-runtime = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -sp-std = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } - -[dev-dependencies] -bp-runtime = { path = "../../primitives/runtime" } -pallet-balances = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } -sp-io = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } -sp-runtime = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } - -[features] -default = ["std"] -std = [ - "bp-messages/std", - "bp-relayers/std", - "bp-runtime/std", - "codec/std", - "frame-benchmarking/std", - "frame-support/std", - "frame-system/std", - "log/std", - "pallet-bridge-messages/std", - "scale-info/std", - "sp-arithmetic/std", - "sp-runtime/std", - "sp-std/std", -] -runtime-benchmarks = [ - "frame-benchmarking/runtime-benchmarks", - "frame-support/runtime-benchmarks", - "frame-system/runtime-benchmarks", - "pallet-balances/runtime-benchmarks", - "pallet-bridge-messages/runtime-benchmarks", - "sp-runtime/runtime-benchmarks", -] -try-runtime = [ - "frame-support/try-runtime", - "frame-system/try-runtime", - "pallet-balances/try-runtime", - "pallet-bridge-messages/try-runtime", - "sp-runtime/try-runtime", -] diff --git a/modules/relayers/README.md b/modules/relayers/README.md deleted file mode 100644 index 656200f44..000000000 --- a/modules/relayers/README.md +++ /dev/null @@ -1,14 +0,0 @@ -# Bridge Relayers Pallet - -The pallet serves as a storage for pending bridge relayer rewards. Any runtime component may register reward -to some relayer for doing some useful job at some messages lane. Later, the relayer may claim its rewards -using the `claim_rewards` call. - -The reward payment procedure is abstracted from the pallet code. One of possible implementations, is the -[`PayLaneRewardFromAccount`](../../primitives/relayers/src/lib.rs), which just does a `Currency::transfer` -call to relayer account from the relayer-rewards account, determined by the message lane id. - -We have two examples of how this pallet is used in production. Rewards are registered at the target chain to -compensate fees of message delivery transactions (and linked finality delivery calls). At the source chain, rewards -are registered during delivery confirmation transactions. You may find more information about that in the -[Kusama <> Polkadot bridge](../../docs/polkadot-kusama-bridge-overview.md) documentation. diff --git a/modules/relayers/src/benchmarking.rs b/modules/relayers/src/benchmarking.rs deleted file mode 100644 index 00c3814a4..000000000 --- a/modules/relayers/src/benchmarking.rs +++ /dev/null @@ -1,131 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Benchmarks for the relayers Pallet. - -#![cfg(feature = "runtime-benchmarks")] - -use crate::*; - -use bp_messages::LaneId; -use bp_relayers::RewardsAccountOwner; -use frame_benchmarking::{benchmarks, whitelisted_caller}; -use frame_system::RawOrigin; -use sp_runtime::traits::One; - -/// Reward amount that is (hopefully) is larger than existential deposit across all chains. -const REWARD_AMOUNT: u32 = u32::MAX; - -/// Pallet we're benchmarking here. -pub struct Pallet(crate::Pallet); - -/// Trait that must be implemented by runtime. -pub trait Config: crate::Config { - /// Prepare environment for paying given reward for serving given lane. - fn prepare_rewards_account(account_params: RewardsAccountParams, reward: Self::Reward); - /// Give enough balance to given account. - fn deposit_account(account: Self::AccountId, balance: Self::Reward); -} - -benchmarks! { - // Benchmark `claim_rewards` call. - claim_rewards { - let lane = LaneId([0, 0, 0, 0]); - let account_params = - RewardsAccountParams::new(lane, *b"test", RewardsAccountOwner::ThisChain); - let relayer: T::AccountId = whitelisted_caller(); - let reward = T::Reward::from(REWARD_AMOUNT); - - T::prepare_rewards_account(account_params, reward); - RelayerRewards::::insert(&relayer, account_params, reward); - }: _(RawOrigin::Signed(relayer), account_params) - verify { - // we can't check anything here, because `PaymentProcedure` is responsible for - // payment logic, so we assume that if call has succeeded, the procedure has - // also completed successfully - } - - // Benchmark `register` call. - register { - let relayer: T::AccountId = whitelisted_caller(); - let valid_till = frame_system::Pallet::::block_number() - .saturating_add(crate::Pallet::::required_registration_lease()) - .saturating_add(One::one()) - .saturating_add(One::one()); - - T::deposit_account(relayer.clone(), crate::Pallet::::required_stake()); - }: _(RawOrigin::Signed(relayer.clone()), valid_till) - verify { - assert!(crate::Pallet::::is_registration_active(&relayer)); - } - - // Benchmark `deregister` call. - deregister { - let relayer: T::AccountId = whitelisted_caller(); - let valid_till = frame_system::Pallet::::block_number() - .saturating_add(crate::Pallet::::required_registration_lease()) - .saturating_add(One::one()) - .saturating_add(One::one()); - T::deposit_account(relayer.clone(), crate::Pallet::::required_stake()); - crate::Pallet::::register(RawOrigin::Signed(relayer.clone()).into(), valid_till).unwrap(); - - frame_system::Pallet::::set_block_number(valid_till.saturating_add(One::one())); - }: _(RawOrigin::Signed(relayer.clone())) - verify { - assert!(!crate::Pallet::::is_registration_active(&relayer)); - } - - // Benchmark `slash_and_deregister` method of the pallet. We are adding this weight to - // the weight of message delivery call if `RefundBridgedParachainMessages` signed extension - // is deployed at runtime level. - slash_and_deregister { - // prepare and register relayer account - let relayer: T::AccountId = whitelisted_caller(); - let valid_till = frame_system::Pallet::::block_number() - .saturating_add(crate::Pallet::::required_registration_lease()) - .saturating_add(One::one()) - .saturating_add(One::one()); - T::deposit_account(relayer.clone(), crate::Pallet::::required_stake()); - crate::Pallet::::register(RawOrigin::Signed(relayer.clone()).into(), valid_till).unwrap(); - - // create slash destination account - let lane = LaneId([0, 0, 0, 0]); - let slash_destination = RewardsAccountParams::new(lane, *b"test", RewardsAccountOwner::ThisChain); - T::prepare_rewards_account(slash_destination, Zero::zero()); - }: { - crate::Pallet::::slash_and_deregister(&relayer, slash_destination) - } - verify { - assert!(!crate::Pallet::::is_registration_active(&relayer)); - } - - // Benchmark `register_relayer_reward` method of the pallet. We are adding this weight to - // the weight of message delivery call if `RefundBridgedParachainMessages` signed extension - // is deployed at runtime level. - register_relayer_reward { - let lane = LaneId([0, 0, 0, 0]); - let relayer: T::AccountId = whitelisted_caller(); - let account_params = - RewardsAccountParams::new(lane, *b"test", RewardsAccountOwner::ThisChain); - }: { - crate::Pallet::::register_relayer_reward(account_params, &relayer, One::one()); - } - verify { - assert_eq!(RelayerRewards::::get(relayer, &account_params), Some(One::one())); - } - - impl_benchmark_test_suite!(Pallet, crate::mock::new_test_ext(), crate::mock::TestRuntime) -} diff --git a/modules/relayers/src/lib.rs b/modules/relayers/src/lib.rs deleted file mode 100644 index ce66c9df4..000000000 --- a/modules/relayers/src/lib.rs +++ /dev/null @@ -1,922 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Runtime module that is used to store relayer rewards and (in the future) to -//! coordinate relations between relayers. - -#![cfg_attr(not(feature = "std"), no_std)] -#![warn(missing_docs)] - -use bp_relayers::{ - PaymentProcedure, Registration, RelayerRewardsKeyProvider, RewardsAccountParams, StakeAndSlash, -}; -use bp_runtime::StorageDoubleMapKeyProvider; -use frame_support::fail; -use sp_arithmetic::traits::{AtLeast32BitUnsigned, Zero}; -use sp_runtime::{traits::CheckedSub, Saturating}; -use sp_std::marker::PhantomData; - -pub use pallet::*; -pub use payment_adapter::DeliveryConfirmationPaymentsAdapter; -pub use stake_adapter::StakeAndSlashNamed; -pub use weights::WeightInfo; -pub use weights_ext::WeightInfoExt; - -pub mod benchmarking; - -mod mock; -mod payment_adapter; -mod stake_adapter; -mod weights_ext; - -pub mod weights; - -/// The target that will be used when publishing logs related to this pallet. -pub const LOG_TARGET: &str = "runtime::bridge-relayers"; - -#[frame_support::pallet] -pub mod pallet { - use super::*; - use frame_support::pallet_prelude::*; - use frame_system::pallet_prelude::*; - - /// `RelayerRewardsKeyProvider` for given configuration. - type RelayerRewardsKeyProviderOf = - RelayerRewardsKeyProvider<::AccountId, ::Reward>; - - #[pallet::config] - pub trait Config: frame_system::Config { - /// The overarching event type. - type RuntimeEvent: From> + IsType<::RuntimeEvent>; - /// Type of relayer reward. - type Reward: AtLeast32BitUnsigned + Copy + Parameter + MaxEncodedLen; - /// Pay rewards scheme. - type PaymentProcedure: PaymentProcedure; - /// Stake and slash scheme. - type StakeAndSlash: StakeAndSlash, Self::Reward>; - /// Pallet call weights. - type WeightInfo: WeightInfoExt; - } - - #[pallet::pallet] - pub struct Pallet(PhantomData); - - #[pallet::call] - impl Pallet { - /// Claim accumulated rewards. - #[pallet::call_index(0)] - #[pallet::weight(T::WeightInfo::claim_rewards())] - pub fn claim_rewards( - origin: OriginFor, - rewards_account_params: RewardsAccountParams, - ) -> DispatchResult { - let relayer = ensure_signed(origin)?; - - RelayerRewards::::try_mutate_exists( - &relayer, - rewards_account_params, - |maybe_reward| -> DispatchResult { - let reward = maybe_reward.take().ok_or(Error::::NoRewardForRelayer)?; - T::PaymentProcedure::pay_reward(&relayer, rewards_account_params, reward) - .map_err(|e| { - log::trace!( - target: LOG_TARGET, - "Failed to pay {:?} rewards to {:?}: {:?}", - rewards_account_params, - relayer, - e, - ); - Error::::FailedToPayReward - })?; - - Self::deposit_event(Event::::RewardPaid { - relayer: relayer.clone(), - rewards_account_params, - reward, - }); - Ok(()) - }, - ) - } - - /// Register relayer or update its registration. - /// - /// Registration allows relayer to get priority boost for its message delivery transactions. - #[pallet::call_index(1)] - #[pallet::weight(T::WeightInfo::register())] - pub fn register(origin: OriginFor, valid_till: BlockNumberFor) -> DispatchResult { - let relayer = ensure_signed(origin)?; - - // valid till must be larger than the current block number and the lease must be larger - // than the `RequiredRegistrationLease` - let lease = valid_till.saturating_sub(frame_system::Pallet::::block_number()); - ensure!( - lease > Pallet::::required_registration_lease(), - Error::::InvalidRegistrationLease - ); - - RegisteredRelayers::::try_mutate(&relayer, |maybe_registration| -> DispatchResult { - let mut registration = maybe_registration - .unwrap_or_else(|| Registration { valid_till, stake: Zero::zero() }); - - // new `valid_till` must be larger (or equal) than the old one - ensure!( - valid_till >= registration.valid_till, - Error::::CannotReduceRegistrationLease, - ); - registration.valid_till = valid_till; - - // regarding stake, there are three options: - // - if relayer stake is larger than required stake, we may do unreserve - // - if relayer stake equals to required stake, we do nothing - // - if relayer stake is smaller than required stake, we do additional reserve - let required_stake = Pallet::::required_stake(); - if let Some(to_unreserve) = registration.stake.checked_sub(&required_stake) { - Self::do_unreserve(&relayer, to_unreserve)?; - } else if let Some(to_reserve) = required_stake.checked_sub(®istration.stake) { - T::StakeAndSlash::reserve(&relayer, to_reserve).map_err(|e| { - log::trace!( - target: LOG_TARGET, - "Failed to reserve {:?} on relayer {:?} account: {:?}", - to_reserve, - relayer, - e, - ); - - Error::::FailedToReserve - })?; - } - registration.stake = required_stake; - - log::trace!(target: LOG_TARGET, "Successfully registered relayer: {:?}", relayer); - Self::deposit_event(Event::::RegistrationUpdated { - relayer: relayer.clone(), - registration, - }); - - *maybe_registration = Some(registration); - - Ok(()) - }) - } - - /// `Deregister` relayer. - /// - /// After this call, message delivery transactions of the relayer won't get any priority - /// boost. - #[pallet::call_index(2)] - #[pallet::weight(T::WeightInfo::deregister())] - pub fn deregister(origin: OriginFor) -> DispatchResult { - let relayer = ensure_signed(origin)?; - - RegisteredRelayers::::try_mutate(&relayer, |maybe_registration| -> DispatchResult { - let registration = match maybe_registration.take() { - Some(registration) => registration, - None => fail!(Error::::NotRegistered), - }; - - // we can't deregister until `valid_till + 1` - ensure!( - registration.valid_till < frame_system::Pallet::::block_number(), - Error::::RegistrationIsStillActive, - ); - - // if stake is non-zero, we should do unreserve - if !registration.stake.is_zero() { - Self::do_unreserve(&relayer, registration.stake)?; - } - - log::trace!(target: LOG_TARGET, "Successfully deregistered relayer: {:?}", relayer); - Self::deposit_event(Event::::Deregistered { relayer: relayer.clone() }); - - *maybe_registration = None; - - Ok(()) - }) - } - } - - impl Pallet { - /// Returns true if given relayer registration is active at current block. - /// - /// This call respects both `RequiredStake` and `RequiredRegistrationLease`, meaning that - /// it'll return false if registered stake is lower than required or if remaining lease - /// is less than `RequiredRegistrationLease`. - pub fn is_registration_active(relayer: &T::AccountId) -> bool { - let registration = match Self::registered_relayer(relayer) { - Some(registration) => registration, - None => return false, - }; - - // registration is inactive if relayer stake is less than required - if registration.stake < Self::required_stake() { - return false - } - - // registration is inactive if it ends soon - let remaining_lease = registration - .valid_till - .saturating_sub(frame_system::Pallet::::block_number()); - if remaining_lease <= Self::required_registration_lease() { - return false - } - - true - } - - /// Slash and `deregister` relayer. This function slashes all staked balance. - /// - /// It may fail inside, but error is swallowed and we only log it. - pub fn slash_and_deregister( - relayer: &T::AccountId, - slash_destination: RewardsAccountParams, - ) { - let registration = match RegisteredRelayers::::take(relayer) { - Some(registration) => registration, - None => { - log::trace!( - target: crate::LOG_TARGET, - "Cannot slash unregistered relayer {:?}", - relayer, - ); - - return - }, - }; - - match T::StakeAndSlash::repatriate_reserved( - relayer, - slash_destination, - registration.stake, - ) { - Ok(failed_to_slash) if failed_to_slash.is_zero() => { - log::trace!( - target: crate::LOG_TARGET, - "Relayer account {:?} has been slashed for {:?}. Funds were deposited to {:?}", - relayer, - registration.stake, - slash_destination, - ); - }, - Ok(failed_to_slash) => { - log::trace!( - target: crate::LOG_TARGET, - "Relayer account {:?} has been partially slashed for {:?}. Funds were deposited to {:?}. \ - Failed to slash: {:?}", - relayer, - registration.stake, - slash_destination, - failed_to_slash, - ); - }, - Err(e) => { - // TODO: document this. Where? - - // it may fail if there's no beneficiary account. For us it means that this - // account must exists before we'll deploy the bridge - log::debug!( - target: crate::LOG_TARGET, - "Failed to slash relayer account {:?}: {:?}. Maybe beneficiary account doesn't exist? \ - Beneficiary: {:?}, amount: {:?}, failed to slash: {:?}", - relayer, - e, - slash_destination, - registration.stake, - registration.stake, - ); - }, - } - } - - /// Register reward for given relayer. - pub fn register_relayer_reward( - rewards_account_params: RewardsAccountParams, - relayer: &T::AccountId, - reward: T::Reward, - ) { - if reward.is_zero() { - return - } - - RelayerRewards::::mutate( - relayer, - rewards_account_params, - |old_reward: &mut Option| { - let new_reward = old_reward.unwrap_or_else(Zero::zero).saturating_add(reward); - *old_reward = Some(new_reward); - - log::trace!( - target: crate::LOG_TARGET, - "Relayer {:?} can now claim reward for serving payer {:?}: {:?}", - relayer, - rewards_account_params, - new_reward, - ); - - Self::deposit_event(Event::::RewardRegistered { - relayer: relayer.clone(), - rewards_account_params, - reward, - }); - }, - ); - } - - /// Return required registration lease. - pub(crate) fn required_registration_lease() -> BlockNumberFor { - , - T::Reward, - >>::RequiredRegistrationLease::get() - } - - /// Return required stake. - pub(crate) fn required_stake() -> T::Reward { - , - T::Reward, - >>::RequiredStake::get() - } - - /// `Unreserve` given amount on relayer account. - fn do_unreserve(relayer: &T::AccountId, amount: T::Reward) -> DispatchResult { - let failed_to_unreserve = T::StakeAndSlash::unreserve(relayer, amount); - if !failed_to_unreserve.is_zero() { - log::trace!( - target: LOG_TARGET, - "Failed to unreserve {:?}/{:?} on relayer {:?} account", - failed_to_unreserve, - amount, - relayer, - ); - - fail!(Error::::FailedToUnreserve) - } - - Ok(()) - } - } - - #[pallet::event] - #[pallet::generate_deposit(pub(super) fn deposit_event)] - pub enum Event { - /// Relayer reward has been registered and may be claimed later. - RewardRegistered { - /// Relayer account that can claim reward. - relayer: T::AccountId, - /// Relayer can claim reward from this account. - rewards_account_params: RewardsAccountParams, - /// Reward amount. - reward: T::Reward, - }, - /// Reward has been paid to the relayer. - RewardPaid { - /// Relayer account that has been rewarded. - relayer: T::AccountId, - /// Relayer has received reward from this account. - rewards_account_params: RewardsAccountParams, - /// Reward amount. - reward: T::Reward, - }, - /// Relayer registration has been added or updated. - RegistrationUpdated { - /// Relayer account that has been registered. - relayer: T::AccountId, - /// Relayer registration. - registration: Registration, T::Reward>, - }, - /// Relayer has been `deregistered`. - Deregistered { - /// Relayer account that has been `deregistered`. - relayer: T::AccountId, - }, - /// Relayer has been slashed and `deregistered`. - SlashedAndDeregistered { - /// Relayer account that has been `deregistered`. - relayer: T::AccountId, - /// Registration that was removed. - registration: Registration, T::Reward>, - }, - } - - #[pallet::error] - pub enum Error { - /// No reward can be claimed by given relayer. - NoRewardForRelayer, - /// Reward payment procedure has failed. - FailedToPayReward, - /// The relayer has tried to register for past block or registration lease - /// is too short. - InvalidRegistrationLease, - /// New registration lease is less than the previous one. - CannotReduceRegistrationLease, - /// Failed to reserve enough funds on relayer account. - FailedToReserve, - /// Failed to `unreserve` enough funds on relayer account. - FailedToUnreserve, - /// Cannot `deregister` if not registered. - NotRegistered, - /// Failed to `deregister` relayer, because lease is still active. - RegistrationIsStillActive, - } - - /// Map of the relayer => accumulated reward. - #[pallet::storage] - #[pallet::getter(fn relayer_reward)] - pub type RelayerRewards = StorageDoubleMap< - _, - as StorageDoubleMapKeyProvider>::Hasher1, - as StorageDoubleMapKeyProvider>::Key1, - as StorageDoubleMapKeyProvider>::Hasher2, - as StorageDoubleMapKeyProvider>::Key2, - as StorageDoubleMapKeyProvider>::Value, - OptionQuery, - >; - - /// Relayers that have reserved some of their balance to get free priority boost - /// for their message delivery transactions. - /// - /// Other relayers may submit transactions as well, but they will have default - /// priority and will be rejected (without significant tip) in case if registered - /// relayer is present. - #[pallet::storage] - #[pallet::getter(fn registered_relayer)] - pub type RegisteredRelayers = StorageMap< - _, - Blake2_128Concat, - T::AccountId, - Registration, T::Reward>, - OptionQuery, - >; -} - -#[cfg(test)] -mod tests { - use super::*; - use mock::{RuntimeEvent as TestEvent, *}; - - use crate::Event::{RewardPaid, RewardRegistered}; - use bp_messages::LaneId; - use bp_relayers::RewardsAccountOwner; - use frame_support::{ - assert_noop, assert_ok, - traits::fungible::{Inspect, Mutate}, - }; - use frame_system::{EventRecord, Pallet as System, Phase}; - use sp_runtime::DispatchError; - - fn get_ready_for_events() { - System::::set_block_number(1); - System::::reset_events(); - } - - #[test] - fn register_relayer_reward_emit_event() { - run_test(|| { - get_ready_for_events(); - - Pallet::::register_relayer_reward( - TEST_REWARDS_ACCOUNT_PARAMS, - ®ULAR_RELAYER, - 100, - ); - - // Check if the `RewardRegistered` event was emitted. - assert_eq!( - System::::events().last(), - Some(&EventRecord { - phase: Phase::Initialization, - event: TestEvent::Relayers(RewardRegistered { - relayer: REGULAR_RELAYER, - rewards_account_params: TEST_REWARDS_ACCOUNT_PARAMS, - reward: 100 - }), - topics: vec![], - }), - ); - }); - } - - #[test] - fn root_cant_claim_anything() { - run_test(|| { - assert_noop!( - Pallet::::claim_rewards( - RuntimeOrigin::root(), - TEST_REWARDS_ACCOUNT_PARAMS - ), - DispatchError::BadOrigin, - ); - }); - } - - #[test] - fn relayer_cant_claim_if_no_reward_exists() { - run_test(|| { - assert_noop!( - Pallet::::claim_rewards( - RuntimeOrigin::signed(REGULAR_RELAYER), - TEST_REWARDS_ACCOUNT_PARAMS - ), - Error::::NoRewardForRelayer, - ); - }); - } - - #[test] - fn relayer_cant_claim_if_payment_procedure_fails() { - run_test(|| { - RelayerRewards::::insert( - FAILING_RELAYER, - TEST_REWARDS_ACCOUNT_PARAMS, - 100, - ); - assert_noop!( - Pallet::::claim_rewards( - RuntimeOrigin::signed(FAILING_RELAYER), - TEST_REWARDS_ACCOUNT_PARAMS - ), - Error::::FailedToPayReward, - ); - }); - } - - #[test] - fn relayer_can_claim_reward() { - run_test(|| { - get_ready_for_events(); - - RelayerRewards::::insert( - REGULAR_RELAYER, - TEST_REWARDS_ACCOUNT_PARAMS, - 100, - ); - assert_ok!(Pallet::::claim_rewards( - RuntimeOrigin::signed(REGULAR_RELAYER), - TEST_REWARDS_ACCOUNT_PARAMS - )); - assert_eq!( - RelayerRewards::::get(REGULAR_RELAYER, TEST_REWARDS_ACCOUNT_PARAMS), - None - ); - - // Check if the `RewardPaid` event was emitted. - assert_eq!( - System::::events().last(), - Some(&EventRecord { - phase: Phase::Initialization, - event: TestEvent::Relayers(RewardPaid { - relayer: REGULAR_RELAYER, - rewards_account_params: TEST_REWARDS_ACCOUNT_PARAMS, - reward: 100 - }), - topics: vec![], - }), - ); - }); - } - - #[test] - fn pay_reward_from_account_actually_pays_reward() { - type Balances = pallet_balances::Pallet; - type PayLaneRewardFromAccount = bp_relayers::PayRewardFromAccount; - - run_test(|| { - let in_lane_0 = RewardsAccountParams::new( - LaneId([0, 0, 0, 0]), - *b"test", - RewardsAccountOwner::ThisChain, - ); - let out_lane_1 = RewardsAccountParams::new( - LaneId([0, 0, 0, 1]), - *b"test", - RewardsAccountOwner::BridgedChain, - ); - - let in_lane0_rewards_account = PayLaneRewardFromAccount::rewards_account(in_lane_0); - let out_lane1_rewards_account = PayLaneRewardFromAccount::rewards_account(out_lane_1); - - Balances::mint_into(&in_lane0_rewards_account, 100).unwrap(); - Balances::mint_into(&out_lane1_rewards_account, 100).unwrap(); - assert_eq!(Balances::balance(&in_lane0_rewards_account), 100); - assert_eq!(Balances::balance(&out_lane1_rewards_account), 100); - assert_eq!(Balances::balance(&1), 0); - - PayLaneRewardFromAccount::pay_reward(&1, in_lane_0, 100).unwrap(); - assert_eq!(Balances::balance(&in_lane0_rewards_account), 0); - assert_eq!(Balances::balance(&out_lane1_rewards_account), 100); - assert_eq!(Balances::balance(&1), 100); - - PayLaneRewardFromAccount::pay_reward(&1, out_lane_1, 100).unwrap(); - assert_eq!(Balances::balance(&in_lane0_rewards_account), 0); - assert_eq!(Balances::balance(&out_lane1_rewards_account), 0); - assert_eq!(Balances::balance(&1), 200); - }); - } - - #[test] - fn register_fails_if_valid_till_is_a_past_block() { - run_test(|| { - System::::set_block_number(100); - - assert_noop!( - Pallet::::register(RuntimeOrigin::signed(REGISTER_RELAYER), 50), - Error::::InvalidRegistrationLease, - ); - }); - } - - #[test] - fn register_fails_if_valid_till_lease_is_less_than_required() { - run_test(|| { - System::::set_block_number(100); - - assert_noop!( - Pallet::::register( - RuntimeOrigin::signed(REGISTER_RELAYER), - 99 + Lease::get() - ), - Error::::InvalidRegistrationLease, - ); - }); - } - - #[test] - fn register_works() { - run_test(|| { - get_ready_for_events(); - - assert_ok!(Pallet::::register( - RuntimeOrigin::signed(REGISTER_RELAYER), - 150 - )); - assert_eq!(Balances::reserved_balance(REGISTER_RELAYER), Stake::get()); - assert_eq!( - Pallet::::registered_relayer(REGISTER_RELAYER), - Some(Registration { valid_till: 150, stake: Stake::get() }), - ); - - assert_eq!( - System::::events().last(), - Some(&EventRecord { - phase: Phase::Initialization, - event: TestEvent::Relayers(Event::RegistrationUpdated { - relayer: REGISTER_RELAYER, - registration: Registration { valid_till: 150, stake: Stake::get() }, - }), - topics: vec![], - }), - ); - }); - } - - #[test] - fn register_fails_if_new_valid_till_is_lesser_than_previous() { - run_test(|| { - assert_ok!(Pallet::::register( - RuntimeOrigin::signed(REGISTER_RELAYER), - 150 - )); - - assert_noop!( - Pallet::::register(RuntimeOrigin::signed(REGISTER_RELAYER), 125), - Error::::CannotReduceRegistrationLease, - ); - }); - } - - #[test] - fn register_fails_if_it_cant_unreserve_some_balance_if_required_stake_decreases() { - run_test(|| { - RegisteredRelayers::::insert( - REGISTER_RELAYER, - Registration { valid_till: 150, stake: Stake::get() + 1 }, - ); - - assert_noop!( - Pallet::::register(RuntimeOrigin::signed(REGISTER_RELAYER), 150), - Error::::FailedToUnreserve, - ); - }); - } - - #[test] - fn register_unreserves_some_balance_if_required_stake_decreases() { - run_test(|| { - get_ready_for_events(); - - RegisteredRelayers::::insert( - REGISTER_RELAYER, - Registration { valid_till: 150, stake: Stake::get() + 1 }, - ); - TestStakeAndSlash::reserve(®ISTER_RELAYER, Stake::get() + 1).unwrap(); - assert_eq!(Balances::reserved_balance(REGISTER_RELAYER), Stake::get() + 1); - let free_balance = Balances::free_balance(REGISTER_RELAYER); - - assert_ok!(Pallet::::register( - RuntimeOrigin::signed(REGISTER_RELAYER), - 150 - )); - assert_eq!(Balances::reserved_balance(REGISTER_RELAYER), Stake::get()); - assert_eq!(Balances::free_balance(REGISTER_RELAYER), free_balance + 1); - assert_eq!( - Pallet::::registered_relayer(REGISTER_RELAYER), - Some(Registration { valid_till: 150, stake: Stake::get() }), - ); - - assert_eq!( - System::::events().last(), - Some(&EventRecord { - phase: Phase::Initialization, - event: TestEvent::Relayers(Event::RegistrationUpdated { - relayer: REGISTER_RELAYER, - registration: Registration { valid_till: 150, stake: Stake::get() } - }), - topics: vec![], - }), - ); - }); - } - - #[test] - fn register_fails_if_it_cant_reserve_some_balance() { - run_test(|| { - Balances::set_balance(®ISTER_RELAYER, 0); - assert_noop!( - Pallet::::register(RuntimeOrigin::signed(REGISTER_RELAYER), 150), - Error::::FailedToReserve, - ); - }); - } - - #[test] - fn register_fails_if_it_cant_reserve_some_balance_if_required_stake_increases() { - run_test(|| { - RegisteredRelayers::::insert( - REGISTER_RELAYER, - Registration { valid_till: 150, stake: Stake::get() - 1 }, - ); - Balances::set_balance(®ISTER_RELAYER, 0); - - assert_noop!( - Pallet::::register(RuntimeOrigin::signed(REGISTER_RELAYER), 150), - Error::::FailedToReserve, - ); - }); - } - - #[test] - fn register_reserves_some_balance_if_required_stake_increases() { - run_test(|| { - get_ready_for_events(); - - RegisteredRelayers::::insert( - REGISTER_RELAYER, - Registration { valid_till: 150, stake: Stake::get() - 1 }, - ); - TestStakeAndSlash::reserve(®ISTER_RELAYER, Stake::get() - 1).unwrap(); - - let free_balance = Balances::free_balance(REGISTER_RELAYER); - assert_ok!(Pallet::::register( - RuntimeOrigin::signed(REGISTER_RELAYER), - 150 - )); - assert_eq!(Balances::reserved_balance(REGISTER_RELAYER), Stake::get()); - assert_eq!(Balances::free_balance(REGISTER_RELAYER), free_balance - 1); - assert_eq!( - Pallet::::registered_relayer(REGISTER_RELAYER), - Some(Registration { valid_till: 150, stake: Stake::get() }), - ); - - assert_eq!( - System::::events().last(), - Some(&EventRecord { - phase: Phase::Initialization, - event: TestEvent::Relayers(Event::RegistrationUpdated { - relayer: REGISTER_RELAYER, - registration: Registration { valid_till: 150, stake: Stake::get() } - }), - topics: vec![], - }), - ); - }); - } - - #[test] - fn deregister_fails_if_not_registered() { - run_test(|| { - assert_noop!( - Pallet::::deregister(RuntimeOrigin::signed(REGISTER_RELAYER)), - Error::::NotRegistered, - ); - }); - } - - #[test] - fn deregister_fails_if_registration_is_still_active() { - run_test(|| { - assert_ok!(Pallet::::register( - RuntimeOrigin::signed(REGISTER_RELAYER), - 150 - )); - - System::::set_block_number(100); - - assert_noop!( - Pallet::::deregister(RuntimeOrigin::signed(REGISTER_RELAYER)), - Error::::RegistrationIsStillActive, - ); - }); - } - - #[test] - fn deregister_works() { - run_test(|| { - get_ready_for_events(); - - assert_ok!(Pallet::::register( - RuntimeOrigin::signed(REGISTER_RELAYER), - 150 - )); - - System::::set_block_number(151); - - let reserved_balance = Balances::reserved_balance(REGISTER_RELAYER); - let free_balance = Balances::free_balance(REGISTER_RELAYER); - assert_ok!(Pallet::::deregister(RuntimeOrigin::signed(REGISTER_RELAYER))); - assert_eq!( - Balances::reserved_balance(REGISTER_RELAYER), - reserved_balance - Stake::get() - ); - assert_eq!(Balances::free_balance(REGISTER_RELAYER), free_balance + Stake::get()); - - assert_eq!( - System::::events().last(), - Some(&EventRecord { - phase: Phase::Initialization, - event: TestEvent::Relayers(Event::Deregistered { relayer: REGISTER_RELAYER }), - topics: vec![], - }), - ); - }); - } - - #[test] - fn is_registration_active_is_false_for_unregistered_relayer() { - run_test(|| { - assert!(!Pallet::::is_registration_active(®ISTER_RELAYER)); - }); - } - - #[test] - fn is_registration_active_is_false_when_stake_is_too_low() { - run_test(|| { - RegisteredRelayers::::insert( - REGISTER_RELAYER, - Registration { valid_till: 150, stake: Stake::get() - 1 }, - ); - assert!(!Pallet::::is_registration_active(®ISTER_RELAYER)); - }); - } - - #[test] - fn is_registration_active_is_false_when_remaining_lease_is_too_low() { - run_test(|| { - System::::set_block_number(150 - Lease::get()); - - RegisteredRelayers::::insert( - REGISTER_RELAYER, - Registration { valid_till: 150, stake: Stake::get() }, - ); - assert!(!Pallet::::is_registration_active(®ISTER_RELAYER)); - }); - } - - #[test] - fn is_registration_active_is_true_when_relayer_is_properly_registeered() { - run_test(|| { - System::::set_block_number(150 - Lease::get()); - - RegisteredRelayers::::insert( - REGISTER_RELAYER, - Registration { valid_till: 151, stake: Stake::get() }, - ); - assert!(Pallet::::is_registration_active(®ISTER_RELAYER)); - }); - } -} diff --git a/modules/relayers/src/mock.rs b/modules/relayers/src/mock.rs deleted file mode 100644 index 312478789..000000000 --- a/modules/relayers/src/mock.rs +++ /dev/null @@ -1,149 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -#![cfg(test)] - -use crate as pallet_bridge_relayers; - -use bp_messages::LaneId; -use bp_relayers::{ - PayRewardFromAccount, PaymentProcedure, RewardsAccountOwner, RewardsAccountParams, -}; -use frame_support::{ - derive_impl, parameter_types, traits::fungible::Mutate, weights::RuntimeDbWeight, -}; -use sp_runtime::BuildStorage; - -pub type AccountId = u64; -pub type Balance = u64; -pub type BlockNumber = u64; - -pub type TestStakeAndSlash = pallet_bridge_relayers::StakeAndSlashNamed< - AccountId, - BlockNumber, - Balances, - ReserveId, - Stake, - Lease, ->; - -type Block = frame_system::mocking::MockBlock; - -frame_support::construct_runtime! { - pub enum TestRuntime - { - System: frame_system::{Pallet, Call, Config, Storage, Event}, - Balances: pallet_balances::{Pallet, Event}, - Relayers: pallet_bridge_relayers::{Pallet, Call, Event}, - } -} - -parameter_types! { - pub const DbWeight: RuntimeDbWeight = RuntimeDbWeight { read: 1, write: 2 }; - pub const ExistentialDeposit: Balance = 1; - pub const ReserveId: [u8; 8] = *b"brdgrlrs"; - pub const Stake: Balance = 1_000; - pub const Lease: BlockNumber = 8; -} - -#[derive_impl(frame_system::config_preludes::TestDefaultConfig)] -impl frame_system::Config for TestRuntime { - type Block = Block; - type AccountData = pallet_balances::AccountData; - type DbWeight = DbWeight; -} - -#[derive_impl(pallet_balances::config_preludes::TestDefaultConfig)] -impl pallet_balances::Config for TestRuntime { - type ReserveIdentifier = [u8; 8]; - type AccountStore = System; -} - -impl pallet_bridge_relayers::Config for TestRuntime { - type RuntimeEvent = RuntimeEvent; - type Reward = Balance; - type PaymentProcedure = TestPaymentProcedure; - type StakeAndSlash = TestStakeAndSlash; - type WeightInfo = (); -} - -#[cfg(feature = "runtime-benchmarks")] -impl pallet_bridge_relayers::benchmarking::Config for TestRuntime { - fn prepare_rewards_account(account_params: RewardsAccountParams, reward: Balance) { - let rewards_account = - bp_relayers::PayRewardFromAccount::::rewards_account( - account_params, - ); - Self::deposit_account(rewards_account, reward); - } - - fn deposit_account(account: Self::AccountId, balance: Self::Reward) { - Balances::mint_into(&account, balance.saturating_add(ExistentialDeposit::get())).unwrap(); - } -} - -/// Message lane that we're using in tests. -pub const TEST_REWARDS_ACCOUNT_PARAMS: RewardsAccountParams = - RewardsAccountParams::new(LaneId([0, 0, 0, 0]), *b"test", RewardsAccountOwner::ThisChain); - -/// Regular relayer that may receive rewards. -pub const REGULAR_RELAYER: AccountId = 1; - -/// Relayer that can't receive rewards. -pub const FAILING_RELAYER: AccountId = 2; - -/// Relayer that is able to register. -pub const REGISTER_RELAYER: AccountId = 42; - -/// Payment procedure that rejects payments to the `FAILING_RELAYER`. -pub struct TestPaymentProcedure; - -impl TestPaymentProcedure { - pub fn rewards_account(params: RewardsAccountParams) -> AccountId { - PayRewardFromAccount::<(), AccountId>::rewards_account(params) - } -} - -impl PaymentProcedure for TestPaymentProcedure { - type Error = (); - - fn pay_reward( - relayer: &AccountId, - _lane_id: RewardsAccountParams, - _reward: Balance, - ) -> Result<(), Self::Error> { - match *relayer { - FAILING_RELAYER => Err(()), - _ => Ok(()), - } - } -} - -/// Return test externalities to use in tests. -pub fn new_test_ext() -> sp_io::TestExternalities { - let t = frame_system::GenesisConfig::::default().build_storage().unwrap(); - sp_io::TestExternalities::new(t) -} - -/// Run pallet test. -pub fn run_test(test: impl FnOnce() -> T) -> T { - new_test_ext().execute_with(|| { - Balances::mint_into(®ISTER_RELAYER, ExistentialDeposit::get() + 10 * Stake::get()) - .unwrap(); - - test() - }) -} diff --git a/modules/relayers/src/payment_adapter.rs b/modules/relayers/src/payment_adapter.rs deleted file mode 100644 index b2d9c676b..000000000 --- a/modules/relayers/src/payment_adapter.rs +++ /dev/null @@ -1,158 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Code that allows relayers pallet to be used as a payment mechanism for the messages pallet. - -use crate::{Config, Pallet}; - -use bp_messages::{ - source_chain::{DeliveryConfirmationPayments, RelayersRewards}, - LaneId, MessageNonce, -}; -use bp_relayers::{RewardsAccountOwner, RewardsAccountParams}; -use frame_support::{sp_runtime::SaturatedConversion, traits::Get}; -use sp_arithmetic::traits::{Saturating, Zero}; -use sp_std::{collections::vec_deque::VecDeque, marker::PhantomData, ops::RangeInclusive}; - -/// Adapter that allows relayers pallet to be used as a delivery+dispatch payment mechanism -/// for the messages pallet. -pub struct DeliveryConfirmationPaymentsAdapter( - PhantomData<(T, MI, DeliveryReward)>, -); - -impl DeliveryConfirmationPayments - for DeliveryConfirmationPaymentsAdapter -where - T: Config + pallet_bridge_messages::Config, - MI: 'static, - DeliveryReward: Get, -{ - type Error = &'static str; - - fn pay_reward( - lane_id: LaneId, - messages_relayers: VecDeque>, - confirmation_relayer: &T::AccountId, - received_range: &RangeInclusive, - ) -> MessageNonce { - let relayers_rewards = - bp_messages::calc_relayers_rewards::(messages_relayers, received_range); - let rewarded_relayers = relayers_rewards.len(); - - register_relayers_rewards::( - confirmation_relayer, - relayers_rewards, - RewardsAccountParams::new( - lane_id, - T::BridgedChainId::get(), - RewardsAccountOwner::BridgedChain, - ), - DeliveryReward::get(), - ); - - rewarded_relayers as _ - } -} - -// Update rewards to given relayers, optionally rewarding confirmation relayer. -fn register_relayers_rewards( - confirmation_relayer: &T::AccountId, - relayers_rewards: RelayersRewards, - lane_id: RewardsAccountParams, - delivery_fee: T::Reward, -) { - // reward every relayer except `confirmation_relayer` - let mut confirmation_relayer_reward = T::Reward::zero(); - for (relayer, messages) in relayers_rewards { - // sane runtime configurations guarantee that the number of messages will be below - // `u32::MAX` - let relayer_reward = T::Reward::saturated_from(messages).saturating_mul(delivery_fee); - - if relayer != *confirmation_relayer { - Pallet::::register_relayer_reward(lane_id, &relayer, relayer_reward); - } else { - confirmation_relayer_reward = - confirmation_relayer_reward.saturating_add(relayer_reward); - } - } - - // finally - pay reward to confirmation relayer - Pallet::::register_relayer_reward( - lane_id, - confirmation_relayer, - confirmation_relayer_reward, - ); -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::{mock::*, RelayerRewards}; - - const RELAYER_1: AccountId = 1; - const RELAYER_2: AccountId = 2; - const RELAYER_3: AccountId = 3; - - fn relayers_rewards() -> RelayersRewards { - vec![(RELAYER_1, 2), (RELAYER_2, 3)].into_iter().collect() - } - - #[test] - fn confirmation_relayer_is_rewarded_if_it_has_also_delivered_messages() { - run_test(|| { - register_relayers_rewards::( - &RELAYER_2, - relayers_rewards(), - TEST_REWARDS_ACCOUNT_PARAMS, - 50, - ); - - assert_eq!( - RelayerRewards::::get(RELAYER_1, TEST_REWARDS_ACCOUNT_PARAMS), - Some(100) - ); - assert_eq!( - RelayerRewards::::get(RELAYER_2, TEST_REWARDS_ACCOUNT_PARAMS), - Some(150) - ); - }); - } - - #[test] - fn confirmation_relayer_is_not_rewarded_if_it_has_not_delivered_any_messages() { - run_test(|| { - register_relayers_rewards::( - &RELAYER_3, - relayers_rewards(), - TEST_REWARDS_ACCOUNT_PARAMS, - 50, - ); - - assert_eq!( - RelayerRewards::::get(RELAYER_1, TEST_REWARDS_ACCOUNT_PARAMS), - Some(100) - ); - assert_eq!( - RelayerRewards::::get(RELAYER_2, TEST_REWARDS_ACCOUNT_PARAMS), - Some(150) - ); - assert_eq!( - RelayerRewards::::get(RELAYER_3, TEST_REWARDS_ACCOUNT_PARAMS), - None - ); - }); - } -} diff --git a/modules/relayers/src/stake_adapter.rs b/modules/relayers/src/stake_adapter.rs deleted file mode 100644 index 88af9b187..000000000 --- a/modules/relayers/src/stake_adapter.rs +++ /dev/null @@ -1,186 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Code that allows `NamedReservableCurrency` to be used as a `StakeAndSlash` -//! mechanism of the relayers pallet. - -use bp_relayers::{PayRewardFromAccount, RewardsAccountParams, StakeAndSlash}; -use codec::Codec; -use frame_support::traits::{tokens::BalanceStatus, NamedReservableCurrency}; -use sp_runtime::{traits::Get, DispatchError, DispatchResult}; -use sp_std::{fmt::Debug, marker::PhantomData}; - -/// `StakeAndSlash` that works with `NamedReservableCurrency` and uses named -/// reservations. -/// -/// **WARNING**: this implementation assumes that the relayers pallet is configured to -/// use the [`bp_relayers::PayRewardFromAccount`] as its relayers payment scheme. -pub struct StakeAndSlashNamed( - PhantomData<(AccountId, BlockNumber, Currency, ReserveId, Stake, Lease)>, -); - -impl - StakeAndSlash - for StakeAndSlashNamed -where - AccountId: Codec + Debug, - Currency: NamedReservableCurrency, - ReserveId: Get, - Stake: Get, - Lease: Get, -{ - type RequiredStake = Stake; - type RequiredRegistrationLease = Lease; - - fn reserve(relayer: &AccountId, amount: Currency::Balance) -> DispatchResult { - Currency::reserve_named(&ReserveId::get(), relayer, amount) - } - - fn unreserve(relayer: &AccountId, amount: Currency::Balance) -> Currency::Balance { - Currency::unreserve_named(&ReserveId::get(), relayer, amount) - } - - fn repatriate_reserved( - relayer: &AccountId, - beneficiary: RewardsAccountParams, - amount: Currency::Balance, - ) -> Result { - let beneficiary_account = - PayRewardFromAccount::<(), AccountId>::rewards_account(beneficiary); - Currency::repatriate_reserved_named( - &ReserveId::get(), - relayer, - &beneficiary_account, - amount, - BalanceStatus::Free, - ) - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::mock::*; - - use frame_support::traits::fungible::Mutate; - - fn test_stake() -> Balance { - Stake::get() - } - - #[test] - fn reserve_works() { - run_test(|| { - assert!(TestStakeAndSlash::reserve(&1, test_stake()).is_err()); - assert_eq!(Balances::free_balance(1), 0); - assert_eq!(Balances::reserved_balance(1), 0); - - Balances::mint_into(&2, test_stake() - 1).unwrap(); - assert!(TestStakeAndSlash::reserve(&2, test_stake()).is_err()); - assert_eq!(Balances::free_balance(2), test_stake() - 1); - assert_eq!(Balances::reserved_balance(2), 0); - - Balances::mint_into(&3, test_stake() * 2).unwrap(); - assert_eq!(TestStakeAndSlash::reserve(&3, test_stake()), Ok(())); - assert_eq!(Balances::free_balance(3), test_stake()); - assert_eq!(Balances::reserved_balance(3), test_stake()); - }) - } - - #[test] - fn unreserve_works() { - run_test(|| { - assert_eq!(TestStakeAndSlash::unreserve(&1, test_stake()), test_stake()); - assert_eq!(Balances::free_balance(1), 0); - assert_eq!(Balances::reserved_balance(1), 0); - - Balances::mint_into(&2, test_stake() * 2).unwrap(); - TestStakeAndSlash::reserve(&2, test_stake() / 3).unwrap(); - assert_eq!( - TestStakeAndSlash::unreserve(&2, test_stake()), - test_stake() - test_stake() / 3 - ); - assert_eq!(Balances::free_balance(2), test_stake() * 2); - assert_eq!(Balances::reserved_balance(2), 0); - - Balances::mint_into(&3, test_stake() * 2).unwrap(); - TestStakeAndSlash::reserve(&3, test_stake()).unwrap(); - assert_eq!(TestStakeAndSlash::unreserve(&3, test_stake()), 0); - assert_eq!(Balances::free_balance(3), test_stake() * 2); - assert_eq!(Balances::reserved_balance(3), 0); - }) - } - - #[test] - fn repatriate_reserved_works() { - run_test(|| { - let beneficiary = TEST_REWARDS_ACCOUNT_PARAMS; - let beneficiary_account = TestPaymentProcedure::rewards_account(beneficiary); - - let mut expected_balance = ExistentialDeposit::get(); - Balances::mint_into(&beneficiary_account, expected_balance).unwrap(); - - assert_eq!( - TestStakeAndSlash::repatriate_reserved(&1, beneficiary, test_stake()), - Ok(test_stake()) - ); - assert_eq!(Balances::free_balance(1), 0); - assert_eq!(Balances::reserved_balance(1), 0); - assert_eq!(Balances::free_balance(beneficiary_account), expected_balance); - assert_eq!(Balances::reserved_balance(beneficiary_account), 0); - - expected_balance += test_stake() / 3; - Balances::mint_into(&2, test_stake() * 2).unwrap(); - TestStakeAndSlash::reserve(&2, test_stake() / 3).unwrap(); - assert_eq!( - TestStakeAndSlash::repatriate_reserved(&2, beneficiary, test_stake()), - Ok(test_stake() - test_stake() / 3) - ); - assert_eq!(Balances::free_balance(2), test_stake() * 2 - test_stake() / 3); - assert_eq!(Balances::reserved_balance(2), 0); - assert_eq!(Balances::free_balance(beneficiary_account), expected_balance); - assert_eq!(Balances::reserved_balance(beneficiary_account), 0); - - expected_balance += test_stake(); - Balances::mint_into(&3, test_stake() * 2).unwrap(); - TestStakeAndSlash::reserve(&3, test_stake()).unwrap(); - assert_eq!( - TestStakeAndSlash::repatriate_reserved(&3, beneficiary, test_stake()), - Ok(0) - ); - assert_eq!(Balances::free_balance(3), test_stake()); - assert_eq!(Balances::reserved_balance(3), 0); - assert_eq!(Balances::free_balance(beneficiary_account), expected_balance); - assert_eq!(Balances::reserved_balance(beneficiary_account), 0); - }) - } - - #[test] - fn repatriate_reserved_doesnt_work_when_beneficiary_account_is_missing() { - run_test(|| { - let beneficiary = TEST_REWARDS_ACCOUNT_PARAMS; - let beneficiary_account = TestPaymentProcedure::rewards_account(beneficiary); - - Balances::mint_into(&3, test_stake() * 2).unwrap(); - TestStakeAndSlash::reserve(&3, test_stake()).unwrap(); - assert!(TestStakeAndSlash::repatriate_reserved(&3, beneficiary, test_stake()).is_err()); - assert_eq!(Balances::free_balance(3), test_stake()); - assert_eq!(Balances::reserved_balance(3), test_stake()); - assert_eq!(Balances::free_balance(beneficiary_account), 0); - assert_eq!(Balances::reserved_balance(beneficiary_account), 0); - }); - } -} diff --git a/modules/relayers/src/weights.rs b/modules/relayers/src/weights.rs deleted file mode 100644 index c2c065b0c..000000000 --- a/modules/relayers/src/weights.rs +++ /dev/null @@ -1,259 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Autogenerated weights for pallet_bridge_relayers -//! -//! THIS FILE WAS AUTO-GENERATED USING THE SUBSTRATE BENCHMARK CLI VERSION 4.0.0-dev -//! DATE: 2023-04-28, STEPS: `50`, REPEAT: `20`, LOW RANGE: `[]`, HIGH RANGE: `[]` -//! WORST CASE MAP SIZE: `1000000` -//! HOSTNAME: `covid`, CPU: `11th Gen Intel(R) Core(TM) i7-11800H @ 2.30GHz` -//! EXECUTION: Some(Wasm), WASM-EXECUTION: Compiled, CHAIN: Some("dev"), DB CACHE: 1024 - -// Executed Command: -// target/release/rip-bridge-node -// benchmark -// pallet -// --chain=dev -// --steps=50 -// --repeat=20 -// --pallet=pallet_bridge_relayers -// --extrinsic=* -// --execution=wasm -// --wasm-execution=Compiled -// --heap-pages=4096 -// --output=./modules/relayers/src/weights.rs -// --template=./.maintain/bridge-weight-template.hbs - -#![allow(clippy::all)] -#![allow(unused_parens)] -#![allow(unused_imports)] -#![allow(missing_docs)] - -use frame_support::{ - traits::Get, - weights::{constants::RocksDbWeight, Weight}, -}; -use sp_std::marker::PhantomData; - -/// Weight functions needed for pallet_bridge_relayers. -pub trait WeightInfo { - fn claim_rewards() -> Weight; - fn register() -> Weight; - fn deregister() -> Weight; - fn slash_and_deregister() -> Weight; - fn register_relayer_reward() -> Weight; -} - -/// Weights for `pallet_bridge_relayers` that are generated using one of the Bridge testnets. -/// -/// Those weights are test only and must never be used in production. -pub struct BridgeWeight(PhantomData); -impl WeightInfo for BridgeWeight { - /// Storage: BridgeRelayers RelayerRewards (r:1 w:1) - /// - /// Proof: BridgeRelayers RelayerRewards (max_values: None, max_size: Some(65), added: 2540, - /// mode: MaxEncodedLen) - /// - /// Storage: Balances TotalIssuance (r:1 w:0) - /// - /// Proof: Balances TotalIssuance (max_values: Some(1), max_size: Some(8), added: 503, mode: - /// MaxEncodedLen) - /// - /// Storage: System Account (r:1 w:1) - /// - /// Proof: System Account (max_values: None, max_size: Some(104), added: 2579, mode: - /// MaxEncodedLen) - fn claim_rewards() -> Weight { - // Proof Size summary in bytes: - // Measured: `294` - // Estimated: `8592` - // Minimum execution time: 77_614 nanoseconds. - Weight::from_parts(79_987_000, 8592) - .saturating_add(T::DbWeight::get().reads(3_u64)) - .saturating_add(T::DbWeight::get().writes(2_u64)) - } - /// Storage: BridgeRelayers RegisteredRelayers (r:1 w:1) - /// - /// Proof: BridgeRelayers RegisteredRelayers (max_values: None, max_size: Some(64), added: 2539, - /// mode: MaxEncodedLen) - /// - /// Storage: Balances Reserves (r:1 w:1) - /// - /// Proof: Balances Reserves (max_values: None, max_size: Some(849), added: 3324, mode: - /// MaxEncodedLen) - fn register() -> Weight { - // Proof Size summary in bytes: - // Measured: `87` - // Estimated: `7843` - // Minimum execution time: 39_590 nanoseconds. - Weight::from_parts(40_546_000, 7843) - .saturating_add(T::DbWeight::get().reads(2_u64)) - .saturating_add(T::DbWeight::get().writes(2_u64)) - } - /// Storage: BridgeRelayers RegisteredRelayers (r:1 w:1) - /// - /// Proof: BridgeRelayers RegisteredRelayers (max_values: None, max_size: Some(64), added: 2539, - /// mode: MaxEncodedLen) - /// - /// Storage: Balances Reserves (r:1 w:1) - /// - /// Proof: Balances Reserves (max_values: None, max_size: Some(849), added: 3324, mode: - /// MaxEncodedLen) - fn deregister() -> Weight { - // Proof Size summary in bytes: - // Measured: `264` - // Estimated: `7843` - // Minimum execution time: 43_332 nanoseconds. - Weight::from_parts(45_087_000, 7843) - .saturating_add(T::DbWeight::get().reads(2_u64)) - .saturating_add(T::DbWeight::get().writes(2_u64)) - } - /// Storage: BridgeRelayers RegisteredRelayers (r:1 w:1) - /// - /// Proof: BridgeRelayers RegisteredRelayers (max_values: None, max_size: Some(64), added: 2539, - /// mode: MaxEncodedLen) - /// - /// Storage: Balances Reserves (r:1 w:1) - /// - /// Proof: Balances Reserves (max_values: None, max_size: Some(849), added: 3324, mode: - /// MaxEncodedLen) - /// - /// Storage: System Account (r:1 w:1) - /// - /// Proof: System Account (max_values: None, max_size: Some(104), added: 2579, mode: - /// MaxEncodedLen) - fn slash_and_deregister() -> Weight { - // Proof Size summary in bytes: - // Measured: `380` - // Estimated: `11412` - // Minimum execution time: 42_358 nanoseconds. - Weight::from_parts(43_539_000, 11412) - .saturating_add(T::DbWeight::get().reads(3_u64)) - .saturating_add(T::DbWeight::get().writes(3_u64)) - } - /// Storage: BridgeRelayers RelayerRewards (r:1 w:1) - /// - /// Proof: BridgeRelayers RelayerRewards (max_values: None, max_size: Some(65), added: 2540, - /// mode: MaxEncodedLen) - fn register_relayer_reward() -> Weight { - // Proof Size summary in bytes: - // Measured: `12` - // Estimated: `3530` - // Minimum execution time: 6_338 nanoseconds. - Weight::from_parts(6_526_000, 3530) - .saturating_add(T::DbWeight::get().reads(1_u64)) - .saturating_add(T::DbWeight::get().writes(1_u64)) - } -} - -// For backwards compatibility and tests -impl WeightInfo for () { - /// Storage: BridgeRelayers RelayerRewards (r:1 w:1) - /// - /// Proof: BridgeRelayers RelayerRewards (max_values: None, max_size: Some(65), added: 2540, - /// mode: MaxEncodedLen) - /// - /// Storage: Balances TotalIssuance (r:1 w:0) - /// - /// Proof: Balances TotalIssuance (max_values: Some(1), max_size: Some(8), added: 503, mode: - /// MaxEncodedLen) - /// - /// Storage: System Account (r:1 w:1) - /// - /// Proof: System Account (max_values: None, max_size: Some(104), added: 2579, mode: - /// MaxEncodedLen) - fn claim_rewards() -> Weight { - // Proof Size summary in bytes: - // Measured: `294` - // Estimated: `8592` - // Minimum execution time: 77_614 nanoseconds. - Weight::from_parts(79_987_000, 8592) - .saturating_add(RocksDbWeight::get().reads(3_u64)) - .saturating_add(RocksDbWeight::get().writes(2_u64)) - } - /// Storage: BridgeRelayers RegisteredRelayers (r:1 w:1) - /// - /// Proof: BridgeRelayers RegisteredRelayers (max_values: None, max_size: Some(64), added: 2539, - /// mode: MaxEncodedLen) - /// - /// Storage: Balances Reserves (r:1 w:1) - /// - /// Proof: Balances Reserves (max_values: None, max_size: Some(849), added: 3324, mode: - /// MaxEncodedLen) - fn register() -> Weight { - // Proof Size summary in bytes: - // Measured: `87` - // Estimated: `7843` - // Minimum execution time: 39_590 nanoseconds. - Weight::from_parts(40_546_000, 7843) - .saturating_add(RocksDbWeight::get().reads(2_u64)) - .saturating_add(RocksDbWeight::get().writes(2_u64)) - } - /// Storage: BridgeRelayers RegisteredRelayers (r:1 w:1) - /// - /// Proof: BridgeRelayers RegisteredRelayers (max_values: None, max_size: Some(64), added: 2539, - /// mode: MaxEncodedLen) - /// - /// Storage: Balances Reserves (r:1 w:1) - /// - /// Proof: Balances Reserves (max_values: None, max_size: Some(849), added: 3324, mode: - /// MaxEncodedLen) - fn deregister() -> Weight { - // Proof Size summary in bytes: - // Measured: `264` - // Estimated: `7843` - // Minimum execution time: 43_332 nanoseconds. - Weight::from_parts(45_087_000, 7843) - .saturating_add(RocksDbWeight::get().reads(2_u64)) - .saturating_add(RocksDbWeight::get().writes(2_u64)) - } - /// Storage: BridgeRelayers RegisteredRelayers (r:1 w:1) - /// - /// Proof: BridgeRelayers RegisteredRelayers (max_values: None, max_size: Some(64), added: 2539, - /// mode: MaxEncodedLen) - /// - /// Storage: Balances Reserves (r:1 w:1) - /// - /// Proof: Balances Reserves (max_values: None, max_size: Some(849), added: 3324, mode: - /// MaxEncodedLen) - /// - /// Storage: System Account (r:1 w:1) - /// - /// Proof: System Account (max_values: None, max_size: Some(104), added: 2579, mode: - /// MaxEncodedLen) - fn slash_and_deregister() -> Weight { - // Proof Size summary in bytes: - // Measured: `380` - // Estimated: `11412` - // Minimum execution time: 42_358 nanoseconds. - Weight::from_parts(43_539_000, 11412) - .saturating_add(RocksDbWeight::get().reads(3_u64)) - .saturating_add(RocksDbWeight::get().writes(3_u64)) - } - /// Storage: BridgeRelayers RelayerRewards (r:1 w:1) - /// - /// Proof: BridgeRelayers RelayerRewards (max_values: None, max_size: Some(65), added: 2540, - /// mode: MaxEncodedLen) - fn register_relayer_reward() -> Weight { - // Proof Size summary in bytes: - // Measured: `12` - // Estimated: `3530` - // Minimum execution time: 6_338 nanoseconds. - Weight::from_parts(6_526_000, 3530) - .saturating_add(RocksDbWeight::get().reads(1_u64)) - .saturating_add(RocksDbWeight::get().writes(1_u64)) - } -} diff --git a/modules/relayers/src/weights_ext.rs b/modules/relayers/src/weights_ext.rs deleted file mode 100644 index 9cd25c47c..000000000 --- a/modules/relayers/src/weights_ext.rs +++ /dev/null @@ -1,49 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Weight-related utilities. - -use crate::weights::WeightInfo; - -use frame_support::pallet_prelude::Weight; - -/// Extended weight info. -pub trait WeightInfoExt: WeightInfo { - /// Returns weight, that needs to be added to the pre-dispatch weight of message delivery call, - /// if `RefundBridgedParachainMessages` signed extension is deployed at runtime level. - fn receive_messages_proof_overhead_from_runtime() -> Weight { - Self::slash_and_deregister().max(Self::register_relayer_reward()) - } - - /// Returns weight, that needs to be added to the pre-dispatch weight of message delivery - /// confirmation call, if `RefundBridgedParachainMessages` signed extension is deployed at - /// runtime level. - fn receive_messages_delivery_proof_overhead_from_runtime() -> Weight { - Self::register_relayer_reward() - } - - /// Returns weight that we need to deduct from the message delivery call weight that has - /// completed successfully. - /// - /// Usually, the weight of `slash_and_deregister` is larger than the weight of the - /// `register_relayer_reward`. So if relayer has been rewarded, we want to deduct the difference - /// to get the actual post-dispatch weight. - fn extra_weight_of_successful_receive_messages_proof_call() -> Weight { - Self::slash_and_deregister().saturating_sub(Self::register_relayer_reward()) - } -} - -impl WeightInfoExt for T {} diff --git a/modules/xcm-bridge-hub-router/Cargo.toml b/modules/xcm-bridge-hub-router/Cargo.toml deleted file mode 100644 index af130c5e7..000000000 --- a/modules/xcm-bridge-hub-router/Cargo.toml +++ /dev/null @@ -1,67 +0,0 @@ -[package] -name = "pallet-xcm-bridge-hub-router" -description = "Bridge hub interface for sibling/parent chains with dynamic fees support." -version = "0.5.0" -authors.workspace = true -edition.workspace = true -license = "GPL-3.0-or-later WITH Classpath-exception-2.0" -repository.workspace = true - -[lints] -workspace = true - -[dependencies] -codec = { package = "parity-scale-codec", version = "3.6.1", default-features = false } -log = { workspace = true } -scale-info = { version = "2.11.1", default-features = false, features = ["bit-vec", "derive", "serde"] } - -# Bridge dependencies - -bp-xcm-bridge-hub-router = { path = "../../primitives/xcm-bridge-hub-router", default-features = false } - -# Substrate Dependencies - -frame-benchmarking = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false, optional = true } -frame-support = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -frame-system = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -sp-core = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -sp-runtime = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -sp-std = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } - -# Polkadot Dependencies - -xcm = { package = "staging-xcm", git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -xcm-builder = { package = "staging-xcm-builder", git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } - -[dev-dependencies] -sp-io = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } -sp-std = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } - -[features] -default = ["std"] -std = [ - "bp-xcm-bridge-hub-router/std", - "codec/std", - "frame-benchmarking/std", - "frame-support/std", - "frame-system/std", - "log/std", - "scale-info/std", - "sp-core/std", - "sp-runtime/std", - "sp-std/std", - "xcm-builder/std", - "xcm/std", -] -runtime-benchmarks = [ - "frame-benchmarking/runtime-benchmarks", - "frame-support/runtime-benchmarks", - "frame-system/runtime-benchmarks", - "sp-runtime/runtime-benchmarks", - "xcm-builder/runtime-benchmarks", -] -try-runtime = [ - "frame-support/try-runtime", - "frame-system/try-runtime", - "sp-runtime/try-runtime", -] diff --git a/modules/xcm-bridge-hub-router/src/benchmarking.rs b/modules/xcm-bridge-hub-router/src/benchmarking.rs deleted file mode 100644 index c4f9f534c..000000000 --- a/modules/xcm-bridge-hub-router/src/benchmarking.rs +++ /dev/null @@ -1,95 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! XCM bridge hub router pallet benchmarks. - -#![cfg(feature = "runtime-benchmarks")] - -use crate::{Bridge, Call}; - -use bp_xcm_bridge_hub_router::{BridgeState, MINIMAL_DELIVERY_FEE_FACTOR}; -use frame_benchmarking::{benchmarks_instance_pallet, BenchmarkError}; -use frame_support::traits::{EnsureOrigin, Get, Hooks, UnfilteredDispatchable}; -use sp_runtime::traits::Zero; -use xcm::prelude::*; - -/// Pallet we're benchmarking here. -pub struct Pallet, I: 'static = ()>(crate::Pallet); - -/// Trait that must be implemented by runtime to be able to benchmark pallet properly. -pub trait Config: crate::Config { - /// Fill up queue so it becomes congested. - fn make_congested(); - - /// Returns destination which is valid for this router instance. - /// (Needs to pass `T::Bridges`) - /// Make sure that `SendXcm` will pass. - fn ensure_bridged_target_destination() -> Result { - Ok(Location::new( - Self::UniversalLocation::get().len() as u8, - [GlobalConsensus(Self::BridgedNetworkId::get().unwrap())], - )) - } -} - -benchmarks_instance_pallet! { - on_initialize_when_non_congested { - Bridge::::put(BridgeState { - is_congested: false, - delivery_fee_factor: MINIMAL_DELIVERY_FEE_FACTOR + MINIMAL_DELIVERY_FEE_FACTOR, - }); - }: { - crate::Pallet::::on_initialize(Zero::zero()) - } - - on_initialize_when_congested { - Bridge::::put(BridgeState { - is_congested: false, - delivery_fee_factor: MINIMAL_DELIVERY_FEE_FACTOR + MINIMAL_DELIVERY_FEE_FACTOR, - }); - - let _ = T::ensure_bridged_target_destination()?; - T::make_congested(); - }: { - crate::Pallet::::on_initialize(Zero::zero()) - } - - report_bridge_status { - Bridge::::put(BridgeState::default()); - - let origin: T::RuntimeOrigin = T::BridgeHubOrigin::try_successful_origin().expect("expected valid BridgeHubOrigin"); - let bridge_id = Default::default(); - let is_congested = true; - - let call = Call::::report_bridge_status { bridge_id, is_congested }; - }: { call.dispatch_bypass_filter(origin)? } - verify { - assert!(Bridge::::get().is_congested); - } - - send_message { - let dest = T::ensure_bridged_target_destination()?; - let xcm = sp_std::vec![].into(); - - // make local queue congested, because it means additional db write - T::make_congested(); - }: { - send_xcm::>(dest, xcm).expect("message is sent") - } - verify { - assert!(Bridge::::get().delivery_fee_factor > MINIMAL_DELIVERY_FEE_FACTOR); - } -} diff --git a/modules/xcm-bridge-hub-router/src/lib.rs b/modules/xcm-bridge-hub-router/src/lib.rs deleted file mode 100644 index 5d0be41b1..000000000 --- a/modules/xcm-bridge-hub-router/src/lib.rs +++ /dev/null @@ -1,568 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Pallet that may be used instead of `SovereignPaidRemoteExporter` in the XCM router -//! configuration. The main thing that the pallet offers is the dynamic message fee, -//! that is computed based on the bridge queues state. It starts exponentially increasing -//! if the queue between this chain and the sibling/child bridge hub is congested. -//! -//! All other bridge hub queues offer some backpressure mechanisms. So if at least one -//! of all queues is congested, it will eventually lead to the growth of the queue at -//! this chain. -//! -//! **A note on terminology**: when we mention the bridge hub here, we mean the chain that -//! has the messages pallet deployed (`pallet-bridge-grandpa`, `pallet-bridge-messages`, -//! `pallet-xcm-bridge-hub`, ...). It may be the system bridge hub parachain or any other -//! chain. - -#![cfg_attr(not(feature = "std"), no_std)] - -use bp_xcm_bridge_hub_router::{ - BridgeState, XcmChannelStatusProvider, MINIMAL_DELIVERY_FEE_FACTOR, -}; -use codec::Encode; -use frame_support::traits::Get; -use sp_core::H256; -use sp_runtime::{FixedPointNumber, FixedU128, Saturating}; -use xcm::prelude::*; -use xcm_builder::{ExporterFor, SovereignPaidRemoteExporter}; - -pub use pallet::*; -pub use weights::WeightInfo; - -pub mod benchmarking; -pub mod weights; - -mod mock; - -/// The factor that is used to increase current message fee factor when bridge experiencing -/// some lags. -const EXPONENTIAL_FEE_BASE: FixedU128 = FixedU128::from_rational(105, 100); // 1.05 -/// The factor that is used to increase current message fee factor for every sent kilobyte. -const MESSAGE_SIZE_FEE_BASE: FixedU128 = FixedU128::from_rational(1, 1000); // 0.001 - -/// Maximal size of the XCM message that may be sent over bridge. -/// -/// This should be less than the maximal size, allowed by the messages pallet, because -/// the message itself is wrapped in other structs and is double encoded. -pub const HARD_MESSAGE_SIZE_LIMIT: u32 = 32 * 1024; - -/// The target that will be used when publishing logs related to this pallet. -/// -/// This doesn't match the pattern used by other bridge pallets (`runtime::bridge-*`). But this -/// pallet has significant differences with those pallets. The main one is that is intended to -/// be deployed at sending chains. Other bridge pallets are likely to be deployed at the separate -/// bridge hub parachain. -pub const LOG_TARGET: &str = "xcm::bridge-hub-router"; - -#[frame_support::pallet] -pub mod pallet { - use super::*; - use frame_support::pallet_prelude::*; - use frame_system::pallet_prelude::*; - - #[pallet::config] - pub trait Config: frame_system::Config { - /// Benchmarks results from runtime we're plugged into. - type WeightInfo: WeightInfo; - - /// Universal location of this runtime. - type UniversalLocation: Get; - /// The bridged network that this config is for if specified. - /// Also used for filtering `Bridges` by `BridgedNetworkId`. - /// If not specified, allows all networks pass through. - type BridgedNetworkId: Get>; - /// Configuration for supported **bridged networks/locations** with **bridge location** and - /// **possible fee**. Allows to externalize better control over allowed **bridged - /// networks/locations**. - type Bridges: ExporterFor; - /// Checks the XCM version for the destination. - type DestinationVersion: GetVersion; - - /// Origin of the sibling bridge hub that is allowed to report bridge status. - type BridgeHubOrigin: EnsureOrigin; - /// Actual message sender (`HRMP` or `DMP`) to the sibling bridge hub location. - type ToBridgeHubSender: SendXcm; - /// Underlying channel with the sibling bridge hub. It must match the channel, used - /// by the `Self::ToBridgeHubSender`. - type WithBridgeHubChannel: XcmChannelStatusProvider; - - /// Additional fee that is paid for every byte of the outbound message. - type ByteFee: Get; - /// Asset that is used to paid bridge fee. - type FeeAsset: Get; - } - - #[pallet::pallet] - pub struct Pallet(PhantomData<(T, I)>); - - #[pallet::hooks] - impl, I: 'static> Hooks> for Pallet { - fn on_initialize(_n: BlockNumberFor) -> Weight { - // TODO: make sure that `WithBridgeHubChannel::is_congested` returns true if either - // of XCM channels (outbound/inbound) is suspended. Because if outbound is suspended - // that is definitely congestion. If inbound is suspended, then we are not able to - // receive the "report_bridge_status" signal (that maybe sent by the bridge hub). - - // if the channel with sibling/child bridge hub is suspended, we don't change - // anything - if T::WithBridgeHubChannel::is_congested() { - return T::WeightInfo::on_initialize_when_congested() - } - - // if bridge has reported congestion, we don't change anything - let mut bridge = Self::bridge(); - if bridge.is_congested { - return T::WeightInfo::on_initialize_when_congested() - } - - // if fee factor is already minimal, we don't change anything - if bridge.delivery_fee_factor == MINIMAL_DELIVERY_FEE_FACTOR { - return T::WeightInfo::on_initialize_when_congested() - } - - let previous_factor = bridge.delivery_fee_factor; - bridge.delivery_fee_factor = - MINIMAL_DELIVERY_FEE_FACTOR.max(bridge.delivery_fee_factor / EXPONENTIAL_FEE_BASE); - log::info!( - target: LOG_TARGET, - "Bridge queue is uncongested. Decreased fee factor from {} to {}", - previous_factor, - bridge.delivery_fee_factor, - ); - - Bridge::::put(bridge); - T::WeightInfo::on_initialize_when_non_congested() - } - } - - #[pallet::call] - impl, I: 'static> Pallet { - /// Notification about congested bridge queue. - #[pallet::call_index(0)] - #[pallet::weight(T::WeightInfo::report_bridge_status())] - pub fn report_bridge_status( - origin: OriginFor, - // this argument is not currently used, but to ease future migration, we'll keep it - // here - bridge_id: H256, - is_congested: bool, - ) -> DispatchResult { - let _ = T::BridgeHubOrigin::ensure_origin(origin)?; - - log::info!( - target: LOG_TARGET, - "Received bridge status from {:?}: congested = {}", - bridge_id, - is_congested, - ); - - Bridge::::mutate(|bridge| { - bridge.is_congested = is_congested; - }); - Ok(()) - } - } - - /// Bridge that we are using. - /// - /// **bridges-v1** assumptions: all outbound messages through this router are using single lane - /// and to single remote consensus. If there is some other remote consensus that uses the same - /// bridge hub, the separate pallet instance shall be used, In `v2` we'll have all required - /// primitives (lane-id aka bridge-id, derived from XCM locations) to support multiple bridges - /// by the same pallet instance. - #[pallet::storage] - #[pallet::getter(fn bridge)] - pub type Bridge, I: 'static = ()> = StorageValue<_, BridgeState, ValueQuery>; - - impl, I: 'static> Pallet { - /// Called when new message is sent (queued to local outbound XCM queue) over the bridge. - pub(crate) fn on_message_sent_to_bridge(message_size: u32) { - let _ = Bridge::::try_mutate(|bridge| { - let is_channel_with_bridge_hub_congested = T::WithBridgeHubChannel::is_congested(); - let is_bridge_congested = bridge.is_congested; - - // if outbound queue is not congested AND bridge has not reported congestion, do - // nothing - if !is_channel_with_bridge_hub_congested && !is_bridge_congested { - return Err(()) - } - - // ok - we need to increase the fee factor, let's do that - let message_size_factor = FixedU128::from_u32(message_size.saturating_div(1024)) - .saturating_mul(MESSAGE_SIZE_FEE_BASE); - let total_factor = EXPONENTIAL_FEE_BASE.saturating_add(message_size_factor); - let previous_factor = bridge.delivery_fee_factor; - bridge.delivery_fee_factor = - bridge.delivery_fee_factor.saturating_mul(total_factor); - - log::info!( - target: LOG_TARGET, - "Bridge channel is congested. Increased fee factor from {} to {}", - previous_factor, - bridge.delivery_fee_factor, - ); - - Ok(()) - }); - } - } -} - -/// We'll be using `SovereignPaidRemoteExporter` to send remote messages over the sibling/child -/// bridge hub. -type ViaBridgeHubExporter = SovereignPaidRemoteExporter< - Pallet, - >::ToBridgeHubSender, - >::UniversalLocation, ->; - -// This pallet acts as the `ExporterFor` for the `SovereignPaidRemoteExporter` to compute -// message fee using fee factor. -impl, I: 'static> ExporterFor for Pallet { - fn exporter_for( - network: &NetworkId, - remote_location: &InteriorLocation, - message: &Xcm<()>, - ) -> Option<(Location, Option)> { - // ensure that the message is sent to the expected bridged network (if specified). - if let Some(bridged_network) = T::BridgedNetworkId::get() { - if *network != bridged_network { - log::trace!( - target: LOG_TARGET, - "Router with bridged_network_id {:?} does not support bridging to network {:?}!", - bridged_network, - network, - ); - return None - } - } - - // ensure that the message is sent to the expected bridged network and location. - let Some((bridge_hub_location, maybe_payment)) = - T::Bridges::exporter_for(network, remote_location, message) - else { - log::trace!( - target: LOG_TARGET, - "Router with bridged_network_id {:?} does not support bridging to network {:?} and remote_location {:?}!", - T::BridgedNetworkId::get(), - network, - remote_location, - ); - return None - }; - - // take `base_fee` from `T::Brides`, but it has to be the same `T::FeeAsset` - let base_fee = match maybe_payment { - Some(payment) => match payment { - Asset { fun: Fungible(amount), id } if id.eq(&T::FeeAsset::get()) => amount, - invalid_asset => { - log::error!( - target: LOG_TARGET, - "Router with bridged_network_id {:?} is configured for `T::FeeAsset` {:?} which is not \ - compatible with {:?} for bridge_hub_location: {:?} for bridging to {:?}/{:?}!", - T::BridgedNetworkId::get(), - T::FeeAsset::get(), - invalid_asset, - bridge_hub_location, - network, - remote_location, - ); - return None - }, - }, - None => 0, - }; - - // compute fee amount. Keep in mind that this is only the bridge fee. The fee for sending - // message from this chain to child/sibling bridge hub is determined by the - // `Config::ToBridgeHubSender` - let message_size = message.encoded_size(); - let message_fee = (message_size as u128).saturating_mul(T::ByteFee::get()); - let fee_sum = base_fee.saturating_add(message_fee); - let fee_factor = Self::bridge().delivery_fee_factor; - let fee = fee_factor.saturating_mul_int(fee_sum); - - let fee = if fee > 0 { Some((T::FeeAsset::get(), fee).into()) } else { None }; - - log::info!( - target: LOG_TARGET, - "Going to send message to {:?} ({} bytes) over bridge. Computed bridge fee {:?} using fee factor {}", - (network, remote_location), - message_size, - fee, - fee_factor - ); - - Some((bridge_hub_location, fee)) - } -} - -// This pallet acts as the `SendXcm` to the sibling/child bridge hub instead of regular -// XCMP/DMP transport. This allows injecting dynamic message fees into XCM programs that -// are going to the bridged network. -impl, I: 'static> SendXcm for Pallet { - type Ticket = (u32, ::Ticket); - - fn validate( - dest: &mut Option, - xcm: &mut Option>, - ) -> SendResult { - // `dest` and `xcm` are required here - let dest_ref = dest.as_ref().ok_or(SendError::MissingArgument)?; - let xcm_ref = xcm.as_ref().ok_or(SendError::MissingArgument)?; - - // we won't have an access to `dest` and `xcm` in the `deliver` method, so precompute - // everything required here - let message_size = xcm_ref.encoded_size() as _; - - // bridge doesn't support oversized/overweight messages now. So it is better to drop such - // messages here than at the bridge hub. Let's check the message size. - if message_size > HARD_MESSAGE_SIZE_LIMIT { - return Err(SendError::ExceedsMaxMessageSize) - } - - // We need to ensure that the known `dest`'s XCM version can comprehend the current `xcm` - // program. This may seem like an additional, unnecessary check, but it is not. A similar - // check is probably performed by the `ViaBridgeHubExporter`, which attempts to send a - // versioned message to the sibling bridge hub. However, the local bridge hub may have a - // higher XCM version than the remote `dest`. Once again, it is better to discard such - // messages here than at the bridge hub (e.g., to avoid losing funds). - let destination_version = T::DestinationVersion::get_version_for(dest_ref) - .ok_or(SendError::DestinationUnsupported)?; - let _ = VersionedXcm::from(xcm_ref.clone()) - .into_version(destination_version) - .map_err(|()| SendError::DestinationUnsupported)?; - - // just use exporter to validate destination and insert instructions to pay message fee - // at the sibling/child bridge hub - // - // the cost will include both cost of: (1) to-sibling bridge hub delivery (returned by - // the `Config::ToBridgeHubSender`) and (2) to-bridged bridge hub delivery (returned by - // `Self::exporter_for`) - ViaBridgeHubExporter::::validate(dest, xcm) - .map(|(ticket, cost)| ((message_size, ticket), cost)) - } - - fn deliver(ticket: Self::Ticket) -> Result { - // use router to enqueue message to the sibling/child bridge hub. This also should handle - // payment for passing through this queue. - let (message_size, ticket) = ticket; - let xcm_hash = ViaBridgeHubExporter::::deliver(ticket)?; - - // increase delivery fee factor if required - Self::on_message_sent_to_bridge(message_size); - - Ok(xcm_hash) - } -} - -#[cfg(test)] -mod tests { - use super::*; - use frame_support::assert_ok; - use mock::*; - - use frame_support::traits::Hooks; - use sp_runtime::traits::One; - - fn congested_bridge(delivery_fee_factor: FixedU128) -> BridgeState { - BridgeState { is_congested: true, delivery_fee_factor } - } - - fn uncongested_bridge(delivery_fee_factor: FixedU128) -> BridgeState { - BridgeState { is_congested: false, delivery_fee_factor } - } - - #[test] - fn initial_fee_factor_is_one() { - run_test(|| { - assert_eq!( - Bridge::::get(), - uncongested_bridge(MINIMAL_DELIVERY_FEE_FACTOR), - ); - }) - } - - #[test] - fn fee_factor_is_not_decreased_from_on_initialize_when_xcm_channel_is_congested() { - run_test(|| { - Bridge::::put(uncongested_bridge(FixedU128::from_rational(125, 100))); - TestWithBridgeHubChannel::make_congested(); - - // it should not decrease, because xcm channel is congested - let old_bridge = XcmBridgeHubRouter::bridge(); - XcmBridgeHubRouter::on_initialize(One::one()); - assert_eq!(XcmBridgeHubRouter::bridge(), old_bridge); - }) - } - - #[test] - fn fee_factor_is_not_decreased_from_on_initialize_when_bridge_has_reported_congestion() { - run_test(|| { - Bridge::::put(congested_bridge(FixedU128::from_rational(125, 100))); - - // it should not decrease, because bridge congested - let old_bridge = XcmBridgeHubRouter::bridge(); - XcmBridgeHubRouter::on_initialize(One::one()); - assert_eq!(XcmBridgeHubRouter::bridge(), old_bridge); - }) - } - - #[test] - fn fee_factor_is_decreased_from_on_initialize_when_xcm_channel_is_uncongested() { - run_test(|| { - Bridge::::put(uncongested_bridge(FixedU128::from_rational(125, 100))); - - // it should eventually decreased to one - while XcmBridgeHubRouter::bridge().delivery_fee_factor > MINIMAL_DELIVERY_FEE_FACTOR { - XcmBridgeHubRouter::on_initialize(One::one()); - } - - // verify that it doesn't decreases anymore - XcmBridgeHubRouter::on_initialize(One::one()); - assert_eq!( - XcmBridgeHubRouter::bridge(), - uncongested_bridge(MINIMAL_DELIVERY_FEE_FACTOR) - ); - }) - } - - #[test] - fn not_applicable_if_destination_is_within_other_network() { - run_test(|| { - assert_eq!( - send_xcm::( - Location::new(2, [GlobalConsensus(Rococo), Parachain(1000)]), - vec![].into(), - ), - Err(SendError::NotApplicable), - ); - }); - } - - #[test] - fn exceeds_max_message_size_if_size_is_above_hard_limit() { - run_test(|| { - assert_eq!( - send_xcm::( - Location::new(2, [GlobalConsensus(Rococo), Parachain(1000)]), - vec![ClearOrigin; HARD_MESSAGE_SIZE_LIMIT as usize].into(), - ), - Err(SendError::ExceedsMaxMessageSize), - ); - }); - } - - #[test] - fn destination_unsupported_if_wrap_version_fails() { - run_test(|| { - assert_eq!( - send_xcm::( - UnknownXcmVersionLocation::get(), - vec![ClearOrigin].into(), - ), - Err(SendError::DestinationUnsupported), - ); - }); - } - - #[test] - fn returns_proper_delivery_price() { - run_test(|| { - let dest = Location::new(2, [GlobalConsensus(BridgedNetworkId::get())]); - let xcm: Xcm<()> = vec![ClearOrigin].into(); - let msg_size = xcm.encoded_size(); - - // initially the base fee is used: `BASE_FEE + BYTE_FEE * msg_size + HRMP_FEE` - let expected_fee = BASE_FEE + BYTE_FEE * (msg_size as u128) + HRMP_FEE; - assert_eq!( - XcmBridgeHubRouter::validate(&mut Some(dest.clone()), &mut Some(xcm.clone())) - .unwrap() - .1 - .get(0), - Some(&(BridgeFeeAsset::get(), expected_fee).into()), - ); - - // but when factor is larger than one, it increases the fee, so it becomes: - // `(BASE_FEE + BYTE_FEE * msg_size) * F + HRMP_FEE` - let factor = FixedU128::from_rational(125, 100); - Bridge::::put(uncongested_bridge(factor)); - let expected_fee = - (FixedU128::saturating_from_integer(BASE_FEE + BYTE_FEE * (msg_size as u128)) * - factor) - .into_inner() / FixedU128::DIV + - HRMP_FEE; - assert_eq!( - XcmBridgeHubRouter::validate(&mut Some(dest), &mut Some(xcm)).unwrap().1.get(0), - Some(&(BridgeFeeAsset::get(), expected_fee).into()), - ); - }); - } - - #[test] - fn sent_message_doesnt_increase_factor_if_xcm_channel_is_uncongested() { - run_test(|| { - let old_bridge = XcmBridgeHubRouter::bridge(); - assert_ok!(send_xcm::( - Location::new(2, [GlobalConsensus(BridgedNetworkId::get()), Parachain(1000)]), - vec![ClearOrigin].into(), - ) - .map(drop)); - - assert!(TestToBridgeHubSender::is_message_sent()); - assert_eq!(old_bridge, XcmBridgeHubRouter::bridge()); - }); - } - - #[test] - fn sent_message_increases_factor_if_xcm_channel_is_congested() { - run_test(|| { - TestWithBridgeHubChannel::make_congested(); - - let old_bridge = XcmBridgeHubRouter::bridge(); - assert_ok!(send_xcm::( - Location::new(2, [GlobalConsensus(BridgedNetworkId::get()), Parachain(1000)]), - vec![ClearOrigin].into(), - ) - .map(drop)); - - assert!(TestToBridgeHubSender::is_message_sent()); - assert!( - old_bridge.delivery_fee_factor < XcmBridgeHubRouter::bridge().delivery_fee_factor - ); - }); - } - - #[test] - fn sent_message_increases_factor_if_bridge_has_reported_congestion() { - run_test(|| { - Bridge::::put(congested_bridge(MINIMAL_DELIVERY_FEE_FACTOR)); - - let old_bridge = XcmBridgeHubRouter::bridge(); - assert_ok!(send_xcm::( - Location::new(2, [GlobalConsensus(BridgedNetworkId::get()), Parachain(1000)]), - vec![ClearOrigin].into(), - ) - .map(drop)); - - assert!(TestToBridgeHubSender::is_message_sent()); - assert!( - old_bridge.delivery_fee_factor < XcmBridgeHubRouter::bridge().delivery_fee_factor - ); - }); - } -} diff --git a/modules/xcm-bridge-hub-router/src/mock.rs b/modules/xcm-bridge-hub-router/src/mock.rs deleted file mode 100644 index 54e10966d..000000000 --- a/modules/xcm-bridge-hub-router/src/mock.rs +++ /dev/null @@ -1,148 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -#![cfg(test)] - -use crate as pallet_xcm_bridge_hub_router; - -use bp_xcm_bridge_hub_router::XcmChannelStatusProvider; -use frame_support::{ - construct_runtime, derive_impl, parameter_types, - traits::{Contains, Equals}, -}; -use frame_system::EnsureRoot; -use sp_runtime::{traits::ConstU128, BuildStorage}; -use xcm::prelude::*; -use xcm_builder::{NetworkExportTable, NetworkExportTableItem}; - -pub type AccountId = u64; -type Block = frame_system::mocking::MockBlock; - -/// HRMP fee. -pub const HRMP_FEE: u128 = 500; -/// Base bridge fee. -pub const BASE_FEE: u128 = 1_000_000; -/// Byte bridge fee. -pub const BYTE_FEE: u128 = 1_000; - -construct_runtime! { - pub enum TestRuntime - { - System: frame_system::{Pallet, Call, Config, Storage, Event}, - XcmBridgeHubRouter: pallet_xcm_bridge_hub_router::{Pallet, Storage}, - } -} - -parameter_types! { - pub ThisNetworkId: NetworkId = Polkadot; - pub BridgedNetworkId: NetworkId = Kusama; - pub UniversalLocation: InteriorLocation = [GlobalConsensus(ThisNetworkId::get()), Parachain(1000)].into(); - pub SiblingBridgeHubLocation: Location = ParentThen([Parachain(1002)].into()).into(); - pub BridgeFeeAsset: AssetId = Location::parent().into(); - pub BridgeTable: Vec - = vec![ - NetworkExportTableItem::new( - BridgedNetworkId::get(), - None, - SiblingBridgeHubLocation::get(), - Some((BridgeFeeAsset::get(), BASE_FEE).into()) - ) - ]; - pub UnknownXcmVersionLocation: Location = Location::new(2, [GlobalConsensus(BridgedNetworkId::get()), Parachain(9999)]); -} - -#[derive_impl(frame_system::config_preludes::TestDefaultConfig)] -impl frame_system::Config for TestRuntime { - type Block = Block; -} - -impl pallet_xcm_bridge_hub_router::Config<()> for TestRuntime { - type WeightInfo = (); - - type UniversalLocation = UniversalLocation; - type BridgedNetworkId = BridgedNetworkId; - type Bridges = NetworkExportTable; - type DestinationVersion = - LatestOrNoneForLocationVersionChecker>; - - type BridgeHubOrigin = EnsureRoot; - type ToBridgeHubSender = TestToBridgeHubSender; - type WithBridgeHubChannel = TestWithBridgeHubChannel; - - type ByteFee = ConstU128; - type FeeAsset = BridgeFeeAsset; -} - -pub struct LatestOrNoneForLocationVersionChecker(sp_std::marker::PhantomData); -impl> GetVersion - for LatestOrNoneForLocationVersionChecker -{ - fn get_version_for(dest: &Location) -> Option { - if LocationValue::contains(dest) { - return None - } - Some(XCM_VERSION) - } -} - -pub struct TestToBridgeHubSender; - -impl TestToBridgeHubSender { - pub fn is_message_sent() -> bool { - frame_support::storage::unhashed::get_or_default(b"TestToBridgeHubSender.Sent") - } -} - -impl SendXcm for TestToBridgeHubSender { - type Ticket = (); - - fn validate( - _destination: &mut Option, - _message: &mut Option>, - ) -> SendResult { - Ok(((), (BridgeFeeAsset::get(), HRMP_FEE).into())) - } - - fn deliver(_ticket: Self::Ticket) -> Result { - frame_support::storage::unhashed::put(b"TestToBridgeHubSender.Sent", &true); - Ok([0u8; 32]) - } -} - -pub struct TestWithBridgeHubChannel; - -impl TestWithBridgeHubChannel { - pub fn make_congested() { - frame_support::storage::unhashed::put(b"TestWithBridgeHubChannel.Congested", &true); - } -} - -impl XcmChannelStatusProvider for TestWithBridgeHubChannel { - fn is_congested() -> bool { - frame_support::storage::unhashed::get_or_default(b"TestWithBridgeHubChannel.Congested") - } -} - -/// Return test externalities to use in tests. -pub fn new_test_ext() -> sp_io::TestExternalities { - let t = frame_system::GenesisConfig::::default().build_storage().unwrap(); - sp_io::TestExternalities::new(t) -} - -/// Run pallet test. -pub fn run_test(test: impl FnOnce() -> T) -> T { - new_test_ext().execute_with(test) -} diff --git a/modules/xcm-bridge-hub-router/src/weights.rs b/modules/xcm-bridge-hub-router/src/weights.rs deleted file mode 100644 index b0c8fc625..000000000 --- a/modules/xcm-bridge-hub-router/src/weights.rs +++ /dev/null @@ -1,208 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Autogenerated weights for pallet_xcm_bridge_hub_router -//! -//! THIS FILE WAS AUTO-GENERATED USING THE SUBSTRATE BENCHMARK CLI VERSION 4.0.0-dev -//! DATE: 2023-08-03, STEPS: `50`, REPEAT: `20`, LOW RANGE: `[]`, HIGH RANGE: `[]` -//! WORST CASE MAP SIZE: `1000000` -//! HOSTNAME: `covid`, CPU: `11th Gen Intel(R) Core(TM) i7-11800H @ 2.30GHz` -//! EXECUTION: , WASM-EXECUTION: Compiled, CHAIN: Some("dev"), DB CACHE: 1024 - -// Executed Command: -// target/release/rip-bridge-node -// benchmark -// pallet -// --chain=dev -// --steps=50 -// --repeat=20 -// --pallet=pallet_xcm_bridge_hub_router -// --extrinsic=* -// --execution=wasm -// --wasm-execution=Compiled -// --heap-pages=4096 -// --output=./modules/xcm-bridge-hub-router/src/weights.rs -// --template=./.maintain/bridge-weight-template.hbs - -#![allow(clippy::all)] -#![allow(unused_parens)] -#![allow(unused_imports)] -#![allow(missing_docs)] - -use frame_support::{ - traits::Get, - weights::{constants::RocksDbWeight, Weight}, -}; -use sp_std::marker::PhantomData; - -/// Weight functions needed for pallet_xcm_bridge_hub_router. -pub trait WeightInfo { - fn on_initialize_when_non_congested() -> Weight; - fn on_initialize_when_congested() -> Weight; - fn report_bridge_status() -> Weight; - fn send_message() -> Weight; -} - -/// Weights for `pallet_xcm_bridge_hub_router` that are generated using one of the Bridge testnets. -/// -/// Those weights are test only and must never be used in production. -pub struct BridgeWeight(PhantomData); -impl WeightInfo for BridgeWeight { - /// Storage: `XcmBridgeHubRouter::Bridge` (r:1 w:1) - /// - /// Proof: `XcmBridgeHubRouter::Bridge` (`max_values`: Some(1), `max_size`: Some(17), added: - /// 512, mode: `MaxEncodedLen`) - /// - /// Storage: UNKNOWN KEY `0x456d756c617465645369626c696e6758636d704368616e6e656c2e436f6e6765` - /// (r:1 w:0) - /// - /// Proof: UNKNOWN KEY `0x456d756c617465645369626c696e6758636d704368616e6e656c2e436f6e6765` (r:1 - /// w:0) - fn on_initialize_when_non_congested() -> Weight { - // Proof Size summary in bytes: - // Measured: `53` - // Estimated: `3518` - // Minimum execution time: 11_934 nanoseconds. - Weight::from_parts(12_201_000, 3518) - .saturating_add(T::DbWeight::get().reads(2_u64)) - .saturating_add(T::DbWeight::get().writes(1_u64)) - } - /// Storage: `XcmBridgeHubRouter::Bridge` (r:1 w:1) - /// - /// Proof: `XcmBridgeHubRouter::Bridge` (`max_values`: Some(1), `max_size`: Some(17), added: - /// 512, mode: `MaxEncodedLen`) - /// - /// Storage: UNKNOWN KEY `0x456d756c617465645369626c696e6758636d704368616e6e656c2e436f6e6765` - /// (r:1 w:0) - /// - /// Proof: UNKNOWN KEY `0x456d756c617465645369626c696e6758636d704368616e6e656c2e436f6e6765` (r:1 - /// w:0) - fn on_initialize_when_congested() -> Weight { - // Proof Size summary in bytes: - // Measured: `94` - // Estimated: `3559` - // Minimum execution time: 9_010 nanoseconds. - Weight::from_parts(9_594_000, 3559) - .saturating_add(T::DbWeight::get().reads(2_u64)) - .saturating_add(T::DbWeight::get().writes(1_u64)) - } - /// Storage: `XcmBridgeHubRouter::Bridge` (r:1 w:1) - /// - /// Proof: `XcmBridgeHubRouter::Bridge` (`max_values`: Some(1), `max_size`: Some(17), added: - /// 512, mode: `MaxEncodedLen`) - fn report_bridge_status() -> Weight { - // Proof Size summary in bytes: - // Measured: `53` - // Estimated: `1502` - // Minimum execution time: 10_427 nanoseconds. - Weight::from_parts(10_682_000, 1502) - .saturating_add(T::DbWeight::get().reads(1_u64)) - .saturating_add(T::DbWeight::get().writes(1_u64)) - } - /// Storage: `XcmBridgeHubRouter::Bridge` (r:1 w:1) - /// - /// Proof: `XcmBridgeHubRouter::Bridge` (`max_values`: Some(1), `max_size`: Some(17), added: - /// 512, mode: `MaxEncodedLen`) - /// - /// Storage: UNKNOWN KEY `0x456d756c617465645369626c696e6758636d704368616e6e656c2e436f6e6765` - /// (r:1 w:0) - /// - /// Proof: UNKNOWN KEY `0x456d756c617465645369626c696e6758636d704368616e6e656c2e436f6e6765` (r:1 - /// w:0) - fn send_message() -> Weight { - // Proof Size summary in bytes: - // Measured: `52` - // Estimated: `3517` - // Minimum execution time: 19_709 nanoseconds. - Weight::from_parts(20_110_000, 3517) - .saturating_add(T::DbWeight::get().reads(2_u64)) - .saturating_add(T::DbWeight::get().writes(1_u64)) - } -} - -// For backwards compatibility and tests -impl WeightInfo for () { - /// Storage: `XcmBridgeHubRouter::Bridge` (r:1 w:1) - /// - /// Proof: `XcmBridgeHubRouter::Bridge` (`max_values`: Some(1), `max_size`: Some(17), added: - /// 512, mode: `MaxEncodedLen`) - /// - /// Storage: UNKNOWN KEY `0x456d756c617465645369626c696e6758636d704368616e6e656c2e436f6e6765` - /// (r:1 w:0) - /// - /// Proof: UNKNOWN KEY `0x456d756c617465645369626c696e6758636d704368616e6e656c2e436f6e6765` (r:1 - /// w:0) - fn on_initialize_when_non_congested() -> Weight { - // Proof Size summary in bytes: - // Measured: `53` - // Estimated: `3518` - // Minimum execution time: 11_934 nanoseconds. - Weight::from_parts(12_201_000, 3518) - .saturating_add(RocksDbWeight::get().reads(2_u64)) - .saturating_add(RocksDbWeight::get().writes(1_u64)) - } - /// Storage: `XcmBridgeHubRouter::Bridge` (r:1 w:1) - /// - /// Proof: `XcmBridgeHubRouter::Bridge` (`max_values`: Some(1), `max_size`: Some(17), added: - /// 512, mode: `MaxEncodedLen`) - /// - /// Storage: UNKNOWN KEY `0x456d756c617465645369626c696e6758636d704368616e6e656c2e436f6e6765` - /// (r:1 w:0) - /// - /// Proof: UNKNOWN KEY `0x456d756c617465645369626c696e6758636d704368616e6e656c2e436f6e6765` (r:1 - /// w:0) - fn on_initialize_when_congested() -> Weight { - // Proof Size summary in bytes: - // Measured: `94` - // Estimated: `3559` - // Minimum execution time: 9_010 nanoseconds. - Weight::from_parts(9_594_000, 3559) - .saturating_add(RocksDbWeight::get().reads(2_u64)) - .saturating_add(RocksDbWeight::get().writes(1_u64)) - } - /// Storage: `XcmBridgeHubRouter::Bridge` (r:1 w:1) - /// - /// Proof: `XcmBridgeHubRouter::Bridge` (`max_values`: Some(1), `max_size`: Some(17), added: - /// 512, mode: `MaxEncodedLen`) - fn report_bridge_status() -> Weight { - // Proof Size summary in bytes: - // Measured: `53` - // Estimated: `1502` - // Minimum execution time: 10_427 nanoseconds. - Weight::from_parts(10_682_000, 1502) - .saturating_add(RocksDbWeight::get().reads(1_u64)) - .saturating_add(RocksDbWeight::get().writes(1_u64)) - } - /// Storage: `XcmBridgeHubRouter::Bridge` (r:1 w:1) - /// - /// Proof: `XcmBridgeHubRouter::Bridge` (`max_values`: Some(1), `max_size`: Some(17), added: - /// 512, mode: `MaxEncodedLen`) - /// - /// Storage: UNKNOWN KEY `0x456d756c617465645369626c696e6758636d704368616e6e656c2e436f6e6765` - /// (r:1 w:0) - /// - /// Proof: UNKNOWN KEY `0x456d756c617465645369626c696e6758636d704368616e6e656c2e436f6e6765` (r:1 - /// w:0) - fn send_message() -> Weight { - // Proof Size summary in bytes: - // Measured: `52` - // Estimated: `3517` - // Minimum execution time: 19_709 nanoseconds. - Weight::from_parts(20_110_000, 3517) - .saturating_add(RocksDbWeight::get().reads(2_u64)) - .saturating_add(RocksDbWeight::get().writes(1_u64)) - } -} diff --git a/modules/xcm-bridge-hub/Cargo.toml b/modules/xcm-bridge-hub/Cargo.toml deleted file mode 100644 index d7e562530..000000000 --- a/modules/xcm-bridge-hub/Cargo.toml +++ /dev/null @@ -1,78 +0,0 @@ -[package] -name = "pallet-xcm-bridge-hub" -description = "Module that adds dynamic bridges/lanes support to XCM infrastructure at the bridge hub." -version = "0.2.0" -authors.workspace = true -edition.workspace = true -license = "GPL-3.0-or-later WITH Classpath-exception-2.0" -repository.workspace = true - -[lints] -workspace = true - -[dependencies] -codec = { package = "parity-scale-codec", version = "3.6.1", default-features = false } -log = { workspace = true } -scale-info = { version = "2.11.1", default-features = false, features = ["derive"] } - -# Bridge Dependencies -bp-messages = { path = "../../primitives/messages", default-features = false } -bp-runtime = { path = "../../primitives/runtime", default-features = false } -bp-xcm-bridge-hub = { path = "../../primitives/xcm-bridge-hub", default-features = false } -pallet-bridge-messages = { path = "../messages", default-features = false } -bridge-runtime-common = { path = "../../bin/runtime-common", default-features = false } - -# Substrate Dependencies -frame-support = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -frame-system = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -sp-core = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -sp-runtime = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -sp-std = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } - -# Polkadot Dependencies -xcm = { package = "staging-xcm", git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -xcm-builder = { package = "staging-xcm-builder", git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -xcm-executor = { package = "staging-xcm-executor", git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } - -[dev-dependencies] -bp-header-chain = { path = "../../primitives/header-chain" } -pallet-balances = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } -sp-io = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } - -[features] -default = ["std"] -std = [ - "bp-messages/std", - "bp-runtime/std", - "bp-xcm-bridge-hub/std", - "bridge-runtime-common/std", - "codec/std", - "frame-support/std", - "frame-system/std", - "log/std", - "pallet-bridge-messages/std", - "scale-info/std", - "sp-core/std", - "sp-runtime/std", - "sp-std/std", - "xcm-builder/std", - "xcm-executor/std", - "xcm/std", -] -runtime-benchmarks = [ - "bridge-runtime-common/runtime-benchmarks", - "frame-support/runtime-benchmarks", - "frame-system/runtime-benchmarks", - "pallet-balances/runtime-benchmarks", - "pallet-bridge-messages/runtime-benchmarks", - "sp-runtime/runtime-benchmarks", - "xcm-builder/runtime-benchmarks", - "xcm-executor/runtime-benchmarks", -] -try-runtime = [ - "frame-support/try-runtime", - "frame-system/try-runtime", - "pallet-balances/try-runtime", - "pallet-bridge-messages/try-runtime", - "sp-runtime/try-runtime", -] diff --git a/modules/xcm-bridge-hub/src/exporter.rs b/modules/xcm-bridge-hub/src/exporter.rs deleted file mode 100644 index 94ec8b5f1..000000000 --- a/modules/xcm-bridge-hub/src/exporter.rs +++ /dev/null @@ -1,206 +0,0 @@ -// Copyright 2019-2021 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! The code that allows to use the pallet (`pallet-xcm-bridge-hub`) as XCM message -//! exporter at the sending bridge hub. Internally, it just enqueues outbound blob -//! in the messages pallet queue. -//! -//! This code is executed at the source bridge hub. - -use crate::{Config, Pallet, LOG_TARGET}; - -use bp_messages::source_chain::MessagesBridge; -use bp_xcm_bridge_hub::XcmAsPlainPayload; -use bridge_runtime_common::messages_xcm_extension::{LocalXcmQueueManager, SenderAndLane}; -use pallet_bridge_messages::{Config as BridgeMessagesConfig, Pallet as BridgeMessagesPallet}; -use xcm::prelude::*; -use xcm_builder::{HaulBlob, HaulBlobError, HaulBlobExporter}; -use xcm_executor::traits::ExportXcm; - -/// An easy way to access `HaulBlobExporter`. -pub type PalletAsHaulBlobExporter = HaulBlobExporter< - DummyHaulBlob, - >::BridgedNetwork, - >::DestinationVersion, - >::MessageExportPrice, ->; -/// An easy way to access associated messages pallet. -type MessagesPallet = BridgeMessagesPallet>::BridgeMessagesPalletInstance>; - -impl, I: 'static> ExportXcm for Pallet -where - T: BridgeMessagesConfig, -{ - type Ticket = ( - SenderAndLane, - as MessagesBridge>::SendMessageArgs, - XcmHash, - ); - - fn validate( - network: NetworkId, - channel: u32, - universal_source: &mut Option, - destination: &mut Option, - message: &mut Option>, - ) -> Result<(Self::Ticket, Assets), SendError> { - // Find supported lane_id. - let sender_and_lane = Self::lane_for( - universal_source.as_ref().ok_or(SendError::MissingArgument)?, - (&network, destination.as_ref().ok_or(SendError::MissingArgument)?), - ) - .ok_or(SendError::NotApplicable)?; - - // check if we are able to route the message. We use existing `HaulBlobExporter` for that. - // It will make all required changes and will encode message properly, so that the - // `DispatchBlob` at the bridged bridge hub will be able to decode it - let ((blob, id), price) = PalletAsHaulBlobExporter::::validate( - network, - channel, - universal_source, - destination, - message, - )?; - - let bridge_message = MessagesPallet::::validate_message(sender_and_lane.lane, &blob) - .map_err(|e| { - log::debug!( - target: LOG_TARGET, - "XCM message {:?} cannot be exported because of bridge error {:?} on bridge {:?}", - id, - e, - sender_and_lane.lane, - ); - SendError::Transport("BridgeValidateError") - })?; - - Ok(((sender_and_lane, bridge_message, id), price)) - } - - fn deliver((sender_and_lane, bridge_message, id): Self::Ticket) -> Result { - let lane_id = sender_and_lane.lane; - let artifacts = MessagesPallet::::send_message(bridge_message); - - log::info!( - target: LOG_TARGET, - "XCM message {:?} has been enqueued at bridge {:?} with nonce {}", - id, - lane_id, - artifacts.nonce, - ); - - // notify XCM queue manager about updated lane state - LocalXcmQueueManager::::on_bridge_message_enqueued( - &sender_and_lane, - artifacts.enqueued_messages, - ); - - Ok(id) - } -} - -/// Dummy implementation of the `HaulBlob` trait that is never called. -/// -/// We are using `HaulBlobExporter`, which requires `HaulBlob` implementation. It assumes that -/// there's a single channel between two bridge hubs - `HaulBlob` only accepts the blob and nothing -/// else. But bridge messages pallet may have a dedicated channel (lane) for every pair of bridged -/// chains. So we are using our own `ExportXcm` implementation, but to utilize `HaulBlobExporter` we -/// still need this `DummyHaulBlob`. -pub struct DummyHaulBlob; - -impl HaulBlob for DummyHaulBlob { - fn haul_blob(_blob: XcmAsPlainPayload) -> Result<(), HaulBlobError> { - Err(HaulBlobError::Transport("DummyHaulBlob")) - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::mock::*; - use frame_support::assert_ok; - use xcm_executor::traits::export_xcm; - - fn universal_source() -> InteriorLocation { - [GlobalConsensus(RelayNetwork::get()), Parachain(SIBLING_ASSET_HUB_ID)].into() - } - - fn universal_destination() -> InteriorLocation { - BridgedDestination::get() - } - - #[test] - fn export_works() { - run_test(|| { - assert_ok!(export_xcm::( - BridgedRelayNetwork::get(), - 0, - universal_source(), - universal_destination(), - vec![Instruction::ClearOrigin].into(), - )); - }) - } - - #[test] - fn export_fails_if_argument_is_missing() { - run_test(|| { - assert_eq!( - XcmOverBridge::validate( - BridgedRelayNetwork::get(), - 0, - &mut None, - &mut Some(universal_destination()), - &mut Some(Vec::new().into()), - ), - Err(SendError::MissingArgument), - ); - - assert_eq!( - XcmOverBridge::validate( - BridgedRelayNetwork::get(), - 0, - &mut Some(universal_source()), - &mut None, - &mut Some(Vec::new().into()), - ), - Err(SendError::MissingArgument), - ); - }) - } - - #[test] - fn exporter_computes_correct_lane_id() { - run_test(|| { - let expected_lane_id = TEST_LANE_ID; - - assert_eq!( - XcmOverBridge::validate( - BridgedRelayNetwork::get(), - 0, - &mut Some(universal_source()), - &mut Some(universal_destination()), - &mut Some(Vec::new().into()), - ) - .unwrap() - .0 - .0 - .lane, - expected_lane_id, - ); - }) - } -} diff --git a/modules/xcm-bridge-hub/src/lib.rs b/modules/xcm-bridge-hub/src/lib.rs deleted file mode 100644 index 60b988497..000000000 --- a/modules/xcm-bridge-hub/src/lib.rs +++ /dev/null @@ -1,118 +0,0 @@ -// Copyright 2019-2021 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Module that adds XCM support to bridge pallets. - -#![warn(missing_docs)] -#![cfg_attr(not(feature = "std"), no_std)] - -use bridge_runtime_common::messages_xcm_extension::XcmBlobHauler; -use pallet_bridge_messages::Config as BridgeMessagesConfig; -use xcm::prelude::*; - -pub use exporter::PalletAsHaulBlobExporter; -pub use pallet::*; - -mod exporter; -mod mock; - -/// The target that will be used when publishing logs related to this pallet. -pub const LOG_TARGET: &str = "runtime::bridge-xcm"; - -#[frame_support::pallet] -pub mod pallet { - use super::*; - use bridge_runtime_common::messages_xcm_extension::SenderAndLane; - use frame_support::pallet_prelude::*; - use frame_system::pallet_prelude::BlockNumberFor; - - #[pallet::config] - #[pallet::disable_frame_system_supertrait_check] - pub trait Config: - BridgeMessagesConfig - { - /// Runtime's universal location. - type UniversalLocation: Get; - // TODO: https://github.com/paritytech/parity-bridges-common/issues/1666 remove `ChainId` and - // replace it with the `NetworkId` - then we'll be able to use - // `T as pallet_bridge_messages::Config::BridgedChain::NetworkId` - /// Bridged network as relative location of bridged `GlobalConsensus`. - #[pallet::constant] - type BridgedNetwork: Get; - /// Associated messages pallet instance that bridges us with the - /// `BridgedNetworkId` consensus. - type BridgeMessagesPalletInstance: 'static; - - /// Price of single message export to the bridged consensus (`Self::BridgedNetworkId`). - type MessageExportPrice: Get; - /// Checks the XCM version for the destination. - type DestinationVersion: GetVersion; - - /// Get point-to-point links with bridged consensus (`Self::BridgedNetworkId`). - /// (this will be replaced with dynamic on-chain bridges - `Bridges V2`) - type Lanes: Get>; - /// Support for point-to-point links - /// (this will be replaced with dynamic on-chain bridges - `Bridges V2`) - type LanesSupport: XcmBlobHauler; - } - - #[pallet::pallet] - pub struct Pallet(PhantomData<(T, I)>); - - #[pallet::hooks] - impl, I: 'static> Hooks> for Pallet { - fn integrity_test() { - assert!( - Self::bridged_network_id().is_some(), - "Configured `T::BridgedNetwork`: {:?} does not contain `GlobalConsensus` junction with `NetworkId`", - T::BridgedNetwork::get() - ) - } - } - - impl, I: 'static> Pallet { - /// Returns dedicated/configured lane identifier. - pub(crate) fn lane_for( - source: &InteriorLocation, - dest: (&NetworkId, &InteriorLocation), - ) -> Option { - let source = source.clone().relative_to(&T::UniversalLocation::get()); - - // Check that we have configured a point-to-point lane for 'source' and `dest`. - T::Lanes::get() - .into_iter() - .find_map(|(lane_source, (lane_dest_network, lane_dest))| { - if lane_source.location == source && - &lane_dest_network == dest.0 && - Self::bridged_network_id().as_ref() == Some(dest.0) && - &lane_dest == dest.1 - { - Some(lane_source) - } else { - None - } - }) - } - - /// Returns some `NetworkId` if contains `GlobalConsensus` junction. - fn bridged_network_id() -> Option { - match T::BridgedNetwork::get().take_first_interior() { - Some(GlobalConsensus(network)) => Some(network), - _ => None, - } - } - } -} diff --git a/modules/xcm-bridge-hub/src/mock.rs b/modules/xcm-bridge-hub/src/mock.rs deleted file mode 100644 index 4c09bce56..000000000 --- a/modules/xcm-bridge-hub/src/mock.rs +++ /dev/null @@ -1,317 +0,0 @@ -// Copyright 2019-2021 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -#![cfg(test)] - -use crate as pallet_xcm_bridge_hub; - -use bp_messages::{ - target_chain::{DispatchMessage, MessageDispatch}, - LaneId, -}; -use bp_runtime::{messages::MessageDispatchResult, Chain, ChainId, UnderlyingChainProvider}; -use bridge_runtime_common::{ - messages::{ - source::TargetHeaderChainAdapter, target::SourceHeaderChainAdapter, - BridgedChainWithMessages, HashOf, MessageBridge, ThisChainWithMessages, - }, - messages_xcm_extension::{SenderAndLane, XcmBlobHauler}, -}; -use codec::Encode; -use frame_support::{derive_impl, parameter_types, traits::ConstU32, weights::RuntimeDbWeight}; -use sp_core::H256; -use sp_runtime::{ - testing::Header as SubstrateHeader, - traits::{BlakeTwo256, IdentityLookup}, - AccountId32, BuildStorage, -}; -use xcm::prelude::*; - -pub type AccountId = AccountId32; -pub type Balance = u64; - -type Block = frame_system::mocking::MockBlock; - -pub const SIBLING_ASSET_HUB_ID: u32 = 2001; -pub const THIS_BRIDGE_HUB_ID: u32 = 2002; -pub const BRIDGED_ASSET_HUB_ID: u32 = 1001; -pub const TEST_LANE_ID: LaneId = LaneId([0, 0, 0, 1]); - -frame_support::construct_runtime! { - pub enum TestRuntime { - System: frame_system::{Pallet, Call, Config, Storage, Event}, - Balances: pallet_balances::{Pallet, Event}, - Messages: pallet_bridge_messages::{Pallet, Call, Event}, - XcmOverBridge: pallet_xcm_bridge_hub::{Pallet}, - } -} - -parameter_types! { - pub const DbWeight: RuntimeDbWeight = RuntimeDbWeight { read: 1, write: 2 }; - pub const ExistentialDeposit: Balance = 1; -} - -#[derive_impl(frame_system::config_preludes::TestDefaultConfig)] -impl frame_system::Config for TestRuntime { - type AccountId = AccountId; - type AccountData = pallet_balances::AccountData; - type Block = Block; - type Lookup = IdentityLookup; -} - -#[derive_impl(pallet_balances::config_preludes::TestDefaultConfig)] -impl pallet_balances::Config for TestRuntime { - type AccountStore = System; -} - -parameter_types! { - pub const ActiveOutboundLanes: &'static [LaneId] = &[TEST_LANE_ID]; -} - -impl pallet_bridge_messages::Config for TestRuntime { - type RuntimeEvent = RuntimeEvent; - type WeightInfo = TestMessagesWeights; - - type BridgedChainId = (); - type ActiveOutboundLanes = ActiveOutboundLanes; - type MaxUnrewardedRelayerEntriesAtInboundLane = (); - type MaxUnconfirmedMessagesAtInboundLane = (); - type MaximalOutboundPayloadSize = ConstU32<2048>; - type OutboundPayload = Vec; - type InboundPayload = Vec; - type InboundRelayer = (); - type DeliveryPayments = (); - type TargetHeaderChain = TargetHeaderChainAdapter; - type DeliveryConfirmationPayments = (); - type OnMessagesDelivered = (); - type SourceHeaderChain = SourceHeaderChainAdapter; - type MessageDispatch = TestMessageDispatch; -} - -pub struct TestMessagesWeights; - -impl pallet_bridge_messages::WeightInfo for TestMessagesWeights { - fn receive_single_message_proof() -> Weight { - Weight::zero() - } - fn receive_single_message_proof_with_outbound_lane_state() -> Weight { - Weight::zero() - } - fn receive_delivery_proof_for_single_message() -> Weight { - Weight::zero() - } - fn receive_delivery_proof_for_two_messages_by_single_relayer() -> Weight { - Weight::zero() - } - fn receive_delivery_proof_for_two_messages_by_two_relayers() -> Weight { - Weight::zero() - } - - fn receive_two_messages_proof() -> Weight { - Weight::zero() - } - - fn receive_single_message_proof_1_kb() -> Weight { - Weight::zero() - } - - fn receive_single_message_proof_16_kb() -> Weight { - Weight::zero() - } - - fn receive_single_message_proof_with_dispatch(_: u32) -> Weight { - Weight::from_parts(1, 0) - } -} - -impl pallet_bridge_messages::WeightInfoExt for TestMessagesWeights { - fn expected_extra_storage_proof_size() -> u32 { - 0 - } - - fn receive_messages_proof_overhead_from_runtime() -> Weight { - Weight::zero() - } - - fn receive_messages_delivery_proof_overhead_from_runtime() -> Weight { - Weight::zero() - } -} - -parameter_types! { - pub const RelayNetwork: NetworkId = NetworkId::Kusama; - pub const BridgedRelayNetwork: NetworkId = NetworkId::Polkadot; - pub BridgedRelayNetworkLocation: Location = (Parent, GlobalConsensus(BridgedRelayNetwork::get())).into(); - pub const NonBridgedRelayNetwork: NetworkId = NetworkId::Rococo; - pub const BridgeReserve: Balance = 100_000; - pub UniversalLocation: InteriorLocation = [ - GlobalConsensus(RelayNetwork::get()), - Parachain(THIS_BRIDGE_HUB_ID), - ].into(); - pub const Penalty: Balance = 1_000; -} - -impl pallet_xcm_bridge_hub::Config for TestRuntime { - type UniversalLocation = UniversalLocation; - type BridgedNetwork = BridgedRelayNetworkLocation; - type BridgeMessagesPalletInstance = (); - - type MessageExportPrice = (); - type DestinationVersion = AlwaysLatest; - - type Lanes = TestLanes; - type LanesSupport = TestXcmBlobHauler; -} - -parameter_types! { - pub TestSenderAndLane: SenderAndLane = SenderAndLane { - location: Location::new(1, [Parachain(SIBLING_ASSET_HUB_ID)]), - lane: TEST_LANE_ID, - }; - pub BridgedDestination: InteriorLocation = [ - Parachain(BRIDGED_ASSET_HUB_ID) - ].into(); - pub TestLanes: sp_std::vec::Vec<(SenderAndLane, (NetworkId, InteriorLocation))> = sp_std::vec![ - (TestSenderAndLane::get(), (BridgedRelayNetwork::get(), BridgedDestination::get())) - ]; -} - -pub struct TestXcmBlobHauler; -impl XcmBlobHauler for TestXcmBlobHauler { - type Runtime = TestRuntime; - type MessagesInstance = (); - type ToSourceChainSender = (); - type CongestedMessage = (); - type UncongestedMessage = (); -} - -pub struct ThisChain; - -impl Chain for ThisChain { - const ID: ChainId = *b"tuch"; - type BlockNumber = u64; - type Hash = H256; - type Hasher = BlakeTwo256; - type Header = SubstrateHeader; - type AccountId = AccountId; - type Balance = Balance; - type Nonce = u64; - type Signature = sp_runtime::MultiSignature; - - fn max_extrinsic_size() -> u32 { - u32::MAX - } - - fn max_extrinsic_weight() -> Weight { - Weight::MAX - } -} - -pub struct BridgedChain; -pub type BridgedHeaderHash = H256; -pub type BridgedChainHeader = SubstrateHeader; - -impl Chain for BridgedChain { - const ID: ChainId = *b"tuch"; - type BlockNumber = u64; - type Hash = BridgedHeaderHash; - type Hasher = BlakeTwo256; - type Header = BridgedChainHeader; - type AccountId = AccountId; - type Balance = Balance; - type Nonce = u64; - type Signature = sp_runtime::MultiSignature; - - fn max_extrinsic_size() -> u32 { - 4096 - } - - fn max_extrinsic_weight() -> Weight { - Weight::MAX - } -} - -/// Test message dispatcher. -pub struct TestMessageDispatch; - -impl TestMessageDispatch { - pub fn deactivate(lane: LaneId) { - frame_support::storage::unhashed::put(&(b"inactive", lane).encode()[..], &false); - } -} - -impl MessageDispatch for TestMessageDispatch { - type DispatchPayload = Vec; - type DispatchLevelResult = (); - - fn is_active() -> bool { - frame_support::storage::unhashed::take::(&(b"inactive").encode()[..]) != Some(false) - } - - fn dispatch_weight(_message: &mut DispatchMessage) -> Weight { - Weight::zero() - } - - fn dispatch( - _: DispatchMessage, - ) -> MessageDispatchResult { - MessageDispatchResult { unspent_weight: Weight::zero(), dispatch_level_result: () } - } -} - -pub struct WrappedThisChain; -impl UnderlyingChainProvider for WrappedThisChain { - type Chain = ThisChain; -} -impl ThisChainWithMessages for WrappedThisChain { - type RuntimeOrigin = RuntimeOrigin; -} - -pub struct WrappedBridgedChain; -impl UnderlyingChainProvider for WrappedBridgedChain { - type Chain = BridgedChain; -} -impl BridgedChainWithMessages for WrappedBridgedChain {} - -pub struct BridgedHeaderChain; -impl bp_header_chain::HeaderChain for BridgedHeaderChain { - fn finalized_header_state_root( - _hash: HashOf, - ) -> Option> { - unreachable!() - } -} - -/// Bridge that is deployed on `ThisChain` and allows sending/receiving messages to/from -/// `BridgedChain`. -#[derive(Debug, PartialEq, Eq)] -pub struct OnThisChainBridge; - -impl MessageBridge for OnThisChainBridge { - const BRIDGED_MESSAGES_PALLET_NAME: &'static str = ""; - - type ThisChain = WrappedThisChain; - type BridgedChain = WrappedBridgedChain; - type BridgedHeaderChain = BridgedHeaderChain; -} - -/// Run pallet test. -pub fn run_test(test: impl FnOnce() -> T) -> T { - sp_io::TestExternalities::new( - frame_system::GenesisConfig::::default().build_storage().unwrap(), - ) - .execute_with(test) -} diff --git a/primitives/beefy/Cargo.toml b/primitives/beefy/Cargo.toml deleted file mode 100644 index f1992e59b..000000000 --- a/primitives/beefy/Cargo.toml +++ /dev/null @@ -1,47 +0,0 @@ -[package] -name = "bp-beefy" -description = "Primitives of pallet-bridge-beefy module." -version = "0.1.0" -authors.workspace = true -edition.workspace = true -license = "GPL-3.0-or-later WITH Classpath-exception-2.0" -repository.workspace = true -publish = false - -[lints] -workspace = true - -[dependencies] -codec = { package = "parity-scale-codec", version = "3.6.1", default-features = false, features = ["bit-vec", "derive"] } -scale-info = { version = "2.11.1", default-features = false, features = ["bit-vec", "derive"] } -serde = { default-features = false, features = ["alloc", "derive"], workspace = true } - -# Bridge Dependencies - -bp-runtime = { path = "../runtime", default-features = false } - -# Substrate Dependencies - -binary-merkle-tree = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -sp-consensus-beefy = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -frame-support = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -pallet-beefy-mmr = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -pallet-mmr = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -sp-runtime = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -sp-std = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } - -[features] -default = ["std"] -std = [ - "binary-merkle-tree/std", - "bp-runtime/std", - "codec/std", - "frame-support/std", - "pallet-beefy-mmr/std", - "pallet-mmr/std", - "scale-info/std", - "serde/std", - "sp-consensus-beefy/std", - "sp-runtime/std", - "sp-std/std", -] diff --git a/primitives/beefy/src/lib.rs b/primitives/beefy/src/lib.rs deleted file mode 100644 index 0441781e7..000000000 --- a/primitives/beefy/src/lib.rs +++ /dev/null @@ -1,151 +0,0 @@ -// Copyright 2019-2021 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Primitives that are used to interact with BEEFY bridge pallet. - -#![cfg_attr(not(feature = "std"), no_std)] -#![warn(missing_docs)] - -pub use binary_merkle_tree::merkle_root; -pub use pallet_beefy_mmr::BeefyEcdsaToEthereum; -pub use pallet_mmr::{ - primitives::{DataOrHash as MmrDataOrHash, Proof as MmrProof}, - verify_leaves_proof as verify_mmr_leaves_proof, -}; -pub use sp_consensus_beefy::{ - ecdsa_crypto::{ - AuthorityId as EcdsaValidatorId, AuthoritySignature as EcdsaValidatorSignature, - }, - known_payloads::MMR_ROOT_ID as MMR_ROOT_PAYLOAD_ID, - mmr::{BeefyAuthoritySet, MmrLeafVersion}, - BeefyAuthorityId, Commitment, Payload as BeefyPayload, SignedCommitment, ValidatorSet, - ValidatorSetId, BEEFY_ENGINE_ID, -}; - -use bp_runtime::{BasicOperatingMode, BlockNumberOf, Chain, HashOf}; -use codec::{Decode, Encode}; -use frame_support::Parameter; -use scale_info::TypeInfo; -use serde::{Deserialize, Serialize}; -use sp_runtime::{ - traits::{Convert, MaybeSerializeDeserialize}, - RuntimeAppPublic, RuntimeDebug, -}; -use sp_std::prelude::*; - -/// Substrate-based chain with BEEFY && MMR pallets deployed. -/// -/// Both BEEFY and MMR pallets and their clients may be configured to use different -/// primitives. Some of types can be configured in low-level pallets, but are constrained -/// when BEEFY+MMR bundle is used. -pub trait ChainWithBeefy: Chain { - /// The hashing algorithm used to compute the digest of the BEEFY commitment. - /// - /// Corresponds to the hashing algorithm, used by `sc_consensus_beefy::BeefyKeystore`. - type CommitmentHasher: sp_runtime::traits::Hash; - - /// The hashing algorithm used to build the MMR. - /// - /// The same algorithm is also used to compute merkle roots in BEEFY - /// (e.g. validator addresses root in leaf data). - /// - /// Corresponds to the `Hashing` field of the `pallet-mmr` configuration. - type MmrHashing: sp_runtime::traits::Hash; - - /// The output type of the hashing algorithm used to build the MMR. - /// - /// This type is actually stored in the MMR. - - /// Corresponds to the `Hash` field of the `pallet-mmr` configuration. - type MmrHash: sp_std::hash::Hash - + Parameter - + Copy - + AsRef<[u8]> - + Default - + MaybeSerializeDeserialize - + PartialOrd; - - /// The type expected for the MMR leaf extra data. - type BeefyMmrLeafExtra: Parameter; - - /// A way to identify a BEEFY validator. - /// - /// Corresponds to the `BeefyId` field of the `pallet-beefy` configuration. - type AuthorityId: BeefyAuthorityId + Parameter; - - /// A way to convert validator id to its raw representation in the BEEFY merkle tree. - /// - /// Corresponds to the `BeefyAuthorityToMerkleLeaf` field of the `pallet-beefy-mmr` - /// configuration. - type AuthorityIdToMerkleLeaf: Convert>; -} - -/// BEEFY validator id used by given Substrate chain. -pub type BeefyAuthorityIdOf = ::AuthorityId; -/// BEEFY validator set, containing both validator identifiers and the numeric set id. -pub type BeefyAuthoritySetOf = ValidatorSet>; -/// BEEFY authority set, containing both validator identifiers and the numeric set id. -pub type BeefyAuthoritySetInfoOf = sp_consensus_beefy::mmr::BeefyAuthoritySet>; -/// BEEFY validator signature used by given Substrate chain. -pub type BeefyValidatorSignatureOf = - <::AuthorityId as RuntimeAppPublic>::Signature; -/// Signed BEEFY commitment used by given Substrate chain. -pub type BeefySignedCommitmentOf = - SignedCommitment, BeefyValidatorSignatureOf>; -/// Hash algorithm, used to compute the digest of the BEEFY commitment before signing it. -pub type BeefyCommitmentHasher = ::CommitmentHasher; -/// Hash algorithm used in Beefy MMR construction by given Substrate chain. -pub type MmrHashingOf = ::MmrHashing; -/// Hash type, used in MMR construction by given Substrate chain. -pub type MmrHashOf = ::MmrHash; -/// BEEFY MMR proof type used by the given Substrate chain. -pub type MmrProofOf = MmrProof>; -/// The type of the MMR leaf extra data used by the given Substrate chain. -pub type BeefyMmrLeafExtraOf = ::BeefyMmrLeafExtra; -/// A way to convert a validator id to its raw representation in the BEEFY merkle tree, used by -/// the given Substrate chain. -pub type BeefyAuthorityIdToMerkleLeafOf = ::AuthorityIdToMerkleLeaf; -/// Actual type of leafs in the BEEFY MMR. -pub type BeefyMmrLeafOf = sp_consensus_beefy::mmr::MmrLeaf< - BlockNumberOf, - HashOf, - MmrHashOf, - BeefyMmrLeafExtraOf, ->; - -/// Data required for initializing the BEEFY pallet. -/// -/// Provides the initial context that the bridge needs in order to know -/// where to start the sync process from. -#[derive(Encode, Decode, RuntimeDebug, PartialEq, Clone, TypeInfo, Serialize, Deserialize)] -pub struct InitializationData { - /// Pallet operating mode. - pub operating_mode: BasicOperatingMode, - /// Number of the best block, finalized by BEEFY. - pub best_block_number: BlockNumber, - /// BEEFY authority set that will be finalizing descendants of the `best_beefy_block_number` - /// block. - pub authority_set: BeefyAuthoritySet, -} - -/// Basic data, stored by the pallet for every imported commitment. -#[derive(Encode, Decode, RuntimeDebug, PartialEq, TypeInfo)] -pub struct ImportedCommitment { - /// Block number and hash of the finalized block parent. - pub parent_number_and_hash: (BlockNumber, BlockHash), - /// MMR root at the imported block. - pub mmr_root: MmrHash, -} diff --git a/primitives/header-chain/Cargo.toml b/primitives/header-chain/Cargo.toml deleted file mode 100644 index f38d75454..000000000 --- a/primitives/header-chain/Cargo.toml +++ /dev/null @@ -1,49 +0,0 @@ -[package] -name = "bp-header-chain" -description = "A common interface for describing what a bridge pallet should be able to do." -version = "0.7.0" -authors.workspace = true -edition.workspace = true -license = "GPL-3.0-or-later WITH Classpath-exception-2.0" -repository.workspace = true - -[lints] -workspace = true - -[dependencies] -codec = { package = "parity-scale-codec", version = "3.6.1", default-features = false } -finality-grandpa = { version = "0.16.2", default-features = false } -scale-info = { version = "2.11.1", default-features = false, features = ["derive"] } -serde = { features = ["alloc", "derive"], workspace = true } - -# Bridge dependencies - -bp-runtime = { path = "../runtime", default-features = false } - -# Substrate Dependencies - -frame-support = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -sp-core = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false, features = ["serde"] } -sp-consensus-grandpa = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false, features = ["serde"] } -sp-runtime = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false, features = ["serde"] } -sp-std = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } - -[dev-dependencies] -bp-test-utils = { path = "../test-utils" } -hex = "0.4" -hex-literal = "0.4" - -[features] -default = ["std"] -std = [ - "bp-runtime/std", - "codec/std", - "finality-grandpa/std", - "frame-support/std", - "scale-info/std", - "serde/std", - "sp-consensus-grandpa/std", - "sp-core/std", - "sp-runtime/std", - "sp-std/std", -] diff --git a/primitives/header-chain/src/justification/mod.rs b/primitives/header-chain/src/justification/mod.rs deleted file mode 100644 index d7c2cbf42..000000000 --- a/primitives/header-chain/src/justification/mod.rs +++ /dev/null @@ -1,132 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Logic for checking GRANDPA Finality Proofs. -//! -//! Adapted copy of substrate/client/finality-grandpa/src/justification.rs. If origin -//! will ever be moved to the sp_consensus_grandpa, we should reuse that implementation. - -mod verification; - -use crate::ChainWithGrandpa; -pub use verification::{ - equivocation::{EquivocationsCollector, GrandpaEquivocationsFinder}, - optimizer::verify_and_optimize_justification, - strict::verify_justification, - AncestryChain, Error as JustificationVerificationError, JustificationVerificationContext, - PrecommitError, -}; - -use bp_runtime::{BlockNumberOf, Chain, HashOf, HeaderId}; -use codec::{Decode, Encode, MaxEncodedLen}; -use frame_support::RuntimeDebugNoBound; -use scale_info::TypeInfo; -use sp_consensus_grandpa::{AuthorityId, AuthoritySignature}; -use sp_runtime::{traits::Header as HeaderT, RuntimeDebug, SaturatedConversion}; -use sp_std::prelude::*; - -/// A GRANDPA Justification is a proof that a given header was finalized -/// at a certain height and with a certain set of authorities. -/// -/// This particular proof is used to prove that headers on a bridged chain -/// (so not our chain) have been finalized correctly. -#[derive(Encode, Decode, Clone, PartialEq, Eq, TypeInfo, RuntimeDebugNoBound)] -pub struct GrandpaJustification { - /// The round (voting period) this justification is valid for. - pub round: u64, - /// The set of votes for the chain which is to be finalized. - pub commit: - finality_grandpa::Commit, - /// A proof that the chain of blocks in the commit are related to each other. - pub votes_ancestries: Vec
, -} - -impl GrandpaJustification { - /// Returns reasonable size of justification using constants from the provided chain. - /// - /// An imprecise analogue of `MaxEncodedLen` implementation. We don't use it for - /// any precise calculations - that's just an estimation. - pub fn max_reasonable_size(required_precommits: u32) -> u32 - where - C: Chain + ChainWithGrandpa, - { - // we don't need precise results here - just estimations, so some details - // are removed from computations (e.g. bytes required to encode vector length) - - // structures in `finality_grandpa` crate are not implementing `MaxEncodedLength`, so - // here's our estimation for the `finality_grandpa::Commit` struct size - // - // precommit is: hash + number - // signed precommit is: precommit + signature (64b) + authority id - // commit is: hash + number + vec of signed precommits - let signed_precommit_size: u32 = BlockNumberOf::::max_encoded_len() - .saturating_add(HashOf::::max_encoded_len().saturated_into()) - .saturating_add(64) - .saturating_add(AuthorityId::max_encoded_len().saturated_into()) - .saturated_into(); - let max_expected_signed_commit_size = signed_precommit_size - .saturating_mul(required_precommits) - .saturating_add(BlockNumberOf::::max_encoded_len().saturated_into()) - .saturating_add(HashOf::::max_encoded_len().saturated_into()); - - let max_expected_votes_ancestries_size = - C::REASONABLE_HEADERS_IN_JUSTIFICATION_ANCESTRY.saturating_mul(C::AVERAGE_HEADER_SIZE); - - // justification is round number (u64=8b), a signed GRANDPA commit and the - // `votes_ancestries` vector - 8u32.saturating_add(max_expected_signed_commit_size) - .saturating_add(max_expected_votes_ancestries_size) - } - - /// Return identifier of header that this justification claims to finalize. - pub fn commit_target_id(&self) -> HeaderId { - HeaderId(self.commit.target_number, self.commit.target_hash) - } -} - -impl crate::FinalityProof for GrandpaJustification { - fn target_header_hash(&self) -> H::Hash { - self.commit.target_hash - } - - fn target_header_number(&self) -> H::Number { - self.commit.target_number - } -} - -/// Justification verification error. -#[derive(Eq, RuntimeDebug, PartialEq)] -pub enum Error { - /// Failed to decode justification. - JustificationDecode, -} - -/// Given GRANDPA authorities set size, return number of valid authorities votes that the -/// justification must have to be valid. -/// -/// This function assumes that all authorities have the same vote weight. -pub fn required_justification_precommits(authorities_set_length: u32) -> u32 { - authorities_set_length - authorities_set_length.saturating_sub(1) / 3 -} - -/// Decode justification target. -pub fn decode_justification_target( - raw_justification: &[u8], -) -> Result<(Header::Hash, Header::Number), Error> { - GrandpaJustification::
::decode(&mut &*raw_justification) - .map(|justification| (justification.commit.target_hash, justification.commit.target_number)) - .map_err(|_| Error::JustificationDecode) -} diff --git a/primitives/header-chain/src/justification/verification/equivocation.rs b/primitives/header-chain/src/justification/verification/equivocation.rs deleted file mode 100644 index fbad30128..000000000 --- a/primitives/header-chain/src/justification/verification/equivocation.rs +++ /dev/null @@ -1,200 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Logic for extracting equivocations from multiple GRANDPA Finality Proofs. - -use crate::{ - justification::{ - verification::{ - Error as JustificationVerificationError, IterationFlow, - JustificationVerificationContext, JustificationVerifier, PrecommitError, - SignedPrecommit, - }, - GrandpaJustification, - }, - ChainWithGrandpa, FindEquivocations, -}; - -use bp_runtime::{BlockNumberOf, HashOf, HeaderOf}; -use sp_consensus_grandpa::{AuthorityId, AuthoritySignature, EquivocationProof, Precommit}; -use sp_runtime::traits::Header as HeaderT; -use sp_std::{ - collections::{btree_map::BTreeMap, btree_set::BTreeSet}, - prelude::*, -}; - -enum AuthorityVotes { - SingleVote(SignedPrecommit
), - Equivocation( - finality_grandpa::Equivocation, AuthoritySignature>, - ), -} - -/// Structure that can extract equivocations from multiple GRANDPA justifications. -pub struct EquivocationsCollector<'a, Header: HeaderT> { - round: u64, - context: &'a JustificationVerificationContext, - - votes: BTreeMap>, -} - -impl<'a, Header: HeaderT> EquivocationsCollector<'a, Header> { - /// Create a new instance of `EquivocationsCollector`. - pub fn new( - context: &'a JustificationVerificationContext, - base_justification: &GrandpaJustification
, - ) -> Result { - let mut checker = Self { round: base_justification.round, context, votes: BTreeMap::new() }; - - checker.verify_justification( - (base_justification.commit.target_hash, base_justification.commit.target_number), - checker.context, - base_justification, - )?; - - Ok(checker) - } - - /// Parse additional justifications for equivocations. - pub fn parse_justifications(&mut self, justifications: &[GrandpaJustification
]) { - let round = self.round; - for justification in - justifications.iter().filter(|justification| round == justification.round) - { - // We ignore the Errors received here since we don't care if the proofs are valid. - // We only care about collecting equivocations. - let _ = self.verify_justification( - (justification.commit.target_hash, justification.commit.target_number), - self.context, - justification, - ); - } - } - - /// Extract the equivocation proofs that have been collected. - pub fn into_equivocation_proofs(self) -> Vec> { - let mut equivocations = vec![]; - for (_authority, vote) in self.votes { - if let AuthorityVotes::Equivocation(equivocation) = vote { - equivocations.push(EquivocationProof::new( - self.context.authority_set_id, - sp_consensus_grandpa::Equivocation::Precommit(equivocation), - )); - } - } - - equivocations - } -} - -impl<'a, Header: HeaderT> JustificationVerifier
for EquivocationsCollector<'a, Header> { - fn process_duplicate_votes_ancestries( - &mut self, - _duplicate_votes_ancestries: Vec, - ) -> Result<(), JustificationVerificationError> { - Ok(()) - } - - fn process_redundant_vote( - &mut self, - _precommit_idx: usize, - ) -> Result { - Ok(IterationFlow::Run) - } - - fn process_known_authority_vote( - &mut self, - _precommit_idx: usize, - _signed: &SignedPrecommit
, - ) -> Result { - Ok(IterationFlow::Run) - } - - fn process_unknown_authority_vote( - &mut self, - _precommit_idx: usize, - ) -> Result<(), PrecommitError> { - Ok(()) - } - - fn process_unrelated_ancestry_vote( - &mut self, - _precommit_idx: usize, - ) -> Result { - Ok(IterationFlow::Run) - } - - fn process_invalid_signature_vote( - &mut self, - _precommit_idx: usize, - ) -> Result<(), PrecommitError> { - Ok(()) - } - - fn process_valid_vote(&mut self, signed: &SignedPrecommit
) { - match self.votes.get_mut(&signed.id) { - Some(vote) => match vote { - AuthorityVotes::SingleVote(first_vote) => { - if first_vote.precommit != signed.precommit { - *vote = AuthorityVotes::Equivocation(finality_grandpa::Equivocation { - round_number: self.round, - identity: signed.id.clone(), - first: (first_vote.precommit.clone(), first_vote.signature.clone()), - second: (signed.precommit.clone(), signed.signature.clone()), - }); - } - }, - AuthorityVotes::Equivocation(_) => {}, - }, - None => { - self.votes.insert(signed.id.clone(), AuthorityVotes::SingleVote(signed.clone())); - }, - } - } - - fn process_redundant_votes_ancestries( - &mut self, - _redundant_votes_ancestries: BTreeSet, - ) -> Result<(), JustificationVerificationError> { - Ok(()) - } -} - -/// Helper struct for finding equivocations in GRANDPA proofs. -pub struct GrandpaEquivocationsFinder(sp_std::marker::PhantomData); - -impl - FindEquivocations< - GrandpaJustification>, - JustificationVerificationContext, - EquivocationProof, BlockNumberOf>, - > for GrandpaEquivocationsFinder -{ - type Error = JustificationVerificationError; - - fn find_equivocations( - verification_context: &JustificationVerificationContext, - synced_proof: &GrandpaJustification>, - source_proofs: &[GrandpaJustification>], - ) -> Result, BlockNumberOf>>, Self::Error> { - let mut equivocations_collector = - EquivocationsCollector::new(verification_context, synced_proof)?; - - equivocations_collector.parse_justifications(source_proofs); - - Ok(equivocations_collector.into_equivocation_proofs()) - } -} diff --git a/primitives/header-chain/src/justification/verification/mod.rs b/primitives/header-chain/src/justification/verification/mod.rs deleted file mode 100644 index 9df3511e1..000000000 --- a/primitives/header-chain/src/justification/verification/mod.rs +++ /dev/null @@ -1,333 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Logic for checking GRANDPA Finality Proofs. - -pub mod equivocation; -pub mod optimizer; -pub mod strict; - -use crate::{justification::GrandpaJustification, AuthoritySet}; - -use bp_runtime::HeaderId; -use finality_grandpa::voter_set::VoterSet; -use sp_consensus_grandpa::{AuthorityId, AuthoritySignature, SetId}; -use sp_runtime::{traits::Header as HeaderT, RuntimeDebug}; -use sp_std::{ - collections::{ - btree_map::{ - BTreeMap, - Entry::{Occupied, Vacant}, - }, - btree_set::BTreeSet, - }, - prelude::*, -}; - -type SignedPrecommit
= finality_grandpa::SignedPrecommit< -
::Hash, -
::Number, - AuthoritySignature, - AuthorityId, ->; - -/// Votes ancestries with useful methods. -#[derive(RuntimeDebug)] -pub struct AncestryChain { - /// We expect all forks in the ancestry chain to be descendants of base. - base: HeaderId, - /// Header hash => parent header hash mapping. - parents: BTreeMap, - /// Hashes of headers that were not visited by `ancestry()`. - unvisited: BTreeSet, -} - -impl AncestryChain
{ - /// Creates a new instance of `AncestryChain` starting from a `GrandpaJustification`. - /// - /// Returns the `AncestryChain` and a `Vec` containing the `votes_ancestries` entries - /// that were ignored when creating it, because they are duplicates. - pub fn new( - justification: &GrandpaJustification
, - ) -> (AncestryChain
, Vec) { - let mut parents = BTreeMap::new(); - let mut unvisited = BTreeSet::new(); - let mut ignored_idxs = Vec::new(); - for (idx, ancestor) in justification.votes_ancestries.iter().enumerate() { - let hash = ancestor.hash(); - match parents.entry(hash) { - Occupied(_) => { - ignored_idxs.push(idx); - }, - Vacant(entry) => { - entry.insert(*ancestor.parent_hash()); - unvisited.insert(hash); - }, - } - } - (AncestryChain { base: justification.commit_target_id(), parents, unvisited }, ignored_idxs) - } - - /// Returns the hash of a block's parent if the block is present in the ancestry. - pub fn parent_hash_of(&self, hash: &Header::Hash) -> Option<&Header::Hash> { - self.parents.get(hash) - } - - /// Returns a route if the precommit target block is a descendant of the `base` block. - pub fn ancestry( - &self, - precommit_target_hash: &Header::Hash, - precommit_target_number: &Header::Number, - ) -> Option> { - if precommit_target_number < &self.base.number() { - return None - } - - let mut route = vec![]; - let mut current_hash = *precommit_target_hash; - loop { - if current_hash == self.base.hash() { - break - } - - current_hash = match self.parent_hash_of(¤t_hash) { - Some(parent_hash) => { - let is_visited_before = self.unvisited.get(¤t_hash).is_none(); - if is_visited_before { - // If the current header has been visited in a previous call, it is a - // descendent of `base` (we assume that the previous call was successful). - return Some(route) - } - route.push(current_hash); - - *parent_hash - }, - None => return None, - }; - } - - Some(route) - } - - fn mark_route_as_visited(&mut self, route: Vec) { - for hash in route { - self.unvisited.remove(&hash); - } - } - - fn is_fully_visited(&self) -> bool { - self.unvisited.is_empty() - } -} - -/// Justification verification error. -#[derive(Eq, RuntimeDebug, PartialEq)] -pub enum Error { - /// Could not convert `AuthorityList` to `VoterSet`. - InvalidAuthorityList, - /// Justification is finalizing unexpected header. - InvalidJustificationTarget, - /// The justification contains duplicate headers in its `votes_ancestries` field. - DuplicateVotesAncestries, - /// Error validating a precommit - Precommit(PrecommitError), - /// The cumulative weight of all votes in the justification is not enough to justify commit - /// header finalization. - TooLowCumulativeWeight, - /// The justification contains extra (unused) headers in its `votes_ancestries` field. - RedundantVotesAncestries, -} - -/// Justification verification error. -#[derive(Eq, RuntimeDebug, PartialEq)] -pub enum PrecommitError { - /// Justification contains redundant votes. - RedundantAuthorityVote, - /// Justification contains unknown authority precommit. - UnknownAuthorityVote, - /// Justification contains duplicate authority precommit. - DuplicateAuthorityVote, - /// The authority has provided an invalid signature. - InvalidAuthoritySignature, - /// The justification contains precommit for header that is not a descendant of the commit - /// header. - UnrelatedAncestryVote, -} - -/// The context needed for validating GRANDPA finality proofs. -#[derive(RuntimeDebug)] -pub struct JustificationVerificationContext { - /// The authority set used to verify the justification. - pub voter_set: VoterSet, - /// The ID of the authority set used to verify the justification. - pub authority_set_id: SetId, -} - -impl TryFrom for JustificationVerificationContext { - type Error = Error; - - fn try_from(authority_set: AuthoritySet) -> Result { - let voter_set = - VoterSet::new(authority_set.authorities).ok_or(Error::InvalidAuthorityList)?; - Ok(JustificationVerificationContext { voter_set, authority_set_id: authority_set.set_id }) - } -} - -enum IterationFlow { - Run, - Skip, -} - -/// Verification callbacks. -trait JustificationVerifier { - /// Called when there are duplicate headers in the votes ancestries. - fn process_duplicate_votes_ancestries( - &mut self, - duplicate_votes_ancestries: Vec, - ) -> Result<(), Error>; - - fn process_redundant_vote( - &mut self, - precommit_idx: usize, - ) -> Result; - - fn process_known_authority_vote( - &mut self, - precommit_idx: usize, - signed: &SignedPrecommit
, - ) -> Result; - - fn process_unknown_authority_vote( - &mut self, - precommit_idx: usize, - ) -> Result<(), PrecommitError>; - - fn process_unrelated_ancestry_vote( - &mut self, - precommit_idx: usize, - ) -> Result; - - fn process_invalid_signature_vote( - &mut self, - precommit_idx: usize, - ) -> Result<(), PrecommitError>; - - fn process_valid_vote(&mut self, signed: &SignedPrecommit
); - - /// Called when there are redundant headers in the votes ancestries. - fn process_redundant_votes_ancestries( - &mut self, - redundant_votes_ancestries: BTreeSet, - ) -> Result<(), Error>; - - fn verify_justification( - &mut self, - finalized_target: (Header::Hash, Header::Number), - context: &JustificationVerificationContext, - justification: &GrandpaJustification
, - ) -> Result<(), Error> { - // ensure that it is justification for the expected header - if (justification.commit.target_hash, justification.commit.target_number) != - finalized_target - { - return Err(Error::InvalidJustificationTarget) - } - - let threshold = context.voter_set.threshold().get(); - let (mut chain, ignored_idxs) = AncestryChain::new(justification); - let mut signature_buffer = Vec::new(); - let mut cumulative_weight = 0u64; - - if !ignored_idxs.is_empty() { - self.process_duplicate_votes_ancestries(ignored_idxs)?; - } - - for (precommit_idx, signed) in justification.commit.precommits.iter().enumerate() { - if cumulative_weight >= threshold { - let action = - self.process_redundant_vote(precommit_idx).map_err(Error::Precommit)?; - if matches!(action, IterationFlow::Skip) { - continue - } - } - - // authority must be in the set - let authority_info = match context.voter_set.get(&signed.id) { - Some(authority_info) => { - // The implementer may want to do extra checks here. - // For example to see if the authority has already voted in the same round. - let action = self - .process_known_authority_vote(precommit_idx, signed) - .map_err(Error::Precommit)?; - if matches!(action, IterationFlow::Skip) { - continue - } - - authority_info - }, - None => { - self.process_unknown_authority_vote(precommit_idx).map_err(Error::Precommit)?; - continue - }, - }; - - // all precommits must be descendants of the target block - let maybe_route = - chain.ancestry(&signed.precommit.target_hash, &signed.precommit.target_number); - if maybe_route.is_none() { - let action = self - .process_unrelated_ancestry_vote(precommit_idx) - .map_err(Error::Precommit)?; - if matches!(action, IterationFlow::Skip) { - continue - } - } - - // verify authority signature - if !sp_consensus_grandpa::check_message_signature_with_buffer( - &finality_grandpa::Message::Precommit(signed.precommit.clone()), - &signed.id, - &signed.signature, - justification.round, - context.authority_set_id, - &mut signature_buffer, - ) { - self.process_invalid_signature_vote(precommit_idx).map_err(Error::Precommit)?; - continue - } - - // now we can count the vote since we know that it is valid - self.process_valid_vote(signed); - if let Some(route) = maybe_route { - chain.mark_route_as_visited(route); - cumulative_weight = cumulative_weight.saturating_add(authority_info.weight().get()); - } - } - - // check that the cumulative weight of validators that voted for the justification target - // (or one of its descendants) is larger than the required threshold. - if cumulative_weight < threshold { - return Err(Error::TooLowCumulativeWeight) - } - - // check that there are no extra headers in the justification - if !chain.is_fully_visited() { - self.process_redundant_votes_ancestries(chain.unvisited)?; - } - - Ok(()) - } -} diff --git a/primitives/header-chain/src/justification/verification/optimizer.rs b/primitives/header-chain/src/justification/verification/optimizer.rs deleted file mode 100644 index 3f1e6ab67..000000000 --- a/primitives/header-chain/src/justification/verification/optimizer.rs +++ /dev/null @@ -1,142 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Logic for optimizing GRANDPA Finality Proofs. - -use crate::justification::{ - verification::{Error, JustificationVerifier, PrecommitError}, - GrandpaJustification, -}; - -use crate::justification::verification::{ - IterationFlow, JustificationVerificationContext, SignedPrecommit, -}; -use sp_consensus_grandpa::AuthorityId; -use sp_runtime::traits::Header as HeaderT; -use sp_std::{collections::btree_set::BTreeSet, prelude::*}; - -// Verification callbacks for justification optimization. -struct JustificationOptimizer { - votes: BTreeSet, - - extra_precommits: Vec, - duplicate_votes_ancestries_idxs: Vec, - redundant_votes_ancestries: BTreeSet, -} - -impl JustificationOptimizer
{ - fn optimize(self, justification: &mut GrandpaJustification
) { - for invalid_precommit_idx in self.extra_precommits.into_iter().rev() { - justification.commit.precommits.remove(invalid_precommit_idx); - } - if !self.duplicate_votes_ancestries_idxs.is_empty() { - for idx in self.duplicate_votes_ancestries_idxs.iter().rev() { - justification.votes_ancestries.swap_remove(*idx); - } - } - if !self.redundant_votes_ancestries.is_empty() { - justification - .votes_ancestries - .retain(|header| !self.redundant_votes_ancestries.contains(&header.hash())) - } - } -} - -impl JustificationVerifier
for JustificationOptimizer
{ - fn process_duplicate_votes_ancestries( - &mut self, - duplicate_votes_ancestries: Vec, - ) -> Result<(), Error> { - self.duplicate_votes_ancestries_idxs = duplicate_votes_ancestries.to_vec(); - Ok(()) - } - - fn process_redundant_vote( - &mut self, - precommit_idx: usize, - ) -> Result { - self.extra_precommits.push(precommit_idx); - Ok(IterationFlow::Skip) - } - - fn process_known_authority_vote( - &mut self, - precommit_idx: usize, - signed: &SignedPrecommit
, - ) -> Result { - // Skip duplicate votes - if self.votes.contains(&signed.id) { - self.extra_precommits.push(precommit_idx); - return Ok(IterationFlow::Skip) - } - - Ok(IterationFlow::Run) - } - - fn process_unknown_authority_vote( - &mut self, - precommit_idx: usize, - ) -> Result<(), PrecommitError> { - self.extra_precommits.push(precommit_idx); - Ok(()) - } - - fn process_unrelated_ancestry_vote( - &mut self, - precommit_idx: usize, - ) -> Result { - self.extra_precommits.push(precommit_idx); - Ok(IterationFlow::Skip) - } - - fn process_invalid_signature_vote( - &mut self, - precommit_idx: usize, - ) -> Result<(), PrecommitError> { - self.extra_precommits.push(precommit_idx); - Ok(()) - } - - fn process_valid_vote(&mut self, signed: &SignedPrecommit
) { - self.votes.insert(signed.id.clone()); - } - - fn process_redundant_votes_ancestries( - &mut self, - redundant_votes_ancestries: BTreeSet, - ) -> Result<(), Error> { - self.redundant_votes_ancestries = redundant_votes_ancestries; - Ok(()) - } -} - -/// Verify and optimize given justification by removing unknown and duplicate votes. -pub fn verify_and_optimize_justification( - finalized_target: (Header::Hash, Header::Number), - context: &JustificationVerificationContext, - justification: &mut GrandpaJustification
, -) -> Result<(), Error> { - let mut optimizer = JustificationOptimizer { - votes: BTreeSet::new(), - extra_precommits: vec![], - duplicate_votes_ancestries_idxs: vec![], - redundant_votes_ancestries: Default::default(), - }; - optimizer.verify_justification(finalized_target, context, justification)?; - optimizer.optimize(justification); - - Ok(()) -} diff --git a/primitives/header-chain/src/justification/verification/strict.rs b/primitives/header-chain/src/justification/verification/strict.rs deleted file mode 100644 index 858cf517a..000000000 --- a/primitives/header-chain/src/justification/verification/strict.rs +++ /dev/null @@ -1,108 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Logic for checking if GRANDPA Finality Proofs are valid and optimal. - -use crate::justification::{ - verification::{Error, JustificationVerifier, PrecommitError}, - GrandpaJustification, -}; - -use crate::justification::verification::{ - IterationFlow, JustificationVerificationContext, SignedPrecommit, -}; -use sp_consensus_grandpa::AuthorityId; -use sp_runtime::traits::Header as HeaderT; -use sp_std::{collections::btree_set::BTreeSet, vec::Vec}; - -/// Verification callbacks that reject all unknown, duplicate or redundant votes. -struct StrictJustificationVerifier { - votes: BTreeSet, -} - -impl JustificationVerifier
for StrictJustificationVerifier { - fn process_duplicate_votes_ancestries( - &mut self, - _duplicate_votes_ancestries: Vec, - ) -> Result<(), Error> { - Err(Error::DuplicateVotesAncestries) - } - - fn process_redundant_vote( - &mut self, - _precommit_idx: usize, - ) -> Result { - Err(PrecommitError::RedundantAuthorityVote) - } - - fn process_known_authority_vote( - &mut self, - _precommit_idx: usize, - signed: &SignedPrecommit
, - ) -> Result { - if self.votes.contains(&signed.id) { - // There's a lot of code in `validate_commit` and `import_precommit` functions - // inside `finality-grandpa` crate (mostly related to reporting equivocations). - // But the only thing that we care about is that only first vote from the - // authority is accepted - return Err(PrecommitError::DuplicateAuthorityVote) - } - - Ok(IterationFlow::Run) - } - - fn process_unknown_authority_vote( - &mut self, - _precommit_idx: usize, - ) -> Result<(), PrecommitError> { - Err(PrecommitError::UnknownAuthorityVote) - } - - fn process_unrelated_ancestry_vote( - &mut self, - _precommit_idx: usize, - ) -> Result { - Err(PrecommitError::UnrelatedAncestryVote) - } - - fn process_invalid_signature_vote( - &mut self, - _precommit_idx: usize, - ) -> Result<(), PrecommitError> { - Err(PrecommitError::InvalidAuthoritySignature) - } - - fn process_valid_vote(&mut self, signed: &SignedPrecommit
) { - self.votes.insert(signed.id.clone()); - } - - fn process_redundant_votes_ancestries( - &mut self, - _redundant_votes_ancestries: BTreeSet, - ) -> Result<(), Error> { - Err(Error::RedundantVotesAncestries) - } -} - -/// Verify that justification, that is generated by given authority set, finalizes given header. -pub fn verify_justification( - finalized_target: (Header::Hash, Header::Number), - context: &JustificationVerificationContext, - justification: &GrandpaJustification
, -) -> Result<(), Error> { - let mut verifier = StrictJustificationVerifier { votes: BTreeSet::new() }; - verifier.verify_justification(finalized_target, context, justification) -} diff --git a/primitives/header-chain/src/lib.rs b/primitives/header-chain/src/lib.rs deleted file mode 100644 index 98fb9ff83..000000000 --- a/primitives/header-chain/src/lib.rs +++ /dev/null @@ -1,388 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Defines traits which represent a common interface for Substrate pallets which want to -//! incorporate bridge functionality. - -#![warn(missing_docs)] -#![cfg_attr(not(feature = "std"), no_std)] - -use crate::justification::{ - GrandpaJustification, JustificationVerificationContext, JustificationVerificationError, -}; -use bp_runtime::{ - BasicOperatingMode, Chain, HashOf, HasherOf, HeaderOf, RawStorageProof, StorageProofChecker, - StorageProofError, UnderlyingChainProvider, -}; -use codec::{Codec, Decode, Encode, EncodeLike, MaxEncodedLen}; -use core::{clone::Clone, cmp::Eq, default::Default, fmt::Debug}; -use frame_support::PalletError; -use scale_info::TypeInfo; -use serde::{Deserialize, Serialize}; -use sp_consensus_grandpa::{AuthorityList, ConsensusLog, SetId, GRANDPA_ENGINE_ID}; -use sp_runtime::{traits::Header as HeaderT, Digest, RuntimeDebug}; -use sp_std::{boxed::Box, vec::Vec}; - -pub mod justification; -pub mod storage_keys; - -/// Header chain error. -#[derive(Clone, Decode, Encode, Eq, PartialEq, PalletError, Debug, TypeInfo)] -pub enum HeaderChainError { - /// Header with given hash is missing from the chain. - UnknownHeader, - /// Storage proof related error. - StorageProof(StorageProofError), -} - -/// Header data that we're storing on-chain. -/// -/// Even though we may store full header, our applications (XCM) only use couple of header -/// fields. Extracting those values makes on-chain storage and PoV smaller, which is good. -#[derive(Clone, Decode, Encode, Eq, MaxEncodedLen, PartialEq, RuntimeDebug, TypeInfo)] -pub struct StoredHeaderData { - /// Header number. - pub number: Number, - /// Header state root. - pub state_root: Hash, -} - -/// Stored header data builder. -pub trait StoredHeaderDataBuilder { - /// Build header data from self. - fn build(&self) -> StoredHeaderData; -} - -impl StoredHeaderDataBuilder for H { - fn build(&self) -> StoredHeaderData { - StoredHeaderData { number: *self.number(), state_root: *self.state_root() } - } -} - -/// Substrate header chain, abstracted from the way it is stored. -pub trait HeaderChain { - /// Returns state (storage) root of given finalized header. - fn finalized_header_state_root(header_hash: HashOf) -> Option>; - /// Get storage proof checker using finalized header. - fn storage_proof_checker( - header_hash: HashOf, - storage_proof: RawStorageProof, - ) -> Result>, HeaderChainError> { - let state_root = Self::finalized_header_state_root(header_hash) - .ok_or(HeaderChainError::UnknownHeader)?; - StorageProofChecker::new(state_root, storage_proof).map_err(HeaderChainError::StorageProof) - } -} - -/// A type that can be used as a parameter in a dispatchable function. -/// -/// When using `decl_module` all arguments for call functions must implement this trait. -pub trait Parameter: Codec + EncodeLike + Clone + Eq + Debug + TypeInfo {} -impl Parameter for T where T: Codec + EncodeLike + Clone + Eq + Debug + TypeInfo {} - -/// A GRANDPA Authority List and ID. -#[derive(Default, Encode, Eq, Decode, RuntimeDebug, PartialEq, Clone, TypeInfo)] -#[cfg_attr(feature = "std", derive(Serialize, Deserialize))] -pub struct AuthoritySet { - /// List of GRANDPA authorities for the current round. - pub authorities: AuthorityList, - /// Monotonic identifier of the current GRANDPA authority set. - pub set_id: SetId, -} - -impl AuthoritySet { - /// Create a new GRANDPA Authority Set. - pub fn new(authorities: AuthorityList, set_id: SetId) -> Self { - Self { authorities, set_id } - } -} - -/// Data required for initializing the GRANDPA bridge pallet. -/// -/// The bridge needs to know where to start its sync from, and this provides that initial context. -#[derive( - Default, Encode, Decode, RuntimeDebug, PartialEq, Eq, Clone, TypeInfo, Serialize, Deserialize, -)] -pub struct InitializationData { - /// The header from which we should start syncing. - pub header: Box, - /// The initial authorities of the pallet. - pub authority_list: AuthorityList, - /// The ID of the initial authority set. - pub set_id: SetId, - /// Pallet operating mode. - pub operating_mode: BasicOperatingMode, -} - -/// Abstract finality proof that is justifying block finality. -pub trait FinalityProof: Clone + Send + Sync + Debug { - /// Return hash of header that this proof is generated for. - fn target_header_hash(&self) -> Hash; - - /// Return number of header that this proof is generated for. - fn target_header_number(&self) -> Number; -} - -/// A trait that provides helper methods for querying the consensus log. -pub trait ConsensusLogReader { - /// Returns true if digest contains item that schedules authorities set change. - fn schedules_authorities_change(digest: &Digest) -> bool; -} - -/// A struct that provides helper methods for querying the GRANDPA consensus log. -pub struct GrandpaConsensusLogReader(sp_std::marker::PhantomData); - -impl GrandpaConsensusLogReader { - /// Find and return scheduled (regular) change digest item. - pub fn find_scheduled_change( - digest: &Digest, - ) -> Option> { - // find the first consensus digest with the right ID which converts to - // the right kind of consensus log. - digest - .convert_first(|log| log.consensus_try_to(&GRANDPA_ENGINE_ID)) - .and_then(|log| match log { - ConsensusLog::ScheduledChange(change) => Some(change), - _ => None, - }) - } - - /// Find and return forced change digest item. Or light client can't do anything - /// with forced changes, so we can't accept header with the forced change digest. - pub fn find_forced_change( - digest: &Digest, - ) -> Option<(Number, sp_consensus_grandpa::ScheduledChange)> { - // find the first consensus digest with the right ID which converts to - // the right kind of consensus log. - digest - .convert_first(|log| log.consensus_try_to(&GRANDPA_ENGINE_ID)) - .and_then(|log| match log { - ConsensusLog::ForcedChange(delay, change) => Some((delay, change)), - _ => None, - }) - } -} - -impl ConsensusLogReader for GrandpaConsensusLogReader { - fn schedules_authorities_change(digest: &Digest) -> bool { - GrandpaConsensusLogReader::::find_scheduled_change(digest).is_some() - } -} - -/// The finality-related info associated to a header. -#[derive(Encode, Decode, Debug, PartialEq, Clone, TypeInfo)] -pub struct HeaderFinalityInfo { - /// The header finality proof. - pub finality_proof: FinalityProof, - /// The new verification context introduced by the header. - pub new_verification_context: Option, -} - -/// Grandpa-related info associated to a header. This info can be saved to events. -pub type StoredHeaderGrandpaInfo
= - HeaderFinalityInfo, AuthoritySet>; - -/// Processed Grandpa-related info associated to a header. -pub type HeaderGrandpaInfo
= - HeaderFinalityInfo, JustificationVerificationContext>; - -impl TryFrom> for HeaderGrandpaInfo
{ - type Error = JustificationVerificationError; - - fn try_from(grandpa_info: StoredHeaderGrandpaInfo
) -> Result { - Ok(Self { - finality_proof: grandpa_info.finality_proof, - new_verification_context: match grandpa_info.new_verification_context { - Some(authority_set) => Some(authority_set.try_into()?), - None => None, - }, - }) - } -} - -/// Helper trait for finding equivocations in finality proofs. -pub trait FindEquivocations { - /// The type returned when encountering an error while looking for equivocations. - type Error: Debug; - - /// Find equivocations. - fn find_equivocations( - verification_context: &FinalityVerificationContext, - synced_proof: &FinalityProof, - source_proofs: &[FinalityProof], - ) -> Result, Self::Error>; -} - -/// A minimized version of `pallet-bridge-grandpa::Call` that can be used without a runtime. -#[derive(Encode, Decode, Debug, PartialEq, Eq, Clone, TypeInfo)] -#[allow(non_camel_case_types)] -pub enum BridgeGrandpaCall { - /// `pallet-bridge-grandpa::Call::submit_finality_proof` - #[codec(index = 0)] - submit_finality_proof { - /// The header that we are going to finalize. - finality_target: Box
, - /// Finality justification for the `finality_target`. - justification: justification::GrandpaJustification
, - }, - /// `pallet-bridge-grandpa::Call::initialize` - #[codec(index = 1)] - initialize { - /// All data, required to initialize the pallet. - init_data: InitializationData
, - }, - /// `pallet-bridge-grandpa::Call::submit_finality_proof_ex` - #[codec(index = 4)] - submit_finality_proof_ex { - /// The header that we are going to finalize. - finality_target: Box
, - /// Finality justification for the `finality_target`. - justification: justification::GrandpaJustification
, - /// An identifier of the validators set, that have signed the justification. - current_set_id: SetId, - }, -} - -/// The `BridgeGrandpaCall` used by a chain. -pub type BridgeGrandpaCallOf = BridgeGrandpaCall>; - -/// Substrate-based chain that is using direct GRANDPA finality. -/// -/// Keep in mind that parachains are relying on relay chain GRANDPA, so they should not implement -/// this trait. -pub trait ChainWithGrandpa: Chain { - /// Name of the bridge GRANDPA pallet (used in `construct_runtime` macro call) that is deployed - /// at some other chain to bridge with this `ChainWithGrandpa`. - /// - /// We assume that all chains that are bridging with this `ChainWithGrandpa` are using - /// the same name. - const WITH_CHAIN_GRANDPA_PALLET_NAME: &'static str; - - /// Max number of GRANDPA authorities at the chain. - /// - /// This is a strict constant. If bridged chain will have more authorities than that, - /// the GRANDPA bridge pallet may halt. - const MAX_AUTHORITIES_COUNT: u32; - - /// Max reasonable number of headers in `votes_ancestries` vector of the GRANDPA justification. - /// - /// This isn't a strict limit. The relay may submit justifications with more headers in its - /// ancestry and the pallet will accept such justification. The limit is only used to compute - /// maximal refund amount and submitting justifications which exceed the limit, may be costly - /// to submitter. - const REASONABLE_HEADERS_IN_JUSTIFICATION_ANCESTRY: u32; - - /// Maximal size of the mandatory chain header. Mandatory header is the header that enacts new - /// GRANDPA authorities set (so it has large digest inside). - /// - /// This isn't a strict limit. The relay may submit larger headers and the pallet will accept - /// the call. The limit is only used to compute maximal refund amount and doing calls which - /// exceed the limit, may be costly to submitter. - const MAX_MANDATORY_HEADER_SIZE: u32; - - /// Average size of the chain header. We don't expect to see there headers that change GRANDPA - /// authorities set (GRANDPA will probably be able to finalize at least one additional header - /// per session on non test chains), so this is average size of headers that aren't changing the - /// set. - /// - /// This isn't a strict limit. The relay may submit justifications with larger headers and the - /// pallet will accept the call. However, if the total size of all `submit_finality_proof` - /// arguments exceeds the maximal size, computed using this average size, relayer will only get - /// partial refund. - /// - /// We expect some headers on production chains that are above this size. But they are rare and - /// if rellayer cares about its profitability, we expect it'll select other headers for - /// submission. - const AVERAGE_HEADER_SIZE: u32; -} - -impl ChainWithGrandpa for T -where - T: Chain + UnderlyingChainProvider, - T::Chain: ChainWithGrandpa, -{ - const WITH_CHAIN_GRANDPA_PALLET_NAME: &'static str = - ::WITH_CHAIN_GRANDPA_PALLET_NAME; - const MAX_AUTHORITIES_COUNT: u32 = ::MAX_AUTHORITIES_COUNT; - const REASONABLE_HEADERS_IN_JUSTIFICATION_ANCESTRY: u32 = - ::REASONABLE_HEADERS_IN_JUSTIFICATION_ANCESTRY; - const MAX_MANDATORY_HEADER_SIZE: u32 = - ::MAX_MANDATORY_HEADER_SIZE; - const AVERAGE_HEADER_SIZE: u32 = ::AVERAGE_HEADER_SIZE; -} - -/// Returns maximal expected size of `submit_finality_proof` call arguments. -pub fn max_expected_submit_finality_proof_arguments_size( - is_mandatory_finality_target: bool, - precommits: u32, -) -> u32 { - let max_expected_justification_size = - GrandpaJustification::>::max_reasonable_size::(precommits); - - // call arguments are header and justification - let max_expected_finality_target_size = if is_mandatory_finality_target { - C::MAX_MANDATORY_HEADER_SIZE - } else { - C::AVERAGE_HEADER_SIZE - }; - max_expected_finality_target_size.saturating_add(max_expected_justification_size) -} - -#[cfg(test)] -mod tests { - use super::*; - use bp_runtime::ChainId; - use frame_support::weights::Weight; - use sp_runtime::{testing::H256, traits::BlakeTwo256, MultiSignature}; - - struct TestChain; - - impl Chain for TestChain { - const ID: ChainId = *b"test"; - - type BlockNumber = u32; - type Hash = H256; - type Hasher = BlakeTwo256; - type Header = sp_runtime::generic::Header; - type AccountId = u64; - type Balance = u64; - type Nonce = u64; - type Signature = MultiSignature; - - fn max_extrinsic_size() -> u32 { - 0 - } - fn max_extrinsic_weight() -> Weight { - Weight::zero() - } - } - - impl ChainWithGrandpa for TestChain { - const WITH_CHAIN_GRANDPA_PALLET_NAME: &'static str = "Test"; - const MAX_AUTHORITIES_COUNT: u32 = 128; - const REASONABLE_HEADERS_IN_JUSTIFICATION_ANCESTRY: u32 = 2; - const MAX_MANDATORY_HEADER_SIZE: u32 = 100_000; - const AVERAGE_HEADER_SIZE: u32 = 1_024; - } - - #[test] - fn max_expected_submit_finality_proof_arguments_size_respects_mandatory_argument() { - assert!( - max_expected_submit_finality_proof_arguments_size::(true, 100) > - max_expected_submit_finality_proof_arguments_size::(false, 100), - ); - } -} diff --git a/primitives/header-chain/src/storage_keys.rs b/primitives/header-chain/src/storage_keys.rs deleted file mode 100644 index 55d095afb..000000000 --- a/primitives/header-chain/src/storage_keys.rs +++ /dev/null @@ -1,104 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Storage keys of bridge GRANDPA pallet. - -/// Name of the `IsHalted` storage value. -pub const PALLET_OPERATING_MODE_VALUE_NAME: &str = "PalletOperatingMode"; -/// Name of the `BestFinalized` storage value. -pub const BEST_FINALIZED_VALUE_NAME: &str = "BestFinalized"; -/// Name of the `CurrentAuthoritySet` storage value. -pub const CURRENT_AUTHORITY_SET_VALUE_NAME: &str = "CurrentAuthoritySet"; - -use sp_core::storage::StorageKey; - -/// Storage key of the `PalletOperatingMode` variable in the runtime storage. -pub fn pallet_operating_mode_key(pallet_prefix: &str) -> StorageKey { - StorageKey( - bp_runtime::storage_value_final_key( - pallet_prefix.as_bytes(), - PALLET_OPERATING_MODE_VALUE_NAME.as_bytes(), - ) - .to_vec(), - ) -} - -/// Storage key of the `CurrentAuthoritySet` variable in the runtime storage. -pub fn current_authority_set_key(pallet_prefix: &str) -> StorageKey { - StorageKey( - bp_runtime::storage_value_final_key( - pallet_prefix.as_bytes(), - CURRENT_AUTHORITY_SET_VALUE_NAME.as_bytes(), - ) - .to_vec(), - ) -} - -/// Storage key of the best finalized header number and hash value in the runtime storage. -pub fn best_finalized_key(pallet_prefix: &str) -> StorageKey { - StorageKey( - bp_runtime::storage_value_final_key( - pallet_prefix.as_bytes(), - BEST_FINALIZED_VALUE_NAME.as_bytes(), - ) - .to_vec(), - ) -} - -#[cfg(test)] -mod tests { - use super::*; - use hex_literal::hex; - - #[test] - fn pallet_operating_mode_key_computed_properly() { - // If this test fails, then something has been changed in module storage that is breaking - // compatibility with previous pallet. - let storage_key = pallet_operating_mode_key("BridgeGrandpa").0; - assert_eq!( - storage_key, - hex!("0b06f475eddb98cf933a12262e0388de0f4cf0917788d791142ff6c1f216e7b3").to_vec(), - "Unexpected storage key: {}", - hex::encode(&storage_key), - ); - } - - #[test] - fn current_authority_set_key_computed_properly() { - // If this test fails, then something has been changed in module storage that is breaking - // compatibility with previous pallet. - let storage_key = current_authority_set_key("BridgeGrandpa").0; - assert_eq!( - storage_key, - hex!("0b06f475eddb98cf933a12262e0388de24a7b8b5717ea33346fa595a66ccbcb0").to_vec(), - "Unexpected storage key: {}", - hex::encode(&storage_key), - ); - } - - #[test] - fn best_finalized_key_computed_properly() { - // If this test fails, then something has been changed in module storage that is breaking - // compatibility with previous pallet. - let storage_key = best_finalized_key("BridgeGrandpa").0; - assert_eq!( - storage_key, - hex!("0b06f475eddb98cf933a12262e0388dea4ebafdd473c549fdb24c5c991c5591c").to_vec(), - "Unexpected storage key: {}", - hex::encode(&storage_key), - ); - } -} diff --git a/primitives/header-chain/tests/implementation_match.rs b/primitives/header-chain/tests/implementation_match.rs deleted file mode 100644 index 1f61f91ff..000000000 --- a/primitives/header-chain/tests/implementation_match.rs +++ /dev/null @@ -1,411 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Tests inside this module are made to ensure that our custom justification verification -//! implementation works similar to the [`finality_grandpa::validate_commit`] and explicitly -//! show where we behave different. -//! -//! Some of tests in this module may partially duplicate tests from `justification.rs`, -//! but their purpose is different. - -use bp_header_chain::justification::{ - verify_justification, GrandpaJustification, JustificationVerificationContext, - JustificationVerificationError, PrecommitError, -}; -use bp_test_utils::{ - header_id, make_justification_for_header, signed_precommit, test_header, Account, - JustificationGeneratorParams, ALICE, BOB, CHARLIE, DAVE, EVE, FERDIE, TEST_GRANDPA_SET_ID, -}; -use finality_grandpa::voter_set::VoterSet; -use sp_consensus_grandpa::{AuthorityId, AuthorityWeight, SetId}; -use sp_runtime::traits::Header as HeaderT; - -type TestHeader = sp_runtime::testing::Header; -type TestHash = ::Hash; -type TestNumber = ::Number; - -/// Implementation of `finality_grandpa::Chain` that is used in tests. -struct AncestryChain(bp_header_chain::justification::AncestryChain); - -impl AncestryChain { - fn new(justification: &GrandpaJustification) -> Self { - Self(bp_header_chain::justification::AncestryChain::new(justification).0) - } -} - -impl finality_grandpa::Chain for AncestryChain { - fn ancestry( - &self, - base: TestHash, - block: TestHash, - ) -> Result, finality_grandpa::Error> { - let mut route = Vec::new(); - let mut current_hash = block; - loop { - if current_hash == base { - break - } - match self.0.parent_hash_of(¤t_hash) { - Some(parent_hash) => { - current_hash = *parent_hash; - route.push(current_hash); - }, - _ => return Err(finality_grandpa::Error::NotDescendent), - } - } - route.pop(); // remove the base - - Ok(route) - } -} - -/// Get a full set of accounts. -fn full_accounts_set() -> Vec<(Account, AuthorityWeight)> { - vec![(ALICE, 1), (BOB, 1), (CHARLIE, 1), (DAVE, 1), (EVE, 1)] -} - -/// Get a full set of GRANDPA authorities. -fn full_voter_set() -> VoterSet { - VoterSet::new(full_accounts_set().iter().map(|(id, w)| (AuthorityId::from(*id), *w))).unwrap() -} - -pub fn full_verification_context(set_id: SetId) -> JustificationVerificationContext { - let voter_set = full_voter_set(); - JustificationVerificationContext { voter_set, authority_set_id: set_id } -} - -/// Get a minimal set of accounts. -fn minimal_accounts_set() -> Vec<(Account, AuthorityWeight)> { - // there are 5 accounts in the full set => we need 2/3 + 1 accounts, which results in 4 accounts - vec![(ALICE, 1), (BOB, 1), (CHARLIE, 1), (DAVE, 1)] -} - -/// Make a valid GRANDPA justification with sensible defaults. -pub fn make_default_justification(header: &TestHeader) -> GrandpaJustification { - make_justification_for_header(JustificationGeneratorParams { - header: header.clone(), - authorities: minimal_accounts_set(), - ..Default::default() - }) -} - -// the `finality_grandpa::validate_commit` function has two ways to report an unsuccessful -// commit validation: -// -// 1) to return `Err()` (which only may happen if `finality_grandpa::Chain` implementation returns -// an error); -// 2) to return `Ok(validation_result)` if `validation_result.is_valid()` is false. -// -// Our implementation would just return error in both cases. - -#[test] -fn same_result_when_precommit_target_has_lower_number_than_commit_target() { - let mut justification = make_default_justification(&test_header(1)); - // the number of header in precommit (0) is lower than number of header in commit (1) - justification.commit.precommits[0].precommit.target_number = 0; - - // our implementation returns an error - assert_eq!( - verify_justification::( - header_id::(1), - &full_verification_context(TEST_GRANDPA_SET_ID), - &justification, - ), - Err(JustificationVerificationError::Precommit(PrecommitError::UnrelatedAncestryVote)), - ); - - // original implementation returns `Ok(validation_result)` - // with `validation_result.is_valid() == false`. - let result = finality_grandpa::validate_commit( - &justification.commit, - &full_voter_set(), - &AncestryChain::new(&justification), - ) - .unwrap(); - - assert!(!result.is_valid()); -} - -#[test] -fn same_result_when_precommit_target_is_not_descendant_of_commit_target() { - let not_descendant = test_header::(10); - let mut justification = make_default_justification(&test_header(1)); - // the route from header of commit (1) to header of precommit (10) is missing from - // the votes ancestries - justification.commit.precommits[0].precommit.target_number = *not_descendant.number(); - justification.commit.precommits[0].precommit.target_hash = not_descendant.hash(); - justification.votes_ancestries.push(not_descendant); - - // our implementation returns an error - assert_eq!( - verify_justification::( - header_id::(1), - &full_verification_context(TEST_GRANDPA_SET_ID), - &justification, - ), - Err(JustificationVerificationError::Precommit(PrecommitError::UnrelatedAncestryVote)), - ); - - // original implementation returns `Ok(validation_result)` - // with `validation_result.is_valid() == false`. - let result = finality_grandpa::validate_commit( - &justification.commit, - &full_voter_set(), - &AncestryChain::new(&justification), - ) - .unwrap(); - - assert!(!result.is_valid()); -} - -#[test] -fn same_result_when_there_are_not_enough_cumulative_weight_to_finalize_commit_target() { - // just remove one authority from the minimal set and we shall not reach the threshold - let mut authorities_set = minimal_accounts_set(); - authorities_set.pop(); - let justification = make_justification_for_header(JustificationGeneratorParams { - header: test_header(1), - authorities: authorities_set, - ..Default::default() - }); - - // our implementation returns an error - assert_eq!( - verify_justification::( - header_id::(1), - &full_verification_context(TEST_GRANDPA_SET_ID), - &justification, - ), - Err(JustificationVerificationError::TooLowCumulativeWeight), - ); - // original implementation returns `Ok(validation_result)` - // with `validation_result.is_valid() == false`. - let result = finality_grandpa::validate_commit( - &justification.commit, - &full_voter_set(), - &AncestryChain::new(&justification), - ) - .unwrap(); - - assert!(!result.is_valid()); -} - -// tests below are our differences with the original implementation - -#[test] -fn different_result_when_justification_contains_duplicate_vote() { - let mut justification = make_justification_for_header(JustificationGeneratorParams { - header: test_header(1), - authorities: minimal_accounts_set(), - ancestors: 0, - ..Default::default() - }); - // the justification may contain exactly the same vote (i.e. same precommit and same signature) - // multiple times && it isn't treated as an error by original implementation - let last_precommit = justification.commit.precommits.pop().unwrap(); - justification.commit.precommits.push(justification.commit.precommits[0].clone()); - justification.commit.precommits.push(last_precommit); - - // our implementation fails - assert_eq!( - verify_justification::( - header_id::(1), - &full_verification_context(TEST_GRANDPA_SET_ID), - &justification, - ), - Err(JustificationVerificationError::Precommit(PrecommitError::DuplicateAuthorityVote)), - ); - // original implementation returns `Ok(validation_result)` - // with `validation_result.is_valid() == true`. - let result = finality_grandpa::validate_commit( - &justification.commit, - &full_voter_set(), - &AncestryChain::new(&justification), - ) - .unwrap(); - - assert!(result.is_valid()); -} - -#[test] -fn different_results_when_authority_equivocates_once_in_a_round() { - let mut justification = make_justification_for_header(JustificationGeneratorParams { - header: test_header(1), - authorities: minimal_accounts_set(), - ancestors: 0, - ..Default::default() - }); - // the justification original implementation allows authority to submit two different - // votes in a single round, of which only first is 'accepted' - let last_precommit = justification.commit.precommits.pop().unwrap(); - justification.commit.precommits.push(signed_precommit::( - &ALICE, - header_id::(1), - justification.round, - TEST_GRANDPA_SET_ID, - )); - justification.commit.precommits.push(last_precommit); - - // our implementation fails - assert_eq!( - verify_justification::( - header_id::(1), - &full_verification_context(TEST_GRANDPA_SET_ID), - &justification, - ), - Err(JustificationVerificationError::Precommit(PrecommitError::DuplicateAuthorityVote)), - ); - // original implementation returns `Ok(validation_result)` - // with `validation_result.is_valid() == true`. - let result = finality_grandpa::validate_commit( - &justification.commit, - &full_voter_set(), - &AncestryChain::new(&justification), - ) - .unwrap(); - - assert!(result.is_valid()); -} - -#[test] -fn different_results_when_authority_equivocates_twice_in_a_round() { - let mut justification = make_justification_for_header(JustificationGeneratorParams { - header: test_header(1), - authorities: minimal_accounts_set(), - ancestors: 0, - ..Default::default() - }); - // there's some code in the original implementation that should return an error when - // same authority submits more than two different votes in a single round: - // https://github.com/paritytech/finality-grandpa/blob/6aeea2d1159d0f418f0b86e70739f2130629ca09/src/lib.rs#L473 - // but there's also a code that prevents this from happening: - // https://github.com/paritytech/finality-grandpa/blob/6aeea2d1159d0f418f0b86e70739f2130629ca09/src/round.rs#L287 - // => so now we are also just ignoring all votes from the same authority, except the first one - let last_precommit = justification.commit.precommits.pop().unwrap(); - let prev_last_precommit = justification.commit.precommits.pop().unwrap(); - justification.commit.precommits.push(signed_precommit::( - &ALICE, - header_id::(1), - justification.round, - TEST_GRANDPA_SET_ID, - )); - justification.commit.precommits.push(signed_precommit::( - &ALICE, - header_id::(1), - justification.round, - TEST_GRANDPA_SET_ID, - )); - justification.commit.precommits.push(last_precommit); - justification.commit.precommits.push(prev_last_precommit); - - // our implementation fails - assert_eq!( - verify_justification::( - header_id::(1), - &full_verification_context(TEST_GRANDPA_SET_ID), - &justification, - ), - Err(JustificationVerificationError::Precommit(PrecommitError::DuplicateAuthorityVote)), - ); - // original implementation returns `Ok(validation_result)` - // with `validation_result.is_valid() == true`. - let result = finality_grandpa::validate_commit( - &justification.commit, - &full_voter_set(), - &AncestryChain::new(&justification), - ) - .unwrap(); - - assert!(result.is_valid()); -} - -#[test] -fn different_results_when_there_are_more_than_enough_votes() { - let mut justification = make_justification_for_header(JustificationGeneratorParams { - header: test_header(1), - authorities: minimal_accounts_set(), - ancestors: 0, - ..Default::default() - }); - // the reference implementation just keep verifying signatures even if we have - // collected enough votes. We are not - justification.commit.precommits.push(signed_precommit::( - &EVE, - header_id::(1), - justification.round, - TEST_GRANDPA_SET_ID, - )); - - // our implementation fails - assert_eq!( - verify_justification::( - header_id::(1), - &full_verification_context(TEST_GRANDPA_SET_ID), - &justification, - ), - Err(JustificationVerificationError::Precommit(PrecommitError::RedundantAuthorityVote)), - ); - // original implementation returns `Ok(validation_result)` - // with `validation_result.is_valid() == true`. - let result = finality_grandpa::validate_commit( - &justification.commit, - &full_voter_set(), - &AncestryChain::new(&justification), - ) - .unwrap(); - - assert!(result.is_valid()); -} - -#[test] -fn different_results_when_there_is_a_vote_of_unknown_authority() { - let mut justification = make_justification_for_header(JustificationGeneratorParams { - header: test_header(1), - authorities: minimal_accounts_set(), - ancestors: 0, - ..Default::default() - }); - // the reference implementation just keep verifying signatures even if we have - // collected enough votes. We are not - let last_precommit = justification.commit.precommits.pop().unwrap(); - justification.commit.precommits.push(signed_precommit::( - &FERDIE, - header_id::(1), - justification.round, - TEST_GRANDPA_SET_ID, - )); - justification.commit.precommits.push(last_precommit); - - // our implementation fails - assert_eq!( - verify_justification::( - header_id::(1), - &full_verification_context(TEST_GRANDPA_SET_ID), - &justification, - ), - Err(JustificationVerificationError::Precommit(PrecommitError::UnknownAuthorityVote)), - ); - // original implementation returns `Ok(validation_result)` - // with `validation_result.is_valid() == true`. - let result = finality_grandpa::validate_commit( - &justification.commit, - &full_voter_set(), - &AncestryChain::new(&justification), - ) - .unwrap(); - - assert!(result.is_valid()); -} diff --git a/primitives/header-chain/tests/justification/equivocation.rs b/primitives/header-chain/tests/justification/equivocation.rs deleted file mode 100644 index 0bc084cc1..000000000 --- a/primitives/header-chain/tests/justification/equivocation.rs +++ /dev/null @@ -1,124 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Tests for Grandpa equivocations collector code. - -use bp_header_chain::justification::EquivocationsCollector; -use bp_test_utils::*; -use finality_grandpa::Precommit; -use sp_consensus_grandpa::EquivocationProof; - -type TestHeader = sp_runtime::testing::Header; - -#[test] -fn duplicate_votes_are_not_considered_equivocations() { - let verification_context = verification_context(TEST_GRANDPA_SET_ID); - let base_justification = make_default_justification::(&test_header(1)); - - let mut collector = - EquivocationsCollector::new(&verification_context, &base_justification).unwrap(); - collector.parse_justifications(&[base_justification.clone()]); - - assert_eq!(collector.into_equivocation_proofs().len(), 0); -} - -#[test] -fn equivocations_are_detected_in_base_justification_redundant_votes() { - let mut base_justification = make_default_justification::(&test_header(1)); - - let first_vote = base_justification.commit.precommits[0].clone(); - let equivocation = signed_precommit::( - &ALICE, - header_id::(1), - base_justification.round, - TEST_GRANDPA_SET_ID, - ); - base_justification.commit.precommits.push(equivocation.clone()); - - let verification_context = verification_context(TEST_GRANDPA_SET_ID); - let collector = - EquivocationsCollector::new(&verification_context, &base_justification).unwrap(); - - assert_eq!( - collector.into_equivocation_proofs(), - vec![EquivocationProof::new( - 1, - sp_consensus_grandpa::Equivocation::Precommit(finality_grandpa::Equivocation { - round_number: 1, - identity: ALICE.into(), - first: ( - Precommit { - target_hash: first_vote.precommit.target_hash, - target_number: first_vote.precommit.target_number - }, - first_vote.signature - ), - second: ( - Precommit { - target_hash: equivocation.precommit.target_hash, - target_number: equivocation.precommit.target_number - }, - equivocation.signature - ) - }) - )] - ); -} - -#[test] -fn equivocations_are_detected_in_extra_justification_redundant_votes() { - let base_justification = make_default_justification::(&test_header(1)); - let first_vote = base_justification.commit.precommits[0].clone(); - - let mut extra_justification = base_justification.clone(); - let equivocation = signed_precommit::( - &ALICE, - header_id::(1), - base_justification.round, - TEST_GRANDPA_SET_ID, - ); - extra_justification.commit.precommits.push(equivocation.clone()); - - let verification_context = verification_context(TEST_GRANDPA_SET_ID); - let mut collector = - EquivocationsCollector::new(&verification_context, &base_justification).unwrap(); - collector.parse_justifications(&[extra_justification]); - - assert_eq!( - collector.into_equivocation_proofs(), - vec![EquivocationProof::new( - 1, - sp_consensus_grandpa::Equivocation::Precommit(finality_grandpa::Equivocation { - round_number: 1, - identity: ALICE.into(), - first: ( - Precommit { - target_hash: first_vote.precommit.target_hash, - target_number: first_vote.precommit.target_number - }, - first_vote.signature - ), - second: ( - Precommit { - target_hash: equivocation.precommit.target_hash, - target_number: equivocation.precommit.target_number - }, - equivocation.signature - ) - }) - )] - ); -} diff --git a/primitives/header-chain/tests/justification/optimizer.rs b/primitives/header-chain/tests/justification/optimizer.rs deleted file mode 100644 index 8d7e2d650..000000000 --- a/primitives/header-chain/tests/justification/optimizer.rs +++ /dev/null @@ -1,196 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Tests for Grandpa Justification optimizer code. - -use bp_header_chain::justification::verify_and_optimize_justification; -use bp_test_utils::*; -use finality_grandpa::SignedPrecommit; -use sp_consensus_grandpa::AuthoritySignature; - -type TestHeader = sp_runtime::testing::Header; - -#[test] -fn optimizer_does_noting_with_minimal_justification() { - let mut justification = make_default_justification::(&test_header(1)); - - let num_precommits_before = justification.commit.precommits.len(); - verify_and_optimize_justification::( - header_id::(1), - &verification_context(TEST_GRANDPA_SET_ID), - &mut justification, - ) - .unwrap(); - let num_precommits_after = justification.commit.precommits.len(); - - assert_eq!(num_precommits_before, num_precommits_after); -} - -#[test] -fn unknown_authority_votes_are_removed_by_optimizer() { - let mut justification = make_default_justification::(&test_header(1)); - justification.commit.precommits.push(signed_precommit::( - &bp_test_utils::Account(42), - header_id::(1), - justification.round, - TEST_GRANDPA_SET_ID, - )); - - let num_precommits_before = justification.commit.precommits.len(); - verify_and_optimize_justification::( - header_id::(1), - &verification_context(TEST_GRANDPA_SET_ID), - &mut justification, - ) - .unwrap(); - let num_precommits_after = justification.commit.precommits.len(); - - assert_eq!(num_precommits_before - 1, num_precommits_after); -} - -#[test] -fn duplicate_authority_votes_are_removed_by_optimizer() { - let mut justification = make_default_justification::(&test_header(1)); - justification - .commit - .precommits - .push(justification.commit.precommits.first().cloned().unwrap()); - - let num_precommits_before = justification.commit.precommits.len(); - verify_and_optimize_justification::( - header_id::(1), - &verification_context(TEST_GRANDPA_SET_ID), - &mut justification, - ) - .unwrap(); - let num_precommits_after = justification.commit.precommits.len(); - - assert_eq!(num_precommits_before - 1, num_precommits_after); -} - -#[test] -fn invalid_authority_signatures_are_removed_by_optimizer() { - let mut justification = make_default_justification::(&test_header(1)); - - let target = header_id::(1); - let invalid_raw_signature: Vec = ALICE.sign(b"").to_bytes().into(); - justification.commit.precommits.insert( - 0, - SignedPrecommit { - precommit: finality_grandpa::Precommit { - target_hash: target.0, - target_number: target.1, - }, - signature: AuthoritySignature::try_from(invalid_raw_signature).unwrap(), - id: ALICE.into(), - }, - ); - - let num_precommits_before = justification.commit.precommits.len(); - verify_and_optimize_justification::( - header_id::(1), - &verification_context(TEST_GRANDPA_SET_ID), - &mut justification, - ) - .unwrap(); - let num_precommits_after = justification.commit.precommits.len(); - - assert_eq!(num_precommits_before - 1, num_precommits_after); -} - -#[test] -fn redundant_authority_votes_are_removed_by_optimizer() { - let mut justification = make_default_justification::(&test_header(1)); - justification.commit.precommits.push(signed_precommit::( - &EVE, - header_id::(1), - justification.round, - TEST_GRANDPA_SET_ID, - )); - - let num_precommits_before = justification.commit.precommits.len(); - verify_and_optimize_justification::( - header_id::(1), - &verification_context(TEST_GRANDPA_SET_ID), - &mut justification, - ) - .unwrap(); - let num_precommits_after = justification.commit.precommits.len(); - - assert_eq!(num_precommits_before - 1, num_precommits_after); -} - -#[test] -fn unrelated_ancestry_votes_are_removed_by_optimizer() { - let mut justification = make_default_justification::(&test_header(2)); - justification.commit.precommits.insert( - 0, - signed_precommit::( - &ALICE, - header_id::(1), - justification.round, - TEST_GRANDPA_SET_ID, - ), - ); - - let num_precommits_before = justification.commit.precommits.len(); - verify_and_optimize_justification::( - header_id::(2), - &verification_context(TEST_GRANDPA_SET_ID), - &mut justification, - ) - .unwrap(); - let num_precommits_after = justification.commit.precommits.len(); - - assert_eq!(num_precommits_before - 1, num_precommits_after); -} - -#[test] -fn duplicate_votes_ancestries_are_removed_by_optimizer() { - let mut justification = make_default_justification::(&test_header(1)); - let optimized_votes_ancestries = justification.votes_ancestries.clone(); - justification.votes_ancestries = justification - .votes_ancestries - .into_iter() - .flat_map(|item| std::iter::repeat(item).take(3)) - .collect(); - - verify_and_optimize_justification::( - header_id::(1), - &verification_context(TEST_GRANDPA_SET_ID), - &mut justification, - ) - .unwrap(); - - assert_eq!(justification.votes_ancestries, optimized_votes_ancestries); -} - -#[test] -fn redundant_votes_ancestries_are_removed_by_optimizer() { - let mut justification = make_default_justification::(&test_header(1)); - justification.votes_ancestries.push(test_header(100)); - - let num_votes_ancestries_before = justification.votes_ancestries.len(); - verify_and_optimize_justification::( - header_id::(1), - &verification_context(TEST_GRANDPA_SET_ID), - &mut justification, - ) - .unwrap(); - let num_votes_ancestries_after = justification.votes_ancestries.len(); - - assert_eq!(num_votes_ancestries_before - 1, num_votes_ancestries_after); -} diff --git a/primitives/header-chain/tests/justification/strict.rs b/primitives/header-chain/tests/justification/strict.rs deleted file mode 100644 index 639a66957..000000000 --- a/primitives/header-chain/tests/justification/strict.rs +++ /dev/null @@ -1,202 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Tests for Grandpa strict justification verifier code. - -use bp_header_chain::justification::{ - required_justification_precommits, verify_justification, JustificationVerificationContext, - JustificationVerificationError, PrecommitError, -}; -use bp_test_utils::*; - -type TestHeader = sp_runtime::testing::Header; - -#[test] -fn valid_justification_accepted() { - let authorities = vec![(ALICE, 1), (BOB, 1), (CHARLIE, 1)]; - let params = JustificationGeneratorParams { - header: test_header(1), - round: TEST_GRANDPA_ROUND, - set_id: TEST_GRANDPA_SET_ID, - authorities: authorities.clone(), - ancestors: 7, - forks: 3, - }; - - let justification = make_justification_for_header::(params.clone()); - assert_eq!( - verify_justification::( - header_id::(1), - &verification_context(TEST_GRANDPA_SET_ID), - &justification, - ), - Ok(()), - ); - - assert_eq!(justification.commit.precommits.len(), authorities.len()); - assert_eq!(justification.votes_ancestries.len(), params.ancestors as usize); -} - -#[test] -fn valid_justification_accepted_with_single_fork() { - let params = JustificationGeneratorParams { - header: test_header(1), - round: TEST_GRANDPA_ROUND, - set_id: TEST_GRANDPA_SET_ID, - authorities: vec![(ALICE, 1), (BOB, 1), (CHARLIE, 1)], - ancestors: 5, - forks: 1, - }; - - assert_eq!( - verify_justification::( - header_id::(1), - &verification_context(TEST_GRANDPA_SET_ID), - &make_justification_for_header::(params) - ), - Ok(()), - ); -} - -#[test] -fn valid_justification_accepted_with_arbitrary_number_of_authorities() { - use finality_grandpa::voter_set::VoterSet; - use sp_consensus_grandpa::AuthorityId; - - let n = 15; - let required_signatures = required_justification_precommits(n as _); - let authorities = accounts(n).iter().map(|k| (*k, 1)).collect::>(); - - let params = JustificationGeneratorParams { - header: test_header(1), - round: TEST_GRANDPA_ROUND, - set_id: TEST_GRANDPA_SET_ID, - authorities: authorities.clone().into_iter().take(required_signatures as _).collect(), - ancestors: n.into(), - forks: required_signatures, - }; - - let authorities = authorities - .iter() - .map(|(id, w)| (AuthorityId::from(*id), *w)) - .collect::>(); - let voter_set = VoterSet::new(authorities).unwrap(); - - assert_eq!( - verify_justification::( - header_id::(1), - &JustificationVerificationContext { voter_set, authority_set_id: TEST_GRANDPA_SET_ID }, - &make_justification_for_header::(params) - ), - Ok(()), - ); -} - -#[test] -fn justification_with_invalid_target_rejected() { - assert_eq!( - verify_justification::( - header_id::(2), - &verification_context(TEST_GRANDPA_SET_ID), - &make_default_justification::(&test_header(1)), - ), - Err(JustificationVerificationError::InvalidJustificationTarget), - ); -} - -#[test] -fn justification_with_invalid_commit_rejected() { - let mut justification = make_default_justification::(&test_header(1)); - justification.commit.precommits.clear(); - - assert_eq!( - verify_justification::( - header_id::(1), - &verification_context(TEST_GRANDPA_SET_ID), - &justification, - ), - Err(JustificationVerificationError::TooLowCumulativeWeight), - ); -} - -#[test] -fn justification_with_invalid_authority_signature_rejected() { - let mut justification = make_default_justification::(&test_header(1)); - justification.commit.precommits[0].signature = - sp_core::crypto::UncheckedFrom::unchecked_from([1u8; 64]); - - assert_eq!( - verify_justification::( - header_id::(1), - &verification_context(TEST_GRANDPA_SET_ID), - &justification, - ), - Err(JustificationVerificationError::Precommit(PrecommitError::InvalidAuthoritySignature)), - ); -} - -#[test] -fn justification_with_duplicate_votes_ancestry() { - let mut justification = make_default_justification::(&test_header(1)); - justification.votes_ancestries.push(justification.votes_ancestries[0].clone()); - - assert_eq!( - verify_justification::( - header_id::(1), - &verification_context(TEST_GRANDPA_SET_ID), - &justification, - ), - Err(JustificationVerificationError::DuplicateVotesAncestries), - ); -} -#[test] -fn justification_with_redundant_votes_ancestry() { - let mut justification = make_default_justification::(&test_header(1)); - justification.votes_ancestries.push(test_header(10)); - - assert_eq!( - verify_justification::( - header_id::(1), - &verification_context(TEST_GRANDPA_SET_ID), - &justification, - ), - Err(JustificationVerificationError::RedundantVotesAncestries), - ); -} - -#[test] -fn justification_is_invalid_if_we_dont_meet_threshold() { - // Need at least three authorities to sign off or else the voter set threshold can't be reached - let authorities = vec![(ALICE, 1), (BOB, 1)]; - - let params = JustificationGeneratorParams { - header: test_header(1), - round: TEST_GRANDPA_ROUND, - set_id: TEST_GRANDPA_SET_ID, - authorities: authorities.clone(), - ancestors: 2 * authorities.len() as u32, - forks: 2, - }; - - assert_eq!( - verify_justification::( - header_id::(1), - &verification_context(TEST_GRANDPA_SET_ID), - &make_justification_for_header::(params) - ), - Err(JustificationVerificationError::TooLowCumulativeWeight), - ); -} diff --git a/primitives/header-chain/tests/tests.rs b/primitives/header-chain/tests/tests.rs deleted file mode 100644 index 269fde09b..000000000 --- a/primitives/header-chain/tests/tests.rs +++ /dev/null @@ -1,23 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -mod justification { - mod equivocation; - mod optimizer; - mod strict; -} - -mod implementation_match; diff --git a/primitives/messages/Cargo.toml b/primitives/messages/Cargo.toml deleted file mode 100644 index 8bacff709..000000000 --- a/primitives/messages/Cargo.toml +++ /dev/null @@ -1,44 +0,0 @@ -[package] -name = "bp-messages" -description = "Primitives of messages module." -version = "0.7.0" -authors.workspace = true -edition.workspace = true -license = "GPL-3.0-or-later WITH Classpath-exception-2.0" -repository.workspace = true - -[lints] -workspace = true - -[dependencies] -codec = { package = "parity-scale-codec", version = "3.6.1", default-features = false, features = ["bit-vec", "derive"] } -scale-info = { version = "2.11.1", default-features = false, features = ["bit-vec", "derive"] } -serde = { features = ["alloc", "derive"], workspace = true } - -# Bridge dependencies - -bp-runtime = { path = "../runtime", default-features = false } -bp-header-chain = { path = "../header-chain", default-features = false } - -# Substrate Dependencies - -frame-support = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -sp-core = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -sp-std = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } - -[dev-dependencies] -hex = "0.4" -hex-literal = "0.4" - -[features] -default = ["std"] -std = [ - "bp-header-chain/std", - "bp-runtime/std", - "codec/std", - "frame-support/std", - "scale-info/std", - "serde/std", - "sp-core/std", - "sp-std/std", -] diff --git a/primitives/messages/src/lib.rs b/primitives/messages/src/lib.rs deleted file mode 100644 index c3f79b3ee..000000000 --- a/primitives/messages/src/lib.rs +++ /dev/null @@ -1,567 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Primitives of messages module. - -#![warn(missing_docs)] -#![cfg_attr(not(feature = "std"), no_std)] - -use bp_header_chain::HeaderChainError; -use bp_runtime::{ - messages::MessageDispatchResult, BasicOperatingMode, Chain, OperatingMode, RangeInclusiveExt, - StorageProofError, UnderlyingChainOf, UnderlyingChainProvider, -}; -use codec::{Decode, Encode, MaxEncodedLen}; -use frame_support::PalletError; -// Weight is reexported to avoid additional frame-support dependencies in related crates. -pub use frame_support::weights::Weight; -use scale_info::TypeInfo; -use serde::{Deserialize, Serialize}; -use source_chain::RelayersRewards; -use sp_core::{RuntimeDebug, TypeId}; -use sp_std::{collections::vec_deque::VecDeque, ops::RangeInclusive, prelude::*}; - -pub mod source_chain; -pub mod storage_keys; -pub mod target_chain; - -/// Substrate-based chain with messaging support. -pub trait ChainWithMessages: Chain { - /// Name of the bridge messages pallet (used in `construct_runtime` macro call) that is - /// deployed at some other chain to bridge with this `ChainWithMessages`. - /// - /// We assume that all chains that are bridging with this `ChainWithMessages` are using - /// the same name. - const WITH_CHAIN_MESSAGES_PALLET_NAME: &'static str; - - /// Maximal number of unrewarded relayers in a single confirmation transaction at this - /// `ChainWithMessages`. - const MAX_UNREWARDED_RELAYERS_IN_CONFIRMATION_TX: MessageNonce; - /// Maximal number of unconfirmed messages in a single confirmation transaction at this - /// `ChainWithMessages`. - const MAX_UNCONFIRMED_MESSAGES_IN_CONFIRMATION_TX: MessageNonce; -} - -impl ChainWithMessages for T -where - T: Chain + UnderlyingChainProvider, - UnderlyingChainOf: ChainWithMessages, -{ - const WITH_CHAIN_MESSAGES_PALLET_NAME: &'static str = - UnderlyingChainOf::::WITH_CHAIN_MESSAGES_PALLET_NAME; - const MAX_UNREWARDED_RELAYERS_IN_CONFIRMATION_TX: MessageNonce = - UnderlyingChainOf::::MAX_UNREWARDED_RELAYERS_IN_CONFIRMATION_TX; - const MAX_UNCONFIRMED_MESSAGES_IN_CONFIRMATION_TX: MessageNonce = - UnderlyingChainOf::::MAX_UNCONFIRMED_MESSAGES_IN_CONFIRMATION_TX; -} - -/// Messages pallet operating mode. -#[derive( - Encode, - Decode, - Clone, - Copy, - PartialEq, - Eq, - RuntimeDebug, - TypeInfo, - MaxEncodedLen, - Serialize, - Deserialize, -)] -pub enum MessagesOperatingMode { - /// Basic operating mode (Normal/Halted) - Basic(BasicOperatingMode), - /// The pallet is not accepting outbound messages. Inbound messages and receiving proofs - /// are still accepted. - /// - /// This mode may be used e.g. when bridged chain expects upgrade. Then to avoid dispatch - /// failures, the pallet owner may stop accepting new messages, while continuing to deliver - /// queued messages to the bridged chain. Once upgrade is completed, the mode may be switched - /// back to `Normal`. - RejectingOutboundMessages, -} - -impl Default for MessagesOperatingMode { - fn default() -> Self { - MessagesOperatingMode::Basic(BasicOperatingMode::Normal) - } -} - -impl OperatingMode for MessagesOperatingMode { - fn is_halted(&self) -> bool { - match self { - Self::Basic(operating_mode) => operating_mode.is_halted(), - _ => false, - } - } -} - -/// Lane id which implements `TypeId`. -#[derive( - Clone, Copy, Decode, Default, Encode, Eq, Ord, PartialOrd, PartialEq, TypeInfo, MaxEncodedLen, -)] -pub struct LaneId(pub [u8; 4]); - -impl core::fmt::Debug for LaneId { - fn fmt(&self, fmt: &mut core::fmt::Formatter) -> core::fmt::Result { - self.0.fmt(fmt) - } -} - -impl AsRef<[u8]> for LaneId { - fn as_ref(&self) -> &[u8] { - &self.0 - } -} - -impl TypeId for LaneId { - const TYPE_ID: [u8; 4] = *b"blan"; -} - -/// Message nonce. Valid messages will never have 0 nonce. -pub type MessageNonce = u64; - -/// Message id as a tuple. -pub type BridgeMessageId = (LaneId, MessageNonce); - -/// Opaque message payload. We only decode this payload when it is dispatched. -pub type MessagePayload = Vec; - -/// Message key (unique message identifier) as it is stored in the storage. -#[derive(Encode, Decode, Clone, PartialEq, Eq, RuntimeDebug, TypeInfo, MaxEncodedLen)] -pub struct MessageKey { - /// ID of the message lane. - pub lane_id: LaneId, - /// Message nonce. - pub nonce: MessageNonce, -} - -/// Message as it is stored in the storage. -#[derive(Encode, Decode, Clone, PartialEq, Eq, RuntimeDebug, TypeInfo)] -pub struct Message { - /// Message key. - pub key: MessageKey, - /// Message payload. - pub payload: MessagePayload, -} - -/// Inbound lane data. -#[derive(Encode, Decode, Clone, RuntimeDebug, PartialEq, Eq, TypeInfo)] -pub struct InboundLaneData { - /// Identifiers of relayers and messages that they have delivered to this lane (ordered by - /// message nonce). - /// - /// This serves as a helper storage item, to allow the source chain to easily pay rewards - /// to the relayers who successfully delivered messages to the target chain (inbound lane). - /// - /// It is guaranteed to have at most N entries, where N is configured at the module level. - /// If there are N entries in this vec, then: - /// 1) all incoming messages are rejected if they're missing corresponding - /// `proof-of(outbound-lane.state)`; 2) all incoming messages are rejected if - /// `proof-of(outbound-lane.state).last_delivered_nonce` is equal to - /// `self.last_confirmed_nonce`. Given what is said above, all nonces in this queue are in - /// range: `(self.last_confirmed_nonce; self.last_delivered_nonce()]`. - /// - /// When a relayer sends a single message, both of MessageNonces are the same. - /// When relayer sends messages in a batch, the first arg is the lowest nonce, second arg the - /// highest nonce. Multiple dispatches from the same relayer are allowed. - pub relayers: VecDeque>, - - /// Nonce of the last message that - /// a) has been delivered to the target (this) chain and - /// b) the delivery has been confirmed on the source chain - /// - /// that the target chain knows of. - /// - /// This value is updated indirectly when an `OutboundLane` state of the source - /// chain is received alongside with new messages delivery. - pub last_confirmed_nonce: MessageNonce, -} - -impl Default for InboundLaneData { - fn default() -> Self { - InboundLaneData { relayers: VecDeque::new(), last_confirmed_nonce: 0 } - } -} - -impl InboundLaneData { - /// Returns approximate size of the struct, given a number of entries in the `relayers` set and - /// size of each entry. - /// - /// Returns `None` if size overflows `usize` limits. - pub fn encoded_size_hint(relayers_entries: usize) -> Option - where - RelayerId: MaxEncodedLen, - { - relayers_entries - .checked_mul(UnrewardedRelayer::::max_encoded_len())? - .checked_add(MessageNonce::max_encoded_len()) - } - - /// Returns the approximate size of the struct as u32, given a number of entries in the - /// `relayers` set and the size of each entry. - /// - /// Returns `u32::MAX` if size overflows `u32` limits. - pub fn encoded_size_hint_u32(relayers_entries: usize) -> u32 - where - RelayerId: MaxEncodedLen, - { - Self::encoded_size_hint(relayers_entries) - .and_then(|x| u32::try_from(x).ok()) - .unwrap_or(u32::MAX) - } - - /// Nonce of the last message that has been delivered to this (target) chain. - pub fn last_delivered_nonce(&self) -> MessageNonce { - self.relayers - .back() - .map(|entry| entry.messages.end) - .unwrap_or(self.last_confirmed_nonce) - } - - /// Returns the total number of messages in the `relayers` vector, - /// saturating in case of underflow or overflow. - pub fn total_unrewarded_messages(&self) -> MessageNonce { - let relayers = &self.relayers; - match (relayers.front(), relayers.back()) { - (Some(front), Some(back)) => - (front.messages.begin..=back.messages.end).saturating_len(), - _ => 0, - } - } -} - -/// Outbound message details, returned by runtime APIs. -#[derive(Clone, Encode, Decode, RuntimeDebug, PartialEq, Eq, TypeInfo)] -pub struct OutboundMessageDetails { - /// Nonce assigned to the message. - pub nonce: MessageNonce, - /// Message dispatch weight. - /// - /// Depending on messages pallet configuration, it may be declared by the message submitter, - /// computed automatically or just be zero if dispatch fee is paid at the target chain. - pub dispatch_weight: Weight, - /// Size of the encoded message. - pub size: u32, -} - -/// Inbound message details, returned by runtime APIs. -#[derive(Clone, Encode, Decode, RuntimeDebug, PartialEq, Eq, TypeInfo)] -pub struct InboundMessageDetails { - /// Computed message dispatch weight. - /// - /// Runtime API guarantees that it will match the value, returned by - /// `target_chain::MessageDispatch::dispatch_weight`. This means that if the runtime - /// has failed to decode the message, it will be zero - that's because `undecodable` - /// message cannot be dispatched. - pub dispatch_weight: Weight, -} - -/// Unrewarded relayer entry stored in the inbound lane data. -/// -/// This struct represents a continuous range of messages that have been delivered by the same -/// relayer and whose confirmations are still pending. -#[derive(Encode, Decode, Clone, RuntimeDebug, PartialEq, Eq, TypeInfo, MaxEncodedLen)] -pub struct UnrewardedRelayer { - /// Identifier of the relayer. - pub relayer: RelayerId, - /// Messages range, delivered by this relayer. - pub messages: DeliveredMessages, -} - -/// Received messages with their dispatch result. -#[derive(Clone, Default, Encode, Decode, RuntimeDebug, PartialEq, Eq, TypeInfo)] -pub struct ReceivedMessages { - /// Id of the lane which is receiving messages. - pub lane: LaneId, - /// Result of messages which we tried to dispatch - pub receive_results: Vec<(MessageNonce, ReceptionResult)>, -} - -impl ReceivedMessages { - /// Creates new `ReceivedMessages` structure from given results. - pub fn new( - lane: LaneId, - receive_results: Vec<(MessageNonce, ReceptionResult)>, - ) -> Self { - ReceivedMessages { lane, receive_results } - } - - /// Push `result` of the `message` delivery onto `receive_results` vector. - pub fn push(&mut self, message: MessageNonce, result: ReceptionResult) { - self.receive_results.push((message, result)); - } -} - -/// Result of single message receival. -#[derive(RuntimeDebug, Encode, Decode, PartialEq, Eq, Clone, TypeInfo)] -pub enum ReceptionResult { - /// Message has been received and dispatched. Note that we don't care whether dispatch has - /// been successful or not - in both case message falls into this category. - /// - /// The message dispatch result is also returned. - Dispatched(MessageDispatchResult), - /// Message has invalid nonce and lane has rejected to accept this message. - InvalidNonce, - /// There are too many unrewarded relayer entries at the lane. - TooManyUnrewardedRelayers, - /// There are too many unconfirmed messages at the lane. - TooManyUnconfirmedMessages, -} - -/// Delivered messages with their dispatch result. -#[derive(Clone, Default, Encode, Decode, RuntimeDebug, PartialEq, Eq, TypeInfo, MaxEncodedLen)] -pub struct DeliveredMessages { - /// Nonce of the first message that has been delivered (inclusive). - pub begin: MessageNonce, - /// Nonce of the last message that has been delivered (inclusive). - pub end: MessageNonce, -} - -impl DeliveredMessages { - /// Create new `DeliveredMessages` struct that confirms delivery of single nonce with given - /// dispatch result. - pub fn new(nonce: MessageNonce) -> Self { - DeliveredMessages { begin: nonce, end: nonce } - } - - /// Return total count of delivered messages. - pub fn total_messages(&self) -> MessageNonce { - (self.begin..=self.end).saturating_len() - } - - /// Note new dispatched message. - pub fn note_dispatched_message(&mut self) { - self.end += 1; - } - - /// Returns true if delivered messages contain message with given nonce. - pub fn contains_message(&self, nonce: MessageNonce) -> bool { - (self.begin..=self.end).contains(&nonce) - } -} - -/// Gist of `InboundLaneData::relayers` field used by runtime APIs. -#[derive(Clone, Default, Encode, Decode, RuntimeDebug, PartialEq, Eq, TypeInfo)] -pub struct UnrewardedRelayersState { - /// Number of entries in the `InboundLaneData::relayers` set. - pub unrewarded_relayer_entries: MessageNonce, - /// Number of messages in the oldest entry of `InboundLaneData::relayers`. This is the - /// minimal number of reward proofs required to push out this entry from the set. - pub messages_in_oldest_entry: MessageNonce, - /// Total number of messages in the relayers vector. - pub total_messages: MessageNonce, - /// Nonce of the latest message that has been delivered to the target chain. - /// - /// This corresponds to the result of the `InboundLaneData::last_delivered_nonce` call - /// at the bridged chain. - pub last_delivered_nonce: MessageNonce, -} - -impl UnrewardedRelayersState { - /// Verify that the relayers state corresponds with the `InboundLaneData`. - pub fn is_valid(&self, lane_data: &InboundLaneData) -> bool { - self == &lane_data.into() - } -} - -impl From<&InboundLaneData> for UnrewardedRelayersState { - fn from(lane: &InboundLaneData) -> UnrewardedRelayersState { - UnrewardedRelayersState { - unrewarded_relayer_entries: lane.relayers.len() as _, - messages_in_oldest_entry: lane - .relayers - .front() - .map(|entry| entry.messages.total_messages()) - .unwrap_or(0), - total_messages: lane.total_unrewarded_messages(), - last_delivered_nonce: lane.last_delivered_nonce(), - } - } -} - -/// Outbound lane data. -#[derive(Encode, Decode, Clone, RuntimeDebug, PartialEq, Eq, TypeInfo, MaxEncodedLen)] -pub struct OutboundLaneData { - /// Nonce of the oldest message that we haven't yet pruned. May point to not-yet-generated - /// message if all sent messages are already pruned. - pub oldest_unpruned_nonce: MessageNonce, - /// Nonce of the latest message, received by bridged chain. - pub latest_received_nonce: MessageNonce, - /// Nonce of the latest message, generated by us. - pub latest_generated_nonce: MessageNonce, -} - -impl Default for OutboundLaneData { - fn default() -> Self { - OutboundLaneData { - // it is 1 because we're pruning everything in [oldest_unpruned_nonce; - // latest_received_nonce] - oldest_unpruned_nonce: 1, - latest_received_nonce: 0, - latest_generated_nonce: 0, - } - } -} - -impl OutboundLaneData { - /// Return nonces of all currently queued messages (i.e. messages that we believe - /// are not delivered yet). - pub fn queued_messages(&self) -> RangeInclusive { - (self.latest_received_nonce + 1)..=self.latest_generated_nonce - } -} - -/// Calculate the number of messages that the relayers have delivered. -pub fn calc_relayers_rewards( - messages_relayers: VecDeque>, - received_range: &RangeInclusive, -) -> RelayersRewards -where - AccountId: sp_std::cmp::Ord, -{ - // remember to reward relayers that have delivered messages - // this loop is bounded by `T::MaxUnrewardedRelayerEntriesAtInboundLane` on the bridged chain - let mut relayers_rewards = RelayersRewards::new(); - for entry in messages_relayers { - let nonce_begin = sp_std::cmp::max(entry.messages.begin, *received_range.start()); - let nonce_end = sp_std::cmp::min(entry.messages.end, *received_range.end()); - if nonce_end >= nonce_begin { - *relayers_rewards.entry(entry.relayer).or_default() += nonce_end - nonce_begin + 1; - } - } - relayers_rewards -} - -/// A minimized version of `pallet-bridge-messages::Call` that can be used without a runtime. -#[derive(Encode, Decode, Debug, PartialEq, Eq, Clone, TypeInfo)] -#[allow(non_camel_case_types)] -pub enum BridgeMessagesCall { - /// `pallet-bridge-messages::Call::receive_messages_proof` - #[codec(index = 2)] - receive_messages_proof { - /// Account id of relayer at the **bridged** chain. - relayer_id_at_bridged_chain: AccountId, - /// Messages proof. - proof: MessagesProof, - /// A number of messages in the proof. - messages_count: u32, - /// Total dispatch weight of messages in the proof. - dispatch_weight: Weight, - }, - /// `pallet-bridge-messages::Call::receive_messages_delivery_proof` - #[codec(index = 3)] - receive_messages_delivery_proof { - /// Messages delivery proof. - proof: MessagesDeliveryProof, - /// "Digest" of unrewarded relayers state at the bridged chain. - relayers_state: UnrewardedRelayersState, - }, -} - -/// Error that happens during message verification. -#[derive(Encode, Decode, RuntimeDebug, PartialEq, Eq, PalletError, TypeInfo)] -pub enum VerificationError { - /// The message proof is empty. - EmptyMessageProof, - /// Error returned by the bridged header chain. - HeaderChain(HeaderChainError), - /// Error returned while reading/decoding inbound lane data from the storage proof. - InboundLaneStorage(StorageProofError), - /// The declared message weight is incorrect. - InvalidMessageWeight, - /// Declared messages count doesn't match actual value. - MessagesCountMismatch, - /// Error returned while reading/decoding message data from the storage proof. - MessageStorage(StorageProofError), - /// The message is too large. - MessageTooLarge, - /// Error returned while reading/decoding outbound lane data from the storage proof. - OutboundLaneStorage(StorageProofError), - /// Storage proof related error. - StorageProof(StorageProofError), - /// Custom error - Other(#[codec(skip)] &'static str), -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn total_unrewarded_messages_does_not_overflow() { - let lane_data = InboundLaneData { - relayers: vec![ - UnrewardedRelayer { relayer: 1, messages: DeliveredMessages::new(0) }, - UnrewardedRelayer { - relayer: 2, - messages: DeliveredMessages::new(MessageNonce::MAX), - }, - ] - .into_iter() - .collect(), - last_confirmed_nonce: 0, - }; - assert_eq!(lane_data.total_unrewarded_messages(), MessageNonce::MAX); - } - - #[test] - fn inbound_lane_data_returns_correct_hint() { - let test_cases = vec![ - // single relayer, multiple messages - (1, 128u8), - // multiple relayers, single message per relayer - (128u8, 128u8), - // several messages per relayer - (13u8, 128u8), - ]; - for (relayer_entries, messages_count) in test_cases { - let expected_size = InboundLaneData::::encoded_size_hint(relayer_entries as _); - let actual_size = InboundLaneData { - relayers: (1u8..=relayer_entries) - .map(|i| UnrewardedRelayer { - relayer: i, - messages: DeliveredMessages::new(i as _), - }) - .collect(), - last_confirmed_nonce: messages_count as _, - } - .encode() - .len(); - let difference = (expected_size.unwrap() as f64 - actual_size as f64).abs(); - assert!( - difference / (std::cmp::min(actual_size, expected_size.unwrap()) as f64) < 0.1, - "Too large difference between actual ({actual_size}) and expected ({expected_size:?}) inbound lane data size. Test case: {relayer_entries}+{messages_count}", - ); - } - } - - #[test] - fn contains_result_works() { - let delivered_messages = DeliveredMessages { begin: 100, end: 150 }; - - assert!(!delivered_messages.contains_message(99)); - assert!(delivered_messages.contains_message(100)); - assert!(delivered_messages.contains_message(150)); - assert!(!delivered_messages.contains_message(151)); - } - - #[test] - fn lane_id_debug_format_matches_inner_array_format() { - assert_eq!(format!("{:?}", LaneId([0, 0, 0, 0])), format!("{:?}", [0, 0, 0, 0]),); - } -} diff --git a/primitives/messages/src/source_chain.rs b/primitives/messages/src/source_chain.rs deleted file mode 100644 index f4aefd973..000000000 --- a/primitives/messages/src/source_chain.rs +++ /dev/null @@ -1,179 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Primitives of messages module, that are used on the source chain. - -use crate::{InboundLaneData, LaneId, MessageNonce, VerificationError}; - -use crate::UnrewardedRelayer; -use bp_runtime::Size; -use frame_support::Parameter; -use sp_core::RuntimeDebug; -use sp_std::{ - collections::{btree_map::BTreeMap, vec_deque::VecDeque}, - fmt::Debug, - ops::RangeInclusive, -}; - -/// Number of messages, delivered by relayers. -pub type RelayersRewards = BTreeMap; - -/// Target chain API. Used by source chain to verify target chain proofs. -/// -/// All implementations of this trait should only work with finalized data that -/// can't change. Wrong implementation may lead to invalid lane states (i.e. lane -/// that's stuck) and/or processing messages without paying fees. -/// -/// The `Payload` type here means the payload of the message that is sent from the -/// source chain to the target chain. The `AccountId` type here means the account -/// type used by the source chain. -pub trait TargetHeaderChain { - /// Proof that messages have been received by target chain. - type MessagesDeliveryProof: Parameter + Size; - - /// Verify message payload before we accept it. - /// - /// **CAUTION**: this is very important function. Incorrect implementation may lead - /// to stuck lanes and/or relayers loses. - /// - /// The proper implementation must ensure that the delivery-transaction with this - /// payload would (at least) be accepted into target chain transaction pool AND - /// eventually will be successfully mined. The most obvious incorrect implementation - /// example would be implementation for BTC chain that accepts payloads larger than - /// 1MB. BTC nodes aren't accepting transactions that are larger than 1MB, so relayer - /// will be unable to craft valid transaction => this (and all subsequent) messages will - /// never be delivered. - fn verify_message(payload: &Payload) -> Result<(), VerificationError>; - - /// Verify messages delivery proof and return lane && nonce of the latest received message. - fn verify_messages_delivery_proof( - proof: Self::MessagesDeliveryProof, - ) -> Result<(LaneId, InboundLaneData), VerificationError>; -} - -/// Manages payments that are happening at the source chain during delivery confirmation -/// transaction. -pub trait DeliveryConfirmationPayments { - /// Error type. - type Error: Debug + Into<&'static str>; - - /// Pay rewards for delivering messages to the given relayers. - /// - /// The implementation may also choose to pay reward to the `confirmation_relayer`, which is - /// a relayer that has submitted delivery confirmation transaction. - /// - /// Returns number of actually rewarded relayers. - fn pay_reward( - lane_id: LaneId, - messages_relayers: VecDeque>, - confirmation_relayer: &AccountId, - received_range: &RangeInclusive, - ) -> MessageNonce; -} - -impl DeliveryConfirmationPayments for () { - type Error = &'static str; - - fn pay_reward( - _lane_id: LaneId, - _messages_relayers: VecDeque>, - _confirmation_relayer: &AccountId, - _received_range: &RangeInclusive, - ) -> MessageNonce { - // this implementation is not rewarding relayers at all - 0 - } -} - -/// Callback that is called at the source chain (bridge hub) when we get delivery confirmation -/// for new messages. -pub trait OnMessagesDelivered { - /// New messages delivery has been confirmed. - /// - /// The only argument of the function is the number of yet undelivered messages - fn on_messages_delivered(lane: LaneId, enqueued_messages: MessageNonce); -} - -impl OnMessagesDelivered for () { - fn on_messages_delivered(_lane: LaneId, _enqueued_messages: MessageNonce) {} -} - -/// Send message artifacts. -#[derive(Eq, RuntimeDebug, PartialEq)] -pub struct SendMessageArtifacts { - /// Nonce of the message. - pub nonce: MessageNonce, - /// Number of enqueued messages at the lane, after the message is sent. - pub enqueued_messages: MessageNonce, -} - -/// Messages bridge API to be used from other pallets. -pub trait MessagesBridge { - /// Error type. - type Error: Debug; - - /// Intermediary structure returned by `validate_message()`. - /// - /// It can than be passed to `send_message()` in order to actually send the message - /// on the bridge. - type SendMessageArgs; - - /// Check if the message can be sent over the bridge. - fn validate_message( - lane: LaneId, - message: &Payload, - ) -> Result; - - /// Send message over the bridge. - /// - /// Returns unique message nonce or error if send has failed. - fn send_message(message: Self::SendMessageArgs) -> SendMessageArtifacts; -} - -/// Structure that may be used in place of `TargetHeaderChain` and -/// `MessageDeliveryAndDispatchPayment` on chains, where outbound messages are forbidden. -pub struct ForbidOutboundMessages; - -/// Error message that is used in `ForbidOutboundMessages` implementation. -const ALL_OUTBOUND_MESSAGES_REJECTED: &str = - "This chain is configured to reject all outbound messages"; - -impl TargetHeaderChain for ForbidOutboundMessages { - type MessagesDeliveryProof = (); - - fn verify_message(_payload: &Payload) -> Result<(), VerificationError> { - Err(VerificationError::Other(ALL_OUTBOUND_MESSAGES_REJECTED)) - } - - fn verify_messages_delivery_proof( - _proof: Self::MessagesDeliveryProof, - ) -> Result<(LaneId, InboundLaneData), VerificationError> { - Err(VerificationError::Other(ALL_OUTBOUND_MESSAGES_REJECTED)) - } -} - -impl DeliveryConfirmationPayments for ForbidOutboundMessages { - type Error = &'static str; - - fn pay_reward( - _lane_id: LaneId, - _messages_relayers: VecDeque>, - _confirmation_relayer: &AccountId, - _received_range: &RangeInclusive, - ) -> MessageNonce { - 0 - } -} diff --git a/primitives/messages/src/storage_keys.rs b/primitives/messages/src/storage_keys.rs deleted file mode 100644 index 8eedf8fcc..000000000 --- a/primitives/messages/src/storage_keys.rs +++ /dev/null @@ -1,128 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Storage keys of bridge messages pallet. - -/// Name of the `OPERATING_MODE_VALUE_NAME` storage value. -pub const OPERATING_MODE_VALUE_NAME: &str = "PalletOperatingMode"; -/// Name of the `OutboundMessages` storage map. -pub const OUTBOUND_MESSAGES_MAP_NAME: &str = "OutboundMessages"; -/// Name of the `OutboundLanes` storage map. -pub const OUTBOUND_LANES_MAP_NAME: &str = "OutboundLanes"; -/// Name of the `InboundLanes` storage map. -pub const INBOUND_LANES_MAP_NAME: &str = "InboundLanes"; - -use crate::{LaneId, MessageKey, MessageNonce}; - -use codec::Encode; -use frame_support::Blake2_128Concat; -use sp_core::storage::StorageKey; - -/// Storage key of the `PalletOperatingMode` value in the runtime storage. -pub fn operating_mode_key(pallet_prefix: &str) -> StorageKey { - StorageKey( - bp_runtime::storage_value_final_key( - pallet_prefix.as_bytes(), - OPERATING_MODE_VALUE_NAME.as_bytes(), - ) - .to_vec(), - ) -} - -/// Storage key of the outbound message in the runtime storage. -pub fn message_key(pallet_prefix: &str, lane: &LaneId, nonce: MessageNonce) -> StorageKey { - bp_runtime::storage_map_final_key::( - pallet_prefix, - OUTBOUND_MESSAGES_MAP_NAME, - &MessageKey { lane_id: *lane, nonce }.encode(), - ) -} - -/// Storage key of the outbound message lane state in the runtime storage. -pub fn outbound_lane_data_key(pallet_prefix: &str, lane: &LaneId) -> StorageKey { - bp_runtime::storage_map_final_key::( - pallet_prefix, - OUTBOUND_LANES_MAP_NAME, - &lane.encode(), - ) -} - -/// Storage key of the inbound message lane state in the runtime storage. -pub fn inbound_lane_data_key(pallet_prefix: &str, lane: &LaneId) -> StorageKey { - bp_runtime::storage_map_final_key::( - pallet_prefix, - INBOUND_LANES_MAP_NAME, - &lane.encode(), - ) -} - -#[cfg(test)] -mod tests { - use super::*; - use hex_literal::hex; - - #[test] - fn operating_mode_key_computed_properly() { - // If this test fails, then something has been changed in module storage that is possibly - // breaking all existing message relays. - let storage_key = operating_mode_key("BridgeMessages").0; - assert_eq!( - storage_key, - hex!("dd16c784ebd3390a9bc0357c7511ed010f4cf0917788d791142ff6c1f216e7b3").to_vec(), - "Unexpected storage key: {}", - hex::encode(&storage_key), - ); - } - - #[test] - fn storage_message_key_computed_properly() { - // If this test fails, then something has been changed in module storage that is breaking - // all previously crafted messages proofs. - let storage_key = message_key("BridgeMessages", &LaneId(*b"test"), 42).0; - assert_eq!( - storage_key, - hex!("dd16c784ebd3390a9bc0357c7511ed018a395e6242c6813b196ca31ed0547ea79446af0e09063bd4a7874aef8a997cec746573742a00000000000000").to_vec(), - "Unexpected storage key: {}", - hex::encode(&storage_key), - ); - } - - #[test] - fn outbound_lane_data_key_computed_properly() { - // If this test fails, then something has been changed in module storage that is breaking - // all previously crafted outbound lane state proofs. - let storage_key = outbound_lane_data_key("BridgeMessages", &LaneId(*b"test")).0; - assert_eq!( - storage_key, - hex!("dd16c784ebd3390a9bc0357c7511ed0196c246acb9b55077390e3ca723a0ca1f44a8995dd50b6657a037a7839304535b74657374").to_vec(), - "Unexpected storage key: {}", - hex::encode(&storage_key), - ); - } - - #[test] - fn inbound_lane_data_key_computed_properly() { - // If this test fails, then something has been changed in module storage that is breaking - // all previously crafted inbound lane state proofs. - let storage_key = inbound_lane_data_key("BridgeMessages", &LaneId(*b"test")).0; - assert_eq!( - storage_key, - hex!("dd16c784ebd3390a9bc0357c7511ed01e5f83cf83f2127eb47afdc35d6e43fab44a8995dd50b6657a037a7839304535b74657374").to_vec(), - "Unexpected storage key: {}", - hex::encode(&storage_key), - ); - } -} diff --git a/primitives/messages/src/target_chain.rs b/primitives/messages/src/target_chain.rs deleted file mode 100644 index 388ce16cc..000000000 --- a/primitives/messages/src/target_chain.rs +++ /dev/null @@ -1,212 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Primitives of messages module, that are used on the target chain. - -use crate::{ - LaneId, Message, MessageKey, MessageNonce, MessagePayload, OutboundLaneData, VerificationError, -}; - -use bp_runtime::{messages::MessageDispatchResult, Size}; -use codec::{Decode, Encode, Error as CodecError}; -use frame_support::{weights::Weight, Parameter}; -use scale_info::TypeInfo; -use sp_core::RuntimeDebug; -use sp_std::{collections::btree_map::BTreeMap, fmt::Debug, marker::PhantomData, prelude::*}; - -/// Proved messages from the source chain. -pub type ProvedMessages = BTreeMap>; - -/// Proved messages from single lane of the source chain. -#[derive(RuntimeDebug, Encode, Decode, Clone, PartialEq, Eq, TypeInfo)] -pub struct ProvedLaneMessages { - /// Optional outbound lane state. - pub lane_state: Option, - /// Messages sent through this lane. - pub messages: Vec, -} - -/// Message data with decoded dispatch payload. -#[derive(RuntimeDebug)] -pub struct DispatchMessageData { - /// Result of dispatch payload decoding. - pub payload: Result, -} - -/// Message with decoded dispatch payload. -#[derive(RuntimeDebug)] -pub struct DispatchMessage { - /// Message key. - pub key: MessageKey, - /// Message data with decoded dispatch payload. - pub data: DispatchMessageData, -} - -/// Source chain API. Used by target chain, to verify source chain proofs. -/// -/// All implementations of this trait should only work with finalized data that -/// can't change. Wrong implementation may lead to invalid lane states (i.e. lane -/// that's stuck) and/or processing messages without paying fees. -pub trait SourceHeaderChain { - /// Proof that messages are sent from source chain. This may also include proof - /// of corresponding outbound lane states. - type MessagesProof: Parameter + Size; - - /// Verify messages proof and return proved messages. - /// - /// Returns error if either proof is incorrect, or the number of messages in the proof - /// is not matching the `messages_count`. - /// - /// Messages vector is required to be sorted by nonce within each lane. Out-of-order - /// messages will be rejected. - /// - /// The `messages_count` argument verification (sane limits) is supposed to be made - /// outside this function. This function only verifies that the proof declares exactly - /// `messages_count` messages. - fn verify_messages_proof( - proof: Self::MessagesProof, - messages_count: u32, - ) -> Result, VerificationError>; -} - -/// Called when inbound message is received. -pub trait MessageDispatch { - /// Decoded message payload type. Valid message may contain invalid payload. In this case - /// message is delivered, but dispatch fails. Therefore, two separate types of payload - /// (opaque `MessagePayload` used in delivery and this `DispatchPayload` used in dispatch). - type DispatchPayload: Decode; - - /// Fine-grained result of single message dispatch (for better diagnostic purposes) - type DispatchLevelResult: Clone + sp_std::fmt::Debug + Eq; - - /// Returns `true` if dispatcher is ready to accept additional messages. The `false` should - /// be treated as a hint by both dispatcher and its consumers - i.e. dispatcher shall not - /// simply drop messages if it returns `false`. The consumer may still call the `dispatch` - /// if dispatcher has returned `false`. - /// - /// We check it in the messages delivery transaction prologue. So if it becomes `false` - /// after some portion of messages is already dispatched, it doesn't fail the whole transaction. - fn is_active() -> bool; - - /// Estimate dispatch weight. - /// - /// This function must return correct upper bound of dispatch weight. The return value - /// of this function is expected to match return value of the corresponding - /// `FromInboundLaneApi::message_details().dispatch_weight` call. - fn dispatch_weight(message: &mut DispatchMessage) -> Weight; - - /// Called when inbound message is received. - /// - /// It is up to the implementers of this trait to determine whether the message - /// is invalid (i.e. improperly encoded, has too large weight, ...) or not. - fn dispatch( - message: DispatchMessage, - ) -> MessageDispatchResult; -} - -/// Manages payments that are happening at the target chain during message delivery transaction. -pub trait DeliveryPayments { - /// Error type. - type Error: Debug + Into<&'static str>; - - /// Pay rewards for delivering messages to the given relayer. - /// - /// This method is called during message delivery transaction which has been submitted - /// by the `relayer`. The transaction brings `total_messages` messages but only - /// `valid_messages` have been accepted. The post-dispatch transaction weight is the - /// `actual_weight`. - fn pay_reward( - relayer: AccountId, - total_messages: MessageNonce, - valid_messages: MessageNonce, - actual_weight: Weight, - ); -} - -impl Default for ProvedLaneMessages { - fn default() -> Self { - ProvedLaneMessages { lane_state: None, messages: Vec::new() } - } -} - -impl From for DispatchMessage { - fn from(message: Message) -> Self { - DispatchMessage { key: message.key, data: message.payload.into() } - } -} - -impl From for DispatchMessageData { - fn from(payload: MessagePayload) -> Self { - DispatchMessageData { payload: DispatchPayload::decode(&mut &payload[..]) } - } -} - -impl DeliveryPayments for () { - type Error = &'static str; - - fn pay_reward( - _relayer: AccountId, - _total_messages: MessageNonce, - _valid_messages: MessageNonce, - _actual_weight: Weight, - ) { - // this implementation is not rewarding relayer at all - } -} - -/// Structure that may be used in place of `SourceHeaderChain` and `MessageDispatch` on chains, -/// where inbound messages are forbidden. -pub struct ForbidInboundMessages( - PhantomData<(MessagesProof, DispatchPayload)>, -); - -/// Error message that is used in `ForbidInboundMessages` implementation. -const ALL_INBOUND_MESSAGES_REJECTED: &str = - "This chain is configured to reject all inbound messages"; - -impl SourceHeaderChain - for ForbidInboundMessages -{ - type MessagesProof = MessagesProof; - - fn verify_messages_proof( - _proof: Self::MessagesProof, - _messages_count: u32, - ) -> Result, VerificationError> { - Err(VerificationError::Other(ALL_INBOUND_MESSAGES_REJECTED)) - } -} - -impl MessageDispatch - for ForbidInboundMessages -{ - type DispatchPayload = DispatchPayload; - type DispatchLevelResult = (); - - fn is_active() -> bool { - false - } - - fn dispatch_weight(_message: &mut DispatchMessage) -> Weight { - Weight::MAX - } - - fn dispatch( - _: DispatchMessage, - ) -> MessageDispatchResult { - MessageDispatchResult { unspent_weight: Weight::zero(), dispatch_level_result: () } - } -} diff --git a/primitives/parachains/Cargo.toml b/primitives/parachains/Cargo.toml deleted file mode 100644 index 1606dbfcd..000000000 --- a/primitives/parachains/Cargo.toml +++ /dev/null @@ -1,43 +0,0 @@ -[package] -name = "bp-parachains" -description = "Primitives of parachains module." -version = "0.7.0" -authors.workspace = true -edition.workspace = true -license = "GPL-3.0-or-later WITH Classpath-exception-2.0" -repository.workspace = true - -[lints] -workspace = true - -[dependencies] -codec = { package = "parity-scale-codec", version = "3.6.1", default-features = false, features = ["derive"] } -impl-trait-for-tuples = "0.2" -scale-info = { version = "2.11.1", default-features = false, features = ["derive"] } - -# Bridge dependencies - -bp-header-chain = { path = "../header-chain", default-features = false } -bp-polkadot-core = { path = "../polkadot-core", default-features = false } -bp-runtime = { path = "../runtime", default-features = false } - -# Substrate dependencies - -frame-support = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -sp-core = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -sp-runtime = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -sp-std = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } - -[features] -default = ["std"] -std = [ - "bp-header-chain/std", - "bp-polkadot-core/std", - "bp-runtime/std", - "codec/std", - "frame-support/std", - "scale-info/std", - "sp-core/std", - "sp-runtime/std", - "sp-std/std", -] diff --git a/primitives/parachains/src/lib.rs b/primitives/parachains/src/lib.rs deleted file mode 100644 index 692bbd99e..000000000 --- a/primitives/parachains/src/lib.rs +++ /dev/null @@ -1,184 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Primitives of parachains module. - -#![warn(missing_docs)] -#![cfg_attr(not(feature = "std"), no_std)] - -pub use bp_header_chain::StoredHeaderData; - -use bp_polkadot_core::{ - parachains::{ParaHash, ParaHead, ParaHeadsProof, ParaId}, - BlockNumber as RelayBlockNumber, Hash as RelayBlockHash, -}; -use bp_runtime::{ - BlockNumberOf, Chain, HashOf, HeaderOf, Parachain, StorageDoubleMapKeyProvider, - StorageMapKeyProvider, -}; -use codec::{Decode, Encode, MaxEncodedLen}; -use frame_support::{Blake2_128Concat, Twox64Concat}; -use scale_info::TypeInfo; -use sp_core::storage::StorageKey; -use sp_runtime::{traits::Header as HeaderT, RuntimeDebug}; -use sp_std::{marker::PhantomData, prelude::*}; - -/// Best known parachain head hash. -#[derive(Clone, Decode, Encode, MaxEncodedLen, PartialEq, RuntimeDebug, TypeInfo)] -pub struct BestParaHeadHash { - /// Number of relay block where this head has been read. - /// - /// Parachain head is opaque to relay chain. So we can't simply decode it as a header of - /// parachains and call `block_number()` on it. Instead, we're using the fact that parachain - /// head is always built on top of previous head (because it is blockchain) and relay chain - /// always imports parachain heads in order. What it means for us is that at any given - /// **finalized** relay block `B`, head of parachain will be ancestor (or the same) of all - /// parachain heads available at descendants of `B`. - pub at_relay_block_number: RelayBlockNumber, - /// Hash of parachain head. - pub head_hash: ParaHash, -} - -/// Best known parachain head as it is stored in the runtime storage. -#[derive(Decode, Encode, MaxEncodedLen, PartialEq, RuntimeDebug, TypeInfo)] -pub struct ParaInfo { - /// Best known parachain head hash. - pub best_head_hash: BestParaHeadHash, - /// Current ring buffer position for this parachain. - pub next_imported_hash_position: u32, -} - -/// Returns runtime storage key of given parachain head at the source chain. -/// -/// The head is stored by the `paras` pallet in the `Heads` map. -pub fn parachain_head_storage_key_at_source( - paras_pallet_name: &str, - para_id: ParaId, -) -> StorageKey { - bp_runtime::storage_map_final_key::(paras_pallet_name, "Heads", ¶_id.encode()) -} - -/// Can be use to access the runtime storage key of the parachains info at the target chain. -/// -/// The info is stored by the `pallet-bridge-parachains` pallet in the `ParasInfo` map. -pub struct ParasInfoKeyProvider; -impl StorageMapKeyProvider for ParasInfoKeyProvider { - const MAP_NAME: &'static str = "ParasInfo"; - - type Hasher = Blake2_128Concat; - type Key = ParaId; - type Value = ParaInfo; -} - -/// Can be use to access the runtime storage key of the parachain head at the target chain. -/// -/// The head is stored by the `pallet-bridge-parachains` pallet in the `ImportedParaHeads` map. -pub struct ImportedParaHeadsKeyProvider; -impl StorageDoubleMapKeyProvider for ImportedParaHeadsKeyProvider { - const MAP_NAME: &'static str = "ImportedParaHeads"; - - type Hasher1 = Blake2_128Concat; - type Key1 = ParaId; - type Hasher2 = Blake2_128Concat; - type Key2 = ParaHash; - type Value = ParaStoredHeaderData; -} - -/// Stored data of the parachain head. It is encoded version of the -/// `bp_runtime::StoredHeaderData` structure. -/// -/// We do not know exact structure of the parachain head, so we always store encoded version -/// of the `bp_runtime::StoredHeaderData`. It is only decoded when we talk about specific parachain. -#[derive(Clone, Decode, Encode, PartialEq, RuntimeDebug, TypeInfo)] -pub struct ParaStoredHeaderData(pub Vec); - -impl ParaStoredHeaderData { - /// Decode stored parachain head data. - pub fn decode_parachain_head_data( - &self, - ) -> Result, HashOf>, codec::Error> { - StoredHeaderData::, HashOf>::decode(&mut &self.0[..]) - } -} - -/// Stored parachain head data builder. -pub trait ParaStoredHeaderDataBuilder { - /// Return number of parachains that are supported by this builder. - fn supported_parachains() -> u32; - - /// Try to build head data from encoded head of parachain with given id. - fn try_build(para_id: ParaId, para_head: &ParaHead) -> Option; -} - -/// Helper for using single parachain as `ParaStoredHeaderDataBuilder`. -pub struct SingleParaStoredHeaderDataBuilder(PhantomData); - -impl ParaStoredHeaderDataBuilder for SingleParaStoredHeaderDataBuilder { - fn supported_parachains() -> u32 { - 1 - } - - fn try_build(para_id: ParaId, para_head: &ParaHead) -> Option { - if para_id == ParaId(C::PARACHAIN_ID) { - let header = HeaderOf::::decode(&mut ¶_head.0[..]).ok()?; - return Some(ParaStoredHeaderData( - StoredHeaderData { number: *header.number(), state_root: *header.state_root() } - .encode(), - )) - } - None - } -} - -// Tries to build header data from each tuple member, short-circuiting on first successful one. -#[impl_trait_for_tuples::impl_for_tuples(1, 30)] -#[tuple_types_custom_trait_bound(Parachain)] -impl ParaStoredHeaderDataBuilder for C { - fn supported_parachains() -> u32 { - let mut result = 0; - for_tuples!( #( - result += SingleParaStoredHeaderDataBuilder::::supported_parachains(); - )* ); - result - } - - fn try_build(para_id: ParaId, para_head: &ParaHead) -> Option { - for_tuples!( #( - let maybe_para_head = SingleParaStoredHeaderDataBuilder::::try_build(para_id, para_head); - if let Some(maybe_para_head) = maybe_para_head { - return Some(maybe_para_head); - } - )* ); - - None - } -} - -/// A minimized version of `pallet-bridge-parachains::Call` that can be used without a runtime. -#[derive(Encode, Decode, Debug, PartialEq, Eq, Clone, TypeInfo)] -#[allow(non_camel_case_types)] -pub enum BridgeParachainCall { - /// `pallet-bridge-parachains::Call::submit_parachain_heads` - #[codec(index = 0)] - submit_parachain_heads { - /// Relay chain block, for which we have submitted the `parachain_heads_proof`. - at_relay_block: (RelayBlockNumber, RelayBlockHash), - /// Parachain identifiers and their head hashes. - parachains: Vec<(ParaId, ParaHash)>, - /// Parachain heads proof. - parachain_heads_proof: ParaHeadsProof, - }, -} diff --git a/primitives/polkadot-core/Cargo.toml b/primitives/polkadot-core/Cargo.toml deleted file mode 100644 index b85586405..000000000 --- a/primitives/polkadot-core/Cargo.toml +++ /dev/null @@ -1,49 +0,0 @@ -[package] -name = "bp-polkadot-core" -description = "Primitives of Polkadot-like runtime." -version = "0.7.0" -authors.workspace = true -edition.workspace = true -license = "GPL-3.0-or-later WITH Classpath-exception-2.0" -repository.workspace = true - -[lints] -workspace = true - -[dependencies] -codec = { package = "parity-scale-codec", version = "3.6.1", default-features = false, features = ["derive"] } -parity-util-mem = { version = "0.12.0", optional = true } -scale-info = { version = "2.11.1", default-features = false, features = ["derive"] } -serde = { optional = true, features = ["derive"], workspace = true, default-features = true } - -# Bridge Dependencies - -bp-messages = { path = "../messages", default-features = false } -bp-runtime = { path = "../runtime", default-features = false } - -# Substrate Based Dependencies - -frame-support = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -frame-system = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -sp-core = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -sp-runtime = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -sp-std = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } - -[dev-dependencies] -hex = "0.4" - -[features] -default = ["std"] -std = [ - "bp-messages/std", - "bp-runtime/std", - "codec/std", - "frame-support/std", - "frame-system/std", - "parity-util-mem", - "scale-info/std", - "serde", - "sp-core/std", - "sp-runtime/std", - "sp-std/std", -] diff --git a/primitives/polkadot-core/src/lib.rs b/primitives/polkadot-core/src/lib.rs deleted file mode 100644 index e83be59b2..000000000 --- a/primitives/polkadot-core/src/lib.rs +++ /dev/null @@ -1,384 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Primitives of the Polkadot-like chains. - -#![warn(missing_docs)] -#![cfg_attr(not(feature = "std"), no_std)] - -use bp_messages::MessageNonce; -use bp_runtime::{ - self, - extensions::{ - ChargeTransactionPayment, CheckEra, CheckGenesis, CheckNonZeroSender, CheckNonce, - CheckSpecVersion, CheckTxVersion, CheckWeight, GenericSignedExtension, - SignedExtensionSchema, - }, - EncodedOrDecodedCall, StorageMapKeyProvider, TransactionEra, -}; -use frame_support::{ - dispatch::DispatchClass, - parameter_types, - weights::{ - constants::{BlockExecutionWeight, WEIGHT_REF_TIME_PER_SECOND}, - Weight, - }, - Blake2_128Concat, -}; -use frame_system::limits; -use sp_core::{storage::StorageKey, Hasher as HasherT}; -use sp_runtime::{ - generic, - traits::{BlakeTwo256, IdentifyAccount, Verify}, - MultiAddress, MultiSignature, OpaqueExtrinsic, -}; -use sp_std::prelude::Vec; - -// Re-export's to avoid extra substrate dependencies in chain-specific crates. -pub use frame_support::{weights::constants::ExtrinsicBaseWeight, Parameter}; -pub use sp_runtime::{traits::Convert, Perbill}; - -pub mod parachains; - -/// Maximal number of GRANDPA authorities at Polkadot-like chains. -/// -/// Ideally, we would set it to the value of `MaxAuthorities` constant from bridged runtime -/// configurations. But right now it is set to the `100_000`, which makes PoV size for -/// our bridge hub parachains huge. So let's stick to the real-world value here. -/// -/// Right now both Kusama and Polkadot aim to have around 1000 validators. Let's be safe here and -/// take a bit more here. -pub const MAX_AUTHORITIES_COUNT: u32 = 1_256; - -/// Reasonable number of headers in the `votes_ancestries` on Polkadot-like chains. -/// -/// See [`bp-header-chain::ChainWithGrandpa`] for more details. -/// -/// This value comes from recent (December, 2023) Kusama and Polkadot headers. There are no -/// justifications with any additional headers in votes ancestry, so reasonable headers may -/// be set to zero. But we assume that there may be small GRANDPA lags, so we're leaving some -/// reserve here. -pub const REASONABLE_HEADERS_IN_JUSTIFICATION_ANCESTRY: u32 = 2; - -/// Average header size in `votes_ancestries` field of justification on Polkadot-like -/// chains. -/// -/// See [`bp-header-chain::ChainWithGrandpa`] for more details. -/// -/// This value comes from recent (December, 2023) Kusama headers. Most of headers are `327` bytes -/// there, but let's have some reserve and make it 1024. -pub const AVERAGE_HEADER_SIZE: u32 = 1024; - -/// Approximate maximal header size on Polkadot-like chains. -/// -/// See [`bp-header-chain::ChainWithGrandpa`] for more details. -/// -/// This value comes from recent (December, 2023) Kusama headers. Maximal header is a mandatory -/// header. In its SCALE-encoded form it is `113407` bytes. Let's have some reserve here. -pub const MAX_MANDATORY_HEADER_SIZE: u32 = 120 * 1024; - -/// Number of extra bytes (excluding size of storage value itself) of storage proof, built at -/// Polkadot-like chain. This mostly depends on number of entries in the storage trie. -/// Some reserve is reserved to account future chain growth. -/// -/// To compute this value, we've synced Kusama chain blocks [0; 6545733] to see if there were -/// any significant changes of the storage proof size (NO): -/// -/// - at block 3072 the storage proof size overhead was 579 bytes; -/// - at block 2479616 it was 578 bytes; -/// - at block 4118528 it was 711 bytes; -/// - at block 6540800 it was 779 bytes. -/// -/// The number of storage entries at the block 6546170 was 351207 and number of trie nodes in -/// the storage proof was 5 (log(16, 351207) ~ 4.6). -/// -/// So the assumption is that the storage proof size overhead won't be larger than 1024 in the -/// nearest future. If it'll ever break this barrier, then we'll need to update this constant -/// at next runtime upgrade. -pub const EXTRA_STORAGE_PROOF_SIZE: u32 = 1024; - -/// All Polkadot-like chains allow normal extrinsics to fill block up to 75 percent. -/// -/// This is a copy-paste from the Polkadot repo's `polkadot-runtime-common` crate. -const NORMAL_DISPATCH_RATIO: Perbill = Perbill::from_percent(75); - -/// All Polkadot-like chains allow 2 seconds of compute with a 6-second average block time. -/// -/// This is a copy-paste from the Polkadot repo's `polkadot-runtime-common` crate. -pub const MAXIMUM_BLOCK_WEIGHT: Weight = - Weight::from_parts(WEIGHT_REF_TIME_PER_SECOND.saturating_mul(2), u64::MAX); - -/// All Polkadot-like chains assume that an on-initialize consumes 1 percent of the weight on -/// average, hence a single extrinsic will not be allowed to consume more than -/// `AvailableBlockRatio - 1 percent`. -/// -/// This is a copy-paste from the Polkadot repo's `polkadot-runtime-common` crate. -pub const AVERAGE_ON_INITIALIZE_RATIO: Perbill = Perbill::from_percent(1); - -parameter_types! { - /// All Polkadot-like chains have maximal block size set to 5MB. - /// - /// This is a copy-paste from the Polkadot repo's `polkadot-runtime-common` crate. - pub BlockLength: limits::BlockLength = limits::BlockLength::max_with_normal_ratio( - 5 * 1024 * 1024, - NORMAL_DISPATCH_RATIO, - ); - /// All Polkadot-like chains have the same block weights. - /// - /// This is a copy-paste from the Polkadot repo's `polkadot-runtime-common` crate. - pub BlockWeights: limits::BlockWeights = limits::BlockWeights::builder() - .base_block(BlockExecutionWeight::get()) - .for_class(DispatchClass::all(), |weights| { - weights.base_extrinsic = ExtrinsicBaseWeight::get(); - }) - .for_class(DispatchClass::Normal, |weights| { - weights.max_total = Some(NORMAL_DISPATCH_RATIO * MAXIMUM_BLOCK_WEIGHT); - }) - .for_class(DispatchClass::Operational, |weights| { - weights.max_total = Some(MAXIMUM_BLOCK_WEIGHT); - // Operational transactions have an extra reserved space, so that they - // are included even if block reached `MAXIMUM_BLOCK_WEIGHT`. - weights.reserved = Some( - MAXIMUM_BLOCK_WEIGHT - NORMAL_DISPATCH_RATIO * MAXIMUM_BLOCK_WEIGHT, - ); - }) - .avg_block_initialization(AVERAGE_ON_INITIALIZE_RATIO) - .build_or_panic(); -} - -// TODO [#78] may need to be updated after https://github.com/paritytech/parity-bridges-common/issues/78 -/// Maximal number of messages in single delivery transaction. -pub const MAX_MESSAGES_IN_DELIVERY_TRANSACTION: MessageNonce = 128; - -/// Maximal number of bytes, included in the signed Polkadot-like transaction apart from the encoded -/// call itself. -/// -/// Can be computed by subtracting encoded call size from raw transaction size. -pub const TX_EXTRA_BYTES: u32 = 256; - -/// Re-export `time_units` to make usage easier. -pub use time_units::*; - -/// Human readable time units defined in terms of number of blocks. -pub mod time_units { - use super::BlockNumber; - - /// Milliseconds between Polkadot-like chain blocks. - pub const MILLISECS_PER_BLOCK: u64 = 6000; - /// Slot duration in Polkadot-like chain consensus algorithms. - pub const SLOT_DURATION: u64 = MILLISECS_PER_BLOCK; - - /// A minute, expressed in Polkadot-like chain blocks. - pub const MINUTES: BlockNumber = 60_000 / (MILLISECS_PER_BLOCK as BlockNumber); - /// A hour, expressed in Polkadot-like chain blocks. - pub const HOURS: BlockNumber = MINUTES * 60; - /// A day, expressed in Polkadot-like chain blocks. - pub const DAYS: BlockNumber = HOURS * 24; -} - -/// Block number type used in Polkadot-like chains. -pub type BlockNumber = u32; - -/// Hash type used in Polkadot-like chains. -pub type Hash = ::Out; - -/// Hashing type. -pub type Hashing = BlakeTwo256; - -/// The type of object that can produce hashes on Polkadot-like chains. -pub type Hasher = BlakeTwo256; - -/// The header type used by Polkadot-like chains. -pub type Header = generic::Header; - -/// Signature type used by Polkadot-like chains. -pub type Signature = MultiSignature; - -/// Public key of account on Polkadot-like chains. -pub type AccountPublic = ::Signer; - -/// Id of account on Polkadot-like chains. -pub type AccountId = ::AccountId; - -/// Address of account on Polkadot-like chains. -pub type AccountAddress = MultiAddress; - -/// Nonce of a transaction on the Polkadot-like chains. -pub type Nonce = u32; - -/// Block type of Polkadot-like chains. -pub type Block = generic::Block; - -/// Polkadot-like block signed with a Justification. -pub type SignedBlock = generic::SignedBlock; - -/// The balance of an account on Polkadot-like chain. -pub type Balance = u128; - -/// Unchecked Extrinsic type. -pub type UncheckedExtrinsic = - generic::UncheckedExtrinsic, Signature, SignedExt>; - -/// Account address, used by the Polkadot-like chain. -pub type Address = MultiAddress; - -/// Returns maximal extrinsic size on all Polkadot-like chains. -pub fn max_extrinsic_size() -> u32 { - *BlockLength::get().max.get(DispatchClass::Normal) -} - -/// Returns maximal extrinsic weight on all Polkadot-like chains. -pub fn max_extrinsic_weight() -> Weight { - BlockWeights::get() - .get(DispatchClass::Normal) - .max_extrinsic - .unwrap_or(Weight::MAX) -} - -/// Provides a storage key for account data. -/// -/// We need to use this approach when we don't have access to the runtime. -/// The equivalent command to invoke in case full `Runtime` is known is this: -/// `let key = frame_system::Account::::storage_map_final_key(&account_id);` -pub struct AccountInfoStorageMapKeyProvider; - -impl StorageMapKeyProvider for AccountInfoStorageMapKeyProvider { - const MAP_NAME: &'static str = "Account"; - type Hasher = Blake2_128Concat; - type Key = AccountId; - // This should actually be `AccountInfo`, but we don't use this property in order to decode the - // data. So we use `Vec` as if we would work with encoded data. - type Value = Vec; -} - -impl AccountInfoStorageMapKeyProvider { - /// Name of the system pallet. - const PALLET_NAME: &'static str = "System"; - - /// Return storage key for given account data. - pub fn final_key(id: &AccountId) -> StorageKey { - ::final_key(Self::PALLET_NAME, id) - } -} - -/// Extra signed extension data that is used by most chains. -pub type CommonSignedExtra = ( - CheckNonZeroSender, - CheckSpecVersion, - CheckTxVersion, - CheckGenesis, - CheckEra, - CheckNonce, - CheckWeight, - ChargeTransactionPayment, -); - -/// Extra signed extension data that starts with `CommonSignedExtra`. -pub type SuffixedCommonSignedExtension = - GenericSignedExtension<(CommonSignedExtra, Suffix)>; - -/// Helper trait to define some extra methods on `SuffixedCommonSignedExtension`. -pub trait SuffixedCommonSignedExtensionExt { - /// Create signed extension from its components. - fn from_params( - spec_version: u32, - transaction_version: u32, - era: TransactionEra, - genesis_hash: Hash, - nonce: Nonce, - tip: Balance, - extra: (Suffix::Payload, Suffix::AdditionalSigned), - ) -> Self; - - /// Return transaction nonce. - fn nonce(&self) -> Nonce; - - /// Return transaction tip. - fn tip(&self) -> Balance; -} - -impl SuffixedCommonSignedExtensionExt for SuffixedCommonSignedExtension -where - Suffix: SignedExtensionSchema, -{ - fn from_params( - spec_version: u32, - transaction_version: u32, - era: TransactionEra, - genesis_hash: Hash, - nonce: Nonce, - tip: Balance, - extra: (Suffix::Payload, Suffix::AdditionalSigned), - ) -> Self { - GenericSignedExtension::new( - ( - ( - (), // non-zero sender - (), // spec version - (), // tx version - (), // genesis - era.frame_era(), // era - nonce.into(), // nonce (compact encoding) - (), // Check weight - tip.into(), // transaction payment / tip (compact encoding) - ), - extra.0, - ), - Some(( - ( - (), - spec_version, - transaction_version, - genesis_hash, - era.signed_payload(genesis_hash), - (), - (), - (), - ), - extra.1, - )), - ) - } - - fn nonce(&self) -> Nonce { - let common_payload = self.payload.0; - common_payload.5 .0 - } - - fn tip(&self) -> Balance { - let common_payload = self.payload.0; - common_payload.7 .0 - } -} - -/// Signed extension that is used by most chains. -pub type CommonSignedExtension = SuffixedCommonSignedExtension<()>; - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn should_generate_storage_key() { - let acc = [ - 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, - 25, 26, 27, 28, 29, 30, 31, 32, - ] - .into(); - let key = AccountInfoStorageMapKeyProvider::final_key(&acc); - assert_eq!(hex::encode(key), "26aa394eea5630e07c48ae0c9558cef7b99d880ec681799c0cf30e8886371da92dccd599abfe1920a1cff8a7358231430102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20"); - } -} diff --git a/primitives/polkadot-core/src/parachains.rs b/primitives/polkadot-core/src/parachains.rs deleted file mode 100644 index 433cd2845..000000000 --- a/primitives/polkadot-core/src/parachains.rs +++ /dev/null @@ -1,106 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Primitives of polkadot-like chains, that are related to parachains functionality. -//! -//! Even though this (bridges) repository references polkadot repository, we can't -//! reference polkadot crates from pallets. That's because bridges repository is -//! included in the Cumulus repository and included pallets are used by Cumulus -//! parachains. Having pallets that are referencing polkadot, would mean that there may -//! be two versions of polkadot crates included in the runtime. Which is bad. - -use bp_runtime::{RawStorageProof, Size}; -use codec::{CompactAs, Decode, Encode, MaxEncodedLen}; -use scale_info::TypeInfo; -use sp_core::Hasher; -use sp_runtime::RuntimeDebug; -use sp_std::vec::Vec; - -#[cfg(feature = "std")] -use serde::{Deserialize, Serialize}; - -#[cfg(feature = "std")] -use parity_util_mem::MallocSizeOf; - -/// Parachain id. -/// -/// This is an equivalent of the `polkadot_parachain_primitives::Id`, which is a compact-encoded -/// `u32`. -#[derive( - Clone, - CompactAs, - Copy, - Decode, - Default, - Encode, - Eq, - Hash, - MaxEncodedLen, - Ord, - PartialEq, - PartialOrd, - RuntimeDebug, - TypeInfo, -)] -pub struct ParaId(pub u32); - -impl From for ParaId { - fn from(id: u32) -> Self { - ParaId(id) - } -} - -/// Parachain head. -/// -/// This is an equivalent of the `polkadot_parachain_primitives::HeadData`. -/// -/// The parachain head means (at least in Cumulus) a SCALE-encoded parachain header. -#[derive( - PartialEq, Eq, Clone, PartialOrd, Ord, Encode, Decode, RuntimeDebug, TypeInfo, Default, -)] -#[cfg_attr(feature = "std", derive(Serialize, Deserialize, Hash, MallocSizeOf))] -pub struct ParaHead(pub Vec); - -impl ParaHead { - /// Returns the hash of this head data. - pub fn hash(&self) -> crate::Hash { - sp_runtime::traits::BlakeTwo256::hash(&self.0) - } -} - -/// Parachain head hash. -pub type ParaHash = crate::Hash; - -/// Parachain head hasher. -pub type ParaHasher = crate::Hasher; - -/// Raw storage proof of parachain heads, stored in polkadot-like chain runtime. -#[derive(Clone, Decode, Encode, Eq, PartialEq, RuntimeDebug, TypeInfo)] -pub struct ParaHeadsProof { - /// Unverified storage proof of finalized parachain heads. - pub storage_proof: RawStorageProof, -} - -impl Size for ParaHeadsProof { - fn size(&self) -> u32 { - u32::try_from( - self.storage_proof - .iter() - .fold(0usize, |sum, node| sum.saturating_add(node.len())), - ) - .unwrap_or(u32::MAX) - } -} diff --git a/primitives/relayers/Cargo.toml b/primitives/relayers/Cargo.toml deleted file mode 100644 index 46bc034ef..000000000 --- a/primitives/relayers/Cargo.toml +++ /dev/null @@ -1,42 +0,0 @@ -[package] -name = "bp-relayers" -description = "Primitives of relayers module." -version = "0.7.0" -authors.workspace = true -edition.workspace = true -license = "GPL-3.0-or-later WITH Classpath-exception-2.0" -repository.workspace = true - -[lints] -workspace = true - -[dependencies] -codec = { package = "parity-scale-codec", version = "3.6.1", default-features = false, features = ["bit-vec", "derive"] } -scale-info = { version = "2.11.1", default-features = false, features = ["bit-vec", "derive"] } - -# Bridge Dependencies - -bp-messages = { path = "../messages", default-features = false } -bp-runtime = { path = "../runtime", default-features = false } - -# Substrate Dependencies - -frame-support = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -sp-runtime = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -sp-std = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } - -[dev-dependencies] -hex = "0.4" -hex-literal = "0.4" - -[features] -default = ["std"] -std = [ - "bp-messages/std", - "bp-runtime/std", - "codec/std", - "frame-support/std", - "scale-info/std", - "sp-runtime/std", - "sp-std/std", -] diff --git a/primitives/relayers/src/lib.rs b/primitives/relayers/src/lib.rs deleted file mode 100644 index c808c437b..000000000 --- a/primitives/relayers/src/lib.rs +++ /dev/null @@ -1,206 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Primitives of messages module. - -#![warn(missing_docs)] -#![cfg_attr(not(feature = "std"), no_std)] - -pub use registration::{Registration, StakeAndSlash}; - -use bp_messages::LaneId; -use bp_runtime::{ChainId, StorageDoubleMapKeyProvider}; -use frame_support::{traits::tokens::Preservation, Blake2_128Concat, Identity}; -use scale_info::TypeInfo; -use sp_runtime::{ - codec::{Codec, Decode, Encode, EncodeLike, MaxEncodedLen}, - traits::AccountIdConversion, - TypeId, -}; -use sp_std::{fmt::Debug, marker::PhantomData}; - -mod registration; - -/// The owner of the sovereign account that should pay the rewards. -/// -/// Each of the 2 final points connected by a bridge owns a sovereign account at each end of the -/// bridge. So here, at this end of the bridge there can be 2 sovereign accounts that pay rewards. -#[derive(Copy, Clone, Debug, Decode, Encode, Eq, PartialEq, TypeInfo, MaxEncodedLen)] -pub enum RewardsAccountOwner { - /// The sovereign account of the final chain on this end of the bridge. - ThisChain, - /// The sovereign account of the final chain on the other end of the bridge. - BridgedChain, -} - -/// Structure used to identify the account that pays a reward to the relayer. -/// -/// A bridge connects 2 bridge ends. Each one is located on a separate relay chain. The bridge ends -/// can be the final destinations of the bridge, or they can be intermediary points -/// (e.g. a bridge hub) used to forward messages between pairs of parachains on the bridged relay -/// chains. A pair of such parachains is connected using a bridge lane. Each of the 2 final -/// destinations of a bridge lane must have a sovereign account at each end of the bridge and each -/// of the sovereign accounts will pay rewards for different operations. So we need multiple -/// parameters to identify the account that pays a reward to the relayer. -#[derive(Copy, Clone, Debug, Decode, Encode, Eq, PartialEq, TypeInfo, MaxEncodedLen)] -pub struct RewardsAccountParams { - lane_id: LaneId, - bridged_chain_id: ChainId, - owner: RewardsAccountOwner, -} - -impl RewardsAccountParams { - /// Create a new instance of `RewardsAccountParams`. - pub const fn new( - lane_id: LaneId, - bridged_chain_id: ChainId, - owner: RewardsAccountOwner, - ) -> Self { - Self { lane_id, bridged_chain_id, owner } - } -} - -impl TypeId for RewardsAccountParams { - const TYPE_ID: [u8; 4] = *b"brap"; -} - -/// Reward payment procedure. -pub trait PaymentProcedure { - /// Error that may be returned by the procedure. - type Error: Debug; - - /// Pay reward to the relayer from the account with provided params. - fn pay_reward( - relayer: &Relayer, - rewards_account_params: RewardsAccountParams, - reward: Reward, - ) -> Result<(), Self::Error>; -} - -impl PaymentProcedure for () { - type Error = &'static str; - - fn pay_reward(_: &Relayer, _: RewardsAccountParams, _: Reward) -> Result<(), Self::Error> { - Ok(()) - } -} - -/// Reward payment procedure that does `balances::transfer` call from the account, derived from -/// given params. -pub struct PayRewardFromAccount(PhantomData<(T, Relayer)>); - -impl PayRewardFromAccount -where - Relayer: Decode + Encode, -{ - /// Return account that pays rewards based on the provided parameters. - pub fn rewards_account(params: RewardsAccountParams) -> Relayer { - params.into_sub_account_truncating(b"rewards-account") - } -} - -impl PaymentProcedure for PayRewardFromAccount -where - T: frame_support::traits::fungible::Mutate, - Relayer: Decode + Encode + Eq, -{ - type Error = sp_runtime::DispatchError; - - fn pay_reward( - relayer: &Relayer, - rewards_account_params: RewardsAccountParams, - reward: T::Balance, - ) -> Result<(), Self::Error> { - T::transfer( - &Self::rewards_account(rewards_account_params), - relayer, - reward, - Preservation::Expendable, - ) - .map(drop) - } -} - -/// Can be use to access the runtime storage key within the `RelayerRewards` map of the relayers -/// pallet. -pub struct RelayerRewardsKeyProvider(PhantomData<(AccountId, Reward)>); - -impl StorageDoubleMapKeyProvider for RelayerRewardsKeyProvider -where - AccountId: Codec + EncodeLike, - Reward: Codec + EncodeLike, -{ - const MAP_NAME: &'static str = "RelayerRewards"; - - type Hasher1 = Blake2_128Concat; - type Key1 = AccountId; - type Hasher2 = Identity; - type Key2 = RewardsAccountParams; - type Value = Reward; -} - -#[cfg(test)] -mod tests { - use super::*; - use bp_messages::LaneId; - use sp_runtime::testing::H256; - - #[test] - fn different_lanes_are_using_different_accounts() { - assert_eq!( - PayRewardFromAccount::<(), H256>::rewards_account(RewardsAccountParams::new( - LaneId([0, 0, 0, 0]), - *b"test", - RewardsAccountOwner::ThisChain - )), - hex_literal::hex!("62726170000000007465737400726577617264732d6163636f756e7400000000") - .into(), - ); - - assert_eq!( - PayRewardFromAccount::<(), H256>::rewards_account(RewardsAccountParams::new( - LaneId([0, 0, 0, 1]), - *b"test", - RewardsAccountOwner::ThisChain - )), - hex_literal::hex!("62726170000000017465737400726577617264732d6163636f756e7400000000") - .into(), - ); - } - - #[test] - fn different_directions_are_using_different_accounts() { - assert_eq!( - PayRewardFromAccount::<(), H256>::rewards_account(RewardsAccountParams::new( - LaneId([0, 0, 0, 0]), - *b"test", - RewardsAccountOwner::ThisChain - )), - hex_literal::hex!("62726170000000007465737400726577617264732d6163636f756e7400000000") - .into(), - ); - - assert_eq!( - PayRewardFromAccount::<(), H256>::rewards_account(RewardsAccountParams::new( - LaneId([0, 0, 0, 0]), - *b"test", - RewardsAccountOwner::BridgedChain - )), - hex_literal::hex!("62726170000000007465737401726577617264732d6163636f756e7400000000") - .into(), - ); - } -} diff --git a/primitives/relayers/src/registration.rs b/primitives/relayers/src/registration.rs deleted file mode 100644 index bc2d0d127..000000000 --- a/primitives/relayers/src/registration.rs +++ /dev/null @@ -1,121 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Bridge relayers registration and slashing scheme. -//! -//! There is an option to add a refund-relayer signed extension that will compensate -//! relayer costs of the message delivery and confirmation transactions (as well as -//! required finality proofs). This extension boosts priority of message delivery -//! transactions, based on the number of bundled messages. So transaction with more -//! messages has larger priority than the transaction with less messages. -//! See `bridge_runtime_common::priority_calculator` for details; -//! -//! This encourages relayers to include more messages to their delivery transactions. -//! At the same time, we are not verifying storage proofs before boosting -//! priority. Instead, we simply trust relayer, when it says that transaction delivers -//! `N` messages. -//! -//! This allows relayers to submit transactions which declare large number of bundled -//! transactions to receive priority boost for free, potentially pushing actual delivery -//! transactions from the block (or even transaction queue). Such transactions are -//! not free, but their cost is relatively small. -//! -//! To alleviate that, we only boost transactions of relayers that have some stake -//! that guarantees that their transactions are valid. Such relayers get priority -//! for free, but they risk to lose their stake. - -use crate::RewardsAccountParams; - -use codec::{Decode, Encode, MaxEncodedLen}; -use scale_info::TypeInfo; -use sp_runtime::{ - traits::{Get, Zero}, - DispatchError, DispatchResult, -}; - -/// Relayer registration. -#[derive(Copy, Clone, Debug, Decode, Encode, Eq, PartialEq, TypeInfo, MaxEncodedLen)] -pub struct Registration { - /// The last block number, where this registration is considered active. - /// - /// Relayer has an option to renew his registration (this may be done before it - /// is spoiled as well). Starting from block `valid_till + 1`, relayer may `deregister` - /// himself and get his stake back. - /// - /// Please keep in mind that priority boost stops working some blocks before the - /// registration ends (see [`StakeAndSlash::RequiredRegistrationLease`]). - pub valid_till: BlockNumber, - /// Active relayer stake, which is mapped to the relayer reserved balance. - /// - /// If `stake` is less than the [`StakeAndSlash::RequiredStake`], the registration - /// is considered inactive even if `valid_till + 1` is not yet reached. - pub stake: Balance, -} - -/// Relayer stake-and-slash mechanism. -pub trait StakeAndSlash { - /// The stake that the relayer must have to have its transactions boosted. - type RequiredStake: Get; - /// Required **remaining** registration lease to be able to get transaction priority boost. - /// - /// If the difference between registration's `valid_till` and the current block number - /// is less than the `RequiredRegistrationLease`, it becomes inactive and relayer transaction - /// won't get priority boost. This period exists, because priority is calculated when - /// transaction is placed to the queue (and it is reevaluated periodically) and then some time - /// may pass before transaction will be included into the block. - type RequiredRegistrationLease: Get; - - /// Reserve the given amount at relayer account. - fn reserve(relayer: &AccountId, amount: Balance) -> DispatchResult; - /// `Unreserve` the given amount from relayer account. - /// - /// Returns amount that we have failed to `unreserve`. - fn unreserve(relayer: &AccountId, amount: Balance) -> Balance; - /// Slash up to `amount` from reserved balance of account `relayer` and send funds to given - /// `beneficiary`. - /// - /// Returns `Ok(_)` with non-zero balance if we have failed to repatriate some portion of stake. - fn repatriate_reserved( - relayer: &AccountId, - beneficiary: RewardsAccountParams, - amount: Balance, - ) -> Result; -} - -impl StakeAndSlash for () -where - Balance: Default + Zero, - BlockNumber: Default, -{ - type RequiredStake = (); - type RequiredRegistrationLease = (); - - fn reserve(_relayer: &AccountId, _amount: Balance) -> DispatchResult { - Ok(()) - } - - fn unreserve(_relayer: &AccountId, _amount: Balance) -> Balance { - Zero::zero() - } - - fn repatriate_reserved( - _relayer: &AccountId, - _beneficiary: RewardsAccountParams, - _amount: Balance, - ) -> Result { - Ok(Zero::zero()) - } -} diff --git a/primitives/runtime/Cargo.toml b/primitives/runtime/Cargo.toml deleted file mode 100644 index d8e293cb6..000000000 --- a/primitives/runtime/Cargo.toml +++ /dev/null @@ -1,55 +0,0 @@ -[package] -name = "bp-runtime" -description = "Primitives that may be used at (bridges) runtime level." -version = "0.7.0" -authors.workspace = true -edition.workspace = true -license = "GPL-3.0-or-later WITH Classpath-exception-2.0" -repository.workspace = true - -[lints] -workspace = true - -[dependencies] -codec = { package = "parity-scale-codec", version = "3.6.1", default-features = false } -hash-db = { version = "0.16.0", default-features = false } -impl-trait-for-tuples = "0.2.2" -log = { workspace = true } -num-traits = { version = "0.2", default-features = false } -scale-info = { version = "2.11.1", default-features = false, features = ["derive"] } -serde = { features = ["alloc", "derive"], workspace = true } - -# Substrate Dependencies - -frame-support = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -frame-system = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -sp-core = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -sp-io = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -sp-runtime = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false, features = ["serde"] } -sp-state-machine = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -sp-std = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -sp-trie = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -trie-db = { version = "0.29.0", default-features = false } - -[dev-dependencies] -hex-literal = "0.4" - -[features] -default = ["std"] -std = [ - "codec/std", - "frame-support/std", - "frame-system/std", - "hash-db/std", - "log/std", - "num-traits/std", - "scale-info/std", - "serde/std", - "sp-core/std", - "sp-io/std", - "sp-runtime/std", - "sp-state-machine/std", - "sp-std/std", - "sp-trie/std", - "trie-db/std", -] diff --git a/primitives/runtime/src/chain.rs b/primitives/runtime/src/chain.rs deleted file mode 100644 index 4ec5a001a..000000000 --- a/primitives/runtime/src/chain.rs +++ /dev/null @@ -1,414 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -use crate::{ChainId, HeaderIdProvider}; - -use codec::{Codec, Decode, Encode, MaxEncodedLen}; -use frame_support::{weights::Weight, Parameter}; -use num_traits::{AsPrimitive, Bounded, CheckedSub, Saturating, SaturatingAdd, Zero}; -use sp_runtime::{ - traits::{ - AtLeast32Bit, AtLeast32BitUnsigned, Hash as HashT, Header as HeaderT, MaybeDisplay, - MaybeSerialize, MaybeSerializeDeserialize, Member, SimpleBitOps, Verify, - }, - FixedPointOperand, -}; -use sp_std::{convert::TryFrom, fmt::Debug, hash::Hash, str::FromStr, vec, vec::Vec}; - -/// Chain call, that is either SCALE-encoded, or decoded. -#[derive(Debug, Clone, PartialEq)] -pub enum EncodedOrDecodedCall { - /// The call that is SCALE-encoded. - /// - /// This variant is used when we the chain runtime is not bundled with the relay, but - /// we still need the represent call in some RPC calls or transactions. - Encoded(Vec), - /// The decoded call. - Decoded(ChainCall), -} - -impl EncodedOrDecodedCall { - /// Returns decoded call. - pub fn to_decoded(&self) -> Result { - match self { - Self::Encoded(ref encoded_call) => - ChainCall::decode(&mut &encoded_call[..]).map_err(Into::into), - Self::Decoded(ref decoded_call) => Ok(decoded_call.clone()), - } - } - - /// Converts self to decoded call. - pub fn into_decoded(self) -> Result { - match self { - Self::Encoded(encoded_call) => - ChainCall::decode(&mut &encoded_call[..]).map_err(Into::into), - Self::Decoded(decoded_call) => Ok(decoded_call), - } - } - - /// Converts self to encoded call. - pub fn into_encoded(self) -> Vec { - match self { - Self::Encoded(encoded_call) => encoded_call, - Self::Decoded(decoded_call) => decoded_call.encode(), - } - } -} - -impl From for EncodedOrDecodedCall { - fn from(call: ChainCall) -> EncodedOrDecodedCall { - EncodedOrDecodedCall::Decoded(call) - } -} - -impl Decode for EncodedOrDecodedCall { - fn decode(input: &mut I) -> Result { - // having encoded version is better than decoded, because decoding isn't required - // everywhere and for mocked calls it may lead to **unneeded** errors - match input.remaining_len()? { - Some(remaining_len) => { - let mut encoded_call = vec![0u8; remaining_len]; - input.read(&mut encoded_call)?; - Ok(EncodedOrDecodedCall::Encoded(encoded_call)) - }, - None => Ok(EncodedOrDecodedCall::Decoded(ChainCall::decode(input)?)), - } - } -} - -impl Encode for EncodedOrDecodedCall { - fn encode(&self) -> Vec { - match *self { - Self::Encoded(ref encoded_call) => encoded_call.clone(), - Self::Decoded(ref decoded_call) => decoded_call.encode(), - } - } -} - -/// Minimal Substrate-based chain representation that may be used from no_std environment. -pub trait Chain: Send + Sync + 'static { - /// Chain id. - const ID: ChainId; - - /// A type that fulfills the abstract idea of what a Substrate block number is. - // Constraints come from the associated Number type of `sp_runtime::traits::Header` - // See here for more info: - // https://crates.parity.io/sp_runtime/traits/trait.Header.html#associatedtype.Number - // - // Note that the `AsPrimitive` trait is required by the GRANDPA justification - // verifier, and is not usually part of a Substrate Header's Number type. - type BlockNumber: Parameter - + Member - + MaybeSerializeDeserialize - + Hash - + Copy - + Default - + MaybeDisplay - + AtLeast32BitUnsigned - + FromStr - + AsPrimitive - + Default - + Saturating - + MaxEncodedLen; - - /// A type that fulfills the abstract idea of what a Substrate hash is. - // Constraints come from the associated Hash type of `sp_runtime::traits::Header` - // See here for more info: - // https://crates.parity.io/sp_runtime/traits/trait.Header.html#associatedtype.Hash - type Hash: Parameter - + Member - + MaybeSerializeDeserialize - + Hash - + Ord - + Copy - + MaybeDisplay - + Default - + SimpleBitOps - + AsRef<[u8]> - + AsMut<[u8]> - + MaxEncodedLen; - - /// A type that fulfills the abstract idea of what a Substrate hasher (a type - /// that produces hashes) is. - // Constraints come from the associated Hashing type of `sp_runtime::traits::Header` - // See here for more info: - // https://crates.parity.io/sp_runtime/traits/trait.Header.html#associatedtype.Hashing - type Hasher: HashT; - - /// A type that fulfills the abstract idea of what a Substrate header is. - // See here for more info: - // https://crates.parity.io/sp_runtime/traits/trait.Header.html - type Header: Parameter - + HeaderT - + HeaderIdProvider - + MaybeSerializeDeserialize; - - /// The user account identifier type for the runtime. - type AccountId: Parameter - + Member - + MaybeSerializeDeserialize - + Debug - + MaybeDisplay - + Ord - + MaxEncodedLen; - /// Balance of an account in native tokens. - /// - /// The chain may support multiple tokens, but this particular type is for token that is used - /// to pay for transaction dispatch, to reward different relayers (headers, messages), etc. - type Balance: AtLeast32BitUnsigned - + FixedPointOperand - + Parameter - + Member - + MaybeSerializeDeserialize - + Clone - + Copy - + Bounded - + CheckedSub - + PartialOrd - + SaturatingAdd - + Zero - + TryFrom - + MaxEncodedLen; - /// Nonce of a transaction used by the chain. - type Nonce: Parameter - + Member - + MaybeSerialize - + Debug - + Default - + MaybeDisplay - + MaybeSerializeDeserialize - + AtLeast32Bit - + Copy - + MaxEncodedLen; - /// Signature type, used on this chain. - type Signature: Parameter + Verify; - - /// Get the maximum size (in bytes) of a Normal extrinsic at this chain. - fn max_extrinsic_size() -> u32; - /// Get the maximum weight (compute time) that a Normal extrinsic at this chain can use. - fn max_extrinsic_weight() -> Weight; -} - -/// A trait that provides the type of the underlying chain. -pub trait UnderlyingChainProvider: Send + Sync + 'static { - /// Underlying chain type. - type Chain: Chain; -} - -impl Chain for T -where - T: Send + Sync + 'static + UnderlyingChainProvider, -{ - const ID: ChainId = ::ID; - - type BlockNumber = ::BlockNumber; - type Hash = ::Hash; - type Hasher = ::Hasher; - type Header = ::Header; - type AccountId = ::AccountId; - type Balance = ::Balance; - type Nonce = ::Nonce; - type Signature = ::Signature; - - fn max_extrinsic_size() -> u32 { - ::max_extrinsic_size() - } - - fn max_extrinsic_weight() -> Weight { - ::max_extrinsic_weight() - } -} - -/// Minimal parachain representation that may be used from no_std environment. -pub trait Parachain: Chain { - /// Parachain identifier. - const PARACHAIN_ID: u32; -} - -impl Parachain for T -where - T: Chain + UnderlyingChainProvider, - ::Chain: Parachain, -{ - const PARACHAIN_ID: u32 = <::Chain as Parachain>::PARACHAIN_ID; -} - -/// Adapter for `Get` to access `PARACHAIN_ID` from `trait Parachain` -pub struct ParachainIdOf(sp_std::marker::PhantomData); -impl frame_support::traits::Get for ParachainIdOf { - fn get() -> u32 { - Para::PARACHAIN_ID - } -} - -/// Underlying chain type. -pub type UnderlyingChainOf = ::Chain; - -/// Block number used by the chain. -pub type BlockNumberOf = ::BlockNumber; - -/// Hash type used by the chain. -pub type HashOf = ::Hash; - -/// Hasher type used by the chain. -pub type HasherOf = ::Hasher; - -/// Header type used by the chain. -pub type HeaderOf = ::Header; - -/// Account id type used by the chain. -pub type AccountIdOf = ::AccountId; - -/// Balance type used by the chain. -pub type BalanceOf = ::Balance; - -/// Transaction nonce type used by the chain. -pub type NonceOf = ::Nonce; - -/// Signature type used by the chain. -pub type SignatureOf = ::Signature; - -/// Account public type used by the chain. -pub type AccountPublicOf = as Verify>::Signer; - -/// Transaction era used by the chain. -pub type TransactionEraOf = crate::TransactionEra, HashOf>; - -/// Convenience macro that declares bridge finality runtime apis and related constants for a chain. -/// This includes: -/// - chain-specific bridge runtime APIs: -/// - `FinalityApi` -/// - constants that are stringified names of runtime API methods: -/// - `BEST_FINALIZED__HEADER_METHOD` -/// - `_ACCEPTED__FINALITY_PROOFS_METHOD` -/// The name of the chain has to be specified in snake case (e.g. `bridge_hub_polkadot`). -#[macro_export] -macro_rules! decl_bridge_finality_runtime_apis { - ($chain: ident $(, $consensus: ident => $justification_type: ty)?) => { - bp_runtime::paste::item! { - mod [<$chain _finality_api>] { - use super::*; - - /// Name of the `FinalityApi::best_finalized` runtime method. - pub const []: &str = - stringify!([<$chain:camel FinalityApi_best_finalized>]); - - $( - /// Name of the `FinalityApi::accepted__finality_proofs` - /// runtime method. - pub const [<$chain:upper _SYNCED_HEADERS_ $consensus:upper _INFO_METHOD>]: &str = - stringify!([<$chain:camel FinalityApi_synced_headers_ $consensus:lower _info>]); - )? - - sp_api::decl_runtime_apis! { - /// API for querying information about the finalized chain headers. - /// - /// This API is implemented by runtimes that are receiving messages from this chain, not by this - /// chain's runtime itself. - pub trait [<$chain:camel FinalityApi>] { - /// Returns number and hash of the best finalized header known to the bridge module. - fn best_finalized() -> Option>; - - $( - /// Returns the justifications accepted in the current block. - fn []( - ) -> sp_std::vec::Vec<$justification_type>; - )? - } - } - } - - pub use [<$chain _finality_api>]::*; - } - }; - ($chain: ident, grandpa) => { - decl_bridge_finality_runtime_apis!($chain, grandpa => bp_header_chain::StoredHeaderGrandpaInfo
); - }; -} - -/// Convenience macro that declares bridge messages runtime apis and related constants for a chain. -/// This includes: -/// - chain-specific bridge runtime APIs: -/// - `ToOutboundLaneApi` -/// - `FromInboundLaneApi` -/// - constants that are stringified names of runtime API methods: -/// - `FROM__MESSAGE_DETAILS_METHOD`, -/// The name of the chain has to be specified in snake case (e.g. `bridge_hub_polkadot`). -#[macro_export] -macro_rules! decl_bridge_messages_runtime_apis { - ($chain: ident) => { - bp_runtime::paste::item! { - mod [<$chain _messages_api>] { - use super::*; - - /// Name of the `ToOutboundLaneApi::message_details` runtime method. - pub const []: &str = - stringify!([]); - - /// Name of the `FromInboundLaneApi::message_details` runtime method. - pub const []: &str = - stringify!([]); - - sp_api::decl_runtime_apis! { - /// Outbound message lane API for messages that are sent to this chain. - /// - /// This API is implemented by runtimes that are receiving messages from this chain, not by this - /// chain's runtime itself. - pub trait [] { - /// Returns dispatch weight, encoded payload size and delivery+dispatch fee of all - /// messages in given inclusive range. - /// - /// If some (or all) messages are missing from the storage, they'll also will - /// be missing from the resulting vector. The vector is ordered by the nonce. - fn message_details( - lane: bp_messages::LaneId, - begin: bp_messages::MessageNonce, - end: bp_messages::MessageNonce, - ) -> sp_std::vec::Vec; - } - - /// Inbound message lane API for messages sent by this chain. - /// - /// This API is implemented by runtimes that are receiving messages from this chain, not by this - /// chain's runtime itself. - /// - /// Entries of the resulting vector are matching entries of the `messages` vector. Entries of the - /// `messages` vector may (and need to) be read using `ToOutboundLaneApi::message_details`. - pub trait [] { - /// Return details of given inbound messages. - fn message_details( - lane: bp_messages::LaneId, - messages: sp_std::vec::Vec<(bp_messages::MessagePayload, bp_messages::OutboundMessageDetails)>, - ) -> sp_std::vec::Vec; - } - } - } - - pub use [<$chain _messages_api>]::*; - } - }; -} - -/// Convenience macro that declares bridge finality runtime apis, bridge messages runtime apis -/// and related constants for a chain. -/// The name of the chain has to be specified in snake case (e.g. `bridge_hub_polkadot`). -#[macro_export] -macro_rules! decl_bridge_runtime_apis { - ($chain: ident $(, $consensus: ident)?) => { - bp_runtime::decl_bridge_finality_runtime_apis!($chain $(, $consensus)?); - bp_runtime::decl_bridge_messages_runtime_apis!($chain); - }; -} diff --git a/primitives/runtime/src/extensions.rs b/primitives/runtime/src/extensions.rs deleted file mode 100644 index d896bc92e..000000000 --- a/primitives/runtime/src/extensions.rs +++ /dev/null @@ -1,154 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Primitives that may be used for creating signed extensions for indirect runtimes. - -use codec::{Compact, Decode, Encode}; -use impl_trait_for_tuples::impl_for_tuples; -use scale_info::{StaticTypeInfo, TypeInfo}; -use sp_runtime::{ - traits::{DispatchInfoOf, SignedExtension}, - transaction_validity::TransactionValidityError, -}; -use sp_std::{fmt::Debug, marker::PhantomData}; - -/// Trait that describes some properties of a `SignedExtension` that are needed in order to send a -/// transaction to the chain. -pub trait SignedExtensionSchema: Encode + Decode + Debug + Eq + Clone + StaticTypeInfo { - /// A type of the data encoded as part of the transaction. - type Payload: Encode + Decode + Debug + Eq + Clone + StaticTypeInfo; - /// Parameters which are part of the payload used to produce transaction signature, - /// but don't end up in the transaction itself (i.e. inherent part of the runtime). - type AdditionalSigned: Encode + Debug + Eq + Clone + StaticTypeInfo; -} - -impl SignedExtensionSchema for () { - type Payload = (); - type AdditionalSigned = (); -} - -/// An implementation of `SignedExtensionSchema` using generic params. -#[derive(Encode, Decode, Clone, Debug, PartialEq, Eq, TypeInfo)] -pub struct GenericSignedExtensionSchema(PhantomData<(P, S)>); - -impl SignedExtensionSchema for GenericSignedExtensionSchema -where - P: Encode + Decode + Debug + Eq + Clone + StaticTypeInfo, - S: Encode + Debug + Eq + Clone + StaticTypeInfo, -{ - type Payload = P; - type AdditionalSigned = S; -} - -/// The `SignedExtensionSchema` for `frame_system::CheckNonZeroSender`. -pub type CheckNonZeroSender = GenericSignedExtensionSchema<(), ()>; - -/// The `SignedExtensionSchema` for `frame_system::CheckSpecVersion`. -pub type CheckSpecVersion = GenericSignedExtensionSchema<(), u32>; - -/// The `SignedExtensionSchema` for `frame_system::CheckTxVersion`. -pub type CheckTxVersion = GenericSignedExtensionSchema<(), u32>; - -/// The `SignedExtensionSchema` for `frame_system::CheckGenesis`. -pub type CheckGenesis = GenericSignedExtensionSchema<(), Hash>; - -/// The `SignedExtensionSchema` for `frame_system::CheckEra`. -pub type CheckEra = GenericSignedExtensionSchema; - -/// The `SignedExtensionSchema` for `frame_system::CheckNonce`. -pub type CheckNonce = GenericSignedExtensionSchema, ()>; - -/// The `SignedExtensionSchema` for `frame_system::CheckWeight`. -pub type CheckWeight = GenericSignedExtensionSchema<(), ()>; - -/// The `SignedExtensionSchema` for `pallet_transaction_payment::ChargeTransactionPayment`. -pub type ChargeTransactionPayment = GenericSignedExtensionSchema, ()>; - -/// The `SignedExtensionSchema` for `polkadot-runtime-common::PrevalidateAttests`. -pub type PrevalidateAttests = GenericSignedExtensionSchema<(), ()>; - -/// The `SignedExtensionSchema` for `BridgeRejectObsoleteHeadersAndMessages`. -pub type BridgeRejectObsoleteHeadersAndMessages = GenericSignedExtensionSchema<(), ()>; - -/// The `SignedExtensionSchema` for `RefundBridgedParachainMessages`. -/// This schema is dedicated for `RefundBridgedParachainMessages` signed extension as -/// wildcard/placeholder, which relies on the scale encoding for `()` or `((), ())`, or `((), (), -/// ())` is the same. So runtime can contains any kind of tuple: -/// `(BridgeRefundBridgeHubRococoMessages)` -/// `(BridgeRefundBridgeHubRococoMessages, BridgeRefundBridgeHubWestendMessages)` -/// `(BridgeRefundParachainMessages1, ..., BridgeRefundParachainMessagesN)` -pub type RefundBridgedParachainMessagesSchema = GenericSignedExtensionSchema<(), ()>; - -#[impl_for_tuples(1, 12)] -impl SignedExtensionSchema for Tuple { - for_tuples!( type Payload = ( #( Tuple::Payload ),* ); ); - for_tuples!( type AdditionalSigned = ( #( Tuple::AdditionalSigned ),* ); ); -} - -/// A simplified version of signed extensions meant for producing signed transactions -/// and signed payloads in the client code. -#[derive(Encode, Decode, Debug, PartialEq, Eq, Clone, TypeInfo)] -pub struct GenericSignedExtension { - /// A payload that is included in the transaction. - pub payload: S::Payload, - #[codec(skip)] - // It may be set to `None` if extensions are decoded. We are never reconstructing transactions - // (and it makes no sense to do that) => decoded version of `SignedExtensions` is only used to - // read fields of the `payload`. And when resigning transaction, we're reconstructing - // `SignedExtensions` from scratch. - additional_signed: Option, -} - -impl GenericSignedExtension { - /// Create new `GenericSignedExtension` object. - pub fn new(payload: S::Payload, additional_signed: Option) -> Self { - Self { payload, additional_signed } - } -} - -impl SignedExtension for GenericSignedExtension -where - S: SignedExtensionSchema, - S::Payload: Send + Sync, - S::AdditionalSigned: Send + Sync, -{ - const IDENTIFIER: &'static str = "Not needed."; - type AccountId = (); - type Call = (); - type AdditionalSigned = S::AdditionalSigned; - type Pre = (); - - fn additional_signed(&self) -> Result { - // we shall not ever see this error in relay, because we are never signing decoded - // transactions. Instead we're constructing and signing new transactions. So the error code - // is kinda random here - self.additional_signed.clone().ok_or( - frame_support::unsigned::TransactionValidityError::Unknown( - frame_support::unsigned::UnknownTransaction::Custom(0xFF), - ), - ) - } - - fn pre_dispatch( - self, - _who: &Self::AccountId, - _call: &Self::Call, - _info: &DispatchInfoOf, - _len: usize, - ) -> Result { - Ok(()) - } -} diff --git a/primitives/runtime/src/lib.rs b/primitives/runtime/src/lib.rs deleted file mode 100644 index c9c5c9412..000000000 --- a/primitives/runtime/src/lib.rs +++ /dev/null @@ -1,545 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Primitives that may be used at (bridges) runtime level. - -#![warn(missing_docs)] -#![cfg_attr(not(feature = "std"), no_std)] - -use codec::{Decode, Encode, FullCodec, MaxEncodedLen}; -use frame_support::{ - pallet_prelude::DispatchResult, weights::Weight, PalletError, StorageHasher, StorageValue, -}; -use frame_system::RawOrigin; -use scale_info::TypeInfo; -use serde::{Deserialize, Serialize}; -use sp_core::storage::StorageKey; -use sp_runtime::{ - traits::{BadOrigin, Header as HeaderT, UniqueSaturatedInto}, - RuntimeDebug, -}; -use sp_std::{convert::TryFrom, fmt::Debug, ops::RangeInclusive, vec, vec::Vec}; - -pub use chain::{ - AccountIdOf, AccountPublicOf, BalanceOf, BlockNumberOf, Chain, EncodedOrDecodedCall, HashOf, - HasherOf, HeaderOf, NonceOf, Parachain, ParachainIdOf, SignatureOf, TransactionEraOf, - UnderlyingChainOf, UnderlyingChainProvider, -}; -pub use frame_support::storage::storage_prefix as storage_value_final_key; -use num_traits::{CheckedAdd, CheckedSub, One, SaturatingAdd, Zero}; -pub use storage_proof::{ - record_all_keys as record_all_trie_keys, Error as StorageProofError, - ProofSize as StorageProofSize, RawStorageProof, StorageProofChecker, -}; -pub use storage_types::BoundedStorageValue; - -#[cfg(feature = "std")] -pub use storage_proof::craft_valid_storage_proof; - -pub mod extensions; -pub mod messages; - -mod chain; -mod storage_proof; -mod storage_types; - -// Re-export macro to avoid include paste dependency everywhere -pub use sp_runtime::paste; - -/// Use this when something must be shared among all instances. -pub const NO_INSTANCE_ID: ChainId = [0, 0, 0, 0]; - -/// Generic header Id. -#[derive( - RuntimeDebug, - Default, - Clone, - Encode, - Decode, - Copy, - Eq, - Hash, - MaxEncodedLen, - PartialEq, - PartialOrd, - Ord, - TypeInfo, -)] -pub struct HeaderId(pub Number, pub Hash); - -impl HeaderId { - /// Return header number. - pub fn number(&self) -> Number { - self.0 - } - - /// Return header hash. - pub fn hash(&self) -> Hash { - self.1 - } -} - -/// Header id used by the chain. -pub type HeaderIdOf = HeaderId, BlockNumberOf>; - -/// Generic header id provider. -pub trait HeaderIdProvider { - /// Get the header id. - fn id(&self) -> HeaderId; - - /// Get the header id for the parent block. - fn parent_id(&self) -> Option>; -} - -impl HeaderIdProvider
for Header { - fn id(&self) -> HeaderId { - HeaderId(*self.number(), self.hash()) - } - - fn parent_id(&self) -> Option> { - self.number() - .checked_sub(&One::one()) - .map(|parent_number| HeaderId(parent_number, *self.parent_hash())) - } -} - -/// Unique identifier of the chain. -/// -/// In addition to its main function (identifying the chain), this type may also be used to -/// identify module instance. We have a bunch of pallets that may be used in different bridges. E.g. -/// messages pallet may be deployed twice in the same runtime to bridge ThisChain with Chain1 and -/// Chain2. Sometimes we need to be able to identify deployed instance dynamically. This type may be -/// used for that. -pub type ChainId = [u8; 4]; - -/// Anything that has size. -pub trait Size { - /// Return size of this object (in bytes). - fn size(&self) -> u32; -} - -impl Size for () { - fn size(&self) -> u32 { - 0 - } -} - -impl Size for Vec { - fn size(&self) -> u32 { - self.len() as _ - } -} - -/// Pre-computed size. -pub struct PreComputedSize(pub usize); - -impl Size for PreComputedSize { - fn size(&self) -> u32 { - u32::try_from(self.0).unwrap_or(u32::MAX) - } -} - -/// Era of specific transaction. -#[derive(RuntimeDebug, Clone, Copy, PartialEq)] -pub enum TransactionEra { - /// Transaction is immortal. - Immortal, - /// Transaction is valid for a given number of blocks, starting from given block. - Mortal(HeaderId, u32), -} - -impl, BlockHash: Copy> - TransactionEra -{ - /// Prepare transaction era, based on mortality period and current best block number. - pub fn new( - best_block_id: HeaderId, - mortality_period: Option, - ) -> Self { - mortality_period - .map(|mortality_period| TransactionEra::Mortal(best_block_id, mortality_period)) - .unwrap_or(TransactionEra::Immortal) - } - - /// Create new immortal transaction era. - pub fn immortal() -> Self { - TransactionEra::Immortal - } - - /// Returns mortality period if transaction is mortal. - pub fn mortality_period(&self) -> Option { - match *self { - TransactionEra::Immortal => None, - TransactionEra::Mortal(_, period) => Some(period), - } - } - - /// Returns era that is used by FRAME-based runtimes. - pub fn frame_era(&self) -> sp_runtime::generic::Era { - match *self { - TransactionEra::Immortal => sp_runtime::generic::Era::immortal(), - // `unique_saturated_into` is fine here - mortality `u64::MAX` is not something we - // expect to see on any chain - TransactionEra::Mortal(header_id, period) => - sp_runtime::generic::Era::mortal(period as _, header_id.0.unique_saturated_into()), - } - } - - /// Returns header hash that needs to be included in the signature payload. - pub fn signed_payload(&self, genesis_hash: BlockHash) -> BlockHash { - match *self { - TransactionEra::Immortal => genesis_hash, - TransactionEra::Mortal(header_id, _) => header_id.1, - } - } -} - -/// This is a copy of the -/// `frame_support::storage::generator::StorageMap::storage_map_final_key` for maps based -/// on selected hasher. -/// -/// We're using it because to call `storage_map_final_key` directly, we need access to the runtime -/// and pallet instance, which (sometimes) is impossible. -pub fn storage_map_final_key( - pallet_prefix: &str, - map_name: &str, - key: &[u8], -) -> StorageKey { - let key_hashed = H::hash(key); - let pallet_prefix_hashed = frame_support::Twox128::hash(pallet_prefix.as_bytes()); - let storage_prefix_hashed = frame_support::Twox128::hash(map_name.as_bytes()); - - let mut final_key = Vec::with_capacity( - pallet_prefix_hashed.len() + storage_prefix_hashed.len() + key_hashed.as_ref().len(), - ); - - final_key.extend_from_slice(&pallet_prefix_hashed[..]); - final_key.extend_from_slice(&storage_prefix_hashed[..]); - final_key.extend_from_slice(key_hashed.as_ref()); - - StorageKey(final_key) -} - -/// This is how a storage key of storage value is computed. -/// -/// Copied from `frame_support::storage::storage_prefix`. -pub fn storage_value_key(pallet_prefix: &str, value_name: &str) -> StorageKey { - let pallet_hash = sp_io::hashing::twox_128(pallet_prefix.as_bytes()); - let storage_hash = sp_io::hashing::twox_128(value_name.as_bytes()); - - let mut final_key = vec![0u8; 32]; - final_key[..16].copy_from_slice(&pallet_hash); - final_key[16..].copy_from_slice(&storage_hash); - - StorageKey(final_key) -} - -/// Can be use to access the runtime storage key of a `StorageMap`. -pub trait StorageMapKeyProvider { - /// The name of the variable that holds the `StorageMap`. - const MAP_NAME: &'static str; - - /// The same as `StorageMap::Hasher1`. - type Hasher: StorageHasher; - /// The same as `StorageMap::Key1`. - type Key: FullCodec; - /// The same as `StorageMap::Value`. - type Value: FullCodec; - - /// This is a copy of the - /// `frame_support::storage::generator::StorageMap::storage_map_final_key`. - /// - /// We're using it because to call `storage_map_final_key` directly, we need access - /// to the runtime and pallet instance, which (sometimes) is impossible. - fn final_key(pallet_prefix: &str, key: &Self::Key) -> StorageKey { - storage_map_final_key::(pallet_prefix, Self::MAP_NAME, &key.encode()) - } -} - -/// Can be use to access the runtime storage key of a `StorageDoubleMap`. -pub trait StorageDoubleMapKeyProvider { - /// The name of the variable that holds the `StorageDoubleMap`. - const MAP_NAME: &'static str; - - /// The same as `StorageDoubleMap::Hasher1`. - type Hasher1: StorageHasher; - /// The same as `StorageDoubleMap::Key1`. - type Key1: FullCodec; - /// The same as `StorageDoubleMap::Hasher2`. - type Hasher2: StorageHasher; - /// The same as `StorageDoubleMap::Key2`. - type Key2: FullCodec; - /// The same as `StorageDoubleMap::Value`. - type Value: FullCodec; - - /// This is a copy of the - /// `frame_support::storage::generator::StorageDoubleMap::storage_double_map_final_key`. - /// - /// We're using it because to call `storage_double_map_final_key` directly, we need access - /// to the runtime and pallet instance, which (sometimes) is impossible. - fn final_key(pallet_prefix: &str, key1: &Self::Key1, key2: &Self::Key2) -> StorageKey { - let key1_hashed = Self::Hasher1::hash(&key1.encode()); - let key2_hashed = Self::Hasher2::hash(&key2.encode()); - let pallet_prefix_hashed = frame_support::Twox128::hash(pallet_prefix.as_bytes()); - let storage_prefix_hashed = frame_support::Twox128::hash(Self::MAP_NAME.as_bytes()); - - let mut final_key = Vec::with_capacity( - pallet_prefix_hashed.len() + - storage_prefix_hashed.len() + - key1_hashed.as_ref().len() + - key2_hashed.as_ref().len(), - ); - - final_key.extend_from_slice(&pallet_prefix_hashed[..]); - final_key.extend_from_slice(&storage_prefix_hashed[..]); - final_key.extend_from_slice(key1_hashed.as_ref()); - final_key.extend_from_slice(key2_hashed.as_ref()); - - StorageKey(final_key) - } -} - -/// Error generated by the `OwnedBridgeModule` trait. -#[derive(Encode, Decode, PartialEq, Eq, TypeInfo, PalletError)] -pub enum OwnedBridgeModuleError { - /// All pallet operations are halted. - Halted, -} - -/// Operating mode for a bridge module. -pub trait OperatingMode: Send + Copy + Debug + FullCodec { - /// Returns true if the bridge module is halted. - fn is_halted(&self) -> bool; -} - -/// Basic operating modes for a bridges module (Normal/Halted). -#[derive( - Encode, - Decode, - Clone, - Copy, - PartialEq, - Eq, - RuntimeDebug, - TypeInfo, - MaxEncodedLen, - Serialize, - Deserialize, -)] -pub enum BasicOperatingMode { - /// Normal mode, when all operations are allowed. - Normal, - /// The pallet is halted. All operations (except operating mode change) are prohibited. - Halted, -} - -impl Default for BasicOperatingMode { - fn default() -> Self { - Self::Normal - } -} - -impl OperatingMode for BasicOperatingMode { - fn is_halted(&self) -> bool { - *self == BasicOperatingMode::Halted - } -} - -/// Bridge module that has owner and operating mode -pub trait OwnedBridgeModule { - /// The target that will be used when publishing logs related to this module. - const LOG_TARGET: &'static str; - - /// A storage entry that holds the module `Owner` account. - type OwnerStorage: StorageValue>; - /// Operating mode type of the pallet. - type OperatingMode: OperatingMode; - /// A storage value that holds the pallet operating mode. - type OperatingModeStorage: StorageValue; - - /// Check if the module is halted. - fn is_halted() -> bool { - Self::OperatingModeStorage::get().is_halted() - } - - /// Ensure that the origin is either root, or `PalletOwner`. - fn ensure_owner_or_root(origin: T::RuntimeOrigin) -> Result<(), BadOrigin> { - match origin.into() { - Ok(RawOrigin::Root) => Ok(()), - Ok(RawOrigin::Signed(ref signer)) - if Self::OwnerStorage::get().as_ref() == Some(signer) => - Ok(()), - _ => Err(BadOrigin), - } - } - - /// Ensure that the module is not halted. - fn ensure_not_halted() -> Result<(), OwnedBridgeModuleError> { - match Self::is_halted() { - true => Err(OwnedBridgeModuleError::Halted), - false => Ok(()), - } - } - - /// Change the owner of the module. - fn set_owner(origin: T::RuntimeOrigin, maybe_owner: Option) -> DispatchResult { - Self::ensure_owner_or_root(origin)?; - match maybe_owner { - Some(owner) => { - Self::OwnerStorage::put(&owner); - log::info!(target: Self::LOG_TARGET, "Setting pallet Owner to: {:?}", owner); - }, - None => { - Self::OwnerStorage::kill(); - log::info!(target: Self::LOG_TARGET, "Removed Owner of pallet."); - }, - } - - Ok(()) - } - - /// Halt or resume all/some module operations. - fn set_operating_mode( - origin: T::RuntimeOrigin, - operating_mode: Self::OperatingMode, - ) -> DispatchResult { - Self::ensure_owner_or_root(origin)?; - Self::OperatingModeStorage::put(operating_mode); - log::info!(target: Self::LOG_TARGET, "Setting operating mode to {:?}.", operating_mode); - Ok(()) - } -} - -/// All extra operations with weights that we need in bridges. -pub trait WeightExtraOps { - /// Checked division of individual components of two weights. - /// - /// Divides components and returns minimal division result. Returns `None` if one - /// of `other` weight components is zero. - fn min_components_checked_div(&self, other: Weight) -> Option; -} - -impl WeightExtraOps for Weight { - fn min_components_checked_div(&self, other: Weight) -> Option { - Some(sp_std::cmp::min( - self.ref_time().checked_div(other.ref_time())?, - self.proof_size().checked_div(other.proof_size())?, - )) - } -} - -/// Trait that provides a static `str`. -pub trait StaticStrProvider { - /// Static string. - const STR: &'static str; -} - -/// A macro that generates `StaticStrProvider` with the string set to its stringified argument. -#[macro_export] -macro_rules! generate_static_str_provider { - ($str:expr) => { - $crate::paste::item! { - pub struct []; - - impl $crate::StaticStrProvider for [] { - const STR: &'static str = stringify!($str); - } - } - }; -} - -/// Error message that is only displayable in `std` environment. -#[derive(Encode, Decode, Clone, Eq, PartialEq, PalletError, TypeInfo)] -#[scale_info(skip_type_params(T))] -pub struct StrippableError { - _phantom_data: sp_std::marker::PhantomData, - #[codec(skip)] - #[cfg(feature = "std")] - message: String, -} - -impl From for StrippableError { - fn from(_err: T) -> Self { - Self { - _phantom_data: Default::default(), - #[cfg(feature = "std")] - message: format!("{:?}", _err), - } - } -} - -impl Debug for StrippableError { - #[cfg(feature = "std")] - fn fmt(&self, f: &mut sp_std::fmt::Formatter<'_>) -> sp_std::fmt::Result { - f.write_str(&self.message) - } - - #[cfg(not(feature = "std"))] - fn fmt(&self, f: &mut sp_std::fmt::Formatter<'_>) -> sp_std::fmt::Result { - f.write_str("Stripped error") - } -} - -/// A trait defining helper methods for `RangeInclusive` (start..=end) -pub trait RangeInclusiveExt { - /// Computes the length of the `RangeInclusive`, checking for underflow and overflow. - fn checked_len(&self) -> Option; - /// Computes the length of the `RangeInclusive`, saturating in case of underflow or overflow. - fn saturating_len(&self) -> Idx; -} - -impl RangeInclusiveExt for RangeInclusive -where - Idx: CheckedSub + CheckedAdd + SaturatingAdd + One + Zero, -{ - fn checked_len(&self) -> Option { - self.end() - .checked_sub(self.start()) - .and_then(|len| len.checked_add(&Idx::one())) - } - - fn saturating_len(&self) -> Idx { - let len = match self.end().checked_sub(self.start()) { - Some(len) => len, - None => return Idx::zero(), - }; - len.saturating_add(&Idx::one()) - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn storage_value_key_works() { - assert_eq!( - storage_value_key("PalletTransactionPayment", "NextFeeMultiplier"), - StorageKey( - hex_literal::hex!( - "f0e954dfcca51a255ab12c60c789256a3f2edf3bdf381debe331ab7446addfdc" - ) - .to_vec() - ), - ); - } - - #[test] - fn generate_static_str_provider_works() { - generate_static_str_provider!(Test); - assert_eq!(StrTest::STR, "Test"); - } -} diff --git a/primitives/runtime/src/messages.rs b/primitives/runtime/src/messages.rs deleted file mode 100644 index 0f219e984..000000000 --- a/primitives/runtime/src/messages.rs +++ /dev/null @@ -1,36 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Primitives that may be used by different message delivery and dispatch mechanisms. - -use codec::{Decode, Encode}; -use frame_support::weights::Weight; -use scale_info::TypeInfo; -use sp_runtime::RuntimeDebug; - -/// Message dispatch result. -#[derive(Encode, Decode, RuntimeDebug, Clone, PartialEq, Eq, TypeInfo)] -pub struct MessageDispatchResult { - /// Unspent dispatch weight. This weight that will be deducted from total delivery transaction - /// weight, thus reducing the transaction cost. This shall not be zero in (at least) two cases: - /// - /// 1) if message has been dispatched successfully, but post-dispatch weight is less than the - /// weight, declared by the message sender; - /// 2) if message has not been dispatched at all. - pub unspent_weight: Weight, - /// Fine-grained result of single message dispatch (for better diagnostic purposes) - pub dispatch_level_result: DispatchLevelResult, -} diff --git a/primitives/runtime/src/storage_proof.rs b/primitives/runtime/src/storage_proof.rs deleted file mode 100644 index 1b706aa66..000000000 --- a/primitives/runtime/src/storage_proof.rs +++ /dev/null @@ -1,272 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Logic for checking Substrate storage proofs. - -use crate::StrippableError; -use codec::{Decode, Encode}; -use frame_support::PalletError; -use hash_db::{HashDB, Hasher, EMPTY_PREFIX}; -use scale_info::TypeInfo; -use sp_std::{boxed::Box, collections::btree_set::BTreeSet, vec::Vec}; -use sp_trie::{ - read_trie_value, LayoutV1, MemoryDB, Recorder, StorageProof, Trie, TrieConfiguration, - TrieDBBuilder, TrieError, TrieHash, -}; - -/// Raw storage proof type (just raw trie nodes). -pub type RawStorageProof = Vec>; - -/// Storage proof size requirements. -/// -/// This is currently used by benchmarks when generating storage proofs. -#[derive(Clone, Copy, Debug)] -pub enum ProofSize { - /// The proof is expected to be minimal. If value size may be changed, then it is expected to - /// have given size. - Minimal(u32), - /// The proof is expected to have at least given size and grow by increasing value that is - /// stored in the trie. - HasLargeLeaf(u32), -} - -/// This struct is used to read storage values from a subset of a Merklized database. The "proof" -/// is a subset of the nodes in the Merkle structure of the database, so that it provides -/// authentication against a known Merkle root as well as the values in the -/// database themselves. -pub struct StorageProofChecker -where - H: Hasher, -{ - proof_nodes_count: usize, - root: H::Out, - db: MemoryDB, - recorder: Recorder>, -} - -impl StorageProofChecker -where - H: Hasher, -{ - /// Constructs a new storage proof checker. - /// - /// This returns an error if the given proof is invalid with respect to the given root. - pub fn new(root: H::Out, proof: RawStorageProof) -> Result { - // 1. we don't want extra items in the storage proof - // 2. `StorageProof` is storing all trie nodes in the `BTreeSet` - // - // => someone could simply add duplicate items to the proof and we won't be - // able to detect that by just using `StorageProof` - // - // => let's check it when we are converting our "raw proof" into `StorageProof` - let proof_nodes_count = proof.len(); - let proof = StorageProof::new(proof); - if proof_nodes_count != proof.iter_nodes().count() { - return Err(Error::DuplicateNodesInProof) - } - - let db = proof.into_memory_db(); - if !db.contains(&root, EMPTY_PREFIX) { - return Err(Error::StorageRootMismatch) - } - - let recorder = Recorder::default(); - let checker = StorageProofChecker { proof_nodes_count, root, db, recorder }; - Ok(checker) - } - - /// Returns error if the proof has some nodes that are left intact by previous `read_value` - /// calls. - pub fn ensure_no_unused_nodes(mut self) -> Result<(), Error> { - let visited_nodes = self - .recorder - .drain() - .into_iter() - .map(|record| record.data) - .collect::>(); - let visited_nodes_count = visited_nodes.len(); - if self.proof_nodes_count == visited_nodes_count { - Ok(()) - } else { - Err(Error::UnusedNodesInTheProof) - } - } - - /// Reads a value from the available subset of storage. If the value cannot be read due to an - /// incomplete or otherwise invalid proof, this function returns an error. - pub fn read_value(&mut self, key: &[u8]) -> Result>, Error> { - // LayoutV1 or LayoutV0 is identical for proof that only read values. - read_trie_value::, _>(&self.db, &self.root, key, Some(&mut self.recorder), None) - .map_err(|_| Error::StorageValueUnavailable) - } - - /// Reads and decodes a value from the available subset of storage. If the value cannot be read - /// due to an incomplete or otherwise invalid proof, this function returns an error. If value is - /// read, but decoding fails, this function returns an error. - pub fn read_and_decode_value(&mut self, key: &[u8]) -> Result, Error> { - self.read_value(key).and_then(|v| { - v.map(|v| T::decode(&mut &v[..]).map_err(|e| Error::StorageValueDecodeFailed(e.into()))) - .transpose() - }) - } - - /// Reads and decodes a value from the available subset of storage. If the value cannot be read - /// due to an incomplete or otherwise invalid proof, or if the value is `None`, this function - /// returns an error. If value is read, but decoding fails, this function returns an error. - pub fn read_and_decode_mandatory_value(&mut self, key: &[u8]) -> Result { - self.read_and_decode_value(key)?.ok_or(Error::StorageValueEmpty) - } - - /// Reads and decodes a value from the available subset of storage. If the value cannot be read - /// due to an incomplete or otherwise invalid proof, this function returns `Ok(None)`. - /// If value is read, but decoding fails, this function returns an error. - pub fn read_and_decode_opt_value(&mut self, key: &[u8]) -> Result, Error> { - match self.read_and_decode_value(key) { - Ok(outbound_lane_data) => Ok(outbound_lane_data), - Err(Error::StorageValueUnavailable) => Ok(None), - Err(e) => Err(e), - } - } -} - -/// Storage proof related errors. -#[derive(Encode, Decode, Clone, Eq, PartialEq, PalletError, Debug, TypeInfo)] -pub enum Error { - /// Duplicate trie nodes are found in the proof. - DuplicateNodesInProof, - /// Unused trie nodes are found in the proof. - UnusedNodesInTheProof, - /// Expected storage root is missing from the proof. - StorageRootMismatch, - /// Unable to reach expected storage value using provided trie nodes. - StorageValueUnavailable, - /// The storage value is `None`. - StorageValueEmpty, - /// Failed to decode storage value. - StorageValueDecodeFailed(StrippableError), -} - -/// Return valid storage proof and state root. -/// -/// NOTE: This should only be used for **testing**. -#[cfg(feature = "std")] -pub fn craft_valid_storage_proof() -> (sp_core::H256, RawStorageProof) { - use sp_state_machine::{backend::Backend, prove_read, InMemoryBackend}; - - let state_version = sp_runtime::StateVersion::default(); - - // construct storage proof - let backend = >::from(( - vec![ - (None, vec![(b"key1".to_vec(), Some(b"value1".to_vec()))]), - (None, vec![(b"key2".to_vec(), Some(b"value2".to_vec()))]), - (None, vec![(b"key3".to_vec(), Some(b"value3".to_vec()))]), - (None, vec![(b"key4".to_vec(), Some((42u64, 42u32, 42u16, 42u8).encode()))]), - // Value is too big to fit in a branch node - (None, vec![(b"key11".to_vec(), Some(vec![0u8; 32]))]), - ], - state_version, - )); - let root = backend.storage_root(std::iter::empty(), state_version).0; - let proof = - prove_read(backend, &[&b"key1"[..], &b"key2"[..], &b"key4"[..], &b"key22"[..]]).unwrap(); - - (root, proof.into_nodes().into_iter().collect()) -} - -/// Record all keys for a given root. -pub fn record_all_keys( - db: &DB, - root: &TrieHash, -) -> Result>> -where - DB: hash_db::HashDBRef, -{ - let mut recorder = Recorder::::new(); - let trie = TrieDBBuilder::::new(db, root).with_recorder(&mut recorder).build(); - for x in trie.iter()? { - let (key, _) = x?; - trie.get(&key)?; - } - - // recorder may record the same trie node multiple times and we don't want duplicate nodes - // in our proofs => let's deduplicate it by collecting to the BTreeSet first - Ok(recorder - .drain() - .into_iter() - .map(|n| n.data.to_vec()) - .collect::>() - .into_iter() - .collect()) -} - -#[cfg(test)] -pub mod tests { - use super::*; - use codec::Encode; - - #[test] - fn storage_proof_check() { - let (root, proof) = craft_valid_storage_proof(); - - // check proof in runtime - let mut checker = - >::new(root, proof.clone()).unwrap(); - assert_eq!(checker.read_value(b"key1"), Ok(Some(b"value1".to_vec()))); - assert_eq!(checker.read_value(b"key2"), Ok(Some(b"value2".to_vec()))); - assert_eq!(checker.read_value(b"key4"), Ok(Some((42u64, 42u32, 42u16, 42u8).encode()))); - assert_eq!(checker.read_value(b"key11111"), Err(Error::StorageValueUnavailable)); - assert_eq!(checker.read_value(b"key22"), Ok(None)); - assert_eq!(checker.read_and_decode_value(b"key4"), Ok(Some((42u64, 42u32, 42u16, 42u8))),); - assert!(matches!( - checker.read_and_decode_value::<[u8; 64]>(b"key4"), - Err(Error::StorageValueDecodeFailed(_)), - )); - - // checking proof against invalid commitment fails - assert_eq!( - >::new(sp_core::H256::random(), proof).err(), - Some(Error::StorageRootMismatch) - ); - } - - #[test] - fn proof_with_duplicate_items_is_rejected() { - let (root, mut proof) = craft_valid_storage_proof(); - proof.push(proof.first().unwrap().clone()); - - assert_eq!( - StorageProofChecker::::new(root, proof).map(drop), - Err(Error::DuplicateNodesInProof), - ); - } - - #[test] - fn proof_with_unused_items_is_rejected() { - let (root, proof) = craft_valid_storage_proof(); - - let mut checker = - StorageProofChecker::::new(root, proof.clone()).unwrap(); - checker.read_value(b"key1").unwrap(); - checker.read_value(b"key2").unwrap(); - checker.read_value(b"key4").unwrap(); - checker.read_value(b"key22").unwrap(); - assert_eq!(checker.ensure_no_unused_nodes(), Ok(())); - - let checker = StorageProofChecker::::new(root, proof).unwrap(); - assert_eq!(checker.ensure_no_unused_nodes(), Err(Error::UnusedNodesInTheProof)); - } -} diff --git a/primitives/runtime/src/storage_types.rs b/primitives/runtime/src/storage_types.rs deleted file mode 100644 index 91c545180..000000000 --- a/primitives/runtime/src/storage_types.rs +++ /dev/null @@ -1,91 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Wrapper for a runtime storage value that checks if value exceeds given maximum -//! during conversion. - -use codec::{Decode, Encode, MaxEncodedLen}; -use frame_support::traits::Get; -use scale_info::{Type, TypeInfo}; -use sp_runtime::RuntimeDebug; -use sp_std::{marker::PhantomData, ops::Deref}; - -/// Error that is returned when the value size exceeds maximal configured size. -#[derive(RuntimeDebug)] -pub struct MaximalSizeExceededError { - /// Size of the value. - pub value_size: usize, - /// Maximal configured size. - pub maximal_size: usize, -} - -/// A bounded runtime storage value. -#[derive(Clone, Decode, Encode, Eq, PartialEq)] -pub struct BoundedStorageValue { - value: V, - _phantom: PhantomData, -} - -impl sp_std::fmt::Debug for BoundedStorageValue { - fn fmt(&self, fmt: &mut sp_std::fmt::Formatter) -> sp_std::fmt::Result { - self.value.fmt(fmt) - } -} - -impl, V: Encode> BoundedStorageValue { - /// Construct `BoundedStorageValue` from the underlying `value` with all required checks. - /// - /// Returns error if value size exceeds given bounds. - pub fn try_from_inner(value: V) -> Result { - // this conversion is heavy (since we do encoding here), so we may want to optimize it later - // (e.g. by introducing custom Encode implementation, and turning `BoundedStorageValue` into - // `enum BoundedStorageValue { Decoded(V), Encoded(Vec) }`) - let value_size = value.encoded_size(); - let maximal_size = B::get() as usize; - if value_size > maximal_size { - Err(MaximalSizeExceededError { value_size, maximal_size }) - } else { - Ok(BoundedStorageValue { value, _phantom: Default::default() }) - } - } - - /// Convert into the inner type - pub fn into_inner(self) -> V { - self.value - } -} - -impl Deref for BoundedStorageValue { - type Target = V; - - fn deref(&self) -> &Self::Target { - &self.value - } -} - -impl TypeInfo for BoundedStorageValue { - type Identity = Self; - - fn type_info() -> Type { - V::type_info() - } -} - -impl, V: Encode> MaxEncodedLen for BoundedStorageValue { - fn max_encoded_len() -> usize { - B::get() as usize - } -} diff --git a/primitives/test-utils/Cargo.toml b/primitives/test-utils/Cargo.toml deleted file mode 100644 index b46868a0a..000000000 --- a/primitives/test-utils/Cargo.toml +++ /dev/null @@ -1,44 +0,0 @@ -[package] -name = "bp-test-utils" -version = "0.7.0" -description = "Utilities for testing substrate-based runtime bridge code" -authors.workspace = true -edition.workspace = true -license = "GPL-3.0-or-later WITH Classpath-exception-2.0" -repository.workspace = true - -[lints] -workspace = true - -[dependencies] -bp-header-chain = { path = "../header-chain", default-features = false } -bp-parachains = { path = "../parachains", default-features = false } -bp-polkadot-core = { path = "../polkadot-core", default-features = false } -bp-runtime = { path = "../runtime", default-features = false } -codec = { package = "parity-scale-codec", version = "3.6.1", default-features = false } -ed25519-dalek = { version = "2.1", default-features = false } -finality-grandpa = { version = "0.16.2", default-features = false } -sp-application-crypto = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -sp-consensus-grandpa = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -sp-core = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -sp-runtime = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -sp-std = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -sp-trie = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } - -[features] -default = ["std"] -std = [ - "bp-header-chain/std", - "bp-parachains/std", - "bp-polkadot-core/std", - "bp-runtime/std", - "codec/std", - "ed25519-dalek/std", - "finality-grandpa/std", - "sp-application-crypto/std", - "sp-consensus-grandpa/std", - "sp-core/std", - "sp-runtime/std", - "sp-std/std", - "sp-trie/std", -] diff --git a/primitives/test-utils/src/keyring.rs b/primitives/test-utils/src/keyring.rs deleted file mode 100644 index 22691183a..000000000 --- a/primitives/test-utils/src/keyring.rs +++ /dev/null @@ -1,94 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Utilities for working with test accounts. - -use bp_header_chain::{justification::JustificationVerificationContext, AuthoritySet}; -use codec::Encode; -use ed25519_dalek::{Signature, SigningKey, VerifyingKey}; -use finality_grandpa::voter_set::VoterSet; -use sp_consensus_grandpa::{AuthorityId, AuthorityList, AuthorityWeight, SetId}; -use sp_runtime::RuntimeDebug; -use sp_std::prelude::*; - -/// Set of test accounts with friendly names: Alice. -pub const ALICE: Account = Account(0); -/// Set of test accounts with friendly names: Bob. -pub const BOB: Account = Account(1); -/// Set of test accounts with friendly names: Charlie. -pub const CHARLIE: Account = Account(2); -/// Set of test accounts with friendly names: Dave. -pub const DAVE: Account = Account(3); -/// Set of test accounts with friendly names: Eve. -pub const EVE: Account = Account(4); -/// Set of test accounts with friendly names: Ferdie. -pub const FERDIE: Account = Account(5); - -/// A test account which can be used to sign messages. -#[derive(RuntimeDebug, Clone, Copy)] -pub struct Account(pub u16); - -impl Account { - /// Returns public key of this account. - pub fn public(&self) -> VerifyingKey { - self.pair().verifying_key() - } - - /// Returns key pair, used to sign data on behalf of this account. - pub fn pair(&self) -> SigningKey { - let data = self.0.encode(); - let mut bytes = [0_u8; 32]; - bytes[0..data.len()].copy_from_slice(&data); - SigningKey::from_bytes(&bytes) - } - - /// Generate a signature of given message. - pub fn sign(&self, msg: &[u8]) -> Signature { - use ed25519_dalek::Signer; - self.pair().sign(msg) - } -} - -impl From for AuthorityId { - fn from(p: Account) -> Self { - sp_application_crypto::UncheckedFrom::unchecked_from(p.public().to_bytes()) - } -} - -/// Get a valid set of voters for a Grandpa round. -pub fn voter_set() -> VoterSet { - VoterSet::new(authority_list()).unwrap() -} - -/// Get a valid justification verification context for a GRANDPA round. -pub fn verification_context(set_id: SetId) -> JustificationVerificationContext { - AuthoritySet { authorities: authority_list(), set_id }.try_into().unwrap() -} - -/// Convenience function to get a list of Grandpa authorities. -pub fn authority_list() -> AuthorityList { - test_keyring().iter().map(|(id, w)| (AuthorityId::from(*id), *w)).collect() -} - -/// Get the corresponding identities from the keyring for the "standard" authority set. -pub fn test_keyring() -> Vec<(Account, AuthorityWeight)> { - vec![(ALICE, 1), (BOB, 1), (CHARLIE, 1)] -} - -/// Get a list of "unique" accounts. -pub fn accounts(len: u16) -> Vec { - (0..len).map(Account).collect() -} diff --git a/primitives/test-utils/src/lib.rs b/primitives/test-utils/src/lib.rs deleted file mode 100644 index f4fe4a242..000000000 --- a/primitives/test-utils/src/lib.rs +++ /dev/null @@ -1,347 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Utilities for testing runtime code. - -#![warn(missing_docs)] -#![cfg_attr(not(feature = "std"), no_std)] - -use bp_header_chain::justification::{required_justification_precommits, GrandpaJustification}; -use bp_parachains::parachain_head_storage_key_at_source; -use bp_polkadot_core::parachains::{ParaHash, ParaHead, ParaHeadsProof, ParaId}; -use bp_runtime::record_all_trie_keys; -use codec::Encode; -use sp_consensus_grandpa::{AuthorityId, AuthoritySignature, AuthorityWeight, SetId}; -use sp_runtime::traits::{Header as HeaderT, One, Zero}; -use sp_std::prelude::*; -use sp_trie::{trie_types::TrieDBMutBuilderV1, LayoutV1, MemoryDB, TrieMut}; - -// Re-export all our test account utilities -pub use keyring::*; - -mod keyring; - -/// GRANDPA round number used across tests. -pub const TEST_GRANDPA_ROUND: u64 = 1; -/// GRANDPA validators set id used across tests. -pub const TEST_GRANDPA_SET_ID: SetId = 1; -/// Name of the `Paras` pallet used across tests. -pub const PARAS_PALLET_NAME: &str = "Paras"; - -/// Configuration parameters when generating test GRANDPA justifications. -#[derive(Clone)] -pub struct JustificationGeneratorParams { - /// The header which we want to finalize. - pub header: H, - /// The GRANDPA round number for the current authority set. - pub round: u64, - /// The current authority set ID. - pub set_id: SetId, - /// The current GRANDPA authority set. - /// - /// The size of the set will determine the number of pre-commits in our justification. - pub authorities: Vec<(Account, AuthorityWeight)>, - /// The total number of precommit ancestors in the `votes_ancestries` field our justification. - /// - /// These may be distributed among many forks. - pub ancestors: u32, - /// The number of forks. - /// - /// Useful for creating a "worst-case" scenario in which each authority is on its own fork. - pub forks: u32, -} - -impl Default for JustificationGeneratorParams { - fn default() -> Self { - let required_signatures = required_justification_precommits(test_keyring().len() as _); - Self { - header: test_header(One::one()), - round: TEST_GRANDPA_ROUND, - set_id: TEST_GRANDPA_SET_ID, - authorities: test_keyring().into_iter().take(required_signatures as _).collect(), - ancestors: 2, - forks: 1, - } - } -} - -/// Make a valid GRANDPA justification with sensible defaults -pub fn make_default_justification(header: &H) -> GrandpaJustification { - let params = JustificationGeneratorParams:: { header: header.clone(), ..Default::default() }; - - make_justification_for_header(params) -} - -/// Generate justifications in a way where we are able to tune the number of pre-commits -/// and vote ancestries which are included in the justification. -/// -/// This is useful for benchmarks where we want to generate valid justifications with -/// a specific number of pre-commits (tuned with the number of "authorities") and/or a specific -/// number of vote ancestries (tuned with the "votes" parameter). -/// -/// Note: This needs at least three authorities or else the verifier will complain about -/// being given an invalid commit. -pub fn make_justification_for_header( - params: JustificationGeneratorParams, -) -> GrandpaJustification { - let JustificationGeneratorParams { header, round, set_id, authorities, mut ancestors, forks } = - params; - let (target_hash, target_number) = (header.hash(), *header.number()); - let mut votes_ancestries = vec![]; - let mut precommits = vec![]; - - assert!(forks != 0, "Need at least one fork to have a chain.."); - assert!( - forks as usize <= authorities.len(), - "If we have more forks than authorities we can't create valid pre-commits for all the forks." - ); - - // Roughly, how many vote ancestries do we want per fork - let target_depth = (ancestors + forks - 1) / forks; - - let mut unsigned_precommits = vec![]; - for i in 0..forks { - let depth = if ancestors >= target_depth { - ancestors -= target_depth; - target_depth - } else { - ancestors - }; - - // Note: Adding 1 to account for the target header - let chain = generate_chain(i, depth + 1, &header); - - // We don't include our finality target header in the vote ancestries - for child in &chain[1..] { - votes_ancestries.push(child.clone()); - } - - // The header we need to use when pre-committing is the one at the highest height - // on our chain. - let precommit_candidate = chain.last().map(|h| (h.hash(), *h.number())).unwrap(); - unsigned_precommits.push(precommit_candidate); - } - - for (i, (id, _weight)) in authorities.iter().enumerate() { - // Assign authorities to sign pre-commits in a round-robin fashion - let target = unsigned_precommits[i % forks as usize]; - let precommit = signed_precommit::(id, target, round, set_id); - - precommits.push(precommit); - } - - GrandpaJustification { - round, - commit: finality_grandpa::Commit { target_hash, target_number, precommits }, - votes_ancestries, - } -} - -fn generate_chain(fork_id: u32, depth: u32, ancestor: &H) -> Vec { - let mut headers = vec![ancestor.clone()]; - - for i in 1..depth { - let parent = &headers[(i - 1) as usize]; - let (hash, num) = (parent.hash(), *parent.number()); - - let mut header = test_header::(num + One::one()); - header.set_parent_hash(hash); - - // Modifying the digest so headers at the same height but in different forks have different - // hashes - header.digest_mut().logs.push(sp_runtime::DigestItem::Other(fork_id.encode())); - - headers.push(header); - } - - headers -} - -/// Make valid proof for parachain `heads` -pub fn prepare_parachain_heads_proof( - heads: Vec<(u32, ParaHead)>, -) -> (H::Hash, ParaHeadsProof, Vec<(ParaId, ParaHash)>) { - let mut parachains = Vec::with_capacity(heads.len()); - let mut root = Default::default(); - let mut mdb = MemoryDB::default(); - { - let mut trie = TrieDBMutBuilderV1::::new(&mut mdb, &mut root).build(); - for (parachain, head) in heads { - let storage_key = - parachain_head_storage_key_at_source(PARAS_PALLET_NAME, ParaId(parachain)); - trie.insert(&storage_key.0, &head.encode()) - .map_err(|_| "TrieMut::insert has failed") - .expect("TrieMut::insert should not fail in tests"); - parachains.push((ParaId(parachain), head.hash())); - } - } - - // generate storage proof to be delivered to This chain - let storage_proof = record_all_trie_keys::, _>(&mdb, &root) - .map_err(|_| "record_all_trie_keys has failed") - .expect("record_all_trie_keys should not fail in benchmarks"); - - (root, ParaHeadsProof { storage_proof }, parachains) -} - -/// Create signed precommit with given target. -pub fn signed_precommit( - signer: &Account, - target: (H::Hash, H::Number), - round: u64, - set_id: SetId, -) -> finality_grandpa::SignedPrecommit { - let precommit = finality_grandpa::Precommit { target_hash: target.0, target_number: target.1 }; - - let encoded = sp_consensus_grandpa::localized_payload( - round, - set_id, - &finality_grandpa::Message::Precommit(precommit.clone()), - ); - - let signature = signer.sign(&encoded); - let raw_signature: Vec = signature.to_bytes().into(); - - // Need to wrap our signature and id types that they match what our `SignedPrecommit` is - // expecting - let signature = AuthoritySignature::try_from(raw_signature).expect( - "We know our Keypair is good, - so our signature must also be good.", - ); - let id = (*signer).into(); - - finality_grandpa::SignedPrecommit { precommit, signature, id } -} - -/// Get a header for testing. -/// -/// The correct parent hash will be used if given a non-zero header. -pub fn test_header(number: H::Number) -> H { - let default = |num| { - H::new(num, Default::default(), Default::default(), Default::default(), Default::default()) - }; - - let mut header = default(number); - if number != Zero::zero() { - let parent_hash = default(number - One::one()).hash(); - header.set_parent_hash(parent_hash); - } - - header -} - -/// Get a header for testing with given `state_root`. -/// -/// The correct parent hash will be used if given a non-zero header. -pub fn test_header_with_root(number: H::Number, state_root: H::Hash) -> H { - let mut header: H = test_header(number); - header.set_state_root(state_root); - header -} - -/// Convenience function for generating a Header ID at a given block number. -pub fn header_id(index: u8) -> (H::Hash, H::Number) { - (test_header::(index.into()).hash(), index.into()) -} - -#[macro_export] -/// Adds methods for testing the `set_owner()` and `set_operating_mode()` for a pallet. -/// Some values are hardcoded like: -/// - `run_test()` -/// - `Pallet::` -/// - `PalletOwner::` -/// - `PalletOperatingMode::` -/// While this is not ideal, all the pallets use the same names, so it works for the moment. -/// We can revisit this in the future if anything changes. -macro_rules! generate_owned_bridge_module_tests { - ($normal_operating_mode: expr, $halted_operating_mode: expr) => { - #[test] - fn test_set_owner() { - run_test(|| { - PalletOwner::::put(1); - - // The root should be able to change the owner. - assert_ok!(Pallet::::set_owner(RuntimeOrigin::root(), Some(2))); - assert_eq!(PalletOwner::::get(), Some(2)); - - // The owner should be able to change the owner. - assert_ok!(Pallet::::set_owner(RuntimeOrigin::signed(2), Some(3))); - assert_eq!(PalletOwner::::get(), Some(3)); - - // Other users shouldn't be able to change the owner. - assert_noop!( - Pallet::::set_owner(RuntimeOrigin::signed(1), Some(4)), - DispatchError::BadOrigin - ); - assert_eq!(PalletOwner::::get(), Some(3)); - }); - } - - #[test] - fn test_set_operating_mode() { - run_test(|| { - PalletOwner::::put(1); - PalletOperatingMode::::put($normal_operating_mode); - - // The root should be able to halt the pallet. - assert_ok!(Pallet::::set_operating_mode( - RuntimeOrigin::root(), - $halted_operating_mode - )); - assert_eq!(PalletOperatingMode::::get(), $halted_operating_mode); - // The root should be able to resume the pallet. - assert_ok!(Pallet::::set_operating_mode( - RuntimeOrigin::root(), - $normal_operating_mode - )); - assert_eq!(PalletOperatingMode::::get(), $normal_operating_mode); - - // The owner should be able to halt the pallet. - assert_ok!(Pallet::::set_operating_mode( - RuntimeOrigin::signed(1), - $halted_operating_mode - )); - assert_eq!(PalletOperatingMode::::get(), $halted_operating_mode); - // The owner should be able to resume the pallet. - assert_ok!(Pallet::::set_operating_mode( - RuntimeOrigin::signed(1), - $normal_operating_mode - )); - assert_eq!(PalletOperatingMode::::get(), $normal_operating_mode); - - // Other users shouldn't be able to halt the pallet. - assert_noop!( - Pallet::::set_operating_mode( - RuntimeOrigin::signed(2), - $halted_operating_mode - ), - DispatchError::BadOrigin - ); - assert_eq!(PalletOperatingMode::::get(), $normal_operating_mode); - // Other users shouldn't be able to resume the pallet. - PalletOperatingMode::::put($halted_operating_mode); - assert_noop!( - Pallet::::set_operating_mode( - RuntimeOrigin::signed(2), - $normal_operating_mode - ), - DispatchError::BadOrigin - ); - assert_eq!(PalletOperatingMode::::get(), $halted_operating_mode); - }); - } - }; -} diff --git a/primitives/xcm-bridge-hub-router/Cargo.toml b/primitives/xcm-bridge-hub-router/Cargo.toml deleted file mode 100644 index c3fe409b6..000000000 --- a/primitives/xcm-bridge-hub-router/Cargo.toml +++ /dev/null @@ -1,23 +0,0 @@ -[package] -name = "bp-xcm-bridge-hub-router" -description = "Primitives of the xcm-bridge-hub fee pallet." -version = "0.6.0" -authors.workspace = true -edition.workspace = true -license = "GPL-3.0-or-later WITH Classpath-exception-2.0" -repository.workspace = true - -[lints] -workspace = true - -[dependencies] -codec = { package = "parity-scale-codec", version = "3.6.1", default-features = false, features = ["bit-vec", "derive"] } -scale-info = { version = "2.11.1", default-features = false, features = ["bit-vec", "derive"] } - -# Substrate Dependencies -sp-runtime = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } -sp-core = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } - -[features] -default = ["std"] -std = ["codec/std", "scale-info/std", "sp-core/std", "sp-runtime/std"] diff --git a/primitives/xcm-bridge-hub-router/src/lib.rs b/primitives/xcm-bridge-hub-router/src/lib.rs deleted file mode 100644 index dbedb7a52..000000000 --- a/primitives/xcm-bridge-hub-router/src/lib.rs +++ /dev/null @@ -1,66 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Primitives of the `xcm-bridge-hub-router` pallet. - -#![cfg_attr(not(feature = "std"), no_std)] - -use codec::{Decode, Encode, MaxEncodedLen}; -use scale_info::TypeInfo; -use sp_core::H256; -use sp_runtime::{FixedU128, RuntimeDebug}; - -/// Minimal delivery fee factor. -pub const MINIMAL_DELIVERY_FEE_FACTOR: FixedU128 = FixedU128::from_u32(1); - -/// XCM channel status provider that may report whether it is congested or not. -/// -/// By channel we mean the physical channel that is used to deliver messages of one -/// of the bridge queues. -pub trait XcmChannelStatusProvider { - /// Returns true if the channel is currently congested. - fn is_congested() -> bool; -} - -impl XcmChannelStatusProvider for () { - fn is_congested() -> bool { - false - } -} - -/// Current status of the bridge. -#[derive(Clone, Decode, Encode, Eq, PartialEq, TypeInfo, MaxEncodedLen, RuntimeDebug)] -pub struct BridgeState { - /// Current delivery fee factor. - pub delivery_fee_factor: FixedU128, - /// Bridge congestion flag. - pub is_congested: bool, -} - -impl Default for BridgeState { - fn default() -> BridgeState { - BridgeState { delivery_fee_factor: MINIMAL_DELIVERY_FEE_FACTOR, is_congested: false } - } -} - -/// A minimized version of `pallet-xcm-bridge-hub-router::Call` that can be used without a runtime. -#[derive(Encode, Decode, Debug, PartialEq, Eq, Clone, TypeInfo)] -#[allow(non_camel_case_types)] -pub enum XcmBridgeHubRouterCall { - /// `pallet-xcm-bridge-hub-router::Call::report_bridge_status` - #[codec(index = 0)] - report_bridge_status { bridge_id: H256, is_congested: bool }, -} diff --git a/primitives/xcm-bridge-hub/Cargo.toml b/primitives/xcm-bridge-hub/Cargo.toml deleted file mode 100644 index 904307100..000000000 --- a/primitives/xcm-bridge-hub/Cargo.toml +++ /dev/null @@ -1,20 +0,0 @@ -[package] -name = "bp-xcm-bridge-hub" -description = "Primitives of the xcm-bridge-hub pallet." -version = "0.2.0" -authors.workspace = true -edition.workspace = true -license = "GPL-3.0-or-later WITH Classpath-exception-2.0" -repository.workspace = true - -[lints] -workspace = true - -[dependencies] - -# Substrate Dependencies -sp-std = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master", default-features = false } - -[features] -default = ["std"] -std = ["sp-std/std"] diff --git a/primitives/xcm-bridge-hub/src/lib.rs b/primitives/xcm-bridge-hub/src/lib.rs deleted file mode 100644 index 9745011c9..000000000 --- a/primitives/xcm-bridge-hub/src/lib.rs +++ /dev/null @@ -1,24 +0,0 @@ -// Copyright 2019-2021 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Primitives of the xcm-bridge-hub pallet. - -#![warn(missing_docs)] -#![cfg_attr(not(feature = "std"), no_std)] - -/// Encoded XCM blob. We expect the bridge messages pallet to use this blob type for both inbound -/// and outbound payloads. -pub type XcmAsPlainPayload = sp_std::vec::Vec; diff --git a/relay-clients/client-bridge-hub-kusama/Cargo.toml b/relay-clients/client-bridge-hub-kusama/Cargo.toml index b1bffa0f6..ff2754f5a 100644 --- a/relay-clients/client-bridge-hub-kusama/Cargo.toml +++ b/relay-clients/client-bridge-hub-kusama/Cargo.toml @@ -12,19 +12,18 @@ workspace = true [dependencies] codec = { package = "parity-scale-codec", version = "3.6.1", features = ["derive"] } scale-info = { version = "2.11.1", default-features = false, features = ["derive"] } -#relay-substrate-client = { path = "../client-substrate" } subxt = { version = "0.32.1", default-features = false, features = ["native"] } # Bridge dependencies -bp-bridge-hub-kusama = { path = "../../chains/chain-bridge-hub-kusama" } -bp-header-chain = { path = "../../primitives/header-chain" } -bp-messages = { path = "../../primitives/messages" } -bp-parachains = { path = "../../primitives/parachains" } -bp-polkadot = { path = "../../chains/chain-polkadot" } -bp-polkadot-core = { path = "../../primitives/polkadot-core" } -bridge-runtime-common = { path = "../../bin/runtime-common" } -relay-substrate-client = { path = "../../relays/client-substrate" } +bp-bridge-hub-kusama = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } +bp-header-chain = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } +bp-messages = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } +bp-parachains = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } +bp-polkadot = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } +bp-polkadot-core = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } +bridge-runtime-common = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } +relay-substrate-client = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } # Substrate Dependencies diff --git a/relay-clients/client-bridge-hub-polkadot/Cargo.toml b/relay-clients/client-bridge-hub-polkadot/Cargo.toml index c7de55c89..89874eff4 100644 --- a/relay-clients/client-bridge-hub-polkadot/Cargo.toml +++ b/relay-clients/client-bridge-hub-polkadot/Cargo.toml @@ -16,16 +16,16 @@ subxt = { version = "0.32.1", default-features = false, features = ["native"] } # Bridge dependencies -bp-bridge-hub-polkadot = { path = "../../chains/chain-bridge-hub-polkadot" } -bp-header-chain = { path = "../../primitives/header-chain" } -bp-messages = { path = "../../primitives/messages" } -bp-parachains = { path = "../../primitives/parachains" } -bp-polkadot = { path = "../../chains/chain-polkadot" } -bp-polkadot-core = { path = "../../primitives/polkadot-core" } -bp-kusama = { path = "../../chains/chain-kusama" } -bp-runtime = { path = "../../primitives/runtime" } -bridge-runtime-common = { path = "../../bin/runtime-common" } -relay-substrate-client = { path = "../../relays/client-substrate" } +bp-bridge-hub-polkadot = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } +bp-header-chain = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } +bp-messages = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } +bp-parachains = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } +bp-polkadot = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } +bp-polkadot-core = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } +bp-kusama = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } +bp-runtime = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } +bridge-runtime-common = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } +relay-substrate-client = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } # Substrate Dependencies diff --git a/relay-clients/client-bridge-hub-rococo/Cargo.toml b/relay-clients/client-bridge-hub-rococo/Cargo.toml index b354773a0..d9d6d5ebd 100644 --- a/relay-clients/client-bridge-hub-rococo/Cargo.toml +++ b/relay-clients/client-bridge-hub-rococo/Cargo.toml @@ -16,15 +16,15 @@ subxt = { version = "0.32.1", default-features = false, features = ["native"] } # Bridge dependencies -bp-bridge-hub-rococo = { path = "../../chains/chain-bridge-hub-rococo" } -bp-header-chain = { path = "../../primitives/header-chain" } -bp-messages = { path = "../../primitives/messages" } -bp-parachains = { path = "../../primitives/parachains" } -bp-polkadot-core = { path = "../../primitives/polkadot-core" } -bp-runtime = { path = "../../primitives/runtime" } +bp-bridge-hub-rococo = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } +bp-header-chain = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } +bp-messages = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } +bp-parachains = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } +bp-polkadot-core = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } +bp-runtime = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } -bridge-runtime-common = { path = "../../bin/runtime-common" } -relay-substrate-client = { path = "../../relays/client-substrate" } +bridge-runtime-common = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } +relay-substrate-client = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } # Substrate Dependencies diff --git a/relay-clients/client-bridge-hub-westend/Cargo.toml b/relay-clients/client-bridge-hub-westend/Cargo.toml index b7fc3bcb1..f28c97c40 100644 --- a/relay-clients/client-bridge-hub-westend/Cargo.toml +++ b/relay-clients/client-bridge-hub-westend/Cargo.toml @@ -16,15 +16,15 @@ subxt = { version = "0.32.1", default-features = false, features = ["native"] } # Bridge dependencies -bp-bridge-hub-westend = { path = "../../chains/chain-bridge-hub-westend" } -bp-header-chain = { path = "../../primitives/header-chain" } -bp-messages = { path = "../../primitives/messages" } -bp-parachains = { path = "../../primitives/parachains" } -bp-polkadot-core = { path = "../../primitives/polkadot-core" } -bp-rococo = { path = "../../chains/chain-rococo" } +bp-bridge-hub-westend = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } +bp-header-chain = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } +bp-messages = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } +bp-parachains = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } +bp-polkadot-core = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } +bp-rococo = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } -bridge-runtime-common = { path = "../../bin/runtime-common" } -relay-substrate-client = { path = "../../relays/client-substrate" } +bridge-runtime-common = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } +relay-substrate-client = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } # Substrate Dependencies @@ -33,5 +33,5 @@ sp-runtime = { git = "https://github.com/paritytech/polkadot-sdk", branch = "mas sp-weights = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } [dev-dependencies] -bp-runtime = { path = "../../primitives/runtime" } +bp-runtime = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } sp-consensus-grandpa = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } diff --git a/relay-clients/client-kusama/Cargo.toml b/relay-clients/client-kusama/Cargo.toml index ea3e9f305..a19038fe3 100644 --- a/relay-clients/client-kusama/Cargo.toml +++ b/relay-clients/client-kusama/Cargo.toml @@ -16,12 +16,12 @@ subxt = { version = "0.32.1", default-features = false, features = ["native"] } # Bridge dependencies -bp-kusama = { path = "../../chains/chain-kusama" } -bp-polkadot-core = { path = "../../primitives/polkadot-core" } -bp-runtime = { path = "../../primitives/runtime" } +bp-kusama = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } +bp-polkadot-core = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } +bp-runtime = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } -relay-substrate-client = { path = "../../relays/client-substrate" } -relay-utils = { path = "../../relays/utils" } +relay-substrate-client = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } +relay-utils = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } # Substrate Dependencies diff --git a/relay-clients/client-polkadot-bulletin/Cargo.toml b/relay-clients/client-polkadot-bulletin/Cargo.toml index 6a1b61525..c9f3762fe 100644 --- a/relay-clients/client-polkadot-bulletin/Cargo.toml +++ b/relay-clients/client-polkadot-bulletin/Cargo.toml @@ -16,14 +16,14 @@ subxt = { version = "0.32.1", default-features = false, features = ["native"] } # Bridge dependencies -bp-header-chain = { path = "../../primitives/header-chain" } -bp-messages = { path = "../../primitives/messages" } -bp-polkadot-core = { path = "../../primitives/polkadot-core" } -bp-polkadot-bulletin = { path = "../../chains/chain-polkadot-bulletin" } -bp-runtime = { path = "../../primitives/runtime" } -bridge-runtime-common = { path = "../../bin/runtime-common" } -relay-substrate-client = { path = "../../relays/client-substrate" } -relay-utils = { path = "../../relays/utils" } +bp-header-chain = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } +bp-messages = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } +bp-polkadot-core = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } +bp-polkadot-bulletin = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } +bp-runtime = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } +bridge-runtime-common = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } +relay-substrate-client = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } +relay-utils = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } # Substrate Dependencies diff --git a/relay-clients/client-polkadot/Cargo.toml b/relay-clients/client-polkadot/Cargo.toml index 969fc37a8..9233e529e 100644 --- a/relay-clients/client-polkadot/Cargo.toml +++ b/relay-clients/client-polkadot/Cargo.toml @@ -16,12 +16,12 @@ subxt = { version = "0.32.1", default-features = false, features = ["native"] } # Bridge dependencies -bp-polkadot = { path = "../../chains/chain-polkadot" } -bp-polkadot-core = { path = "../../primitives/polkadot-core" } -bp-runtime = { path = "../../primitives/runtime" } +bp-polkadot = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } +bp-polkadot-core = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } +bp-runtime = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } -relay-substrate-client = { path = "../../relays/client-substrate" } -relay-utils = { path = "../../relays/utils" } +relay-substrate-client = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } +relay-utils = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } # Substrate Dependencies diff --git a/relay-clients/client-rococo/Cargo.toml b/relay-clients/client-rococo/Cargo.toml index 445707f5a..3df01015e 100644 --- a/relay-clients/client-rococo/Cargo.toml +++ b/relay-clients/client-rococo/Cargo.toml @@ -16,11 +16,11 @@ subxt = { version = "0.32.1", default-features = false, features = ["native"] } # Bridge dependencies -bp-polkadot-core = { path = "../../primitives/polkadot-core" } -bp-rococo = { path = "../../chains/chain-rococo" } +bp-polkadot-core = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } +bp-rococo = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } -relay-substrate-client = { path = "../../relays/client-substrate" } -relay-utils = { path = "../../relays/utils" } +relay-substrate-client = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } +relay-utils = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } # Substrate Dependencies diff --git a/relay-clients/client-westend/Cargo.toml b/relay-clients/client-westend/Cargo.toml index b69ddd990..dc252ea47 100644 --- a/relay-clients/client-westend/Cargo.toml +++ b/relay-clients/client-westend/Cargo.toml @@ -16,12 +16,12 @@ subxt = { version = "0.32.1", default-features = false, features = ["native"] } # Bridge dependencies -bp-polkadot-core = { path = "../../primitives/polkadot-core" } -bp-runtime = { path = "../../primitives/runtime" } -bp-westend = { path = "../../chains/chain-westend" } +bp-polkadot-core = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } +bp-runtime = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } +bp-westend = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } -relay-substrate-client = { path = "../../relays/client-substrate" } -relay-utils = { path = "../../relays/utils" } +relay-substrate-client = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } +relay-utils = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } # Substrate Dependencies diff --git a/relays/client-substrate/Cargo.toml b/relays/client-substrate/Cargo.toml deleted file mode 100644 index 85ebce1f9..000000000 --- a/relays/client-substrate/Cargo.toml +++ /dev/null @@ -1,61 +0,0 @@ -[package] -name = "relay-substrate-client" -version = "0.1.0" -authors.workspace = true -edition.workspace = true -license = "GPL-3.0-or-later WITH Classpath-exception-2.0" -repository.workspace = true -publish = false - -[lints] -workspace = true - -[dependencies] -async-std = { version = "1.9.0", features = ["attributes"] } -async-trait = "0.1.79" -codec = { package = "parity-scale-codec", version = "3.6.1" } -futures = "0.3.30" -jsonrpsee = { version = "0.22", features = ["macros", "ws-client"] } -log = { workspace = true } -num-traits = "0.2" -rand = "0.8.5" -scale-info = { version = "2.11.1", features = ["derive"] } -tokio = { version = "1.37", features = ["rt-multi-thread"] } -thiserror = { workspace = true } - -# Bridge dependencies - -bp-header-chain = { path = "../../primitives/header-chain" } -bp-messages = { path = "../../primitives/messages" } -bp-polkadot-core = { path = "../../primitives/polkadot-core" } -bp-runtime = { path = "../../primitives/runtime" } -pallet-bridge-messages = { path = "../../modules/messages" } -finality-relay = { path = "../finality" } -relay-utils = { path = "../utils" } - -# Substrate Dependencies - -frame-support = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } -frame-system = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } -pallet-balances = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } -pallet-transaction-payment = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } -pallet-transaction-payment-rpc-runtime-api = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } -pallet-utility = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } -sc-chain-spec = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } -sc-rpc-api = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } -sc-transaction-pool-api = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } -sp-consensus-grandpa = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } -sp-core = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } -sp-rpc = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } -sp-runtime = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } -sp-std = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } -sp-trie = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } -sp-version = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } - -# Polkadot Dependencies - -xcm = { package = "staging-xcm", git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } - -[features] -default = [] -test-helpers = [] diff --git a/relays/client-substrate/src/calls.rs b/relays/client-substrate/src/calls.rs deleted file mode 100644 index 71b9ec84a..000000000 --- a/relays/client-substrate/src/calls.rs +++ /dev/null @@ -1,59 +0,0 @@ -// Copyright 2019-2023 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Basic runtime calls. - -use codec::{Decode, Encode}; -use scale_info::TypeInfo; -use sp_std::{boxed::Box, vec::Vec}; - -use xcm::{VersionedLocation, VersionedXcm}; - -/// A minimized version of `frame-system::Call` that can be used without a runtime. -#[derive(Encode, Decode, Debug, PartialEq, Eq, Clone, TypeInfo)] -#[allow(non_camel_case_types)] -pub enum SystemCall { - /// `frame-system::Call::remark` - #[codec(index = 1)] - remark(Vec), -} - -/// A minimized version of `pallet-utility::Call` that can be used without a runtime. -#[derive(Encode, Decode, Debug, PartialEq, Eq, Clone, TypeInfo)] -#[allow(non_camel_case_types)] -pub enum UtilityCall { - /// `pallet-utility::Call::batch_all` - #[codec(index = 2)] - batch_all(Vec), -} - -/// A minimized version of `pallet-sudo::Call` that can be used without a runtime. -#[derive(Encode, Decode, Debug, PartialEq, Eq, Clone, TypeInfo)] -#[allow(non_camel_case_types)] -pub enum SudoCall { - /// `pallet-sudo::Call::sudo` - #[codec(index = 0)] - sudo(Box), -} - -/// A minimized version of `pallet-xcm::Call`, that can be used without a runtime. -#[derive(Encode, Decode, Debug, PartialEq, Eq, Clone, TypeInfo)] -#[allow(non_camel_case_types)] -pub enum XcmCall { - /// `pallet-xcm::Call::send` - #[codec(index = 0)] - send(Box, Box>), -} diff --git a/relays/client-substrate/src/chain.rs b/relays/client-substrate/src/chain.rs deleted file mode 100644 index 2aba5f567..000000000 --- a/relays/client-substrate/src/chain.rs +++ /dev/null @@ -1,286 +0,0 @@ -// Copyright 2019-2021 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -use crate::calls::UtilityCall; - -use crate::SimpleRuntimeVersion; -use bp_header_chain::ChainWithGrandpa as ChainWithGrandpaBase; -use bp_messages::ChainWithMessages as ChainWithMessagesBase; -use bp_runtime::{ - Chain as ChainBase, EncodedOrDecodedCall, HashOf, Parachain as ParachainBase, TransactionEra, - TransactionEraOf, UnderlyingChainProvider, -}; -use codec::{Codec, Decode, Encode}; -use jsonrpsee::core::{DeserializeOwned, Serialize}; -use num_traits::Zero; -use sc_transaction_pool_api::TransactionStatus; -use scale_info::TypeInfo; -use sp_core::{storage::StorageKey, Pair}; -use sp_runtime::{ - generic::SignedBlock, - traits::{Block as BlockT, Member}, - ConsensusEngineId, EncodedJustification, -}; -use std::{fmt::Debug, time::Duration}; - -/// Substrate-based chain from minimal relay-client point of view. -pub trait Chain: ChainBase + Clone { - /// Chain name. - const NAME: &'static str; - /// Name of the runtime API method that is returning best known finalized header number - /// and hash (as tuple). - /// - /// Keep in mind that this method is normally provided by the other chain, which is - /// bridged with this chain. - const BEST_FINALIZED_HEADER_ID_METHOD: &'static str; - - /// Average block interval. - /// - /// How often blocks are produced on that chain. It's suggested to set this value - /// to match the block time of the chain. - const AVERAGE_BLOCK_INTERVAL: Duration; - - /// Block type. - type SignedBlock: Member + Serialize + DeserializeOwned + BlockWithJustification; - /// The aggregated `Call` type. - type Call: Clone + Codec + Debug + Send + Sync; -} - -/// Bridge-supported network definition. -/// -/// Used to abstract away CLI commands. -pub trait ChainWithRuntimeVersion: Chain { - /// Current version of the chain runtime, known to relay. - /// - /// can be `None` if relay is not going to submit transactions to that chain. - const RUNTIME_VERSION: Option; -} - -/// Substrate-based relay chain that supports parachains. -/// -/// We assume that the parachains are supported using `runtime_parachains::paras` pallet. -pub trait RelayChain: Chain { - /// Name of the `runtime_parachains::paras` pallet in the runtime of this chain. - const PARAS_PALLET_NAME: &'static str; -} - -/// Substrate-based chain that is using direct GRANDPA finality from minimal relay-client point of -/// view. -/// -/// Keep in mind that parachains are relying on relay chain GRANDPA, so they should not implement -/// this trait. -pub trait ChainWithGrandpa: Chain + ChainWithGrandpaBase { - /// Name of the runtime API method that is returning the GRANDPA info associated with the - /// headers accepted by the `submit_finality_proofs` extrinsic in the queried block. - /// - /// Keep in mind that this method is normally provided by the other chain, which is - /// bridged with this chain. - const SYNCED_HEADERS_GRANDPA_INFO_METHOD: &'static str; - - /// The type of the key owner proof used by the grandpa engine. - type KeyOwnerProof: Decode + TypeInfo + Send; -} - -/// Substrate-based parachain from minimal relay-client point of view. -pub trait Parachain: Chain + ParachainBase {} - -impl Parachain for T where T: UnderlyingChainProvider + Chain + ParachainBase {} - -/// Substrate-based chain with messaging support from minimal relay-client point of view. -pub trait ChainWithMessages: Chain + ChainWithMessagesBase { - // TODO (https://github.com/paritytech/parity-bridges-common/issues/1692): check all the names - // after the issue is fixed - all names must be changed - - /// Name of the bridge relayers pallet (used in `construct_runtime` macro call) that is deployed - /// at some other chain to bridge with this `ChainWithMessages`. - /// - /// We assume that all chains that are bridging with this `ChainWithMessages` are using - /// the same name. - const WITH_CHAIN_RELAYERS_PALLET_NAME: Option<&'static str>; - - /// Name of the `ToOutboundLaneApi::message_details` runtime API method. - /// The method is provided by the runtime that is bridged with this `ChainWithMessages`. - const TO_CHAIN_MESSAGE_DETAILS_METHOD: &'static str; - - /// Name of the `FromInboundLaneApi::message_details` runtime API method. - /// The method is provided by the runtime that is bridged with this `ChainWithMessages`. - const FROM_CHAIN_MESSAGE_DETAILS_METHOD: &'static str; -} - -/// Call type used by the chain. -pub type CallOf = ::Call; -/// Transaction status of the chain. -pub type TransactionStatusOf = TransactionStatus, HashOf>; - -/// Substrate-based chain with `AccountData` generic argument of `frame_system::AccountInfo` set to -/// the `pallet_balances::AccountData`. -pub trait ChainWithBalances: Chain { - /// Return runtime storage key for getting `frame_system::AccountInfo` of given account. - fn account_info_storage_key(account_id: &Self::AccountId) -> StorageKey; -} - -/// SCALE-encoded extrinsic. -pub type EncodedExtrinsic = Vec; - -/// Block with justification. -pub trait BlockWithJustification
{ - /// Return block header. - fn header(&self) -> Header; - /// Return encoded block extrinsics. - fn extrinsics(&self) -> Vec; - /// Return block justification, if known. - fn justification(&self, engine_id: ConsensusEngineId) -> Option<&EncodedJustification>; -} - -/// Transaction before it is signed. -#[derive(Clone, Debug, PartialEq)] -pub struct UnsignedTransaction { - /// Runtime call of this transaction. - pub call: EncodedOrDecodedCall, - /// Transaction nonce. - pub nonce: C::Nonce, - /// Tip included into transaction. - pub tip: C::Balance, - /// Transaction era used by the chain. - pub era: TransactionEraOf, -} - -impl UnsignedTransaction { - /// Create new unsigned transaction with given call, nonce, era and zero tip. - pub fn new(call: EncodedOrDecodedCall, nonce: C::Nonce) -> Self { - Self { call, nonce, era: TransactionEra::Immortal, tip: Zero::zero() } - } - - /// Convert to the transaction of the other compatible chain. - pub fn switch_chain(self) -> UnsignedTransaction - where - Other: Chain< - Nonce = C::Nonce, - Balance = C::Balance, - BlockNumber = C::BlockNumber, - Hash = C::Hash, - >, - { - UnsignedTransaction { - call: EncodedOrDecodedCall::Encoded(self.call.into_encoded()), - nonce: self.nonce, - tip: self.tip, - era: self.era, - } - } - - /// Set transaction tip. - #[must_use] - pub fn tip(mut self, tip: C::Balance) -> Self { - self.tip = tip; - self - } - - /// Set transaction era. - #[must_use] - pub fn era(mut self, era: TransactionEraOf) -> Self { - self.era = era; - self - } -} - -/// Account key pair used by transactions signing scheme. -pub type AccountKeyPairOf = ::AccountKeyPair; - -/// Substrate-based chain transactions signing scheme. -pub trait ChainWithTransactions: Chain { - /// Type of key pairs used to sign transactions. - type AccountKeyPair: Pair + Clone + Send + Sync; - /// Signed transaction. - type SignedTransaction: Clone + Debug + Codec + Send + 'static; - - /// Create transaction for given runtime call, signed by given account. - fn sign_transaction( - param: SignParam, - unsigned: UnsignedTransaction, - ) -> Result - where - Self: Sized; -} - -/// Sign transaction parameters -pub struct SignParam { - /// Version of the runtime specification. - pub spec_version: u32, - /// Transaction version - pub transaction_version: u32, - /// Hash of the genesis block. - pub genesis_hash: HashOf, - /// Signer account - pub signer: AccountKeyPairOf, -} - -impl BlockWithJustification for SignedBlock { - fn header(&self) -> Block::Header { - self.block.header().clone() - } - - fn extrinsics(&self) -> Vec { - self.block.extrinsics().iter().map(Encode::encode).collect() - } - - fn justification(&self, engine_id: ConsensusEngineId) -> Option<&EncodedJustification> { - self.justifications.as_ref().and_then(|j| j.get(engine_id)) - } -} - -/// Trait that provides functionality defined inside `pallet-utility` -pub trait UtilityPallet { - /// Create batch call from given calls vector. - fn build_batch_call(calls: Vec) -> C::Call; -} - -/// Structure that implements `UtilityPalletProvider` based on a full runtime. -pub struct FullRuntimeUtilityPallet { - _phantom: std::marker::PhantomData, -} - -impl UtilityPallet for FullRuntimeUtilityPallet -where - C: Chain, - R: pallet_utility::Config, - ::RuntimeCall: From>, -{ - fn build_batch_call(calls: Vec) -> C::Call { - pallet_utility::Call::batch_all { calls }.into() - } -} - -/// Structure that implements `UtilityPalletProvider` based on a call conversion. -pub struct MockedRuntimeUtilityPallet { - _phantom: std::marker::PhantomData, -} - -impl UtilityPallet for MockedRuntimeUtilityPallet -where - C: Chain, - C::Call: From>, -{ - fn build_batch_call(calls: Vec) -> C::Call { - UtilityCall::batch_all(calls).into() - } -} - -/// Substrate-based chain that uses `pallet-utility`. -pub trait ChainWithUtilityPallet: Chain { - /// The utility pallet provider. - type UtilityPallet: UtilityPallet; -} diff --git a/relays/client-substrate/src/client.rs b/relays/client-substrate/src/client.rs deleted file mode 100644 index afbda8599..000000000 --- a/relays/client-substrate/src/client.rs +++ /dev/null @@ -1,990 +0,0 @@ -// Copyright 2019-2021 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Substrate node client. - -use crate::{ - chain::{Chain, ChainWithTransactions}, - guard::Environment, - rpc::{ - SubstrateAuthorClient, SubstrateChainClient, SubstrateFinalityClient, - SubstrateFrameSystemClient, SubstrateStateClient, SubstrateSystemClient, - }, - transaction_stall_timeout, AccountKeyPairOf, ChainWithGrandpa, ConnectionParams, Error, HashOf, - HeaderIdOf, Result, SignParam, TransactionTracker, UnsignedTransaction, -}; - -use async_std::sync::{Arc, Mutex, RwLock}; -use async_trait::async_trait; -use bp_runtime::{HeaderIdProvider, StorageDoubleMapKeyProvider, StorageMapKeyProvider}; -use codec::{Decode, Encode}; -use frame_support::weights::Weight; -use futures::{SinkExt, StreamExt}; -use jsonrpsee::{ - core::DeserializeOwned, - ws_client::{WsClient as RpcClient, WsClientBuilder as RpcClientBuilder}, -}; -use num_traits::{Saturating, Zero}; -use pallet_transaction_payment::RuntimeDispatchInfo; -use relay_utils::{relay_loop::RECONNECT_DELAY, STALL_TIMEOUT}; -use sp_core::{ - storage::{StorageData, StorageKey}, - Bytes, Hasher, Pair, -}; -use sp_runtime::{ - traits::Header as HeaderT, - transaction_validity::{TransactionSource, TransactionValidity}, -}; -use sp_trie::StorageProof; -use sp_version::RuntimeVersion; -use std::{cmp::Ordering, future::Future}; - -const SUB_API_GRANDPA_AUTHORITIES: &str = "GrandpaApi_grandpa_authorities"; -const SUB_API_GRANDPA_GENERATE_KEY_OWNERSHIP_PROOF: &str = - "GrandpaApi_generate_key_ownership_proof"; -const SUB_API_TXPOOL_VALIDATE_TRANSACTION: &str = "TaggedTransactionQueue_validate_transaction"; -const SUB_API_TX_PAYMENT_QUERY_INFO: &str = "TransactionPaymentApi_query_info"; -const MAX_SUBSCRIPTION_CAPACITY: usize = 4096; - -/// The difference between best block number and number of its ancestor, that is enough -/// for us to consider that ancestor an "ancient" block with dropped state. -/// -/// The relay does not assume that it is connected to the archive node, so it always tries -/// to use the best available chain state. But sometimes it still may use state of some -/// old block. If the state of that block is already dropped, relay will see errors when -/// e.g. it tries to prove something. -/// -/// By default Substrate-based nodes are storing state for last 256 blocks. We'll use -/// half of this value. -pub const ANCIENT_BLOCK_THRESHOLD: u32 = 128; - -/// Returns `true` if we think that the state is already discarded for given block. -pub fn is_ancient_block + PartialOrd + Saturating>(block: N, best: N) -> bool { - best.saturating_sub(block) >= N::from(ANCIENT_BLOCK_THRESHOLD) -} - -/// Opaque justifications subscription type. -pub struct Subscription(pub(crate) Mutex>>); - -/// Opaque GRANDPA authorities set. -pub type OpaqueGrandpaAuthoritiesSet = Vec; - -/// A simple runtime version. It only includes the `spec_version` and `transaction_version`. -#[derive(Copy, Clone, Debug)] -pub struct SimpleRuntimeVersion { - /// Version of the runtime specification. - pub spec_version: u32, - /// All existing dispatches are fully compatible when this number doesn't change. - pub transaction_version: u32, -} - -impl SimpleRuntimeVersion { - /// Create a new instance of `SimpleRuntimeVersion` from a `RuntimeVersion`. - pub const fn from_runtime_version(runtime_version: &RuntimeVersion) -> Self { - Self { - spec_version: runtime_version.spec_version, - transaction_version: runtime_version.transaction_version, - } - } -} - -/// Chain runtime version in client -#[derive(Copy, Clone, Debug)] -pub enum ChainRuntimeVersion { - /// Auto query from chain. - Auto, - /// Custom runtime version, defined by user. - Custom(SimpleRuntimeVersion), -} - -/// Substrate client type. -/// -/// Cloning `Client` is a cheap operation that only clones internal references. Different -/// clones of the same client are guaranteed to use the same references. -pub struct Client { - // Lock order: `submit_signed_extrinsic_lock`, `data` - /// Client connection params. - params: Arc, - /// Saved chain runtime version. - chain_runtime_version: ChainRuntimeVersion, - /// If several tasks are submitting their transactions simultaneously using - /// `submit_signed_extrinsic` method, they may get the same transaction nonce. So one of - /// transactions will be rejected from the pool. This lock is here to prevent situations like - /// that. - submit_signed_extrinsic_lock: Arc>, - /// Genesis block hash. - genesis_hash: HashOf, - /// Shared dynamic data. - data: Arc>, -} - -/// Client data, shared by all `Client` clones. -struct ClientData { - /// Tokio runtime handle. - tokio: Arc, - /// Substrate RPC client. - client: Arc, -} - -/// Already encoded value. -struct PreEncoded(Vec); - -impl Encode for PreEncoded { - fn encode(&self) -> Vec { - self.0.clone() - } -} - -#[async_trait] -impl relay_utils::relay_loop::Client for Client { - type Error = Error; - - async fn reconnect(&mut self) -> Result<()> { - let mut data = self.data.write().await; - let (tokio, client) = Self::build_client(&self.params).await?; - data.tokio = tokio; - data.client = client; - Ok(()) - } -} - -impl Clone for Client { - fn clone(&self) -> Self { - Client { - params: self.params.clone(), - chain_runtime_version: self.chain_runtime_version, - submit_signed_extrinsic_lock: self.submit_signed_extrinsic_lock.clone(), - genesis_hash: self.genesis_hash, - data: self.data.clone(), - } - } -} - -impl std::fmt::Debug for Client { - fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result { - fmt.debug_struct("Client").field("genesis_hash", &self.genesis_hash).finish() - } -} - -impl Client { - /// Returns client that is able to call RPCs on Substrate node over websocket connection. - /// - /// This function will keep connecting to given Substrate node until connection is established - /// and is functional. If attempt fail, it will wait for `RECONNECT_DELAY` and retry again. - pub async fn new(params: ConnectionParams) -> Self { - let params = Arc::new(params); - loop { - match Self::try_connect(params.clone()).await { - Ok(client) => return client, - Err(error) => log::error!( - target: "bridge", - "Failed to connect to {} node: {:?}. Going to retry in {}s", - C::NAME, - error, - RECONNECT_DELAY.as_secs(), - ), - } - - async_std::task::sleep(RECONNECT_DELAY).await; - } - } - - /// Try to connect to Substrate node over websocket. Returns Substrate RPC client if connection - /// has been established or error otherwise. - pub async fn try_connect(params: Arc) -> Result { - let (tokio, client) = Self::build_client(¶ms).await?; - - let number: C::BlockNumber = Zero::zero(); - let genesis_hash_client = client.clone(); - let genesis_hash = tokio - .spawn(async move { - SubstrateChainClient::::block_hash(&*genesis_hash_client, Some(number)).await - }) - .await??; - - let chain_runtime_version = params.chain_runtime_version; - let mut client = Self { - params, - chain_runtime_version, - submit_signed_extrinsic_lock: Arc::new(Mutex::new(())), - genesis_hash, - data: Arc::new(RwLock::new(ClientData { tokio, client })), - }; - Self::ensure_correct_runtime_version(&mut client, chain_runtime_version).await?; - Ok(client) - } - - // Check runtime version to understand if we need are connected to expected version, or we - // need to wait for upgrade, we need to abort immediately. - async fn ensure_correct_runtime_version>( - env: &mut E, - expected: ChainRuntimeVersion, - ) -> Result<()> { - // we are only interested if version mode is bundled or passed using CLI - let expected = match expected { - ChainRuntimeVersion::Auto => return Ok(()), - ChainRuntimeVersion::Custom(expected) => expected, - }; - - // we need to wait if actual version is < than expected, we are OK of versions are the - // same and we need to abort if actual version is > than expected - let actual = SimpleRuntimeVersion::from_runtime_version(&env.runtime_version().await?); - match actual.spec_version.cmp(&expected.spec_version) { - Ordering::Less => - Err(Error::WaitingForRuntimeUpgrade { chain: C::NAME.into(), expected, actual }), - Ordering::Equal => Ok(()), - Ordering::Greater => { - log::error!( - target: "bridge", - "The {} client is configured to use runtime version {expected:?} and actual \ - version is {actual:?}. Aborting", - C::NAME, - ); - env.abort().await; - Err(Error::Custom("Aborted".into())) - }, - } - } - - /// Build client to use in connection. - async fn build_client( - params: &ConnectionParams, - ) -> Result<(Arc, Arc)> { - let tokio = tokio::runtime::Runtime::new()?; - - let uri = match params.uri { - Some(ref uri) => uri.clone(), - None => { - format!( - "{}://{}:{}{}", - if params.secure { "wss" } else { "ws" }, - params.host, - params.port, - match params.path { - Some(ref path) => format!("/{}", path), - None => String::new(), - }, - ) - }, - }; - log::info!(target: "bridge", "Connecting to {} node at {}", C::NAME, uri); - - let client = tokio - .spawn(async move { - RpcClientBuilder::default() - .max_buffer_capacity_per_subscription(MAX_SUBSCRIPTION_CAPACITY) - .build(&uri) - .await - }) - .await??; - - Ok((Arc::new(tokio), Arc::new(client))) - } -} - -impl Client { - /// Return simple runtime version, only include `spec_version` and `transaction_version`. - pub async fn simple_runtime_version(&self) -> Result { - Ok(match &self.chain_runtime_version { - ChainRuntimeVersion::Auto => { - let runtime_version = self.runtime_version().await?; - SimpleRuntimeVersion::from_runtime_version(&runtime_version) - }, - ChainRuntimeVersion::Custom(version) => *version, - }) - } - - /// Returns true if client is connected to at least one peer and is in synced state. - pub async fn ensure_synced(&self) -> Result<()> { - self.jsonrpsee_execute(|client| async move { - let health = SubstrateSystemClient::::health(&*client).await?; - let is_synced = !health.is_syncing && (!health.should_have_peers || health.peers > 0); - if is_synced { - Ok(()) - } else { - Err(Error::ClientNotSynced(health)) - } - }) - .await - } - - /// Return hash of the genesis block. - pub fn genesis_hash(&self) -> &C::Hash { - &self.genesis_hash - } - - /// Return hash of the best finalized block. - pub async fn best_finalized_header_hash(&self) -> Result { - self.jsonrpsee_execute(|client| async move { - Ok(SubstrateChainClient::::finalized_head(&*client).await?) - }) - .await - .map_err(|e| Error::FailedToReadBestFinalizedHeaderHash { - chain: C::NAME.into(), - error: e.boxed(), - }) - } - - /// Return number of the best finalized block. - pub async fn best_finalized_header_number(&self) -> Result { - Ok(*self.best_finalized_header().await?.number()) - } - - /// Return header of the best finalized block. - pub async fn best_finalized_header(&self) -> Result { - self.header_by_hash(self.best_finalized_header_hash().await?).await - } - - /// Returns the best Substrate header. - pub async fn best_header(&self) -> Result - where - C::Header: DeserializeOwned, - { - self.jsonrpsee_execute(|client| async move { - Ok(SubstrateChainClient::::header(&*client, None).await?) - }) - .await - .map_err(|e| Error::FailedToReadBestHeader { chain: C::NAME.into(), error: e.boxed() }) - } - - /// Get a Substrate block from its hash. - pub async fn get_block(&self, block_hash: Option) -> Result { - self.jsonrpsee_execute(move |client| async move { - Ok(SubstrateChainClient::::block(&*client, block_hash).await?) - }) - .await - } - - /// Get a Substrate header by its hash. - pub async fn header_by_hash(&self, block_hash: C::Hash) -> Result - where - C::Header: DeserializeOwned, - { - self.jsonrpsee_execute(move |client| async move { - Ok(SubstrateChainClient::::header(&*client, Some(block_hash)).await?) - }) - .await - .map_err(|e| Error::FailedToReadHeaderByHash { - chain: C::NAME.into(), - hash: format!("{block_hash}"), - error: e.boxed(), - }) - } - - /// Get a Substrate block hash by its number. - pub async fn block_hash_by_number(&self, number: C::BlockNumber) -> Result { - self.jsonrpsee_execute(move |client| async move { - Ok(SubstrateChainClient::::block_hash(&*client, Some(number)).await?) - }) - .await - } - - /// Get a Substrate header by its number. - pub async fn header_by_number(&self, block_number: C::BlockNumber) -> Result - where - C::Header: DeserializeOwned, - { - let block_hash = Self::block_hash_by_number(self, block_number).await?; - let header_by_hash = Self::header_by_hash(self, block_hash).await?; - Ok(header_by_hash) - } - - /// Return runtime version. - pub async fn runtime_version(&self) -> Result { - self.jsonrpsee_execute(move |client| async move { - Ok(SubstrateStateClient::::runtime_version(&*client).await?) - }) - .await - } - - /// Read value from runtime storage. - pub async fn storage_value( - &self, - storage_key: StorageKey, - block_hash: Option, - ) -> Result> { - self.raw_storage_value(storage_key, block_hash) - .await? - .map(|encoded_value| { - T::decode(&mut &encoded_value.0[..]).map_err(Error::ResponseParseFailed) - }) - .transpose() - } - - /// Read `MapStorage` value from runtime storage. - pub async fn storage_map_value( - &self, - pallet_prefix: &str, - key: &T::Key, - block_hash: Option, - ) -> Result> { - let storage_key = T::final_key(pallet_prefix, key); - - self.raw_storage_value(storage_key, block_hash) - .await? - .map(|encoded_value| { - T::Value::decode(&mut &encoded_value.0[..]).map_err(Error::ResponseParseFailed) - }) - .transpose() - } - - /// Read `DoubleMapStorage` value from runtime storage. - pub async fn storage_double_map_value( - &self, - pallet_prefix: &str, - key1: &T::Key1, - key2: &T::Key2, - block_hash: Option, - ) -> Result> { - let storage_key = T::final_key(pallet_prefix, key1, key2); - - self.raw_storage_value(storage_key, block_hash) - .await? - .map(|encoded_value| { - T::Value::decode(&mut &encoded_value.0[..]).map_err(Error::ResponseParseFailed) - }) - .transpose() - } - - /// Read raw value from runtime storage. - pub async fn raw_storage_value( - &self, - storage_key: StorageKey, - block_hash: Option, - ) -> Result> { - let cloned_storage_key = storage_key.clone(); - self.jsonrpsee_execute(move |client| async move { - Ok(SubstrateStateClient::::storage(&*client, storage_key.clone(), block_hash) - .await?) - }) - .await - .map_err(|e| Error::FailedToReadRuntimeStorageValue { - chain: C::NAME.into(), - key: cloned_storage_key, - error: e.boxed(), - }) - } - - /// Get the nonce of the given Substrate account. - /// - /// Note: It's the caller's responsibility to make sure `account` is a valid SS58 address. - pub async fn next_account_index(&self, account: C::AccountId) -> Result { - self.jsonrpsee_execute(move |client| async move { - Ok(SubstrateFrameSystemClient::::account_next_index(&*client, account).await?) - }) - .await - } - - /// Submit unsigned extrinsic for inclusion in a block. - /// - /// Note: The given transaction needs to be SCALE encoded beforehand. - pub async fn submit_unsigned_extrinsic(&self, transaction: Bytes) -> Result { - // one last check that the transaction is valid. Most of checks happen in the relay loop and - // it is the "final" check before submission. - let best_header_hash = self.best_header().await?.hash(); - self.validate_transaction(best_header_hash, PreEncoded(transaction.0.clone())) - .await - .map_err(|e| { - log::error!(target: "bridge", "Pre-submit {} transaction validation failed: {:?}", C::NAME, e); - e - })??; - - self.jsonrpsee_execute(move |client| async move { - let tx_hash = SubstrateAuthorClient::::submit_extrinsic(&*client, transaction) - .await - .map_err(|e| { - log::error!(target: "bridge", "Failed to send transaction to {} node: {:?}", C::NAME, e); - e - })?; - log::trace!(target: "bridge", "Sent transaction to {} node: {:?}", C::NAME, tx_hash); - Ok(tx_hash) - }) - .await - } - - async fn build_sign_params(&self, signer: AccountKeyPairOf) -> Result> - where - C: ChainWithTransactions, - { - let runtime_version = self.simple_runtime_version().await?; - Ok(SignParam:: { - spec_version: runtime_version.spec_version, - transaction_version: runtime_version.transaction_version, - genesis_hash: self.genesis_hash, - signer, - }) - } - - /// Submit an extrinsic signed by given account. - /// - /// All calls of this method are synchronized, so there can't be more than one active - /// `submit_signed_extrinsic()` call. This guarantees that no nonces collision may happen - /// if all client instances are clones of the same initial `Client`. - /// - /// Note: The given transaction needs to be SCALE encoded beforehand. - pub async fn submit_signed_extrinsic( - &self, - signer: &AccountKeyPairOf, - prepare_extrinsic: impl FnOnce(HeaderIdOf, C::Nonce) -> Result> - + Send - + 'static, - ) -> Result - where - C: ChainWithTransactions, - C::AccountId: From<::Public>, - { - let _guard = self.submit_signed_extrinsic_lock.lock().await; - let transaction_nonce = self.next_account_index(signer.public().into()).await?; - let best_header = self.best_header().await?; - let signing_data = self.build_sign_params(signer.clone()).await?; - - // By using parent of best block here, we are protecing again best-block reorganizations. - // E.g. transaction may have been submitted when the best block was `A[num=100]`. Then it - // has been changed to `B[num=100]`. Hash of `A` has been included into transaction - // signature payload. So when signature will be checked, the check will fail and transaction - // will be dropped from the pool. - let best_header_id = best_header.parent_id().unwrap_or_else(|| best_header.id()); - - let extrinsic = prepare_extrinsic(best_header_id, transaction_nonce)?; - let signed_extrinsic = C::sign_transaction(signing_data, extrinsic)?.encode(); - - // one last check that the transaction is valid. Most of checks happen in the relay loop and - // it is the "final" check before submission. - self.validate_transaction(best_header_id.1, PreEncoded(signed_extrinsic.clone())) - .await - .map_err(|e| { - log::error!(target: "bridge", "Pre-submit {} transaction validation failed: {:?}", C::NAME, e); - e - })??; - - self.jsonrpsee_execute(move |client| async move { - let tx_hash = - SubstrateAuthorClient::::submit_extrinsic(&*client, Bytes(signed_extrinsic)) - .await - .map_err(|e| { - log::error!(target: "bridge", "Failed to send transaction to {} node: {:?}", C::NAME, e); - e - })?; - log::trace!(target: "bridge", "Sent transaction to {} node: {:?}", C::NAME, tx_hash); - Ok(tx_hash) - }) - .await - } - - /// Does exactly the same as `submit_signed_extrinsic`, but keeps watching for extrinsic status - /// after submission. - pub async fn submit_and_watch_signed_extrinsic( - &self, - signer: &AccountKeyPairOf, - prepare_extrinsic: impl FnOnce(HeaderIdOf, C::Nonce) -> Result> - + Send - + 'static, - ) -> Result> - where - C: ChainWithTransactions, - C::AccountId: From<::Public>, - { - let self_clone = self.clone(); - let signing_data = self.build_sign_params(signer.clone()).await?; - let _guard = self.submit_signed_extrinsic_lock.lock().await; - let transaction_nonce = self.next_account_index(signer.public().into()).await?; - let best_header = self.best_header().await?; - let best_header_id = best_header.id(); - - let extrinsic = prepare_extrinsic(best_header_id, transaction_nonce)?; - let stall_timeout = transaction_stall_timeout( - extrinsic.era.mortality_period(), - C::AVERAGE_BLOCK_INTERVAL, - STALL_TIMEOUT, - ); - let signed_extrinsic = C::sign_transaction(signing_data, extrinsic)?.encode(); - - // one last check that the transaction is valid. Most of checks happen in the relay loop and - // it is the "final" check before submission. - self.validate_transaction(best_header_id.1, PreEncoded(signed_extrinsic.clone())) - .await - .map_err(|e| { - log::error!(target: "bridge", "Pre-submit {} transaction validation failed: {:?}", C::NAME, e); - e - })??; - - let (sender, receiver) = futures::channel::mpsc::channel(MAX_SUBSCRIPTION_CAPACITY); - let (tracker, subscription) = self - .jsonrpsee_execute(move |client| async move { - let tx_hash = C::Hasher::hash(&signed_extrinsic); - let subscription = SubstrateAuthorClient::::submit_and_watch_extrinsic( - &*client, - Bytes(signed_extrinsic), - ) - .await - .map_err(|e| { - log::error!(target: "bridge", "Failed to send transaction to {} node: {:?}", C::NAME, e); - e - })?; - log::trace!(target: "bridge", "Sent transaction to {} node: {:?}", C::NAME, tx_hash); - let tracker = TransactionTracker::new( - self_clone, - stall_timeout, - tx_hash, - Subscription(Mutex::new(receiver)), - ); - Ok((tracker, subscription)) - }) - .await?; - self.data.read().await.tokio.spawn(Subscription::background_worker( - C::NAME.into(), - "extrinsic".into(), - subscription, - sender, - )); - Ok(tracker) - } - - /// Returns pending extrinsics from transaction pool. - pub async fn pending_extrinsics(&self) -> Result> { - self.jsonrpsee_execute(move |client| async move { - Ok(SubstrateAuthorClient::::pending_extrinsics(&*client).await?) - }) - .await - } - - /// Validate transaction at given block state. - pub async fn validate_transaction( - &self, - at_block: C::Hash, - transaction: SignedTransaction, - ) -> Result { - self.jsonrpsee_execute(move |client| async move { - let call = SUB_API_TXPOOL_VALIDATE_TRANSACTION.to_string(); - let data = Bytes((TransactionSource::External, transaction, at_block).encode()); - - let encoded_response = - SubstrateStateClient::::call(&*client, call, data, Some(at_block)).await?; - let validity = TransactionValidity::decode(&mut &encoded_response.0[..]) - .map_err(Error::ResponseParseFailed)?; - - Ok(validity) - }) - .await - } - - /// Returns weight of the given transaction. - pub async fn extimate_extrinsic_weight( - &self, - transaction: SignedTransaction, - ) -> Result { - self.jsonrpsee_execute(move |client| async move { - let transaction_len = transaction.encoded_size() as u32; - - let call = SUB_API_TX_PAYMENT_QUERY_INFO.to_string(); - let data = Bytes((transaction, transaction_len).encode()); - - let encoded_response = - SubstrateStateClient::::call(&*client, call, data, None).await?; - let dispatch_info = - RuntimeDispatchInfo::::decode(&mut &encoded_response.0[..]) - .map_err(Error::ResponseParseFailed)?; - - Ok(dispatch_info.weight) - }) - .await - } - - /// Get the GRANDPA authority set at given block. - pub async fn grandpa_authorities_set( - &self, - block: C::Hash, - ) -> Result { - self.jsonrpsee_execute(move |client| async move { - let call = SUB_API_GRANDPA_AUTHORITIES.to_string(); - let data = Bytes(Vec::new()); - - let encoded_response = - SubstrateStateClient::::call(&*client, call, data, Some(block)).await?; - let authority_list = encoded_response.0; - - Ok(authority_list) - }) - .await - } - - /// Execute runtime call at given block, provided the input and output types. - /// It also performs the input encode and output decode. - pub async fn typed_state_call( - &self, - method_name: String, - input: Input, - at_block: Option, - ) -> Result { - let encoded_output = self - .state_call(method_name.clone(), Bytes(input.encode()), at_block) - .await - .map_err(|e| Error::ErrorExecutingRuntimeCall { - chain: C::NAME.into(), - method: method_name, - error: e.boxed(), - })?; - Output::decode(&mut &encoded_output.0[..]).map_err(Error::ResponseParseFailed) - } - - /// Execute runtime call at given block. - pub async fn state_call( - &self, - method: String, - data: Bytes, - at_block: Option, - ) -> Result { - self.jsonrpsee_execute(move |client| async move { - SubstrateStateClient::::call(&*client, method, data, at_block) - .await - .map_err(Into::into) - }) - .await - } - - /// Returns storage proof of given storage keys. - pub async fn prove_storage( - &self, - keys: Vec, - at_block: C::Hash, - ) -> Result { - self.jsonrpsee_execute(move |client| async move { - SubstrateStateClient::::prove_storage(&*client, keys, Some(at_block)) - .await - .map(|proof| { - StorageProof::new(proof.proof.into_iter().map(|b| b.0).collect::>()) - }) - .map_err(Into::into) - }) - .await - } - - /// Return `tokenDecimals` property from the set of chain properties. - pub async fn token_decimals(&self) -> Result> { - self.jsonrpsee_execute(move |client| async move { - let system_properties = SubstrateSystemClient::::properties(&*client).await?; - Ok(system_properties.get("tokenDecimals").and_then(|v| v.as_u64())) - }) - .await - } - - /// Return new finality justifications stream. - pub async fn subscribe_finality_justifications>( - &self, - ) -> Result> { - let subscription = self - .jsonrpsee_execute(move |client| async move { - Ok(FC::subscribe_justifications(&client).await?) - }) - .await?; - let (sender, receiver) = futures::channel::mpsc::channel(MAX_SUBSCRIPTION_CAPACITY); - self.data.read().await.tokio.spawn(Subscription::background_worker( - C::NAME.into(), - "justification".into(), - subscription, - sender, - )); - Ok(Subscription(Mutex::new(receiver))) - } - - /// Generates a proof of key ownership for the given authority in the given set. - pub async fn generate_grandpa_key_ownership_proof( - &self, - at: HashOf, - set_id: sp_consensus_grandpa::SetId, - authority_id: sp_consensus_grandpa::AuthorityId, - ) -> Result> - where - C: ChainWithGrandpa, - { - self.typed_state_call( - SUB_API_GRANDPA_GENERATE_KEY_OWNERSHIP_PROOF.into(), - (set_id, authority_id), - Some(at), - ) - .await - } - - /// Execute jsonrpsee future in tokio context. - async fn jsonrpsee_execute(&self, make_jsonrpsee_future: MF) -> Result - where - MF: FnOnce(Arc) -> F + Send + 'static, - F: Future> + Send + 'static, - T: Send + 'static, - { - let data = self.data.read().await; - let client = data.client.clone(); - data.tokio.spawn(make_jsonrpsee_future(client)).await? - } - - /// Returns `true` if version guard can be started. - /// - /// There's no reason to run version guard when version mode is set to `Auto`. It can - /// lead to relay shutdown when chain is upgraded, even though we have explicitly - /// said that we don't want to shutdown. - pub fn can_start_version_guard(&self) -> bool { - !matches!(self.chain_runtime_version, ChainRuntimeVersion::Auto) - } -} - -impl Subscription { - /// Consumes subscription and returns future statuses stream. - pub fn into_stream(self) -> impl futures::Stream { - futures::stream::unfold(self, |this| async { - let item = this.0.lock().await.next().await.unwrap_or(None); - item.map(|i| (i, this)) - }) - } - - /// Return next item from the subscription. - pub async fn next(&self) -> Result> { - let mut receiver = self.0.lock().await; - let item = receiver.next().await; - Ok(item.unwrap_or(None)) - } - - /// Background worker that is executed in tokio context as `jsonrpsee` requires. - async fn background_worker( - chain_name: String, - item_type: String, - mut subscription: jsonrpsee::core::client::Subscription, - mut sender: futures::channel::mpsc::Sender>, - ) { - loop { - match subscription.next().await { - Some(Ok(item)) => - if sender.send(Some(item)).await.is_err() { - break - }, - Some(Err(e)) => { - log::trace!( - target: "bridge", - "{} {} subscription stream has returned '{:?}'. Stream needs to be restarted.", - chain_name, - item_type, - e, - ); - let _ = sender.send(None).await; - break - }, - None => { - log::trace!( - target: "bridge", - "{} {} subscription stream has returned None. Stream needs to be restarted.", - chain_name, - item_type, - ); - let _ = sender.send(None).await; - break - }, - } - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::{guard::tests::TestEnvironment, test_chain::TestChain}; - use futures::{channel::mpsc::unbounded, FutureExt}; - - async fn run_ensure_correct_runtime_version( - expected: ChainRuntimeVersion, - actual: RuntimeVersion, - ) -> Result<()> { - let ( - (mut runtime_version_tx, runtime_version_rx), - (slept_tx, _slept_rx), - (aborted_tx, mut aborted_rx), - ) = (unbounded(), unbounded(), unbounded()); - runtime_version_tx.send(actual).await.unwrap(); - let mut env = TestEnvironment { runtime_version_rx, slept_tx, aborted_tx }; - - let ensure_correct_runtime_version = - Client::::ensure_correct_runtime_version(&mut env, expected).boxed(); - let aborted = aborted_rx.next().map(|_| Err(Error::Custom("".into()))).boxed(); - futures::pin_mut!(ensure_correct_runtime_version, aborted); - futures::future::select(ensure_correct_runtime_version, aborted) - .await - .into_inner() - .0 - } - - #[async_std::test] - async fn ensure_correct_runtime_version_works() { - // when we are configured to use auto version - assert!(matches!( - run_ensure_correct_runtime_version( - ChainRuntimeVersion::Auto, - RuntimeVersion { - spec_version: 100, - transaction_version: 100, - ..Default::default() - }, - ) - .await, - Ok(()), - )); - // when actual == expected - assert!(matches!( - run_ensure_correct_runtime_version( - ChainRuntimeVersion::Custom(SimpleRuntimeVersion { - spec_version: 100, - transaction_version: 100 - }), - RuntimeVersion { - spec_version: 100, - transaction_version: 100, - ..Default::default() - }, - ) - .await, - Ok(()), - )); - // when actual spec version < expected spec version - assert!(matches!( - run_ensure_correct_runtime_version( - ChainRuntimeVersion::Custom(SimpleRuntimeVersion { - spec_version: 100, - transaction_version: 100 - }), - RuntimeVersion { spec_version: 99, transaction_version: 100, ..Default::default() }, - ) - .await, - Err(Error::WaitingForRuntimeUpgrade { - expected: SimpleRuntimeVersion { spec_version: 100, transaction_version: 100 }, - actual: SimpleRuntimeVersion { spec_version: 99, transaction_version: 100 }, - .. - }), - )); - // when actual spec version > expected spec version - assert!(matches!( - run_ensure_correct_runtime_version( - ChainRuntimeVersion::Custom(SimpleRuntimeVersion { - spec_version: 100, - transaction_version: 100 - }), - RuntimeVersion { - spec_version: 101, - transaction_version: 100, - ..Default::default() - }, - ) - .await, - Err(Error::Custom(_)), - )); - } -} diff --git a/relays/client-substrate/src/error.rs b/relays/client-substrate/src/error.rs deleted file mode 100644 index 0b4466818..000000000 --- a/relays/client-substrate/src/error.rs +++ /dev/null @@ -1,165 +0,0 @@ -// Copyright 2019-2021 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Substrate node RPC errors. - -use crate::SimpleRuntimeVersion; -use bp_polkadot_core::parachains::ParaId; -use jsonrpsee::core::ClientError as RpcError; -use relay_utils::MaybeConnectionError; -use sc_rpc_api::system::Health; -use sp_core::storage::StorageKey; -use sp_runtime::transaction_validity::TransactionValidityError; -use thiserror::Error; - -/// Result type used by Substrate client. -pub type Result = std::result::Result; - -/// Errors that can occur only when interacting with -/// a Substrate node through RPC. -#[derive(Error, Debug)] -pub enum Error { - /// IO error. - #[error("IO error: {0}")] - Io(#[from] std::io::Error), - /// An error that can occur when making a request to - /// an JSON-RPC server. - #[error("RPC error: {0}")] - RpcError(#[from] RpcError), - /// The response from the server could not be SCALE decoded. - #[error("Response parse failed: {0}")] - ResponseParseFailed(#[from] codec::Error), - /// Account does not exist on the chain. - #[error("Account does not exist on the chain.")] - AccountDoesNotExist, - /// Runtime storage is missing some mandatory value. - #[error("Mandatory storage value is missing from the runtime storage.")] - MissingMandatoryStorageValue, - /// Required parachain head is not present at the relay chain. - #[error("Parachain {0:?} head {1} is missing from the relay chain storage.")] - MissingRequiredParachainHead(ParaId, u64), - /// Failed to find finality proof for the given header. - #[error("Failed to find finality proof for header {0}.")] - FinalityProofNotFound(u64), - /// The client we're connected to is not synced, so we can't rely on its state. - #[error("Substrate client is not synced {0}.")] - ClientNotSynced(Health), - /// Failed to read best finalized header hash from given chain. - #[error("Failed to read best finalized header hash of {chain}: {error:?}.")] - FailedToReadBestFinalizedHeaderHash { - /// Name of the chain where the error has happened. - chain: String, - /// Underlying error. - error: Box, - }, - /// Failed to read best finalized header from given chain. - #[error("Failed to read best header of {chain}: {error:?}.")] - FailedToReadBestHeader { - /// Name of the chain where the error has happened. - chain: String, - /// Underlying error. - error: Box, - }, - /// Failed to read header by hash from given chain. - #[error("Failed to read header {hash} of {chain}: {error:?}.")] - FailedToReadHeaderByHash { - /// Name of the chain where the error has happened. - chain: String, - /// Hash of the header we've tried to read. - hash: String, - /// Underlying error. - error: Box, - }, - /// Failed to execute runtime call at given chain. - #[error("Failed to execute runtime call {method} at {chain}: {error:?}.")] - ErrorExecutingRuntimeCall { - /// Name of the chain where the error has happened. - chain: String, - /// Runtime method name. - method: String, - /// Underlying error. - error: Box, - }, - /// Failed to read sotrage value at given chain. - #[error("Failed to read storage value {key:?} at {chain}: {error:?}.")] - FailedToReadRuntimeStorageValue { - /// Name of the chain where the error has happened. - chain: String, - /// Runtime storage key - key: StorageKey, - /// Underlying error. - error: Box, - }, - /// The bridge pallet is halted and all transactions will be rejected. - #[error("Bridge pallet is halted.")] - BridgePalletIsHalted, - /// The bridge pallet is not yet initialized and all transactions will be rejected. - #[error("Bridge pallet is not initialized.")] - BridgePalletIsNotInitialized, - /// There's no best head of the parachain at the `pallet-bridge-parachains` at the target side. - #[error("No head of the ParaId({0}) at the bridge parachains pallet at {1}.")] - NoParachainHeadAtTarget(u32, String), - /// An error has happened when we have tried to parse storage proof. - #[error("Error when parsing storage proof: {0:?}.")] - StorageProofError(bp_runtime::StorageProofError), - /// The Substrate transaction is invalid. - #[error("Substrate transaction is invalid: {0:?}")] - TransactionInvalid(#[from] TransactionValidityError), - /// The client is configured to use newer runtime version than the connected chain uses. - /// The client will keep waiting until chain is upgraded to given version. - #[error("Waiting for {chain} runtime upgrade: expected {expected:?} actual {actual:?}")] - WaitingForRuntimeUpgrade { - /// Name of the chain where the error has happened. - chain: String, - /// Expected runtime version. - expected: SimpleRuntimeVersion, - /// Actual runtime version. - actual: SimpleRuntimeVersion, - }, - /// Custom logic error. - #[error("{0}")] - Custom(String), -} - -impl From for Error { - fn from(error: tokio::task::JoinError) -> Self { - Error::Custom(format!("Failed to wait tokio task: {error}")) - } -} - -impl Error { - /// Box the error. - pub fn boxed(self) -> Box { - Box::new(self) - } -} - -impl MaybeConnectionError for Error { - fn is_connection_error(&self) -> bool { - match *self { - Error::RpcError(RpcError::Transport(_)) | - Error::RpcError(RpcError::RestartNeeded(_)) | - Error::ClientNotSynced(_) => true, - Error::FailedToReadBestFinalizedHeaderHash { ref error, .. } => - error.is_connection_error(), - Error::FailedToReadBestHeader { ref error, .. } => error.is_connection_error(), - Error::FailedToReadHeaderByHash { ref error, .. } => error.is_connection_error(), - Error::ErrorExecutingRuntimeCall { ref error, .. } => error.is_connection_error(), - Error::FailedToReadRuntimeStorageValue { ref error, .. } => error.is_connection_error(), - _ => false, - } - } -} diff --git a/relays/client-substrate/src/guard.rs b/relays/client-substrate/src/guard.rs deleted file mode 100644 index 47454892c..000000000 --- a/relays/client-substrate/src/guard.rs +++ /dev/null @@ -1,196 +0,0 @@ -// Copyright 2019-2021 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Pallet provides a set of guard functions that are running in background threads -//! and are aborting process if some condition fails. - -use crate::{error::Error, Chain, Client}; - -use async_trait::async_trait; -use sp_version::RuntimeVersion; -use std::{ - fmt::Display, - time::{Duration, Instant}, -}; - -/// Guards environment. -#[async_trait] -pub trait Environment: Send + Sync + 'static { - /// Error type. - type Error: Display + Send + Sync + 'static; - - /// Return current runtime version. - async fn runtime_version(&mut self) -> Result; - - /// Return current time. - fn now(&self) -> Instant { - Instant::now() - } - - /// Sleep given amount of time. - async fn sleep(&mut self, duration: Duration) { - async_std::task::sleep(duration).await - } - - /// Abort current process. Called when guard condition check fails. - async fn abort(&mut self) { - std::process::abort(); - } -} - -/// Abort when runtime spec version is different from specified. -pub fn abort_on_spec_version_change( - mut env: impl Environment, - expected_spec_version: u32, -) { - async_std::task::spawn(async move { - log::info!( - target: "bridge-guard", - "Starting spec_version guard for {}. Expected spec_version: {}", - C::NAME, - expected_spec_version, - ); - - loop { - let actual_spec_version = env.runtime_version().await; - match actual_spec_version { - Ok(version) if version.spec_version == expected_spec_version => (), - Ok(version) => { - log::error!( - target: "bridge-guard", - "{} runtime spec version has changed from {} to {}. Aborting relay", - C::NAME, - expected_spec_version, - version.spec_version, - ); - - env.abort().await; - }, - Err(error) => log::warn!( - target: "bridge-guard", - "Failed to read {} runtime version: {}. Relay may need to be stopped manually", - C::NAME, - error, - ), - } - - env.sleep(conditions_check_delay::()).await; - } - }); -} - -/// Delay between conditions check. -fn conditions_check_delay() -> Duration { - C::AVERAGE_BLOCK_INTERVAL * (10 + rand::random::() % 10) -} - -#[async_trait] -impl Environment for Client { - type Error = Error; - - async fn runtime_version(&mut self) -> Result { - Client::::runtime_version(self).await - } -} - -#[cfg(test)] -pub(crate) mod tests { - use super::*; - use crate::test_chain::TestChain; - use futures::{ - channel::mpsc::{unbounded, UnboundedReceiver, UnboundedSender}, - future::FutureExt, - stream::StreamExt, - SinkExt, - }; - - pub struct TestEnvironment { - pub runtime_version_rx: UnboundedReceiver, - pub slept_tx: UnboundedSender<()>, - pub aborted_tx: UnboundedSender<()>, - } - - #[async_trait] - impl Environment for TestEnvironment { - type Error = Error; - - async fn runtime_version(&mut self) -> Result { - Ok(self.runtime_version_rx.next().await.unwrap_or_default()) - } - - async fn sleep(&mut self, _duration: Duration) { - let _ = self.slept_tx.send(()).await; - } - - async fn abort(&mut self) { - let _ = self.aborted_tx.send(()).await; - // simulate process abort :) - async_std::task::sleep(Duration::from_secs(60)).await; - } - } - - #[test] - fn aborts_when_spec_version_is_changed() { - async_std::task::block_on(async { - let ( - (mut runtime_version_tx, runtime_version_rx), - (slept_tx, mut slept_rx), - (aborted_tx, mut aborted_rx), - ) = (unbounded(), unbounded(), unbounded()); - abort_on_spec_version_change( - TestEnvironment { runtime_version_rx, slept_tx, aborted_tx }, - 0, - ); - - // client responds with wrong version - runtime_version_tx - .send(RuntimeVersion { spec_version: 42, ..Default::default() }) - .await - .unwrap(); - - // then the `abort` function is called - aborted_rx.next().await; - // and we do not reach the `sleep` function call - assert!(slept_rx.next().now_or_never().is_none()); - }); - } - - #[test] - fn does_not_aborts_when_spec_version_is_unchanged() { - async_std::task::block_on(async { - let ( - (mut runtime_version_tx, runtime_version_rx), - (slept_tx, mut slept_rx), - (aborted_tx, mut aborted_rx), - ) = (unbounded(), unbounded(), unbounded()); - abort_on_spec_version_change( - TestEnvironment { runtime_version_rx, slept_tx, aborted_tx }, - 42, - ); - - // client responds with the same version - runtime_version_tx - .send(RuntimeVersion { spec_version: 42, ..Default::default() }) - .await - .unwrap(); - - // then the `sleep` function is called - slept_rx.next().await; - // and the `abort` function is not called - assert!(aborted_rx.next().now_or_never().is_none()); - }); - } -} diff --git a/relays/client-substrate/src/lib.rs b/relays/client-substrate/src/lib.rs deleted file mode 100644 index d5b8d4dcc..000000000 --- a/relays/client-substrate/src/lib.rs +++ /dev/null @@ -1,101 +0,0 @@ -// Copyright 2019-2021 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Tools to interact with Substrate node using RPC methods. - -#![warn(missing_docs)] - -mod chain; -mod client; -mod error; -mod rpc; -mod sync_header; -mod transaction_tracker; - -pub mod calls; -pub mod guard; -pub mod metrics; -pub mod test_chain; - -use std::time::Duration; - -pub use crate::{ - chain::{ - AccountKeyPairOf, BlockWithJustification, CallOf, Chain, ChainWithBalances, - ChainWithGrandpa, ChainWithMessages, ChainWithRuntimeVersion, ChainWithTransactions, - ChainWithUtilityPallet, FullRuntimeUtilityPallet, MockedRuntimeUtilityPallet, Parachain, - RelayChain, SignParam, TransactionStatusOf, UnsignedTransaction, UtilityPallet, - }, - client::{ - is_ancient_block, ChainRuntimeVersion, Client, OpaqueGrandpaAuthoritiesSet, - SimpleRuntimeVersion, Subscription, ANCIENT_BLOCK_THRESHOLD, - }, - error::{Error, Result}, - rpc::{SubstrateBeefyFinalityClient, SubstrateFinalityClient, SubstrateGrandpaFinalityClient}, - sync_header::SyncHeader, - transaction_tracker::TransactionTracker, -}; -pub use bp_runtime::{ - AccountIdOf, AccountPublicOf, BalanceOf, BlockNumberOf, Chain as ChainBase, HashOf, HeaderIdOf, - HeaderOf, NonceOf, Parachain as ParachainBase, SignatureOf, TransactionEra, TransactionEraOf, - UnderlyingChainProvider, -}; - -/// Substrate-over-websocket connection params. -#[derive(Debug, Clone)] -pub struct ConnectionParams { - /// Websocket endpoint URL. Overrides all other URL components (`host`, `port`, `path` and - /// `secure`). - pub uri: Option, - /// Websocket server host name. - pub host: String, - /// Websocket server TCP port. - pub port: u16, - /// Websocket endpoint path at server. - pub path: Option, - /// Use secure websocket connection. - pub secure: bool, - /// Defined chain runtime version - pub chain_runtime_version: ChainRuntimeVersion, -} - -impl Default for ConnectionParams { - fn default() -> Self { - ConnectionParams { - uri: None, - host: "localhost".into(), - port: 9944, - path: None, - secure: false, - chain_runtime_version: ChainRuntimeVersion::Auto, - } - } -} - -/// Returns stall timeout for relay loop. -/// -/// Relay considers himself stalled if he has submitted transaction to the node, but it has not -/// been mined for this period. -pub fn transaction_stall_timeout( - mortality_period: Option, - average_block_interval: Duration, - default_stall_timeout: Duration, -) -> Duration { - // 1 extra block for transaction to reach the pool && 1 for relayer to awake after it is mined - mortality_period - .map(|mortality_period| average_block_interval.saturating_mul(mortality_period + 1 + 1)) - .unwrap_or(default_stall_timeout) -} diff --git a/relays/client-substrate/src/metrics/float_storage_value.rs b/relays/client-substrate/src/metrics/float_storage_value.rs deleted file mode 100644 index 7bb92693b..000000000 --- a/relays/client-substrate/src/metrics/float_storage_value.rs +++ /dev/null @@ -1,133 +0,0 @@ -// Copyright 2019-2021 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -use crate::{chain::Chain, client::Client, Error as SubstrateError}; - -use async_std::sync::{Arc, RwLock}; -use async_trait::async_trait; -use codec::Decode; -use num_traits::One; -use relay_utils::metrics::{ - metric_name, register, F64SharedRef, Gauge, Metric, PrometheusError, Registry, - StandaloneMetric, F64, -}; -use sp_core::storage::{StorageData, StorageKey}; -use sp_runtime::{traits::UniqueSaturatedInto, FixedPointNumber, FixedU128}; -use std::{marker::PhantomData, time::Duration}; - -/// Storage value update interval (in blocks). -const UPDATE_INTERVAL_IN_BLOCKS: u32 = 5; - -/// Fied-point storage value and the way it is decoded from the raw storage value. -pub trait FloatStorageValue: 'static + Clone + Send + Sync { - /// Type of the value. - type Value: FixedPointNumber; - /// Try to decode value from the raw storage value. - fn decode( - &self, - maybe_raw_value: Option, - ) -> Result, SubstrateError>; -} - -/// Implementation of `FloatStorageValue` that expects encoded `FixedU128` value and returns `1` if -/// value is missing from the storage. -#[derive(Clone, Debug, Default)] -pub struct FixedU128OrOne; - -impl FloatStorageValue for FixedU128OrOne { - type Value = FixedU128; - - fn decode( - &self, - maybe_raw_value: Option, - ) -> Result, SubstrateError> { - maybe_raw_value - .map(|raw_value| { - FixedU128::decode(&mut &raw_value.0[..]) - .map_err(SubstrateError::ResponseParseFailed) - .map(Some) - }) - .unwrap_or_else(|| Ok(Some(FixedU128::one()))) - } -} - -/// Metric that represents fixed-point runtime storage value as float gauge. -#[derive(Clone, Debug)] -pub struct FloatStorageValueMetric { - value_converter: V, - client: Client, - storage_key: StorageKey, - metric: Gauge, - shared_value_ref: F64SharedRef, - _phantom: PhantomData, -} - -impl FloatStorageValueMetric { - /// Create new metric. - pub fn new( - value_converter: V, - client: Client, - storage_key: StorageKey, - name: String, - help: String, - ) -> Result { - let shared_value_ref = Arc::new(RwLock::new(None)); - Ok(FloatStorageValueMetric { - value_converter, - client, - storage_key, - metric: Gauge::new(metric_name(None, &name), help)?, - shared_value_ref, - _phantom: Default::default(), - }) - } - - /// Get shared reference to metric value. - pub fn shared_value_ref(&self) -> F64SharedRef { - self.shared_value_ref.clone() - } -} - -impl Metric for FloatStorageValueMetric { - fn register(&self, registry: &Registry) -> Result<(), PrometheusError> { - register(self.metric.clone(), registry).map(drop) - } -} - -#[async_trait] -impl StandaloneMetric for FloatStorageValueMetric { - fn update_interval(&self) -> Duration { - C::AVERAGE_BLOCK_INTERVAL * UPDATE_INTERVAL_IN_BLOCKS - } - - async fn update(&self) { - let value = self - .client - .raw_storage_value(self.storage_key.clone(), None) - .await - .and_then(|maybe_storage_value| { - self.value_converter.decode(maybe_storage_value).map(|maybe_fixed_point_value| { - maybe_fixed_point_value.map(|fixed_point_value| { - fixed_point_value.into_inner().unique_saturated_into() as f64 / - V::Value::DIV.unique_saturated_into() as f64 - }) - }) - }) - .map_err(|e| e.to_string()); - relay_utils::metrics::set_gauge_value(&self.metric, value.clone()); - *self.shared_value_ref.write().await = value.ok().and_then(|x| x); - } -} diff --git a/relays/client-substrate/src/metrics/mod.rs b/relays/client-substrate/src/metrics/mod.rs deleted file mode 100644 index fe200e2d3..000000000 --- a/relays/client-substrate/src/metrics/mod.rs +++ /dev/null @@ -1,21 +0,0 @@ -// Copyright 2019-2020 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Contains several Substrate-specific metrics that may be exposed by relay. - -pub use float_storage_value::{FixedU128OrOne, FloatStorageValue, FloatStorageValueMetric}; - -mod float_storage_value; diff --git a/relays/client-substrate/src/rpc.rs b/relays/client-substrate/src/rpc.rs deleted file mode 100644 index 60c29cdeb..000000000 --- a/relays/client-substrate/src/rpc.rs +++ /dev/null @@ -1,176 +0,0 @@ -// Copyright 2019-2021 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! The most generic Substrate node RPC interface. - -use async_trait::async_trait; - -use crate::{Chain, ChainWithGrandpa, TransactionStatusOf}; - -use jsonrpsee::{ - core::{client::Subscription, ClientError}, - proc_macros::rpc, - ws_client::WsClient, -}; -use pallet_transaction_payment_rpc_runtime_api::FeeDetails; -use sc_rpc_api::{state::ReadProof, system::Health}; -use sp_core::{ - storage::{StorageData, StorageKey}, - Bytes, -}; -use sp_rpc::number::NumberOrHex; -use sp_version::RuntimeVersion; - -/// RPC methods of Substrate `system` namespace, that we are using. -#[rpc(client, client_bounds(C: Chain), namespace = "system")] -pub(crate) trait SubstrateSystem { - /// Return node health. - #[method(name = "health")] - async fn health(&self) -> RpcResult; - /// Return system properties. - #[method(name = "properties")] - async fn properties(&self) -> RpcResult; -} - -/// RPC methods of Substrate `chain` namespace, that we are using. -#[rpc(client, client_bounds(C: Chain), namespace = "chain")] -pub(crate) trait SubstrateChain { - /// Get block hash by its number. - #[method(name = "getBlockHash")] - async fn block_hash(&self, block_number: Option) -> RpcResult; - /// Return block header by its hash. - #[method(name = "getHeader")] - async fn header(&self, block_hash: Option) -> RpcResult; - /// Return best finalized block hash. - #[method(name = "getFinalizedHead")] - async fn finalized_head(&self) -> RpcResult; - /// Return signed block (with justifications) by its hash. - #[method(name = "getBlock")] - async fn block(&self, block_hash: Option) -> RpcResult; -} - -/// RPC methods of Substrate `author` namespace, that we are using. -#[rpc(client, client_bounds(C: Chain), namespace = "author")] -pub(crate) trait SubstrateAuthor { - /// Submit extrinsic to the transaction pool. - #[method(name = "submitExtrinsic")] - async fn submit_extrinsic(&self, extrinsic: Bytes) -> RpcResult; - /// Return vector of pending extrinsics from the transaction pool. - #[method(name = "pendingExtrinsics")] - async fn pending_extrinsics(&self) -> RpcResult>; - /// Submit and watch for extrinsic state. - #[subscription(name = "submitAndWatchExtrinsic", unsubscribe = "unwatchExtrinsic", item = TransactionStatusOf)] - async fn submit_and_watch_extrinsic(&self, extrinsic: Bytes); -} - -/// RPC methods of Substrate `state` namespace, that we are using. -#[rpc(client, client_bounds(C: Chain), namespace = "state")] -pub(crate) trait SubstrateState { - /// Get current runtime version. - #[method(name = "getRuntimeVersion")] - async fn runtime_version(&self) -> RpcResult; - /// Call given runtime method. - #[method(name = "call")] - async fn call( - &self, - method: String, - data: Bytes, - at_block: Option, - ) -> RpcResult; - /// Get value of the runtime storage. - #[method(name = "getStorage")] - async fn storage( - &self, - key: StorageKey, - at_block: Option, - ) -> RpcResult>; - /// Get proof of the runtime storage value. - #[method(name = "getReadProof")] - async fn prove_storage( - &self, - keys: Vec, - hash: Option, - ) -> RpcResult>; -} - -/// RPC methods that we are using for a certain finality gadget. -#[async_trait] -pub trait SubstrateFinalityClient { - /// Subscribe to finality justifications. - async fn subscribe_justifications( - client: &WsClient, - ) -> Result, ClientError>; -} - -/// RPC methods of Substrate `grandpa` namespace, that we are using. -#[rpc(client, client_bounds(C: ChainWithGrandpa), namespace = "grandpa")] -pub(crate) trait SubstrateGrandpa { - /// Subscribe to GRANDPA justifications. - #[subscription(name = "subscribeJustifications", unsubscribe = "unsubscribeJustifications", item = Bytes)] - async fn subscribe_justifications(&self); -} - -/// RPC finality methods of Substrate `grandpa` namespace, that we are using. -pub struct SubstrateGrandpaFinalityClient; -#[async_trait] -impl SubstrateFinalityClient for SubstrateGrandpaFinalityClient { - async fn subscribe_justifications( - client: &WsClient, - ) -> Result, ClientError> { - SubstrateGrandpaClient::::subscribe_justifications(client).await - } -} - -// TODO: Use `ChainWithBeefy` instead of `Chain` after #1606 is merged -/// RPC methods of Substrate `beefy` namespace, that we are using. -#[rpc(client, client_bounds(C: Chain), namespace = "beefy")] -pub(crate) trait SubstrateBeefy { - /// Subscribe to BEEFY justifications. - #[subscription(name = "subscribeJustifications", unsubscribe = "unsubscribeJustifications", item = Bytes)] - async fn subscribe_justifications(&self); -} - -/// RPC finality methods of Substrate `beefy` namespace, that we are using. -pub struct SubstrateBeefyFinalityClient; -// TODO: Use `ChainWithBeefy` instead of `Chain` after #1606 is merged -#[async_trait] -impl SubstrateFinalityClient for SubstrateBeefyFinalityClient { - async fn subscribe_justifications( - client: &WsClient, - ) -> Result, ClientError> { - SubstrateBeefyClient::::subscribe_justifications(client).await - } -} - -/// RPC methods of Substrate `system` frame pallet, that we are using. -#[rpc(client, client_bounds(C: Chain), namespace = "system")] -pub(crate) trait SubstrateFrameSystem { - /// Return index of next account transaction. - #[method(name = "accountNextIndex")] - async fn account_next_index(&self, account_id: C::AccountId) -> RpcResult; -} - -/// RPC methods of Substrate `pallet_transaction_payment` frame pallet, that we are using. -#[rpc(client, client_bounds(C: Chain), namespace = "payment")] -pub(crate) trait SubstrateTransactionPayment { - /// Query transaction fee details. - #[method(name = "queryFeeDetails")] - async fn fee_details( - &self, - extrinsic: Bytes, - at_block: Option, - ) -> RpcResult>; -} diff --git a/relays/client-substrate/src/sync_header.rs b/relays/client-substrate/src/sync_header.rs deleted file mode 100644 index fdfd1f22c..000000000 --- a/relays/client-substrate/src/sync_header.rs +++ /dev/null @@ -1,61 +0,0 @@ -// Copyright 2019-2021 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -use bp_header_chain::ConsensusLogReader; -use finality_relay::SourceHeader as FinalitySourceHeader; -use sp_runtime::traits::Header as HeaderT; - -/// Generic wrapper for `sp_runtime::traits::Header` based headers, that -/// implements `finality_relay::SourceHeader` and may be used in headers sync directly. -#[derive(Clone, Debug, PartialEq)] -pub struct SyncHeader
(Header); - -impl
SyncHeader
{ - /// Extracts wrapped header from self. - pub fn into_inner(self) -> Header { - self.0 - } -} - -impl
std::ops::Deref for SyncHeader
{ - type Target = Header; - - fn deref(&self) -> &Self::Target { - &self.0 - } -} - -impl
From
for SyncHeader
{ - fn from(header: Header) -> Self { - Self(header) - } -} - -impl FinalitySourceHeader - for SyncHeader
-{ - fn hash(&self) -> Header::Hash { - self.0.hash() - } - - fn number(&self) -> Header::Number { - *self.0.number() - } - - fn is_mandatory(&self) -> bool { - R::schedules_authorities_change(self.digest()) - } -} diff --git a/relays/client-substrate/src/test_chain.rs b/relays/client-substrate/src/test_chain.rs deleted file mode 100644 index 77240d158..000000000 --- a/relays/client-substrate/src/test_chain.rs +++ /dev/null @@ -1,132 +0,0 @@ -// Copyright 2019-2021 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Pallet provides a set of guard functions that are running in background threads -//! and are aborting process if some condition fails. - -//! Test chain implementation to use in tests. - -#![cfg(any(feature = "test-helpers", test))] - -use crate::{Chain, ChainWithBalances, ChainWithMessages}; -use bp_messages::{ChainWithMessages as ChainWithMessagesBase, MessageNonce}; -use bp_runtime::ChainId; -use frame_support::weights::Weight; -use std::time::Duration; - -/// Chain that may be used in tests. -#[derive(Clone, Debug, PartialEq, Eq)] -pub struct TestChain; - -impl bp_runtime::Chain for TestChain { - const ID: ChainId = *b"test"; - - type BlockNumber = u32; - type Hash = sp_core::H256; - type Hasher = sp_runtime::traits::BlakeTwo256; - type Header = sp_runtime::generic::Header; - - type AccountId = u32; - type Balance = u32; - type Nonce = u32; - type Signature = sp_runtime::testing::TestSignature; - - fn max_extrinsic_size() -> u32 { - 100000 - } - - fn max_extrinsic_weight() -> Weight { - unreachable!() - } -} - -impl Chain for TestChain { - const NAME: &'static str = "Test"; - const BEST_FINALIZED_HEADER_ID_METHOD: &'static str = "TestMethod"; - const AVERAGE_BLOCK_INTERVAL: Duration = Duration::from_millis(0); - - type SignedBlock = sp_runtime::generic::SignedBlock< - sp_runtime::generic::Block, - >; - type Call = (); -} - -impl ChainWithBalances for TestChain { - fn account_info_storage_key(_account_id: &u32) -> sp_core::storage::StorageKey { - unreachable!() - } -} - -impl ChainWithMessagesBase for TestChain { - const WITH_CHAIN_MESSAGES_PALLET_NAME: &'static str = "Test"; - const MAX_UNREWARDED_RELAYERS_IN_CONFIRMATION_TX: MessageNonce = 0; - const MAX_UNCONFIRMED_MESSAGES_IN_CONFIRMATION_TX: MessageNonce = 0; -} - -impl ChainWithMessages for TestChain { - const WITH_CHAIN_RELAYERS_PALLET_NAME: Option<&'static str> = None; - const TO_CHAIN_MESSAGE_DETAILS_METHOD: &'static str = "TestMessagesDetailsMethod"; - const FROM_CHAIN_MESSAGE_DETAILS_METHOD: &'static str = "TestFromMessagesDetailsMethod"; -} - -/// Primitives-level parachain that may be used in tests. -#[derive(Clone, Debug, PartialEq, Eq)] -pub struct TestParachainBase; - -impl bp_runtime::Chain for TestParachainBase { - const ID: ChainId = *b"tstp"; - - type BlockNumber = u32; - type Hash = sp_core::H256; - type Hasher = sp_runtime::traits::BlakeTwo256; - type Header = sp_runtime::generic::Header; - - type AccountId = u32; - type Balance = u32; - type Nonce = u32; - type Signature = sp_runtime::testing::TestSignature; - - fn max_extrinsic_size() -> u32 { - unreachable!() - } - - fn max_extrinsic_weight() -> Weight { - unreachable!() - } -} - -impl bp_runtime::Parachain for TestParachainBase { - const PARACHAIN_ID: u32 = 1000; -} - -/// Parachain that may be used in tests. -#[derive(Clone, Debug, PartialEq, Eq)] -pub struct TestParachain; - -impl bp_runtime::UnderlyingChainProvider for TestParachain { - type Chain = TestParachainBase; -} - -impl Chain for TestParachain { - const NAME: &'static str = "TestParachain"; - const BEST_FINALIZED_HEADER_ID_METHOD: &'static str = "TestParachainMethod"; - const AVERAGE_BLOCK_INTERVAL: Duration = Duration::from_millis(0); - - type SignedBlock = sp_runtime::generic::SignedBlock< - sp_runtime::generic::Block, - >; - type Call = (); -} diff --git a/relays/client-substrate/src/transaction_tracker.rs b/relays/client-substrate/src/transaction_tracker.rs deleted file mode 100644 index 00375768c..000000000 --- a/relays/client-substrate/src/transaction_tracker.rs +++ /dev/null @@ -1,447 +0,0 @@ -// Copyright 2019-2021 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Helper for tracking transaction invalidation events. - -use crate::{Chain, Client, Error, HashOf, HeaderIdOf, Subscription, TransactionStatusOf}; - -use async_trait::async_trait; -use futures::{future::Either, Future, FutureExt, Stream, StreamExt}; -use relay_utils::{HeaderId, TrackedTransactionStatus}; -use sp_runtime::traits::Header as _; -use std::time::Duration; - -/// Transaction tracker environment. -#[async_trait] -pub trait Environment: Send + Sync { - /// Returns header id by its hash. - async fn header_id_by_hash(&self, hash: HashOf) -> Result, Error>; -} - -#[async_trait] -impl Environment for Client { - async fn header_id_by_hash(&self, hash: HashOf) -> Result, Error> { - self.header_by_hash(hash).await.map(|h| HeaderId(*h.number(), hash)) - } -} - -/// Substrate transaction tracker implementation. -/// -/// Substrate node provides RPC API to submit and watch for transaction events. This way -/// we may know when transaction is included into block, finalized or rejected. There are -/// some edge cases, when we can't fully trust this mechanism - e.g. transaction may broadcasted -/// and then dropped out of node transaction pool (some other cases are also possible - node -/// restarts, connection lost, ...). Then we can't know for sure - what is currently happening -/// with our transaction. Is the transaction really lost? Is it still alive on the chain network? -/// -/// We have several options to handle such cases: -/// -/// 1) hope that the transaction is still alive and wait for its mining until it is spoiled; -/// -/// 2) assume that the transaction is lost and resubmit another transaction instantly; -/// -/// 3) wait for some time (if transaction is mortal - then until block where it dies; if it is -/// immortal - then for some time that we assume is long enough to mine it) and assume that it is -/// lost. -/// -/// This struct implements third option as it seems to be the most optimal. -pub struct TransactionTracker { - environment: E, - transaction_hash: HashOf, - stall_timeout: Duration, - subscription: Subscription>, -} - -impl> TransactionTracker { - /// Create transaction tracker. - pub fn new( - environment: E, - stall_timeout: Duration, - transaction_hash: HashOf, - subscription: Subscription>, - ) -> Self { - Self { environment, stall_timeout, transaction_hash, subscription } - } - - /// Wait for final transaction status and return it along with last known internal invalidation - /// status. - async fn do_wait( - self, - wait_for_stall_timeout: impl Future, - wait_for_stall_timeout_rest: impl Future, - ) -> (TrackedTransactionStatus>, Option>>) { - // sometimes we want to wait for the rest of the stall timeout even if - // `wait_for_invalidation` has been "select"ed first => it is shared - let wait_for_invalidation = watch_transaction_status::<_, C, _>( - self.environment, - self.transaction_hash, - self.subscription.into_stream(), - ); - futures::pin_mut!(wait_for_stall_timeout, wait_for_invalidation); - - match futures::future::select(wait_for_stall_timeout, wait_for_invalidation).await { - Either::Left((_, _)) => { - log::trace!( - target: "bridge", - "{} transaction {:?} is considered lost after timeout (no status response from the node)", - C::NAME, - self.transaction_hash, - ); - - (TrackedTransactionStatus::Lost, None) - }, - Either::Right((invalidation_status, _)) => match invalidation_status { - InvalidationStatus::Finalized(at_block) => - (TrackedTransactionStatus::Finalized(at_block), Some(invalidation_status)), - InvalidationStatus::Invalid => - (TrackedTransactionStatus::Lost, Some(invalidation_status)), - InvalidationStatus::Lost => { - // wait for the rest of stall timeout - this way we'll be sure that the - // transaction is actually dead if it has been crafted properly - wait_for_stall_timeout_rest.await; - // if someone is still watching for our transaction, then we're reporting - // an error here (which is treated as "transaction lost") - log::trace!( - target: "bridge", - "{} transaction {:?} is considered lost after timeout", - C::NAME, - self.transaction_hash, - ); - - (TrackedTransactionStatus::Lost, Some(invalidation_status)) - }, - }, - } - } -} - -#[async_trait] -impl> relay_utils::TransactionTracker for TransactionTracker { - type HeaderId = HeaderIdOf; - - async fn wait(self) -> TrackedTransactionStatus> { - let wait_for_stall_timeout = async_std::task::sleep(self.stall_timeout).shared(); - let wait_for_stall_timeout_rest = wait_for_stall_timeout.clone(); - self.do_wait(wait_for_stall_timeout, wait_for_stall_timeout_rest).await.0 - } -} - -/// Transaction invalidation status. -/// -/// Note that in places where the `TransactionTracker` is used, the finalization event will be -/// ignored - relay loops are detecting the mining/finalization using their own -/// techniques. That's why we're using `InvalidationStatus` here. -#[derive(Debug, PartialEq)] -enum InvalidationStatus { - /// Transaction has been included into block and finalized at given block. - Finalized(BlockId), - /// Transaction has been invalidated. - Invalid, - /// We have lost track of transaction status. - Lost, -} - -/// Watch for transaction status until transaction is finalized or we lose track of its status. -async fn watch_transaction_status< - E: Environment, - C: Chain, - S: Stream>, ->( - environment: E, - transaction_hash: HashOf, - subscription: S, -) -> InvalidationStatus> { - futures::pin_mut!(subscription); - - loop { - match subscription.next().await { - Some(TransactionStatusOf::::Finalized((block_hash, _))) => { - // the only "successful" outcome of this method is when the block with transaction - // has been finalized - log::trace!( - target: "bridge", - "{} transaction {:?} has been finalized at block: {:?}", - C::NAME, - transaction_hash, - block_hash, - ); - - let header_id = match environment.header_id_by_hash(block_hash).await { - Ok(header_id) => header_id, - Err(e) => { - log::error!( - target: "bridge", - "Failed to read header {:?} when watching for {} transaction {:?}: {:?}", - block_hash, - C::NAME, - transaction_hash, - e, - ); - // that's the best option we have here - return InvalidationStatus::Lost - }, - }; - return InvalidationStatus::Finalized(header_id) - }, - Some(TransactionStatusOf::::Invalid) => { - // if node says that the transaction is invalid, there are still chances that - // it is not actually invalid - e.g. if the block where transaction has been - // revalidated is retracted and transaction (at some other node pool) becomes - // valid again on other fork. But let's assume that the chances of this event - // are almost zero - there's a lot of things that must happen for this to be the - // case. - log::trace!( - target: "bridge", - "{} transaction {:?} has been invalidated", - C::NAME, - transaction_hash, - ); - return InvalidationStatus::Invalid - }, - Some(TransactionStatusOf::::Future) | - Some(TransactionStatusOf::::Ready) | - Some(TransactionStatusOf::::Broadcast(_)) => { - // nothing important (for us) has happened - }, - Some(TransactionStatusOf::::InBlock(block_hash)) => { - // TODO: read matching system event (ExtrinsicSuccess or ExtrinsicFailed), log it - // here and use it later (on finality) for reporting invalid transaction - // https://github.com/paritytech/parity-bridges-common/issues/1464 - log::trace!( - target: "bridge", - "{} transaction {:?} has been included in block: {:?}", - C::NAME, - transaction_hash, - block_hash, - ); - }, - Some(TransactionStatusOf::::Retracted(block_hash)) => { - log::trace!( - target: "bridge", - "{} transaction {:?} at block {:?} has been retracted", - C::NAME, - transaction_hash, - block_hash, - ); - }, - Some(TransactionStatusOf::::FinalityTimeout(block_hash)) => { - // finality is lagging? let's wait a bit more and report a stall - log::trace!( - target: "bridge", - "{} transaction {:?} block {:?} has not been finalized for too long", - C::NAME, - transaction_hash, - block_hash, - ); - return InvalidationStatus::Lost - }, - Some(TransactionStatusOf::::Usurped(new_transaction_hash)) => { - // this may be result of our transaction resubmitter work or some manual - // intervention. In both cases - let's start stall timeout, because the meaning - // of transaction may have changed - log::trace!( - target: "bridge", - "{} transaction {:?} has been usurped by new transaction: {:?}", - C::NAME, - transaction_hash, - new_transaction_hash, - ); - return InvalidationStatus::Lost - }, - Some(TransactionStatusOf::::Dropped) => { - // the transaction has been removed from the pool because of its limits. Let's wait - // a bit and report a stall - log::trace!( - target: "bridge", - "{} transaction {:?} has been dropped from the pool", - C::NAME, - transaction_hash, - ); - return InvalidationStatus::Lost - }, - None => { - // the status of transaction is unknown to us (the subscription has been closed?). - // Let's wait a bit and report a stall - return InvalidationStatus::Lost - }, - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::test_chain::TestChain; - use futures::{FutureExt, SinkExt}; - use sc_transaction_pool_api::TransactionStatus; - - struct TestEnvironment(Result, Error>); - - #[async_trait] - impl Environment for TestEnvironment { - async fn header_id_by_hash( - &self, - _hash: HashOf, - ) -> Result, Error> { - self.0.as_ref().map_err(|_| Error::BridgePalletIsNotInitialized).cloned() - } - } - - async fn on_transaction_status( - status: TransactionStatus, HashOf>, - ) -> Option<( - TrackedTransactionStatus>, - InvalidationStatus>, - )> { - let (mut sender, receiver) = futures::channel::mpsc::channel(1); - let tx_tracker = TransactionTracker::::new( - TestEnvironment(Ok(HeaderId(0, Default::default()))), - Duration::from_secs(0), - Default::default(), - Subscription(async_std::sync::Mutex::new(receiver)), - ); - - let wait_for_stall_timeout = futures::future::pending(); - let wait_for_stall_timeout_rest = futures::future::ready(()); - sender.send(Some(status)).await.unwrap(); - tx_tracker - .do_wait(wait_for_stall_timeout, wait_for_stall_timeout_rest) - .now_or_never() - .map(|(ts, is)| (ts, is.unwrap())) - } - - #[async_std::test] - async fn returns_finalized_on_finalized() { - assert_eq!( - on_transaction_status(TransactionStatus::Finalized(Default::default())).await, - Some(( - TrackedTransactionStatus::Finalized(Default::default()), - InvalidationStatus::Finalized(Default::default()) - )), - ); - } - - #[async_std::test] - async fn returns_lost_on_finalized_and_environment_error() { - assert_eq!( - watch_transaction_status::<_, TestChain, _>( - TestEnvironment(Err(Error::BridgePalletIsNotInitialized)), - Default::default(), - futures::stream::iter([TransactionStatus::Finalized(Default::default())]) - ) - .now_or_never(), - Some(InvalidationStatus::Lost), - ); - } - - #[async_std::test] - async fn returns_invalid_on_invalid() { - assert_eq!( - on_transaction_status(TransactionStatus::Invalid).await, - Some((TrackedTransactionStatus::Lost, InvalidationStatus::Invalid)), - ); - } - - #[async_std::test] - async fn waits_on_future() { - assert_eq!(on_transaction_status(TransactionStatus::Future).await, None,); - } - - #[async_std::test] - async fn waits_on_ready() { - assert_eq!(on_transaction_status(TransactionStatus::Ready).await, None,); - } - - #[async_std::test] - async fn waits_on_broadcast() { - assert_eq!( - on_transaction_status(TransactionStatus::Broadcast(Default::default())).await, - None, - ); - } - - #[async_std::test] - async fn waits_on_in_block() { - assert_eq!( - on_transaction_status(TransactionStatus::InBlock(Default::default())).await, - None, - ); - } - - #[async_std::test] - async fn waits_on_retracted() { - assert_eq!( - on_transaction_status(TransactionStatus::Retracted(Default::default())).await, - None, - ); - } - - #[async_std::test] - async fn lost_on_finality_timeout() { - assert_eq!( - on_transaction_status(TransactionStatus::FinalityTimeout(Default::default())).await, - Some((TrackedTransactionStatus::Lost, InvalidationStatus::Lost)), - ); - } - - #[async_std::test] - async fn lost_on_usurped() { - assert_eq!( - on_transaction_status(TransactionStatus::Usurped(Default::default())).await, - Some((TrackedTransactionStatus::Lost, InvalidationStatus::Lost)), - ); - } - - #[async_std::test] - async fn lost_on_dropped() { - assert_eq!( - on_transaction_status(TransactionStatus::Dropped).await, - Some((TrackedTransactionStatus::Lost, InvalidationStatus::Lost)), - ); - } - - #[async_std::test] - async fn lost_on_subscription_error() { - assert_eq!( - watch_transaction_status::<_, TestChain, _>( - TestEnvironment(Ok(HeaderId(0, Default::default()))), - Default::default(), - futures::stream::iter([]) - ) - .now_or_never(), - Some(InvalidationStatus::Lost), - ); - } - - #[async_std::test] - async fn lost_on_timeout_when_waiting_for_invalidation_status() { - let (_sender, receiver) = futures::channel::mpsc::channel(1); - let tx_tracker = TransactionTracker::::new( - TestEnvironment(Ok(HeaderId(0, Default::default()))), - Duration::from_secs(0), - Default::default(), - Subscription(async_std::sync::Mutex::new(receiver)), - ); - - let wait_for_stall_timeout = futures::future::ready(()).shared(); - let wait_for_stall_timeout_rest = wait_for_stall_timeout.clone(); - let wait_result = tx_tracker - .do_wait(wait_for_stall_timeout, wait_for_stall_timeout_rest) - .now_or_never(); - - assert_eq!(wait_result, Some((TrackedTransactionStatus::Lost, None))); - } -} diff --git a/relays/equivocation/Cargo.toml b/relays/equivocation/Cargo.toml deleted file mode 100644 index e7146e05f..000000000 --- a/relays/equivocation/Cargo.toml +++ /dev/null @@ -1,23 +0,0 @@ -[package] -name = "equivocation-detector" -version = "0.1.0" -authors.workspace = true -edition.workspace = true -license = "GPL-3.0-or-later WITH Classpath-exception-2.0" -repository.workspace = true -description = "Equivocation detector" -publish = false - -[lints] -workspace = true - -[dependencies] -async-std = { version = "1.9.0", features = ["attributes"] } -async-trait = "0.1.79" -bp-header-chain = { path = "../../primitives/header-chain" } -finality-relay = { path = "../finality" } -frame-support = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } -futures = "0.3.30" -log = { workspace = true } -num-traits = "0.2" -relay-utils = { path = "../utils" } diff --git a/relays/equivocation/src/block_checker.rs b/relays/equivocation/src/block_checker.rs deleted file mode 100644 index c8131e5b9..000000000 --- a/relays/equivocation/src/block_checker.rs +++ /dev/null @@ -1,471 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -use crate::{ - handle_client_error, reporter::EquivocationsReporter, EquivocationDetectionPipeline, - EquivocationReportingContext, HeaderFinalityInfo, SourceClient, TargetClient, -}; - -use bp_header_chain::{FinalityProof, FindEquivocations as FindEquivocationsT}; -use finality_relay::FinalityProofsBuf; -use futures::future::{BoxFuture, FutureExt}; -use num_traits::Saturating; - -/// First step in the block checking state machine. -/// -/// Getting the finality info associated to the source headers synced with the target chain -/// at the specified block. -#[cfg_attr(test, derive(Debug, PartialEq))] -pub struct ReadSyncedHeaders { - pub target_block_num: P::TargetNumber, -} - -impl ReadSyncedHeaders

{ - pub async fn next>( - self, - target_client: &mut TC, - ) -> Result, Self> { - match target_client.synced_headers_finality_info(self.target_block_num).await { - Ok(synced_headers) => - Ok(ReadContext { target_block_num: self.target_block_num, synced_headers }), - Err(e) => { - log::error!( - target: "bridge", - "Could not get {} headers synced to {} at block {}: {e:?}", - P::SOURCE_NAME, - P::TARGET_NAME, - self.target_block_num - ); - - // Reconnect target client in case of a connection error. - handle_client_error(target_client, e).await; - - Err(self) - }, - } - } -} - -/// Second step in the block checking state machine. -/// -/// Reading the equivocation reporting context from the target chain. -#[cfg_attr(test, derive(Debug))] -pub struct ReadContext { - target_block_num: P::TargetNumber, - synced_headers: Vec>, -} - -impl ReadContext

{ - pub async fn next>( - self, - target_client: &mut TC, - ) -> Result>, Self> { - match EquivocationReportingContext::try_read_from_target::( - target_client, - self.target_block_num.saturating_sub(1.into()), - ) - .await - { - Ok(Some(context)) => Ok(Some(FindEquivocations { - target_block_num: self.target_block_num, - synced_headers: self.synced_headers, - context, - })), - Ok(None) => Ok(None), - Err(e) => { - log::error!( - target: "bridge", - "Could not read {} `EquivocationReportingContext` from {} at block {}: {e:?}", - P::SOURCE_NAME, - P::TARGET_NAME, - self.target_block_num.saturating_sub(1.into()), - ); - - // Reconnect target client in case of a connection error. - handle_client_error(target_client, e).await; - - Err(self) - }, - } - } -} - -/// Third step in the block checking state machine. -/// -/// Searching for equivocations in the source headers synced with the target chain. -#[cfg_attr(test, derive(Debug))] -pub struct FindEquivocations { - target_block_num: P::TargetNumber, - synced_headers: Vec>, - context: EquivocationReportingContext

, -} - -impl FindEquivocations

{ - pub fn next( - mut self, - finality_proofs_buf: &mut FinalityProofsBuf

, - ) -> Vec> { - let mut result = vec![]; - for synced_header in self.synced_headers { - match P::EquivocationsFinder::find_equivocations( - &self.context.synced_verification_context, - &synced_header.finality_proof, - finality_proofs_buf.buf().as_slice(), - ) { - Ok(equivocations) => - if !equivocations.is_empty() { - result.push(ReportEquivocations { - source_block_hash: self.context.synced_header_hash, - equivocations, - }) - }, - Err(e) => { - log::error!( - target: "bridge", - "Could not search for equivocations in the finality proof \ - for source header {:?} synced at target block {}: {e:?}", - synced_header.finality_proof.target_header_hash(), - self.target_block_num - ); - }, - }; - - finality_proofs_buf.prune(synced_header.finality_proof.target_header_number(), None); - self.context.update(synced_header); - } - - result - } -} - -/// Fourth step in the block checking state machine. -/// -/// Reporting the detected equivocations (if any). -#[cfg_attr(test, derive(Debug))] -pub struct ReportEquivocations { - source_block_hash: P::Hash, - equivocations: Vec, -} - -impl ReportEquivocations

{ - pub async fn next>( - mut self, - source_client: &mut SC, - reporter: &mut EquivocationsReporter<'_, P, SC>, - ) -> Result<(), Self> { - let mut unprocessed_equivocations = vec![]; - for equivocation in self.equivocations { - match reporter - .submit_report(source_client, self.source_block_hash, equivocation.clone()) - .await - { - Ok(_) => {}, - Err(e) => { - log::error!( - target: "bridge", - "Could not submit equivocation report to {} for {equivocation:?}: {e:?}", - P::SOURCE_NAME, - ); - - // Mark the equivocation as unprocessed - unprocessed_equivocations.push(equivocation); - // Reconnect source client in case of a connection error. - handle_client_error(source_client, e).await; - }, - } - } - - self.equivocations = unprocessed_equivocations; - if !self.equivocations.is_empty() { - return Err(self) - } - - Ok(()) - } -} - -/// Block checking state machine. -#[cfg_attr(test, derive(Debug))] -pub enum BlockChecker { - ReadSyncedHeaders(ReadSyncedHeaders

), - ReadContext(ReadContext

), - ReportEquivocations(Vec>), -} - -impl BlockChecker

{ - pub fn new(target_block_num: P::TargetNumber) -> Self { - Self::ReadSyncedHeaders(ReadSyncedHeaders { target_block_num }) - } - - pub fn run<'a, SC: SourceClient

, TC: TargetClient

>( - self, - source_client: &'a mut SC, - target_client: &'a mut TC, - finality_proofs_buf: &'a mut FinalityProofsBuf

, - reporter: &'a mut EquivocationsReporter, - ) -> BoxFuture<'a, Result<(), Self>> { - async move { - match self { - Self::ReadSyncedHeaders(state) => { - let read_context = - state.next(target_client).await.map_err(Self::ReadSyncedHeaders)?; - Self::ReadContext(read_context) - .run(source_client, target_client, finality_proofs_buf, reporter) - .await - }, - Self::ReadContext(state) => { - let maybe_find_equivocations = - state.next(target_client).await.map_err(Self::ReadContext)?; - let find_equivocations = match maybe_find_equivocations { - Some(find_equivocations) => find_equivocations, - None => return Ok(()), - }; - Self::ReportEquivocations(find_equivocations.next(finality_proofs_buf)) - .run(source_client, target_client, finality_proofs_buf, reporter) - .await - }, - Self::ReportEquivocations(state) => { - let mut failures = vec![]; - for report_equivocations in state { - if let Err(failure) = - report_equivocations.next(source_client, reporter).await - { - failures.push(failure); - } - } - - if !failures.is_empty() { - return Err(Self::ReportEquivocations(failures)) - } - - Ok(()) - }, - } - } - .boxed() - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::mock::*; - use std::collections::HashMap; - - impl PartialEq for ReadContext { - fn eq(&self, other: &Self) -> bool { - self.target_block_num == other.target_block_num && - self.synced_headers == other.synced_headers - } - } - - impl PartialEq for FindEquivocations { - fn eq(&self, other: &Self) -> bool { - self.target_block_num == other.target_block_num && - self.synced_headers == other.synced_headers && - self.context == other.context - } - } - - impl PartialEq for ReportEquivocations { - fn eq(&self, other: &Self) -> bool { - self.source_block_hash == other.source_block_hash && - self.equivocations == other.equivocations - } - } - - impl PartialEq for BlockChecker { - fn eq(&self, _other: &Self) -> bool { - matches!(self, _other) - } - } - - #[async_std::test] - async fn block_checker_works() { - let mut source_client = TestSourceClient { ..Default::default() }; - let mut target_client = TestTargetClient { - best_synced_header_hash: HashMap::from([(9, Ok(Some(5)))]), - finality_verification_context: HashMap::from([( - 9, - Ok(TestFinalityVerificationContext { check_equivocations: true }), - )]), - synced_headers_finality_info: HashMap::from([( - 10, - Ok(vec![ - new_header_finality_info(6, None), - new_header_finality_info(7, Some(false)), - new_header_finality_info(8, None), - new_header_finality_info(9, Some(true)), - new_header_finality_info(10, None), - new_header_finality_info(11, None), - new_header_finality_info(12, None), - ]), - )]), - ..Default::default() - }; - let mut reporter = - EquivocationsReporter::::new(); - - let block_checker = BlockChecker::new(10); - assert!(block_checker - .run( - &mut source_client, - &mut target_client, - &mut FinalityProofsBuf::new(vec![ - TestFinalityProof(6, vec!["6-1"]), - TestFinalityProof(7, vec![]), - TestFinalityProof(8, vec!["8-1"]), - TestFinalityProof(9, vec!["9-1"]), - TestFinalityProof(10, vec![]), - TestFinalityProof(11, vec!["11-1", "11-2"]), - TestFinalityProof(12, vec!["12-1"]) - ]), - &mut reporter - ) - .await - .is_ok()); - assert_eq!( - *source_client.reported_equivocations.lock().unwrap(), - HashMap::from([(5, vec!["6-1"]), (9, vec!["11-1", "11-2", "12-1"])]) - ); - } - - #[async_std::test] - async fn block_checker_works_with_empty_context() { - let mut target_client = TestTargetClient { - best_synced_header_hash: HashMap::from([(9, Ok(None))]), - finality_verification_context: HashMap::from([( - 9, - Ok(TestFinalityVerificationContext { check_equivocations: true }), - )]), - synced_headers_finality_info: HashMap::from([( - 10, - Ok(vec![new_header_finality_info(6, None)]), - )]), - ..Default::default() - }; - let mut source_client = TestSourceClient { ..Default::default() }; - let mut reporter = - EquivocationsReporter::::new(); - - let block_checker = BlockChecker::new(10); - assert!(block_checker - .run( - &mut source_client, - &mut target_client, - &mut FinalityProofsBuf::new(vec![TestFinalityProof(6, vec!["6-1"])]), - &mut reporter - ) - .await - .is_ok()); - assert_eq!(*source_client.reported_equivocations.lock().unwrap(), HashMap::default()); - } - - #[async_std::test] - async fn read_synced_headers_handles_errors() { - let mut target_client = TestTargetClient { - synced_headers_finality_info: HashMap::from([ - (10, Err(TestClientError::NonConnection)), - (11, Err(TestClientError::Connection)), - ]), - ..Default::default() - }; - let mut source_client = TestSourceClient { ..Default::default() }; - let mut reporter = - EquivocationsReporter::::new(); - - // NonConnection error - let block_checker = BlockChecker::new(10); - assert_eq!( - block_checker - .run( - &mut source_client, - &mut target_client, - &mut FinalityProofsBuf::new(vec![]), - &mut reporter - ) - .await, - Err(BlockChecker::ReadSyncedHeaders(ReadSyncedHeaders { target_block_num: 10 })) - ); - assert_eq!(target_client.num_reconnects, 0); - - // Connection error - let block_checker = BlockChecker::new(11); - assert_eq!( - block_checker - .run( - &mut source_client, - &mut target_client, - &mut FinalityProofsBuf::new(vec![]), - &mut reporter - ) - .await, - Err(BlockChecker::ReadSyncedHeaders(ReadSyncedHeaders { target_block_num: 11 })) - ); - assert_eq!(target_client.num_reconnects, 1); - } - - #[async_std::test] - async fn read_context_handles_errors() { - let mut target_client = TestTargetClient { - synced_headers_finality_info: HashMap::from([(10, Ok(vec![])), (11, Ok(vec![]))]), - best_synced_header_hash: HashMap::from([ - (9, Err(TestClientError::NonConnection)), - (10, Err(TestClientError::Connection)), - ]), - ..Default::default() - }; - let mut source_client = TestSourceClient { ..Default::default() }; - let mut reporter = - EquivocationsReporter::::new(); - - // NonConnection error - let block_checker = BlockChecker::new(10); - assert_eq!( - block_checker - .run( - &mut source_client, - &mut target_client, - &mut FinalityProofsBuf::new(vec![]), - &mut reporter - ) - .await, - Err(BlockChecker::ReadContext(ReadContext { - target_block_num: 10, - synced_headers: vec![] - })) - ); - assert_eq!(target_client.num_reconnects, 0); - - // Connection error - let block_checker = BlockChecker::new(11); - assert_eq!( - block_checker - .run( - &mut source_client, - &mut target_client, - &mut FinalityProofsBuf::new(vec![]), - &mut reporter - ) - .await, - Err(BlockChecker::ReadContext(ReadContext { - target_block_num: 11, - synced_headers: vec![] - })) - ); - assert_eq!(target_client.num_reconnects, 1); - } -} diff --git a/relays/equivocation/src/equivocation_loop.rs b/relays/equivocation/src/equivocation_loop.rs deleted file mode 100644 index dfc4af0d4..000000000 --- a/relays/equivocation/src/equivocation_loop.rs +++ /dev/null @@ -1,308 +0,0 @@ -// Copyright 2019-2023 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -use crate::{ - handle_client_error, reporter::EquivocationsReporter, EquivocationDetectionPipeline, - SourceClient, TargetClient, -}; - -use crate::block_checker::BlockChecker; -use finality_relay::{FinalityProofsBuf, FinalityProofsStream}; -use futures::{select_biased, FutureExt}; -use num_traits::Saturating; -use relay_utils::{metrics::MetricsParams, FailedClient}; -use std::{future::Future, time::Duration}; - -/// Equivocations detection loop state. -struct EquivocationDetectionLoop< - P: EquivocationDetectionPipeline, - SC: SourceClient

, - TC: TargetClient

, -> { - source_client: SC, - target_client: TC, - - from_block_num: Option, - until_block_num: Option, - - reporter: EquivocationsReporter<'static, P, SC>, - - finality_proofs_stream: FinalityProofsStream, - finality_proofs_buf: FinalityProofsBuf

, -} - -impl, TC: TargetClient

> - EquivocationDetectionLoop -{ - async fn ensure_finality_proofs_stream(&mut self) { - match self.finality_proofs_stream.ensure_stream(&self.source_client).await { - Ok(_) => {}, - Err(e) => { - log::error!( - target: "bridge", - "Could not connect to the {} `FinalityProofsStream`: {e:?}", - P::SOURCE_NAME, - ); - - // Reconnect to the source client if needed - handle_client_error(&mut self.source_client, e).await; - }, - } - } - - async fn best_finalized_target_block_number(&mut self) -> Option { - match self.target_client.best_finalized_header_number().await { - Ok(block_num) => Some(block_num), - Err(e) => { - log::error!( - target: "bridge", - "Could not read best finalized header number from {}: {e:?}", - P::TARGET_NAME, - ); - - // Reconnect target client and move on - handle_client_error(&mut self.target_client, e).await; - - None - }, - } - } - - async fn do_run(&mut self, tick: Duration, exit_signal: impl Future) { - let exit_signal = exit_signal.fuse(); - futures::pin_mut!(exit_signal); - - loop { - // Make sure that we are connected to the source finality proofs stream. - self.ensure_finality_proofs_stream().await; - // Check the status of the pending equivocation reports - self.reporter.process_pending_reports().await; - - // Update blocks range. - if let Some(block_number) = self.best_finalized_target_block_number().await { - self.from_block_num.get_or_insert(block_number); - self.until_block_num = Some(block_number); - } - let (from, until) = match (self.from_block_num, self.until_block_num) { - (Some(from), Some(until)) => (from, until), - _ => continue, - }; - - // Check the available blocks - let mut current_block_number = from; - while current_block_number <= until { - self.finality_proofs_buf.fill(&mut self.finality_proofs_stream); - let block_checker = BlockChecker::new(current_block_number); - let _ = block_checker - .run( - &mut self.source_client, - &mut self.target_client, - &mut self.finality_proofs_buf, - &mut self.reporter, - ) - .await; - current_block_number = current_block_number.saturating_add(1.into()); - } - self.from_block_num = Some(current_block_number); - - select_biased! { - _ = exit_signal => return, - _ = async_std::task::sleep(tick).fuse() => {}, - } - } - } - - pub async fn run( - source_client: SC, - target_client: TC, - tick: Duration, - exit_signal: impl Future, - ) -> Result<(), FailedClient> { - let mut equivocation_detection_loop = Self { - source_client, - target_client, - from_block_num: None, - until_block_num: None, - reporter: EquivocationsReporter::::new(), - finality_proofs_stream: FinalityProofsStream::new(), - finality_proofs_buf: FinalityProofsBuf::new(vec![]), - }; - - equivocation_detection_loop.do_run(tick, exit_signal).await; - Ok(()) - } -} - -/// Spawn the equivocations detection loop. -pub async fn run( - source_client: impl SourceClient

, - target_client: impl TargetClient

, - tick: Duration, - metrics_params: MetricsParams, - exit_signal: impl Future + 'static + Send, -) -> Result<(), relay_utils::Error> { - let exit_signal = exit_signal.shared(); - relay_utils::relay_loop(source_client, target_client) - .with_metrics(metrics_params) - .expose() - .await? - .run( - format!("{}_to_{}_EquivocationDetection", P::SOURCE_NAME, P::TARGET_NAME), - move |source_client, target_client, _metrics| { - EquivocationDetectionLoop::run( - source_client, - target_client, - tick, - exit_signal.clone(), - ) - }, - ) - .await -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::mock::*; - use futures::{channel::mpsc::UnboundedSender, StreamExt}; - use std::{ - collections::{HashMap, VecDeque}, - sync::{Arc, Mutex}, - }; - - fn best_finalized_header_number( - best_finalized_headers: &Mutex>>, - exit_sender: &UnboundedSender<()>, - ) -> Result { - let mut best_finalized_headers = best_finalized_headers.lock().unwrap(); - let result = best_finalized_headers.pop_front().unwrap(); - if best_finalized_headers.is_empty() { - exit_sender.unbounded_send(()).unwrap(); - } - result - } - - #[async_std::test] - async fn multiple_blocks_are_checked_correctly() { - let best_finalized_headers = Arc::new(Mutex::new(VecDeque::from([Ok(10), Ok(12), Ok(13)]))); - let (exit_sender, exit_receiver) = futures::channel::mpsc::unbounded(); - - let source_client = TestSourceClient { - finality_proofs: Arc::new(Mutex::new(vec![ - TestFinalityProof(2, vec!["2-1"]), - TestFinalityProof(3, vec!["3-1", "3-2"]), - TestFinalityProof(4, vec!["4-1"]), - TestFinalityProof(5, vec!["5-1"]), - TestFinalityProof(6, vec!["6-1", "6-2"]), - TestFinalityProof(7, vec!["7-1", "7-2"]), - ])), - ..Default::default() - }; - let reported_equivocations = source_client.reported_equivocations.clone(); - let target_client = TestTargetClient { - best_finalized_header_number: Arc::new(move || { - best_finalized_header_number(&best_finalized_headers, &exit_sender) - }), - best_synced_header_hash: HashMap::from([ - (9, Ok(Some(1))), - (10, Ok(Some(3))), - (11, Ok(Some(5))), - (12, Ok(Some(6))), - ]), - finality_verification_context: HashMap::from([ - (9, Ok(TestFinalityVerificationContext { check_equivocations: true })), - (10, Ok(TestFinalityVerificationContext { check_equivocations: true })), - (11, Ok(TestFinalityVerificationContext { check_equivocations: false })), - (12, Ok(TestFinalityVerificationContext { check_equivocations: true })), - ]), - synced_headers_finality_info: HashMap::from([ - ( - 10, - Ok(vec![new_header_finality_info(2, None), new_header_finality_info(3, None)]), - ), - ( - 11, - Ok(vec![ - new_header_finality_info(4, None), - new_header_finality_info(5, Some(false)), - ]), - ), - (12, Ok(vec![new_header_finality_info(6, None)])), - (13, Ok(vec![new_header_finality_info(7, None)])), - ]), - ..Default::default() - }; - - assert!(run::( - source_client, - target_client, - Duration::from_secs(0), - MetricsParams { address: None, registry: Default::default() }, - exit_receiver.into_future().map(|(_, _)| ()), - ) - .await - .is_ok()); - assert_eq!( - *reported_equivocations.lock().unwrap(), - HashMap::from([ - (1, vec!["2-1", "3-1", "3-2"]), - (3, vec!["4-1", "5-1"]), - (6, vec!["7-1", "7-2"]) - ]) - ); - } - - #[async_std::test] - async fn blocks_following_error_are_checked_correctly() { - let best_finalized_headers = Mutex::new(VecDeque::from([Ok(10), Ok(11)])); - let (exit_sender, exit_receiver) = futures::channel::mpsc::unbounded(); - - let source_client = TestSourceClient { - finality_proofs: Arc::new(Mutex::new(vec![ - TestFinalityProof(2, vec!["2-1"]), - TestFinalityProof(3, vec!["3-1"]), - ])), - ..Default::default() - }; - let reported_equivocations = source_client.reported_equivocations.clone(); - let target_client = TestTargetClient { - best_finalized_header_number: Arc::new(move || { - best_finalized_header_number(&best_finalized_headers, &exit_sender) - }), - best_synced_header_hash: HashMap::from([(9, Ok(Some(1))), (10, Ok(Some(2)))]), - finality_verification_context: HashMap::from([ - (9, Ok(TestFinalityVerificationContext { check_equivocations: true })), - (10, Ok(TestFinalityVerificationContext { check_equivocations: true })), - ]), - synced_headers_finality_info: HashMap::from([ - (10, Err(TestClientError::NonConnection)), - (11, Ok(vec![new_header_finality_info(3, None)])), - ]), - ..Default::default() - }; - - assert!(run::( - source_client, - target_client, - Duration::from_secs(0), - MetricsParams { address: None, registry: Default::default() }, - exit_receiver.into_future().map(|(_, _)| ()), - ) - .await - .is_ok()); - assert_eq!(*reported_equivocations.lock().unwrap(), HashMap::from([(2, vec!["3-1"]),])); - } -} diff --git a/relays/equivocation/src/lib.rs b/relays/equivocation/src/lib.rs deleted file mode 100644 index 56a71ef3b..000000000 --- a/relays/equivocation/src/lib.rs +++ /dev/null @@ -1,137 +0,0 @@ -// Copyright 2019-2023 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -mod block_checker; -mod equivocation_loop; -mod mock; -mod reporter; - -use async_trait::async_trait; -use bp_header_chain::{FinalityProof, FindEquivocations}; -use finality_relay::{FinalityPipeline, SourceClientBase}; -use relay_utils::{relay_loop::Client as RelayClient, MaybeConnectionError, TransactionTracker}; -use std::{fmt::Debug, time::Duration}; - -pub use equivocation_loop::run; - -#[cfg(not(test))] -const RECONNECT_DELAY: Duration = relay_utils::relay_loop::RECONNECT_DELAY; -#[cfg(test)] -const RECONNECT_DELAY: Duration = mock::TEST_RECONNECT_DELAY; - -pub trait EquivocationDetectionPipeline: FinalityPipeline { - /// Block number of the target chain. - type TargetNumber: relay_utils::BlockNumberBase; - /// The context needed for validating finality proofs. - type FinalityVerificationContext: Debug + Send; - /// The type of the equivocation proof. - type EquivocationProof: Clone + Debug + Send + Sync; - /// The equivocations finder. - type EquivocationsFinder: FindEquivocations< - Self::FinalityProof, - Self::FinalityVerificationContext, - Self::EquivocationProof, - >; -} - -type HeaderFinalityInfo

= bp_header_chain::HeaderFinalityInfo< -

::FinalityProof, -

::FinalityVerificationContext, ->; - -/// Source client used in equivocation detection loop. -#[async_trait] -pub trait SourceClient: SourceClientBase

{ - /// Transaction tracker to track submitted transactions. - type TransactionTracker: TransactionTracker; - - /// Report equivocation. - async fn report_equivocation( - &self, - at: P::Hash, - equivocation: P::EquivocationProof, - ) -> Result; -} - -/// Target client used in equivocation detection loop. -#[async_trait] -pub trait TargetClient: RelayClient { - /// Get the best finalized header number. - async fn best_finalized_header_number(&self) -> Result; - - /// Get the hash of the best source header known by the target at the provided block number. - async fn best_synced_header_hash( - &self, - at: P::TargetNumber, - ) -> Result, Self::Error>; - - /// Get the data stored by the target at the specified block for validating source finality - /// proofs. - async fn finality_verification_context( - &self, - at: P::TargetNumber, - ) -> Result; - - /// Get the finality info associated to the source headers synced with the target chain at the - /// specified block. - async fn synced_headers_finality_info( - &self, - at: P::TargetNumber, - ) -> Result>, Self::Error>; -} - -/// The context needed for finding equivocations inside finality proofs and reporting them. -#[derive(Debug, PartialEq)] -struct EquivocationReportingContext { - pub synced_header_hash: P::Hash, - pub synced_verification_context: P::FinalityVerificationContext, -} - -impl EquivocationReportingContext

{ - /// Try to get the `EquivocationReportingContext` used by the target chain - /// at the provided block. - pub async fn try_read_from_target>( - target_client: &TC, - at: P::TargetNumber, - ) -> Result, TC::Error> { - let maybe_best_synced_header_hash = target_client.best_synced_header_hash(at).await?; - Ok(match maybe_best_synced_header_hash { - Some(best_synced_header_hash) => Some(EquivocationReportingContext { - synced_header_hash: best_synced_header_hash, - synced_verification_context: target_client - .finality_verification_context(at) - .await?, - }), - None => None, - }) - } - - /// Update with the new context introduced by the `HeaderFinalityInfo

` if any. - pub fn update(&mut self, info: HeaderFinalityInfo

) { - if let Some(new_verification_context) = info.new_verification_context { - self.synced_header_hash = info.finality_proof.target_header_hash(); - self.synced_verification_context = new_verification_context; - } - } -} - -async fn handle_client_error(client: &mut C, e: C::Error) { - if e.is_connection_error() { - client.reconnect_until_success(RECONNECT_DELAY).await; - } else { - async_std::task::sleep(RECONNECT_DELAY).await; - } -} diff --git a/relays/equivocation/src/mock.rs b/relays/equivocation/src/mock.rs deleted file mode 100644 index ced5c6f35..000000000 --- a/relays/equivocation/src/mock.rs +++ /dev/null @@ -1,285 +0,0 @@ -// Copyright (C) Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -#![cfg(test)] - -use crate::{EquivocationDetectionPipeline, HeaderFinalityInfo, SourceClient, TargetClient}; -use async_trait::async_trait; -use bp_header_chain::{FinalityProof, FindEquivocations}; -use finality_relay::{FinalityPipeline, SourceClientBase}; -use futures::{Stream, StreamExt}; -use relay_utils::{ - relay_loop::Client as RelayClient, HeaderId, MaybeConnectionError, TrackedTransactionStatus, - TransactionTracker, -}; -use std::{ - collections::HashMap, - pin::Pin, - sync::{Arc, Mutex}, - time::Duration, -}; - -pub type TestSourceHashAndNumber = u64; -pub type TestTargetNumber = u64; -pub type TestEquivocationProof = &'static str; - -pub const TEST_RECONNECT_DELAY: Duration = Duration::from_secs(0); - -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct TestFinalityProof(pub TestSourceHashAndNumber, pub Vec); - -impl FinalityProof for TestFinalityProof { - fn target_header_hash(&self) -> TestSourceHashAndNumber { - self.0 - } - - fn target_header_number(&self) -> TestSourceHashAndNumber { - self.0 - } -} - -#[derive(Debug, Clone, PartialEq)] -pub struct TestEquivocationDetectionPipeline; - -impl FinalityPipeline for TestEquivocationDetectionPipeline { - const SOURCE_NAME: &'static str = "TestSource"; - const TARGET_NAME: &'static str = "TestTarget"; - - type Hash = TestSourceHashAndNumber; - type Number = TestSourceHashAndNumber; - type FinalityProof = TestFinalityProof; -} - -#[derive(Clone, Debug, PartialEq)] -pub struct TestFinalityVerificationContext { - pub check_equivocations: bool, -} - -pub struct TestEquivocationsFinder; - -impl FindEquivocations - for TestEquivocationsFinder -{ - type Error = (); - - fn find_equivocations( - verification_context: &TestFinalityVerificationContext, - synced_proof: &TestFinalityProof, - source_proofs: &[TestFinalityProof], - ) -> Result, Self::Error> { - if verification_context.check_equivocations { - // Get the equivocations from the source proofs, in order to make sure - // that they are correctly provided. - if let Some(proof) = source_proofs.iter().find(|proof| proof.0 == synced_proof.0) { - return Ok(proof.1.clone()) - } - } - - Ok(vec![]) - } -} - -impl EquivocationDetectionPipeline for TestEquivocationDetectionPipeline { - type TargetNumber = TestTargetNumber; - type FinalityVerificationContext = TestFinalityVerificationContext; - type EquivocationProof = TestEquivocationProof; - type EquivocationsFinder = TestEquivocationsFinder; -} - -#[derive(Debug, Clone)] -pub enum TestClientError { - Connection, - NonConnection, -} - -impl MaybeConnectionError for TestClientError { - fn is_connection_error(&self) -> bool { - match self { - TestClientError::Connection => true, - TestClientError::NonConnection => false, - } - } -} - -#[derive(Clone)] -pub struct TestSourceClient { - pub num_reconnects: u32, - pub finality_proofs: Arc>>, - pub reported_equivocations: - Arc>>>, -} - -impl Default for TestSourceClient { - fn default() -> Self { - Self { - num_reconnects: 0, - finality_proofs: Arc::new(Mutex::new(vec![])), - reported_equivocations: Arc::new(Mutex::new(Default::default())), - } - } -} - -#[async_trait] -impl RelayClient for TestSourceClient { - type Error = TestClientError; - - async fn reconnect(&mut self) -> Result<(), Self::Error> { - self.num_reconnects += 1; - - Ok(()) - } -} - -#[async_trait] -impl SourceClientBase for TestSourceClient { - type FinalityProofsStream = Pin + 'static + Send>>; - - async fn finality_proofs(&self) -> Result { - let finality_proofs = std::mem::take(&mut *self.finality_proofs.lock().unwrap()); - Ok(futures::stream::iter(finality_proofs).boxed()) - } -} - -#[derive(Clone, Debug)] -pub struct TestTransactionTracker( - pub TrackedTransactionStatus>, -); - -impl Default for TestTransactionTracker { - fn default() -> TestTransactionTracker { - TestTransactionTracker(TrackedTransactionStatus::Finalized(Default::default())) - } -} - -#[async_trait] -impl TransactionTracker for TestTransactionTracker { - type HeaderId = HeaderId; - - async fn wait( - self, - ) -> TrackedTransactionStatus> { - self.0 - } -} - -#[async_trait] -impl SourceClient for TestSourceClient { - type TransactionTracker = TestTransactionTracker; - - async fn report_equivocation( - &self, - at: TestSourceHashAndNumber, - equivocation: TestEquivocationProof, - ) -> Result { - self.reported_equivocations - .lock() - .unwrap() - .entry(at) - .or_default() - .push(equivocation); - - Ok(TestTransactionTracker::default()) - } -} - -#[derive(Clone)] -pub struct TestTargetClient { - pub num_reconnects: u32, - pub best_finalized_header_number: - Arc Result + Send + Sync>, - pub best_synced_header_hash: - HashMap, TestClientError>>, - pub finality_verification_context: - HashMap>, - pub synced_headers_finality_info: HashMap< - TestTargetNumber, - Result>, TestClientError>, - >, -} - -impl Default for TestTargetClient { - fn default() -> Self { - Self { - num_reconnects: 0, - best_finalized_header_number: Arc::new(|| Ok(0)), - best_synced_header_hash: Default::default(), - finality_verification_context: Default::default(), - synced_headers_finality_info: Default::default(), - } - } -} - -#[async_trait] -impl RelayClient for TestTargetClient { - type Error = TestClientError; - - async fn reconnect(&mut self) -> Result<(), Self::Error> { - self.num_reconnects += 1; - - Ok(()) - } -} - -#[async_trait] -impl TargetClient for TestTargetClient { - async fn best_finalized_header_number(&self) -> Result { - (self.best_finalized_header_number)() - } - - async fn best_synced_header_hash( - &self, - at: TestTargetNumber, - ) -> Result, Self::Error> { - self.best_synced_header_hash - .get(&at) - .unwrap_or(&Err(TestClientError::NonConnection)) - .clone() - } - - async fn finality_verification_context( - &self, - at: TestTargetNumber, - ) -> Result { - self.finality_verification_context - .get(&at) - .unwrap_or(&Err(TestClientError::NonConnection)) - .clone() - } - - async fn synced_headers_finality_info( - &self, - at: TestTargetNumber, - ) -> Result>, Self::Error> { - self.synced_headers_finality_info - .get(&at) - .unwrap_or(&Err(TestClientError::NonConnection)) - .clone() - } -} - -pub fn new_header_finality_info( - source_hdr: TestSourceHashAndNumber, - check_following_equivocations: Option, -) -> HeaderFinalityInfo { - HeaderFinalityInfo:: { - finality_proof: TestFinalityProof(source_hdr, vec![]), - new_verification_context: check_following_equivocations.map( - |check_following_equivocations| TestFinalityVerificationContext { - check_equivocations: check_following_equivocations, - }, - ), - } -} diff --git a/relays/equivocation/src/reporter.rs b/relays/equivocation/src/reporter.rs deleted file mode 100644 index 9c4642383..000000000 --- a/relays/equivocation/src/reporter.rs +++ /dev/null @@ -1,129 +0,0 @@ -// Copyright 2019-2023 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Helper struct used for submitting finality reports and tracking their status. - -use crate::{EquivocationDetectionPipeline, SourceClient}; - -use futures::FutureExt; -use relay_utils::{TrackedTransactionFuture, TrackedTransactionStatus, TransactionTracker}; -use std::{ - future::poll_fn, - task::{Context, Poll}, -}; - -pub struct EquivocationsReporter<'a, P: EquivocationDetectionPipeline, SC: SourceClient

> { - pending_reports: Vec>, -} - -impl<'a, P: EquivocationDetectionPipeline, SC: SourceClient

> EquivocationsReporter<'a, P, SC> { - pub fn new() -> Self { - Self { pending_reports: vec![] } - } - - /// Submit a `report_equivocation()` transaction to the source chain. - /// - /// We store the transaction tracker for future monitoring. - pub async fn submit_report( - &mut self, - source_client: &SC, - at: P::Hash, - equivocation: P::EquivocationProof, - ) -> Result<(), SC::Error> { - let pending_report = source_client.report_equivocation(at, equivocation).await?; - self.pending_reports.push(pending_report.wait()); - - Ok(()) - } - - fn do_process_pending_reports(&mut self, cx: &mut Context<'_>) -> Poll<()> { - self.pending_reports.retain_mut(|pending_report| { - match pending_report.poll_unpin(cx) { - Poll::Ready(tx_status) => { - match tx_status { - TrackedTransactionStatus::Lost => { - log::error!(target: "bridge", "Equivocation report tx was lost"); - }, - TrackedTransactionStatus::Finalized(id) => { - log::error!(target: "bridge", "Equivocation report tx was finalized in source block {id:?}"); - }, - } - - // The future was processed. Drop it. - false - }, - Poll::Pending => { - // The future is still pending. Retain it. - true - }, - } - }); - - Poll::Ready(()) - } - - /// Iterate through all the pending `report_equivocation()` transactions - /// and log the ones that finished. - pub async fn process_pending_reports(&mut self) { - poll_fn(|cx| self.do_process_pending_reports(cx)).await - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::mock::*; - use relay_utils::HeaderId; - use std::sync::Mutex; - - #[async_std::test] - async fn process_pending_reports_works() { - let polled_reports = Mutex::new(vec![]); - let finished_reports = Mutex::new(vec![]); - - let mut reporter = - EquivocationsReporter:: { - pending_reports: vec![ - Box::pin(async { - polled_reports.lock().unwrap().push(1); - finished_reports.lock().unwrap().push(1); - TrackedTransactionStatus::Finalized(HeaderId(1, 1)) - }), - Box::pin(async { - polled_reports.lock().unwrap().push(2); - finished_reports.lock().unwrap().push(2); - TrackedTransactionStatus::Finalized(HeaderId(2, 2)) - }), - Box::pin(async { - polled_reports.lock().unwrap().push(3); - std::future::pending::<()>().await; - finished_reports.lock().unwrap().push(3); - TrackedTransactionStatus::Finalized(HeaderId(3, 3)) - }), - Box::pin(async { - polled_reports.lock().unwrap().push(4); - finished_reports.lock().unwrap().push(4); - TrackedTransactionStatus::Finalized(HeaderId(4, 4)) - }), - ], - }; - - reporter.process_pending_reports().await; - assert_eq!(*polled_reports.lock().unwrap(), vec![1, 2, 3, 4]); - assert_eq!(*finished_reports.lock().unwrap(), vec![1, 2, 4]); - assert_eq!(reporter.pending_reports.len(), 1); - } -} diff --git a/relays/finality/Cargo.toml b/relays/finality/Cargo.toml deleted file mode 100644 index 5ee4b10fa..000000000 --- a/relays/finality/Cargo.toml +++ /dev/null @@ -1,25 +0,0 @@ -[package] -name = "finality-relay" -version = "0.1.0" -authors.workspace = true -edition.workspace = true -license = "GPL-3.0-or-later WITH Classpath-exception-2.0" -repository.workspace = true -description = "Finality proofs relay" -publish = false - -[lints] -workspace = true - -[dependencies] -async-std = "1.9.0" -async-trait = "0.1.79" -backoff = "0.4" -bp-header-chain = { path = "../../primitives/header-chain" } -futures = "0.3.30" -log = { workspace = true } -num-traits = "0.2" -relay-utils = { path = "../utils" } - -[dev-dependencies] -parking_lot = "0.12.1" diff --git a/relays/finality/README.md b/relays/finality/README.md deleted file mode 100644 index 92e765cea..000000000 --- a/relays/finality/README.md +++ /dev/null @@ -1,60 +0,0 @@ -# GRANDPA Finality Relay - -The finality relay is able to work with different finality engines. In the modern Substrate world they are GRANDPA -and BEEFY. Let's talk about GRANDPA here, because BEEFY relay and bridge BEEFY pallet are in development. - -In general, the relay works as follows: it connects to the source and target chain. The source chain must have the -[GRANDPA gadget](https://github.com/paritytech/finality-grandpa) running (so it can't be a parachain). The target -chain must have the [bridge GRANDPA pallet](../../modules/grandpa/) deployed at its runtime. The relay subscribes -to the GRANDPA finality notifications at the source chain and when the new justification is received, it is submitted -to the pallet at the target chain. - -Apart from that, the relay is watching for every source header that is missing at target. If it finds the missing -mandatory header (header that is changing the current GRANDPA validators set), it submits the justification for -this header. The case when the source node can't return the mandatory justification is considered a fatal error, -because the pallet can't proceed without it. - -More: [GRANDPA Finality Relay Sequence Diagram](../../docs/grandpa-finality-relay.html). - -## How to Use the Finality Relay - -The most important trait is the [`FinalitySyncPipeline`](./src/lib.rs), which defines the basic primitives of the -source chain (like block hash and number) and the type of finality proof (GRANDPA justification or MMR proof). Once -that is defined, there are two other traits - [`SourceClient`](./src/finality_loop.rs) and -[`TargetClient`](./src/finality_loop.rs). - -The `SourceClient` represents the Substrate node client that connects to the source chain. The client needs to -be able to return the best finalized header number, finalized header and its finality proof and the stream of -finality proofs. - -The `TargetClient` implementation must be able to craft finality delivery transaction and submit it to the target -node. The transaction is then tracked by the relay until it is mined and finalized. - -The main entrypoint for the crate is the [`run` function](./src/finality_loop.rs), which takes source and target -clients and [`FinalitySyncParams`](./src/finality_loop.rs) parameters. The most important parameter is the -`only_mandatory_headers` - it is set to `true`, the relay will only submit mandatory headers. Since transactions -with mandatory headers are fee-free, the cost of running such relay is zero (in terms of fees). - -## Finality Relay Metrics - -Finality relay provides several metrics. Metrics names depend on names of source and target chains. The list below -shows metrics names for Rococo (source chain) to BridgeHubWestend (target chain) finality relay. For other -chains, simply change chain names. So the metrics are: - -- `Rococo_to_BridgeHubWestend_Sync_best_source_block_number` - returns best finalized source chain (Rococo) block - number, known to the relay. - If relay is running in [on-demand mode](../bin-substrate/src/cli/relay_headers_and_messages/), the - number may not match (it may be far behind) the actual best finalized number; - -- `Rococo_to_BridgeHubWestend_Sync_best_source_at_target_block_number` - returns best finalized source chain (Rococo) - block number that is known to the bridge GRANDPA pallet at the target chain. - -- `Rococo_to_BridgeHubWestend_Sync_is_source_and_source_at_target_using_different_forks` - if this metrics is set - to `1`, then the best source chain header known to the target chain doesn't match the same-number-header - at the source chain. It means that the GRANDPA validators set has crafted the duplicate justification - and it has been submitted to the target chain. - Normally (if majority of validators are honest and if you're running finality relay without large breaks) - this shall not happen and the metric will have `0` value. - -If relay operates properly, you should see that the `Rococo_to_BridgeHubWestend_Sync_best_source_at_target_block_number` -tries to reach the `Rococo_to_BridgeHubWestend_Sync_best_source_block_number`. And the latter one always increases. diff --git a/relays/finality/src/base.rs b/relays/finality/src/base.rs deleted file mode 100644 index 4253468ea..000000000 --- a/relays/finality/src/base.rs +++ /dev/null @@ -1,51 +0,0 @@ -// Copyright 2019-2023 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -use async_trait::async_trait; -use bp_header_chain::FinalityProof; -use futures::Stream; -use relay_utils::relay_loop::Client as RelayClient; -use std::fmt::Debug; - -/// Base finality pipeline. -pub trait FinalityPipeline: 'static + Clone + Debug + Send + Sync { - /// Name of the finality proofs source. - const SOURCE_NAME: &'static str; - /// Name of the finality proofs target. - const TARGET_NAME: &'static str; - - /// Synced headers are identified by this hash. - type Hash: Eq + Clone + Copy + Send + Sync + Debug; - /// Synced headers are identified by this number. - type Number: relay_utils::BlockNumberBase; - /// Finality proof type. - type FinalityProof: FinalityProof; -} - -/// Source client used in finality related loops. -#[async_trait] -pub trait SourceClientBase: RelayClient { - /// Stream of new finality proofs. The stream is allowed to miss proofs for some - /// headers, even if those headers are mandatory. - type FinalityProofsStream: Stream + Send + Unpin; - - /// Subscribe to new finality proofs. - async fn finality_proofs(&self) -> Result; -} - -/// Target client used in finality related loops. -#[async_trait] -pub trait TargetClientBase: RelayClient {} diff --git a/relays/finality/src/finality_loop.rs b/relays/finality/src/finality_loop.rs deleted file mode 100644 index e31d8a708..000000000 --- a/relays/finality/src/finality_loop.rs +++ /dev/null @@ -1,698 +0,0 @@ -// Copyright 2019-2021 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! The loop basically reads all missing headers and their finality proofs from the source client. -//! The proof for the best possible header is then submitted to the target node. The only exception -//! is the mandatory headers, which we always submit to the target node. For such headers, we -//! assume that the persistent proof either exists, or will eventually become available. - -use crate::{sync_loop_metrics::SyncLoopMetrics, Error, FinalitySyncPipeline, SourceHeader}; - -use crate::{ - base::SourceClientBase, - finality_proofs::{FinalityProofsBuf, FinalityProofsStream}, - headers::{JustifiedHeader, JustifiedHeaderSelector}, -}; -use async_trait::async_trait; -use backoff::{backoff::Backoff, ExponentialBackoff}; -use futures::{future::Fuse, select, Future, FutureExt}; -use num_traits::Saturating; -use relay_utils::{ - metrics::MetricsParams, relay_loop::Client as RelayClient, retry_backoff, FailedClient, - HeaderId, MaybeConnectionError, TrackedTransactionStatus, TransactionTracker, -}; -use std::{ - fmt::Debug, - time::{Duration, Instant}, -}; - -/// Finality proof synchronization loop parameters. -#[derive(Debug, Clone)] -pub struct FinalitySyncParams { - /// Interval at which we check updates on both clients. Normally should be larger than - /// `min(source_block_time, target_block_time)`. - /// - /// This parameter may be used to limit transactions rate. Increase the value && you'll get - /// infrequent updates => sparse headers => potential slow down of bridge applications, but - /// pallet storage won't be super large. Decrease the value to near `source_block_time` and - /// you'll get transaction for (almost) every block of the source chain => all source headers - /// will be known to the target chain => bridge applications will run faster, but pallet - /// storage may explode (but if pruning is there, then it's fine). - pub tick: Duration, - /// Number of finality proofs to keep in internal buffer between loop iterations. - /// - /// While in "major syncing" state, we still read finality proofs from the stream. They're - /// stored in the internal buffer between loop iterations. When we're close to the tip of the - /// chain, we may meet finality delays if headers are not finalized frequently. So instead of - /// waiting for next finality proof to appear in the stream, we may use existing proof from - /// that buffer. - pub recent_finality_proofs_limit: usize, - /// Timeout before we treat our transactions as lost and restart the whole sync process. - pub stall_timeout: Duration, - /// If true, only mandatory headers are relayed. - pub only_mandatory_headers: bool, -} - -/// Source client used in finality synchronization loop. -#[async_trait] -pub trait SourceClient: SourceClientBase

{ - /// Get best finalized block number. - async fn best_finalized_block_number(&self) -> Result; - - /// Get canonical header and its finality proof by number. - async fn header_and_finality_proof( - &self, - number: P::Number, - ) -> Result<(P::Header, Option), Self::Error>; -} - -/// Target client used in finality synchronization loop. -#[async_trait] -pub trait TargetClient: RelayClient { - /// Transaction tracker to track submitted transactions. - type TransactionTracker: TransactionTracker; - - /// Get best finalized source block number. - async fn best_finalized_source_block_id( - &self, - ) -> Result, Self::Error>; - - /// Submit header finality proof. - async fn submit_finality_proof( - &self, - header: P::Header, - proof: P::FinalityProof, - ) -> Result; -} - -/// Return prefix that will be used by default to expose Prometheus metrics of the finality proofs -/// sync loop. -pub fn metrics_prefix() -> String { - format!("{}_to_{}_Sync", P::SOURCE_NAME, P::TARGET_NAME) -} - -pub struct SyncInfo { - pub best_number_at_source: P::Number, - pub best_number_at_target: P::Number, - pub is_using_same_fork: bool, -} - -impl SyncInfo

{ - /// Checks if both clients are on the same fork. - async fn is_on_same_fork>( - source_client: &SC, - id_at_target: &HeaderId, - ) -> Result { - let header_at_source = source_client.header_and_finality_proof(id_at_target.0).await?.0; - let header_hash_at_source = header_at_source.hash(); - Ok(if id_at_target.1 == header_hash_at_source { - true - } else { - log::error!( - target: "bridge", - "Source node ({}) and pallet at target node ({}) have different headers at the same height {:?}: \ - at-source {:?} vs at-target {:?}", - P::SOURCE_NAME, - P::TARGET_NAME, - id_at_target.0, - header_hash_at_source, - id_at_target.1, - ); - - false - }) - } - - async fn new, TC: TargetClient

>( - source_client: &SC, - target_client: &TC, - ) -> Result> { - let best_number_at_source = - source_client.best_finalized_block_number().await.map_err(Error::Source)?; - let best_id_at_target = - target_client.best_finalized_source_block_id().await.map_err(Error::Target)?; - let best_number_at_target = best_id_at_target.0; - - let is_using_same_fork = Self::is_on_same_fork(source_client, &best_id_at_target) - .await - .map_err(Error::Source)?; - - Ok(Self { best_number_at_source, best_number_at_target, is_using_same_fork }) - } - - fn update_metrics(&self, metrics_sync: &Option) { - if let Some(metrics_sync) = metrics_sync { - metrics_sync.update_best_block_at_source(self.best_number_at_source); - metrics_sync.update_best_block_at_target(self.best_number_at_target); - metrics_sync.update_using_same_fork(self.is_using_same_fork); - } - } - - pub fn num_headers(&self) -> P::Number { - self.best_number_at_source.saturating_sub(self.best_number_at_target) - } -} - -/// Information about transaction that we have submitted. -#[derive(Debug, Clone)] -pub struct Transaction { - /// Submitted transaction tracker. - tracker: Tracker, - /// The number of the header we have submitted. - header_number: Number, -} - -impl Transaction { - pub async fn submit< - P: FinalitySyncPipeline, - TC: TargetClient, - >( - target_client: &TC, - header: P::Header, - justification: P::FinalityProof, - ) -> Result { - let header_number = header.number(); - log::debug!( - target: "bridge", - "Going to submit finality proof of {} header #{:?} to {}", - P::SOURCE_NAME, - header_number, - P::TARGET_NAME, - ); - - let tracker = target_client.submit_finality_proof(header, justification).await?; - Ok(Transaction { tracker, header_number }) - } - - async fn track< - P: FinalitySyncPipeline, - SC: SourceClient

, - TC: TargetClient

, - >( - self, - target_client: TC, - ) -> Result<(), Error> { - match self.tracker.wait().await { - TrackedTransactionStatus::Finalized(_) => { - // The transaction has been finalized, but it may have been finalized in the - // "failed" state. So let's check if the block number was actually updated. - target_client - .best_finalized_source_block_id() - .await - .map_err(Error::Target) - .and_then(|best_id_at_target| { - if self.header_number > best_id_at_target.0 { - return Err(Error::ProofSubmissionTxFailed { - submitted_number: self.header_number, - best_number_at_target: best_id_at_target.0, - }) - } - Ok(()) - }) - }, - TrackedTransactionStatus::Lost => Err(Error::ProofSubmissionTxLost), - } - } -} - -/// Finality synchronization loop state. -struct FinalityLoop, TC: TargetClient

> { - source_client: SC, - target_client: TC, - - sync_params: FinalitySyncParams, - metrics_sync: Option, - - progress: (Instant, Option), - retry_backoff: ExponentialBackoff, - finality_proofs_stream: FinalityProofsStream, - finality_proofs_buf: FinalityProofsBuf

, - best_submitted_number: Option, -} - -impl, TC: TargetClient

> FinalityLoop { - pub fn new( - source_client: SC, - target_client: TC, - sync_params: FinalitySyncParams, - metrics_sync: Option, - ) -> Self { - Self { - source_client, - target_client, - sync_params, - metrics_sync, - progress: (Instant::now(), None), - retry_backoff: retry_backoff(), - finality_proofs_stream: FinalityProofsStream::new(), - finality_proofs_buf: FinalityProofsBuf::new(vec![]), - best_submitted_number: None, - } - } - - fn update_progress(&mut self, info: &SyncInfo

) { - let (prev_time, prev_best_number_at_target) = self.progress; - let now = Instant::now(); - - let needs_update = now - prev_time > Duration::from_secs(10) || - prev_best_number_at_target - .map(|prev_best_number_at_target| { - info.best_number_at_target.saturating_sub(prev_best_number_at_target) > - 10.into() - }) - .unwrap_or(true); - - if !needs_update { - return - } - - log::info!( - target: "bridge", - "Synced {:?} of {:?} headers", - info.best_number_at_target, - info.best_number_at_source, - ); - - self.progress = (now, Some(info.best_number_at_target)) - } - - pub async fn select_header_to_submit( - &mut self, - info: &SyncInfo

, - ) -> Result>, Error> { - // to see that the loop is progressing - log::trace!( - target: "bridge", - "Considering range of headers ({}; {}]", - info.best_number_at_target, - info.best_number_at_source - ); - - // read missing headers - let selector = JustifiedHeaderSelector::new::(&self.source_client, info).await?; - // if we see that the header schedules GRANDPA change, we need to submit it - if self.sync_params.only_mandatory_headers { - return Ok(selector.select_mandatory()) - } - - // all headers that are missing from the target client are non-mandatory - // => even if we have already selected some header and its persistent finality proof, - // we may try to select better header by reading non-persistent proofs from the stream - self.finality_proofs_buf.fill(&mut self.finality_proofs_stream); - let maybe_justified_header = selector.select(&self.finality_proofs_buf); - - // remove obsolete 'recent' finality proofs + keep its size under certain limit - let oldest_finality_proof_to_keep = maybe_justified_header - .as_ref() - .map(|justified_header| justified_header.number()) - .unwrap_or(info.best_number_at_target); - self.finality_proofs_buf.prune( - oldest_finality_proof_to_keep, - Some(self.sync_params.recent_finality_proofs_limit), - ); - - Ok(maybe_justified_header) - } - - pub async fn run_iteration( - &mut self, - ) -> Result< - Option>, - Error, - > { - // read best source headers ids from source and target nodes - let info = SyncInfo::new(&self.source_client, &self.target_client).await?; - info.update_metrics(&self.metrics_sync); - self.update_progress(&info); - - // if we have already submitted header, then we just need to wait for it - // if we're waiting too much, then we believe our transaction has been lost and restart sync - if Some(info.best_number_at_target) < self.best_submitted_number { - return Ok(None) - } - - // submit new header if we have something new - match self.select_header_to_submit(&info).await? { - Some(header) => { - let transaction = - Transaction::submit(&self.target_client, header.header, header.proof) - .await - .map_err(Error::Target)?; - self.best_submitted_number = Some(transaction.header_number); - Ok(Some(transaction)) - }, - None => Ok(None), - } - } - - async fn ensure_finality_proofs_stream(&mut self) -> Result<(), FailedClient> { - if let Err(e) = self.finality_proofs_stream.ensure_stream(&self.source_client).await { - if e.is_connection_error() { - return Err(FailedClient::Source) - } - } - - Ok(()) - } - - /// Run finality relay loop until connection to one of nodes is lost. - async fn run_until_connection_lost( - &mut self, - exit_signal: impl Future, - ) -> Result<(), FailedClient> { - self.ensure_finality_proofs_stream().await?; - let proof_submission_tx_tracker = Fuse::terminated(); - let exit_signal = exit_signal.fuse(); - futures::pin_mut!(exit_signal, proof_submission_tx_tracker); - - loop { - // run loop iteration - let next_tick = match self.run_iteration().await { - Ok(Some(tx)) => { - proof_submission_tx_tracker - .set(tx.track::(self.target_client.clone()).fuse()); - self.retry_backoff.reset(); - self.sync_params.tick - }, - Ok(None) => { - self.retry_backoff.reset(); - self.sync_params.tick - }, - Err(error) => { - log::error!(target: "bridge", "Finality sync loop iteration has failed with error: {:?}", error); - error.fail_if_connection_error()?; - self.retry_backoff - .next_backoff() - .unwrap_or(relay_utils::relay_loop::RECONNECT_DELAY) - }, - }; - self.ensure_finality_proofs_stream().await?; - - // wait till exit signal, or new source block - select! { - proof_submission_result = proof_submission_tx_tracker => { - if let Err(e) = proof_submission_result { - log::error!( - target: "bridge", - "Finality sync proof submission tx to {} has failed with error: {:?}.", - P::TARGET_NAME, - e, - ); - self.best_submitted_number = None; - e.fail_if_connection_error()?; - } - }, - _ = async_std::task::sleep(next_tick).fuse() => {}, - _ = exit_signal => return Ok(()), - } - } - } - - pub async fn run( - source_client: SC, - target_client: TC, - sync_params: FinalitySyncParams, - metrics_sync: Option, - exit_signal: impl Future, - ) -> Result<(), FailedClient> { - let mut finality_loop = Self::new(source_client, target_client, sync_params, metrics_sync); - finality_loop.run_until_connection_lost(exit_signal).await - } -} - -/// Run finality proofs synchronization loop. -pub async fn run( - source_client: impl SourceClient

, - target_client: impl TargetClient

, - sync_params: FinalitySyncParams, - metrics_params: MetricsParams, - exit_signal: impl Future + 'static + Send, -) -> Result<(), relay_utils::Error> { - let exit_signal = exit_signal.shared(); - relay_utils::relay_loop(source_client, target_client) - .with_metrics(metrics_params) - .loop_metric(SyncLoopMetrics::new( - Some(&metrics_prefix::

()), - "source", - "source_at_target", - )?)? - .expose() - .await? - .run(metrics_prefix::

(), move |source_client, target_client, metrics| { - FinalityLoop::run( - source_client, - target_client, - sync_params.clone(), - metrics, - exit_signal.clone(), - ) - }) - .await -} - -#[cfg(test)] -mod tests { - use super::*; - - use crate::mock::*; - use futures::{FutureExt, StreamExt}; - use parking_lot::Mutex; - use relay_utils::{FailedClient, HeaderId, TrackedTransactionStatus}; - use std::{collections::HashMap, sync::Arc}; - - fn prepare_test_clients( - exit_sender: futures::channel::mpsc::UnboundedSender<()>, - state_function: impl Fn(&mut ClientsData) -> bool + Send + Sync + 'static, - source_headers: HashMap)>, - ) -> (TestSourceClient, TestTargetClient) { - let internal_state_function: Arc = - Arc::new(move |data| { - if state_function(data) { - exit_sender.unbounded_send(()).unwrap(); - } - }); - let clients_data = Arc::new(Mutex::new(ClientsData { - source_best_block_number: 10, - source_headers, - source_proofs: vec![TestFinalityProof(12), TestFinalityProof(14)], - - target_best_block_id: HeaderId(5, 5), - target_headers: vec![], - target_transaction_tracker: TestTransactionTracker( - TrackedTransactionStatus::Finalized(Default::default()), - ), - })); - ( - TestSourceClient { - on_method_call: internal_state_function.clone(), - data: clients_data.clone(), - }, - TestTargetClient { on_method_call: internal_state_function, data: clients_data }, - ) - } - - fn test_sync_params() -> FinalitySyncParams { - FinalitySyncParams { - tick: Duration::from_secs(0), - recent_finality_proofs_limit: 1024, - stall_timeout: Duration::from_secs(1), - only_mandatory_headers: false, - } - } - - fn run_sync_loop( - state_function: impl Fn(&mut ClientsData) -> bool + Send + Sync + 'static, - ) -> (ClientsData, Result<(), FailedClient>) { - let (exit_sender, exit_receiver) = futures::channel::mpsc::unbounded(); - let (source_client, target_client) = prepare_test_clients( - exit_sender, - state_function, - vec![ - (5, (TestSourceHeader(false, 5, 5), None)), - (6, (TestSourceHeader(false, 6, 6), None)), - (7, (TestSourceHeader(false, 7, 7), Some(TestFinalityProof(7)))), - (8, (TestSourceHeader(true, 8, 8), Some(TestFinalityProof(8)))), - (9, (TestSourceHeader(false, 9, 9), Some(TestFinalityProof(9)))), - (10, (TestSourceHeader(false, 10, 10), None)), - ] - .into_iter() - .collect(), - ); - let sync_params = test_sync_params(); - - let clients_data = source_client.data.clone(); - let result = async_std::task::block_on(FinalityLoop::run( - source_client, - target_client, - sync_params, - None, - exit_receiver.into_future().map(|(_, _)| ()), - )); - - let clients_data = clients_data.lock().clone(); - (clients_data, result) - } - - #[test] - fn finality_sync_loop_works() { - let (client_data, result) = run_sync_loop(|data| { - // header#7 has persistent finality proof, but it isn't mandatory => it isn't submitted, - // because header#8 has persistent finality proof && it is mandatory => it is submitted - // header#9 has persistent finality proof, but it isn't mandatory => it is submitted, - // because there are no more persistent finality proofs - // - // once this ^^^ is done, we generate more blocks && read proof for blocks 12 and 14 - // from the stream - if data.target_best_block_id.0 == 9 { - data.source_best_block_number = 14; - data.source_headers.insert(11, (TestSourceHeader(false, 11, 11), None)); - data.source_headers - .insert(12, (TestSourceHeader(false, 12, 12), Some(TestFinalityProof(12)))); - data.source_headers.insert(13, (TestSourceHeader(false, 13, 13), None)); - data.source_headers - .insert(14, (TestSourceHeader(false, 14, 14), Some(TestFinalityProof(14)))); - } - // once this ^^^ is done, we generate more blocks && read persistent proof for block 16 - if data.target_best_block_id.0 == 14 { - data.source_best_block_number = 17; - data.source_headers.insert(15, (TestSourceHeader(false, 15, 15), None)); - data.source_headers - .insert(16, (TestSourceHeader(false, 16, 16), Some(TestFinalityProof(16)))); - data.source_headers.insert(17, (TestSourceHeader(false, 17, 17), None)); - } - - data.target_best_block_id.0 == 16 - }); - - assert_eq!(result, Ok(())); - assert_eq!( - client_data.target_headers, - vec![ - // before adding 11..14: finality proof for mandatory header#8 - (TestSourceHeader(true, 8, 8), TestFinalityProof(8)), - // before adding 11..14: persistent finality proof for non-mandatory header#9 - (TestSourceHeader(false, 9, 9), TestFinalityProof(9)), - // after adding 11..14: ephemeral finality proof for non-mandatory header#14 - (TestSourceHeader(false, 14, 14), TestFinalityProof(14)), - // after adding 15..17: persistent finality proof for non-mandatory header#16 - (TestSourceHeader(false, 16, 16), TestFinalityProof(16)), - ], - ); - } - - fn run_only_mandatory_headers_mode_test( - only_mandatory_headers: bool, - has_mandatory_headers: bool, - ) -> Option> { - let (exit_sender, _) = futures::channel::mpsc::unbounded(); - let (source_client, target_client) = prepare_test_clients( - exit_sender, - |_| false, - vec![ - (6, (TestSourceHeader(false, 6, 6), Some(TestFinalityProof(6)))), - (7, (TestSourceHeader(false, 7, 7), Some(TestFinalityProof(7)))), - (8, (TestSourceHeader(has_mandatory_headers, 8, 8), Some(TestFinalityProof(8)))), - (9, (TestSourceHeader(false, 9, 9), Some(TestFinalityProof(9)))), - (10, (TestSourceHeader(false, 10, 10), Some(TestFinalityProof(10)))), - ] - .into_iter() - .collect(), - ); - async_std::task::block_on(async { - let mut finality_loop = FinalityLoop::new( - source_client, - target_client, - FinalitySyncParams { - tick: Duration::from_secs(0), - recent_finality_proofs_limit: 0, - stall_timeout: Duration::from_secs(0), - only_mandatory_headers, - }, - None, - ); - let info = SyncInfo { - best_number_at_source: 10, - best_number_at_target: 5, - is_using_same_fork: true, - }; - finality_loop.select_header_to_submit(&info).await.unwrap() - }) - } - - #[test] - fn select_header_to_submit_skips_non_mandatory_headers_when_only_mandatory_headers_are_required( - ) { - assert_eq!(run_only_mandatory_headers_mode_test(true, false), None); - assert_eq!( - run_only_mandatory_headers_mode_test(false, false), - Some(JustifiedHeader { - header: TestSourceHeader(false, 10, 10), - proof: TestFinalityProof(10) - }), - ); - } - - #[test] - fn select_header_to_submit_selects_mandatory_headers_when_only_mandatory_headers_are_required() - { - assert_eq!( - run_only_mandatory_headers_mode_test(true, true), - Some(JustifiedHeader { - header: TestSourceHeader(true, 8, 8), - proof: TestFinalityProof(8) - }), - ); - assert_eq!( - run_only_mandatory_headers_mode_test(false, true), - Some(JustifiedHeader { - header: TestSourceHeader(true, 8, 8), - proof: TestFinalityProof(8) - }), - ); - } - - #[test] - fn different_forks_at_source_and_at_target_are_detected() { - let (exit_sender, _exit_receiver) = futures::channel::mpsc::unbounded(); - let (source_client, target_client) = prepare_test_clients( - exit_sender, - |_| false, - vec![ - (5, (TestSourceHeader(false, 5, 42), None)), - (6, (TestSourceHeader(false, 6, 6), None)), - (7, (TestSourceHeader(false, 7, 7), None)), - (8, (TestSourceHeader(false, 8, 8), None)), - (9, (TestSourceHeader(false, 9, 9), None)), - (10, (TestSourceHeader(false, 10, 10), None)), - ] - .into_iter() - .collect(), - ); - - let metrics_sync = SyncLoopMetrics::new(None, "source", "target").unwrap(); - async_std::task::block_on(async { - let mut finality_loop = FinalityLoop::new( - source_client, - target_client, - test_sync_params(), - Some(metrics_sync.clone()), - ); - finality_loop.run_iteration().await.unwrap() - }); - - assert!(!metrics_sync.is_using_same_fork()); - } -} diff --git a/relays/finality/src/finality_proofs.rs b/relays/finality/src/finality_proofs.rs deleted file mode 100644 index e78cf8d62..000000000 --- a/relays/finality/src/finality_proofs.rs +++ /dev/null @@ -1,222 +0,0 @@ -// Copyright 2019-2023 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -use crate::{base::SourceClientBase, FinalityPipeline}; - -use bp_header_chain::FinalityProof; -use futures::{FutureExt, Stream, StreamExt}; -use std::pin::Pin; - -/// Source finality proofs stream that may be restarted. -#[derive(Default)] -pub struct FinalityProofsStream> { - /// The underlying stream. - stream: Option>>, -} - -impl> FinalityProofsStream { - pub fn new() -> Self { - Self { stream: None } - } - - pub fn from_stream(stream: SC::FinalityProofsStream) -> Self { - Self { stream: Some(Box::pin(stream)) } - } - - fn next(&mut self) -> Option<::Item> { - let stream = match &mut self.stream { - Some(stream) => stream, - None => return None, - }; - - match stream.next().now_or_never() { - Some(Some(finality_proof)) => Some(finality_proof), - Some(None) => { - self.stream = None; - None - }, - None => None, - } - } - - pub async fn ensure_stream(&mut self, source_client: &SC) -> Result<(), SC::Error> { - if self.stream.is_none() { - log::warn!(target: "bridge", "{} finality proofs stream is being started / restarted", - P::SOURCE_NAME); - - let stream = source_client.finality_proofs().await.map_err(|error| { - log::error!( - target: "bridge", - "Failed to subscribe to {} justifications: {:?}", - P::SOURCE_NAME, - error, - ); - - error - })?; - self.stream = Some(Box::pin(stream)); - } - - Ok(()) - } -} - -/// Source finality proofs buffer. -pub struct FinalityProofsBuf { - /// Proofs buffer. Ordered by target header number. - buf: Vec, -} - -impl FinalityProofsBuf

{ - pub fn new(buf: Vec) -> Self { - Self { buf } - } - - pub fn buf(&self) -> &Vec { - &self.buf - } - - pub fn fill>(&mut self, stream: &mut FinalityProofsStream) { - let mut proofs_count = 0; - let mut first_header_number = None; - let mut last_header_number = None; - while let Some(finality_proof) = stream.next() { - let target_header_number = finality_proof.target_header_number(); - first_header_number.get_or_insert(target_header_number); - last_header_number = Some(target_header_number); - proofs_count += 1; - - self.buf.push(finality_proof); - } - - if proofs_count != 0 { - log::trace!( - target: "bridge", - "Read {} finality proofs from {} finality stream for headers in range [{:?}; {:?}]", - proofs_count, - P::SOURCE_NAME, - first_header_number, - last_header_number, - ); - } - } - - /// Prune all finality proofs that target header numbers older than `first_to_keep`. - pub fn prune(&mut self, first_to_keep: P::Number, maybe_buf_limit: Option) { - let first_to_keep_idx = self - .buf - .binary_search_by_key(&first_to_keep, |hdr| hdr.target_header_number()) - .map(|idx| idx + 1) - .unwrap_or_else(|idx| idx); - let buf_limit_idx = match maybe_buf_limit { - Some(buf_limit) => self.buf.len().saturating_sub(buf_limit), - None => 0, - }; - - self.buf = self.buf.split_off(std::cmp::max(first_to_keep_idx, buf_limit_idx)); - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::mock::*; - - #[test] - fn finality_proofs_buf_fill_works() { - // when stream is currently empty, nothing is changed - let mut finality_proofs_buf = - FinalityProofsBuf:: { buf: vec![TestFinalityProof(1)] }; - let mut stream = - FinalityProofsStream::::from_stream( - Box::pin(futures::stream::pending()), - ); - finality_proofs_buf.fill(&mut stream); - assert_eq!(finality_proofs_buf.buf, vec![TestFinalityProof(1)]); - assert!(stream.stream.is_some()); - - // when stream has entry with target, it is added to the recent proofs container - let mut stream = - FinalityProofsStream::::from_stream( - Box::pin( - futures::stream::iter(vec![TestFinalityProof(4)]) - .chain(futures::stream::pending()), - ), - ); - finality_proofs_buf.fill(&mut stream); - assert_eq!(finality_proofs_buf.buf, vec![TestFinalityProof(1), TestFinalityProof(4)]); - assert!(stream.stream.is_some()); - - // when stream has ended, we'll need to restart it - let mut stream = - FinalityProofsStream::::from_stream( - Box::pin(futures::stream::empty()), - ); - finality_proofs_buf.fill(&mut stream); - assert_eq!(finality_proofs_buf.buf, vec![TestFinalityProof(1), TestFinalityProof(4)]); - assert!(stream.stream.is_none()); - } - - #[test] - fn finality_proofs_buf_prune_works() { - let original_finality_proofs_buf: Vec< - ::FinalityProof, - > = vec![ - TestFinalityProof(10), - TestFinalityProof(13), - TestFinalityProof(15), - TestFinalityProof(17), - TestFinalityProof(19), - ] - .into_iter() - .collect(); - - // when there's proof for justified header in the vec - let mut finality_proofs_buf = FinalityProofsBuf:: { - buf: original_finality_proofs_buf.clone(), - }; - finality_proofs_buf.prune(10, None); - assert_eq!(&original_finality_proofs_buf[1..], finality_proofs_buf.buf,); - - // when there are no proof for justified header in the vec - let mut finality_proofs_buf = FinalityProofsBuf:: { - buf: original_finality_proofs_buf.clone(), - }; - finality_proofs_buf.prune(11, None); - assert_eq!(&original_finality_proofs_buf[1..], finality_proofs_buf.buf,); - - // when there are too many entries after initial prune && they also need to be pruned - let mut finality_proofs_buf = FinalityProofsBuf:: { - buf: original_finality_proofs_buf.clone(), - }; - finality_proofs_buf.prune(10, Some(2)); - assert_eq!(&original_finality_proofs_buf[3..], finality_proofs_buf.buf,); - - // when last entry is pruned - let mut finality_proofs_buf = FinalityProofsBuf:: { - buf: original_finality_proofs_buf.clone(), - }; - finality_proofs_buf.prune(19, Some(2)); - assert_eq!(&original_finality_proofs_buf[5..], finality_proofs_buf.buf,); - - // when post-last entry is pruned - let mut finality_proofs_buf = FinalityProofsBuf:: { - buf: original_finality_proofs_buf.clone(), - }; - finality_proofs_buf.prune(20, Some(2)); - assert_eq!(&original_finality_proofs_buf[5..], finality_proofs_buf.buf,); - } -} diff --git a/relays/finality/src/headers.rs b/relays/finality/src/headers.rs deleted file mode 100644 index 91f7cd037..000000000 --- a/relays/finality/src/headers.rs +++ /dev/null @@ -1,238 +0,0 @@ -// Copyright 2019-2021 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -use crate::{ - finality_loop::SyncInfo, finality_proofs::FinalityProofsBuf, Error, FinalitySyncPipeline, - SourceClient, SourceHeader, TargetClient, -}; - -use bp_header_chain::FinalityProof; -use std::cmp::Ordering; - -/// Unjustified headers container. Ordered by header number. -pub type UnjustifiedHeaders = Vec; - -#[derive(Debug)] -#[cfg_attr(test, derive(Clone, PartialEq))] -pub struct JustifiedHeader { - pub header: P::Header, - pub proof: P::FinalityProof, -} - -impl JustifiedHeader

{ - pub fn number(&self) -> P::Number { - self.header.number() - } -} - -/// Finality proof that has been selected by the `read_missing_headers` function. -pub enum JustifiedHeaderSelector { - /// Mandatory header and its proof has been selected. We shall submit proof for this header. - Mandatory(JustifiedHeader

), - /// Regular header and its proof has been selected. We may submit this proof, or proof for - /// some better header. - Regular(UnjustifiedHeaders, JustifiedHeader

), - /// We haven't found any missing header with persistent proof at the target client. - None(UnjustifiedHeaders), -} - -impl JustifiedHeaderSelector

{ - pub(crate) async fn new, TC: TargetClient

>( - source_client: &SC, - info: &SyncInfo

, - ) -> Result> { - let mut unjustified_headers = Vec::new(); - let mut maybe_justified_header = None; - - let mut header_number = info.best_number_at_target + 1.into(); - while header_number <= info.best_number_at_source { - let (header, maybe_proof) = source_client - .header_and_finality_proof(header_number) - .await - .map_err(Error::Source)?; - - match (header.is_mandatory(), maybe_proof) { - (true, Some(proof)) => { - log::trace!(target: "bridge", "Header {:?} is mandatory", header_number); - return Ok(Self::Mandatory(JustifiedHeader { header, proof })) - }, - (true, None) => return Err(Error::MissingMandatoryFinalityProof(header.number())), - (false, Some(proof)) => { - log::trace!(target: "bridge", "Header {:?} has persistent finality proof", header_number); - unjustified_headers.clear(); - maybe_justified_header = Some(JustifiedHeader { header, proof }); - }, - (false, None) => { - unjustified_headers.push(header); - }, - } - - header_number = header_number + 1.into(); - } - - log::trace!( - target: "bridge", - "Read {} {} headers. Selected finality proof for header: {:?}", - info.num_headers(), - P::SOURCE_NAME, - maybe_justified_header.as_ref().map(|justified_header| &justified_header.header), - ); - - Ok(match maybe_justified_header { - Some(justified_header) => Self::Regular(unjustified_headers, justified_header), - None => Self::None(unjustified_headers), - }) - } - - pub fn select_mandatory(self) -> Option> { - match self { - JustifiedHeaderSelector::Mandatory(header) => Some(header), - _ => None, - } - } - - pub fn select(self, buf: &FinalityProofsBuf

) -> Option> { - let (unjustified_headers, maybe_justified_header) = match self { - JustifiedHeaderSelector::Mandatory(justified_header) => return Some(justified_header), - JustifiedHeaderSelector::Regular(unjustified_headers, justified_header) => - (unjustified_headers, Some(justified_header)), - JustifiedHeaderSelector::None(unjustified_headers) => (unjustified_headers, None), - }; - - let mut finality_proofs_iter = buf.buf().iter().rev(); - let mut maybe_finality_proof = finality_proofs_iter.next(); - - let mut unjustified_headers_iter = unjustified_headers.iter().rev(); - let mut maybe_unjustified_header = unjustified_headers_iter.next(); - - while let (Some(finality_proof), Some(unjustified_header)) = - (maybe_finality_proof, maybe_unjustified_header) - { - match finality_proof.target_header_number().cmp(&unjustified_header.number()) { - Ordering::Equal => { - log::trace!( - target: "bridge", - "Managed to improve selected {} finality proof {:?} to {:?}.", - P::SOURCE_NAME, - maybe_justified_header.as_ref().map(|justified_header| justified_header.number()), - finality_proof.target_header_number() - ); - return Some(JustifiedHeader { - header: unjustified_header.clone(), - proof: finality_proof.clone(), - }) - }, - Ordering::Less => maybe_unjustified_header = unjustified_headers_iter.next(), - Ordering::Greater => { - maybe_finality_proof = finality_proofs_iter.next(); - }, - } - } - - log::trace!( - target: "bridge", - "Could not improve selected {} finality proof {:?}.", - P::SOURCE_NAME, - maybe_justified_header.as_ref().map(|justified_header| justified_header.number()) - ); - maybe_justified_header - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::mock::*; - - #[test] - fn select_better_recent_finality_proof_works() { - // if there are no unjustified headers, nothing is changed - let finality_proofs_buf = - FinalityProofsBuf::::new(vec![TestFinalityProof(5)]); - let justified_header = - JustifiedHeader { header: TestSourceHeader(false, 2, 2), proof: TestFinalityProof(2) }; - let selector = JustifiedHeaderSelector::Regular(vec![], justified_header.clone()); - assert_eq!(selector.select(&finality_proofs_buf), Some(justified_header)); - - // if there are no buffered finality proofs, nothing is changed - let finality_proofs_buf = FinalityProofsBuf::::new(vec![]); - let justified_header = - JustifiedHeader { header: TestSourceHeader(false, 2, 2), proof: TestFinalityProof(2) }; - let selector = JustifiedHeaderSelector::Regular( - vec![TestSourceHeader(false, 5, 5)], - justified_header.clone(), - ); - assert_eq!(selector.select(&finality_proofs_buf), Some(justified_header)); - - // if there's no intersection between recent finality proofs and unjustified headers, - // nothing is changed - let finality_proofs_buf = FinalityProofsBuf::::new(vec![ - TestFinalityProof(1), - TestFinalityProof(4), - ]); - let justified_header = - JustifiedHeader { header: TestSourceHeader(false, 2, 2), proof: TestFinalityProof(2) }; - let selector = JustifiedHeaderSelector::Regular( - vec![TestSourceHeader(false, 9, 9), TestSourceHeader(false, 10, 10)], - justified_header.clone(), - ); - assert_eq!(selector.select(&finality_proofs_buf), Some(justified_header)); - - // if there's intersection between recent finality proofs and unjustified headers, but there - // are no proofs in this intersection, nothing is changed - let finality_proofs_buf = FinalityProofsBuf::::new(vec![ - TestFinalityProof(7), - TestFinalityProof(11), - ]); - let justified_header = - JustifiedHeader { header: TestSourceHeader(false, 2, 2), proof: TestFinalityProof(2) }; - let selector = JustifiedHeaderSelector::Regular( - vec![ - TestSourceHeader(false, 8, 8), - TestSourceHeader(false, 9, 9), - TestSourceHeader(false, 10, 10), - ], - justified_header.clone(), - ); - assert_eq!(selector.select(&finality_proofs_buf), Some(justified_header)); - - // if there's intersection between recent finality proofs and unjustified headers and - // there's a proof in this intersection: - // - this better (last from intersection) proof is selected; - // - 'obsolete' unjustified headers are pruned. - let finality_proofs_buf = FinalityProofsBuf::::new(vec![ - TestFinalityProof(7), - TestFinalityProof(9), - ]); - let justified_header = - JustifiedHeader { header: TestSourceHeader(false, 2, 2), proof: TestFinalityProof(2) }; - let selector = JustifiedHeaderSelector::Regular( - vec![ - TestSourceHeader(false, 8, 8), - TestSourceHeader(false, 9, 9), - TestSourceHeader(false, 10, 10), - ], - justified_header, - ); - assert_eq!( - selector.select(&finality_proofs_buf), - Some(JustifiedHeader { - header: TestSourceHeader(false, 9, 9), - proof: TestFinalityProof(9) - }) - ); - } -} diff --git a/relays/finality/src/lib.rs b/relays/finality/src/lib.rs deleted file mode 100644 index 3579e68e1..000000000 --- a/relays/finality/src/lib.rs +++ /dev/null @@ -1,91 +0,0 @@ -// Copyright 2019-2021 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! This crate has single entrypoint to run synchronization loop that is built around finality -//! proofs, as opposed to headers synchronization loop, which is built around headers. The headers -//! are still submitted to the target node, but are treated as auxiliary data as we are not trying -//! to submit all source headers to the target node. - -pub use crate::{ - base::{FinalityPipeline, SourceClientBase}, - finality_loop::{metrics_prefix, run, FinalitySyncParams, SourceClient, TargetClient}, - finality_proofs::{FinalityProofsBuf, FinalityProofsStream}, - sync_loop_metrics::SyncLoopMetrics, -}; - -use bp_header_chain::ConsensusLogReader; -use relay_utils::{FailedClient, MaybeConnectionError}; -use std::fmt::Debug; - -mod base; -mod finality_loop; -mod finality_proofs; -mod headers; -mod mock; -mod sync_loop_metrics; - -/// Finality proofs synchronization pipeline. -pub trait FinalitySyncPipeline: FinalityPipeline { - /// A reader that can extract the consensus log from the header digest and interpret it. - type ConsensusLogReader: ConsensusLogReader; - /// Type of header that we're syncing. - type Header: SourceHeader; -} - -/// Header that we're receiving from source node. -pub trait SourceHeader: Clone + Debug + PartialEq + Send + Sync { - /// Returns hash of header. - fn hash(&self) -> Hash; - /// Returns number of header. - fn number(&self) -> Number; - /// Returns true if this header needs to be submitted to target node. - fn is_mandatory(&self) -> bool; -} - -/// Error that may happen inside finality synchronization loop. -#[derive(Debug)] -enum Error { - /// Source client request has failed with given error. - Source(SourceError), - /// Target client request has failed with given error. - Target(TargetError), - /// Finality proof for mandatory header is missing from the source node. - MissingMandatoryFinalityProof(P::Number), - /// `submit_finality_proof` transaction failed - ProofSubmissionTxFailed { - #[allow(dead_code)] - submitted_number: P::Number, - #[allow(dead_code)] - best_number_at_target: P::Number, - }, - /// `submit_finality_proof` transaction lost - ProofSubmissionTxLost, -} - -impl Error -where - P: FinalitySyncPipeline, - SourceError: MaybeConnectionError, - TargetError: MaybeConnectionError, -{ - fn fail_if_connection_error(&self) -> Result<(), FailedClient> { - match *self { - Error::Source(ref error) if error.is_connection_error() => Err(FailedClient::Source), - Error::Target(ref error) if error.is_connection_error() => Err(FailedClient::Target), - _ => Ok(()), - } - } -} diff --git a/relays/finality/src/mock.rs b/relays/finality/src/mock.rs deleted file mode 100644 index e3ec4e4d0..000000000 --- a/relays/finality/src/mock.rs +++ /dev/null @@ -1,213 +0,0 @@ -// Copyright 2019-2021 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Tests for finality synchronization loop. - -#![cfg(test)] - -use crate::{ - base::SourceClientBase, - finality_loop::{SourceClient, TargetClient}, - FinalityPipeline, FinalitySyncPipeline, SourceHeader, -}; - -use async_trait::async_trait; -use bp_header_chain::{FinalityProof, GrandpaConsensusLogReader}; -use futures::{Stream, StreamExt}; -use parking_lot::Mutex; -use relay_utils::{ - relay_loop::Client as RelayClient, HeaderId, MaybeConnectionError, TrackedTransactionStatus, - TransactionTracker, -}; -use std::{collections::HashMap, pin::Pin, sync::Arc}; - -type IsMandatory = bool; -pub type TestNumber = u64; -type TestHash = u64; - -#[derive(Clone, Debug)] -pub struct TestTransactionTracker(pub TrackedTransactionStatus>); - -impl Default for TestTransactionTracker { - fn default() -> TestTransactionTracker { - TestTransactionTracker(TrackedTransactionStatus::Finalized(Default::default())) - } -} - -#[async_trait] -impl TransactionTracker for TestTransactionTracker { - type HeaderId = HeaderId; - - async fn wait(self) -> TrackedTransactionStatus> { - self.0 - } -} - -#[derive(Debug, Clone)] -pub enum TestError { - NonConnection, -} - -impl MaybeConnectionError for TestError { - fn is_connection_error(&self) -> bool { - false - } -} - -#[derive(Debug, Clone, PartialEq)] -pub struct TestFinalitySyncPipeline; - -impl FinalityPipeline for TestFinalitySyncPipeline { - const SOURCE_NAME: &'static str = "TestSource"; - const TARGET_NAME: &'static str = "TestTarget"; - - type Hash = TestHash; - type Number = TestNumber; - type FinalityProof = TestFinalityProof; -} - -impl FinalitySyncPipeline for TestFinalitySyncPipeline { - type ConsensusLogReader = GrandpaConsensusLogReader; - type Header = TestSourceHeader; -} - -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct TestSourceHeader(pub IsMandatory, pub TestNumber, pub TestHash); - -impl SourceHeader> - for TestSourceHeader -{ - fn hash(&self) -> TestHash { - self.2 - } - - fn number(&self) -> TestNumber { - self.1 - } - - fn is_mandatory(&self) -> bool { - self.0 - } -} - -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct TestFinalityProof(pub TestNumber); - -impl FinalityProof for TestFinalityProof { - fn target_header_hash(&self) -> TestHash { - Default::default() - } - - fn target_header_number(&self) -> TestNumber { - self.0 - } -} - -#[derive(Debug, Clone, Default)] -pub struct ClientsData { - pub source_best_block_number: TestNumber, - pub source_headers: HashMap)>, - pub source_proofs: Vec, - - pub target_best_block_id: HeaderId, - pub target_headers: Vec<(TestSourceHeader, TestFinalityProof)>, - pub target_transaction_tracker: TestTransactionTracker, -} - -#[derive(Clone)] -pub struct TestSourceClient { - pub on_method_call: Arc, - pub data: Arc>, -} - -#[async_trait] -impl RelayClient for TestSourceClient { - type Error = TestError; - - async fn reconnect(&mut self) -> Result<(), TestError> { - unreachable!() - } -} - -#[async_trait] -impl SourceClientBase for TestSourceClient { - type FinalityProofsStream = Pin + 'static + Send>>; - - async fn finality_proofs(&self) -> Result { - let mut data = self.data.lock(); - (self.on_method_call)(&mut data); - Ok(futures::stream::iter(data.source_proofs.clone()).boxed()) - } -} - -#[async_trait] -impl SourceClient for TestSourceClient { - async fn best_finalized_block_number(&self) -> Result { - let mut data = self.data.lock(); - (self.on_method_call)(&mut data); - Ok(data.source_best_block_number) - } - - async fn header_and_finality_proof( - &self, - number: TestNumber, - ) -> Result<(TestSourceHeader, Option), TestError> { - let mut data = self.data.lock(); - (self.on_method_call)(&mut data); - data.source_headers.get(&number).cloned().ok_or(TestError::NonConnection) - } -} - -#[derive(Clone)] -pub struct TestTargetClient { - pub on_method_call: Arc, - pub data: Arc>, -} - -#[async_trait] -impl RelayClient for TestTargetClient { - type Error = TestError; - - async fn reconnect(&mut self) -> Result<(), TestError> { - unreachable!() - } -} - -#[async_trait] -impl TargetClient for TestTargetClient { - type TransactionTracker = TestTransactionTracker; - - async fn best_finalized_source_block_id( - &self, - ) -> Result, TestError> { - let mut data = self.data.lock(); - (self.on_method_call)(&mut data); - Ok(data.target_best_block_id) - } - - async fn submit_finality_proof( - &self, - header: TestSourceHeader, - proof: TestFinalityProof, - ) -> Result { - let mut data = self.data.lock(); - (self.on_method_call)(&mut data); - data.target_best_block_id = HeaderId(header.number(), header.hash()); - data.target_headers.push((header, proof)); - (self.on_method_call)(&mut data); - Ok(data.target_transaction_tracker.clone()) - } -} diff --git a/relays/finality/src/sync_loop_metrics.rs b/relays/finality/src/sync_loop_metrics.rs deleted file mode 100644 index 4da1df811..000000000 --- a/relays/finality/src/sync_loop_metrics.rs +++ /dev/null @@ -1,95 +0,0 @@ -// Copyright 2019-2021 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Metrics for headers synchronization relay loop. - -use relay_utils::{ - metrics::{metric_name, register, IntGauge, Metric, PrometheusError, Registry}, - UniqueSaturatedInto, -}; - -/// Headers sync metrics. -#[derive(Clone)] -pub struct SyncLoopMetrics { - /// Best syncing header at the source. - best_source_block_number: IntGauge, - /// Best syncing header at the target. - best_target_block_number: IntGauge, - /// Flag that has `0` value when best source headers at the source node and at-target-chain - /// are matching and `1` otherwise. - using_different_forks: IntGauge, -} - -impl SyncLoopMetrics { - /// Create and register headers loop metrics. - pub fn new( - prefix: Option<&str>, - at_source_chain_label: &str, - at_target_chain_label: &str, - ) -> Result { - Ok(SyncLoopMetrics { - best_source_block_number: IntGauge::new( - metric_name(prefix, &format!("best_{at_source_chain_label}_block_number")), - format!("Best block number at the {at_source_chain_label}"), - )?, - best_target_block_number: IntGauge::new( - metric_name(prefix, &format!("best_{at_target_chain_label}_block_number")), - format!("Best block number at the {at_target_chain_label}"), - )?, - using_different_forks: IntGauge::new( - metric_name(prefix, &format!("is_{at_source_chain_label}_and_{at_target_chain_label}_using_different_forks")), - "Whether the best finalized source block at target node is different (value 1) from the \ - corresponding block at the source node", - )?, - }) - } - - /// Returns current value of the using-same-fork flag. - #[cfg(test)] - pub(crate) fn is_using_same_fork(&self) -> bool { - self.using_different_forks.get() == 0 - } - - /// Update best block number at source. - pub fn update_best_block_at_source>( - &self, - source_best_number: Number, - ) { - self.best_source_block_number.set(source_best_number.unique_saturated_into()); - } - - /// Update best block number at target. - pub fn update_best_block_at_target>( - &self, - target_best_number: Number, - ) { - self.best_target_block_number.set(target_best_number.unique_saturated_into()); - } - - /// Update using-same-fork flag. - pub fn update_using_same_fork(&self, using_same_fork: bool) { - self.using_different_forks.set((!using_same_fork).into()) - } -} - -impl Metric for SyncLoopMetrics { - fn register(&self, registry: &Registry) -> Result<(), PrometheusError> { - register(self.best_source_block_number.clone(), registry)?; - register(self.best_target_block_number.clone(), registry)?; - register(self.using_different_forks.clone(), registry)?; - Ok(()) - } -} diff --git a/relays/lib-substrate-relay/Cargo.toml b/relays/lib-substrate-relay/Cargo.toml deleted file mode 100644 index 7e7e774d7..000000000 --- a/relays/lib-substrate-relay/Cargo.toml +++ /dev/null @@ -1,62 +0,0 @@ -[package] -name = "substrate-relay-helper" -version = "0.1.0" -authors.workspace = true -edition.workspace = true -license = "GPL-3.0-or-later WITH Classpath-exception-2.0" -repository.workspace = true -publish = false - -[lints] -workspace = true - -[dependencies] -anyhow = "1.0" -async-std = "1.9.0" -async-trait = "0.1.79" -codec = { package = "parity-scale-codec", version = "3.6.1" } -futures = "0.3.30" -hex = "0.4" -log = { workspace = true } -num-traits = "0.2" -rbtag = "0.3" -structopt = "0.3" -strum = { version = "0.26.2", features = ["derive"] } -thiserror = { workspace = true } - -# Bridge dependencies - -bp-header-chain = { path = "../../primitives/header-chain" } -bp-parachains = { path = "../../primitives/parachains" } -bp-polkadot-core = { path = "../../primitives/polkadot-core" } -bp-relayers = { path = "../../primitives/relayers" } -bridge-runtime-common = { path = "../../bin/runtime-common" } - -equivocation-detector = { path = "../equivocation" } -finality-grandpa = { version = "0.16.2" } -finality-relay = { path = "../finality" } -parachains-relay = { path = "../parachains" } -relay-utils = { path = "../utils" } -messages-relay = { path = "../messages" } -relay-substrate-client = { path = "../client-substrate" } - -pallet-bridge-grandpa = { path = "../../modules/grandpa" } -pallet-bridge-messages = { path = "../../modules/messages" } -pallet-bridge-parachains = { path = "../../modules/parachains" } - -bp-runtime = { path = "../../primitives/runtime" } -bp-messages = { path = "../../primitives/messages" } - -# Substrate Dependencies - -frame-support = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } -frame-system = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } -pallet-balances = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } -pallet-grandpa = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } -sp-core = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } -sp-consensus-grandpa = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } -sp-runtime = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } - -[dev-dependencies] -pallet-transaction-payment = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } -relay-substrate-client = { path = "../client-substrate", features = ["test-helpers"] } diff --git a/relays/lib-substrate-relay/src/cli/bridge.rs b/relays/lib-substrate-relay/src/cli/bridge.rs deleted file mode 100644 index 316f59a2b..000000000 --- a/relays/lib-substrate-relay/src/cli/bridge.rs +++ /dev/null @@ -1,110 +0,0 @@ -// Copyright 2019-2021 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Basic traits for exposing bridges in the CLI. - -use crate::{ - equivocation::SubstrateEquivocationDetectionPipeline, - finality::SubstrateFinalitySyncPipeline, - messages_lane::{MessagesRelayLimits, SubstrateMessageLane}, - parachains::SubstrateParachainsPipeline, -}; -use pallet_bridge_parachains::{RelayBlockHash, RelayBlockHasher, RelayBlockNumber}; -use relay_substrate_client::{ - Chain, ChainWithRuntimeVersion, ChainWithTransactions, Parachain, RelayChain, -}; - -/// Minimal bridge representation that can be used from the CLI. -/// It connects a source chain to a target chain. -pub trait CliBridgeBase: Sized { - /// The source chain. - type Source: Chain + ChainWithRuntimeVersion; - /// The target chain. - type Target: ChainWithTransactions + ChainWithRuntimeVersion; -} - -/// Bridge representation that can be used from the CLI for relaying headers -/// from a relay chain to a relay chain. -pub trait RelayToRelayHeadersCliBridge: CliBridgeBase { - /// Finality proofs synchronization pipeline. - type Finality: SubstrateFinalitySyncPipeline< - SourceChain = Self::Source, - TargetChain = Self::Target, - >; -} - -/// Convenience trait that adds bounds to `CliBridgeBase`. -pub trait RelayToRelayEquivocationDetectionCliBridgeBase: CliBridgeBase { - /// The source chain with extra bounds. - type BoundedSource: ChainWithTransactions; -} - -impl RelayToRelayEquivocationDetectionCliBridgeBase for T -where - T: CliBridgeBase, - T::Source: ChainWithTransactions, -{ - type BoundedSource = T::Source; -} - -/// Bridge representation that can be used from the CLI for detecting equivocations -/// in the headers synchronized from a relay chain to a relay chain. -pub trait RelayToRelayEquivocationDetectionCliBridge: - RelayToRelayEquivocationDetectionCliBridgeBase -{ - /// Equivocation detection pipeline. - type Equivocation: SubstrateEquivocationDetectionPipeline< - SourceChain = Self::Source, - TargetChain = Self::Target, - >; -} - -/// Bridge representation that can be used from the CLI for relaying headers -/// from a parachain to a relay chain. -pub trait ParachainToRelayHeadersCliBridge: CliBridgeBase -where - Self::Source: Parachain, -{ - /// The `CliBridgeBase` type represents the parachain in this situation. - /// We need to add an extra type for the relay chain. - type SourceRelay: Chain - + ChainWithRuntimeVersion - + RelayChain; - /// Finality proofs synchronization pipeline (source parachain -> target). - type ParachainFinality: SubstrateParachainsPipeline< - SourceRelayChain = Self::SourceRelay, - SourceParachain = Self::Source, - TargetChain = Self::Target, - >; - /// Finality proofs synchronization pipeline (source relay chain -> target). - type RelayFinality: SubstrateFinalitySyncPipeline< - SourceChain = Self::SourceRelay, - TargetChain = Self::Target, - >; -} - -/// Bridge representation that can be used from the CLI for relaying messages. -pub trait MessagesCliBridge: CliBridgeBase { - /// The Source -> Destination messages synchronization pipeline. - type MessagesLane: SubstrateMessageLane; - - /// Optional messages delivery transaction limits that the messages relay is going - /// to use. If it returns `None`, limits are estimated using `TransactionPayment` API - /// at the target chain. - fn maybe_messages_limits() -> Option { - None - } -} diff --git a/relays/lib-substrate-relay/src/cli/chain_schema.rs b/relays/lib-substrate-relay/src/cli/chain_schema.rs deleted file mode 100644 index 6246bdbf0..000000000 --- a/relays/lib-substrate-relay/src/cli/chain_schema.rs +++ /dev/null @@ -1,261 +0,0 @@ -// Copyright 2019-2022 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Primitives related to chain CLI options. - -use relay_substrate_client::{AccountKeyPairOf, ChainWithTransactions}; -use structopt::StructOpt; -use strum::{EnumString, VariantNames}; - -use relay_substrate_client::{ChainRuntimeVersion, ChainWithRuntimeVersion, SimpleRuntimeVersion}; - -use crate::TransactionParams; - -#[doc = "Runtime version params."] -#[derive(StructOpt, Debug, PartialEq, Eq, Clone, Copy, EnumString, VariantNames)] -pub enum RuntimeVersionType { - /// Auto query version from chain - Auto, - /// Custom `spec_version` and `transaction_version` - Custom, - /// Read version from bundle dependencies directly. - Bundle, -} - -/// Create chain-specific set of runtime version parameters. -#[macro_export] -macro_rules! declare_chain_runtime_version_params_cli_schema { - ($chain:ident, $chain_prefix:ident) => { - bp_runtime::paste::item! { - #[doc = $chain " runtime version params."] - #[derive(StructOpt, Debug, PartialEq, Eq, Clone, Copy)] - pub struct [<$chain RuntimeVersionParams>] { - #[doc = "The type of runtime version for chain " $chain] - #[structopt(long, default_value = "Bundle")] - pub [<$chain_prefix _version_mode>]: RuntimeVersionType, - #[doc = "The custom sepc_version for chain " $chain] - #[structopt(long)] - pub [<$chain_prefix _spec_version>]: Option, - #[doc = "The custom transaction_version for chain " $chain] - #[structopt(long)] - pub [<$chain_prefix _transaction_version>]: Option, - } - - impl [<$chain RuntimeVersionParams>] { - /// Converts self into `ChainRuntimeVersion`. - pub fn into_runtime_version( - self, - bundle_runtime_version: Option, - ) -> anyhow::Result { - Ok(match self.[<$chain_prefix _version_mode>] { - RuntimeVersionType::Auto => ChainRuntimeVersion::Auto, - RuntimeVersionType::Custom => { - let custom_spec_version = self.[<$chain_prefix _spec_version>] - .ok_or_else(|| anyhow::Error::msg(format!("The {}-spec-version is required when choose custom mode", stringify!($chain_prefix))))?; - let custom_transaction_version = self.[<$chain_prefix _transaction_version>] - .ok_or_else(|| anyhow::Error::msg(format!("The {}-transaction-version is required when choose custom mode", stringify!($chain_prefix))))?; - ChainRuntimeVersion::Custom( - SimpleRuntimeVersion { - spec_version: custom_spec_version, - transaction_version: custom_transaction_version - } - ) - }, - RuntimeVersionType::Bundle => match bundle_runtime_version { - Some(runtime_version) => ChainRuntimeVersion::Custom(runtime_version), - None => { - return Err(anyhow::format_err!("Cannot use bundled runtime version of {}: it is not known to the relay", stringify!($chain_prefix))); - } - }, - }) - } - } - } - }; -} - -/// Create chain-specific set of runtime version parameters. -#[macro_export] -macro_rules! declare_chain_connection_params_cli_schema { - ($chain:ident, $chain_prefix:ident) => { - bp_runtime::paste::item! { - // TODO: https://github.com/paritytech/parity-bridges-common/issues/2909 - // remove all obsolete arguments (separate URI components) - - #[doc = $chain " connection params."] - #[derive(StructOpt, Debug, PartialEq, Eq, Clone)] - pub struct [<$chain ConnectionParams>] { - #[doc = "WS endpoint of " $chain ": full URI. Overrides all other connection string components (host, port, path, secure)."] - #[structopt(long)] - pub [<$chain_prefix _uri>]: Option, - #[doc = "WS endpoint of " $chain ": host component."] - #[structopt(long, default_value = "127.0.0.1")] - pub [<$chain_prefix _host>]: String, - #[doc = "WS endpoint of " $chain ": port component."] - #[structopt(long, default_value = "9944")] - pub [<$chain_prefix _port>]: u16, - #[doc = "WS endpoint of " $chain ": path component."] - #[structopt(long)] - pub [<$chain_prefix _path>]: Option, - #[doc = "Use secure websocket connection."] - #[structopt(long)] - pub [<$chain_prefix _secure>]: bool, - #[doc = "Custom runtime version"] - #[structopt(flatten)] - pub [<$chain_prefix _runtime_version>]: [<$chain RuntimeVersionParams>], - } - - impl [<$chain ConnectionParams>] { - /// Convert connection params into Substrate client. - #[allow(dead_code)] - pub async fn into_client( - self, - ) -> anyhow::Result> { - let chain_runtime_version = self - .[<$chain_prefix _runtime_version>] - .into_runtime_version(Chain::RUNTIME_VERSION)?; - Ok(relay_substrate_client::Client::new(relay_substrate_client::ConnectionParams { - uri: self.[<$chain_prefix _uri>], - host: self.[<$chain_prefix _host>], - port: self.[<$chain_prefix _port>], - path: self.[<$chain_prefix _path>], - secure: self.[<$chain_prefix _secure>], - chain_runtime_version, - }) - .await - ) - } - } - } - }; -} - -/// Create chain-specific set of signing parameters. -#[macro_export] -macro_rules! declare_chain_signing_params_cli_schema { - ($chain:ident, $chain_prefix:ident) => { - bp_runtime::paste::item! { - #[doc = $chain " signing params."] - #[derive(StructOpt, Debug, PartialEq, Eq, Clone)] - pub struct [<$chain SigningParams>] { - #[doc = "The SURI of secret key to use when transactions are submitted to the " $chain " node."] - #[structopt(long)] - pub [<$chain_prefix _signer>]: Option, - #[doc = "The password for the SURI of secret key to use when transactions are submitted to the " $chain " node."] - #[structopt(long)] - pub [<$chain_prefix _signer_password>]: Option, - - #[doc = "Path to the file, that contains SURI of secret key to use when transactions are submitted to the " $chain " node. Can be overridden with " $chain_prefix "_signer option."] - #[structopt(long)] - pub [<$chain_prefix _signer_file>]: Option, - #[doc = "Path to the file, that password for the SURI of secret key to use when transactions are submitted to the " $chain " node. Can be overridden with " $chain_prefix "_signer_password option."] - #[structopt(long)] - pub [<$chain_prefix _signer_password_file>]: Option, - - #[doc = "Transactions mortality period, in blocks. MUST be a power of two in [4; 65536] range. MAY NOT be larger than `BlockHashCount` parameter of the chain system module."] - #[structopt(long)] - pub [<$chain_prefix _transactions_mortality>]: Option, - } - - impl [<$chain SigningParams>] { - /// Return transactions mortality. - #[allow(dead_code)] - pub fn transactions_mortality(&self) -> anyhow::Result> { - self.[<$chain_prefix _transactions_mortality>] - .map(|transactions_mortality| { - if !(4..=65536).contains(&transactions_mortality) - || !transactions_mortality.is_power_of_two() - { - Err(anyhow::format_err!( - "Transactions mortality {} is not a power of two in a [4; 65536] range", - transactions_mortality, - )) - } else { - Ok(transactions_mortality) - } - }) - .transpose() - } - - /// Parse signing params into chain-specific KeyPair. - #[allow(dead_code)] - pub fn to_keypair(&self) -> anyhow::Result> { - let suri = match (self.[<$chain_prefix _signer>].as_ref(), self.[<$chain_prefix _signer_file>].as_ref()) { - (Some(suri), _) => suri.to_owned(), - (None, Some(suri_file)) => std::fs::read_to_string(suri_file) - .map_err(|err| anyhow::format_err!( - "Failed to read SURI from file {:?}: {}", - suri_file, - err, - ))?, - (None, None) => return Err(anyhow::format_err!( - "One of options must be specified: '{}' or '{}'", - stringify!([<$chain_prefix _signer>]), - stringify!([<$chain_prefix _signer_file>]), - )), - }; - - let suri_password = match ( - self.[<$chain_prefix _signer_password>].as_ref(), - self.[<$chain_prefix _signer_password_file>].as_ref(), - ) { - (Some(suri_password), _) => Some(suri_password.to_owned()), - (None, Some(suri_password_file)) => std::fs::read_to_string(suri_password_file) - .map(Some) - .map_err(|err| anyhow::format_err!( - "Failed to read SURI password from file {:?}: {}", - suri_password_file, - err, - ))?, - _ => None, - }; - - use sp_core::crypto::Pair; - - AccountKeyPairOf::::from_string( - &suri, - suri_password.as_deref() - ).map_err(|e| anyhow::format_err!("{:?}", e)) - } - - /// Return transaction parameters. - #[allow(dead_code)] - pub fn transaction_params( - &self, - ) -> anyhow::Result>> { - Ok(TransactionParams { - mortality: self.transactions_mortality()?, - signer: self.to_keypair::()?, - }) - } - } - } - }; -} - -/// Create chain-specific set of configuration objects: connection parameters, -/// signing parameters and bridge initialization parameters. -#[macro_export] -macro_rules! declare_chain_cli_schema { - ($chain:ident, $chain_prefix:ident) => { - $crate::declare_chain_runtime_version_params_cli_schema!($chain, $chain_prefix); - $crate::declare_chain_connection_params_cli_schema!($chain, $chain_prefix); - $crate::declare_chain_signing_params_cli_schema!($chain, $chain_prefix); - }; -} - -declare_chain_cli_schema!(Source, source); -declare_chain_cli_schema!(Target, target); diff --git a/relays/lib-substrate-relay/src/cli/detect_equivocations.rs b/relays/lib-substrate-relay/src/cli/detect_equivocations.rs deleted file mode 100644 index b98e41b2a..000000000 --- a/relays/lib-substrate-relay/src/cli/detect_equivocations.rs +++ /dev/null @@ -1,65 +0,0 @@ -// Copyright 2019-2023 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Primitives for exposing the equivocation detection functionality in the CLI. - -use crate::{ - cli::{bridge::*, chain_schema::*, PrometheusParams}, - equivocation, - equivocation::SubstrateEquivocationDetectionPipeline, -}; - -use async_trait::async_trait; -use relay_substrate_client::ChainWithTransactions; -use structopt::StructOpt; - -/// Start equivocation detection loop. -#[derive(StructOpt)] -pub struct DetectEquivocationsParams { - #[structopt(flatten)] - source: SourceConnectionParams, - #[structopt(flatten)] - source_sign: SourceSigningParams, - #[structopt(flatten)] - target: TargetConnectionParams, - #[structopt(flatten)] - prometheus_params: PrometheusParams, -} - -/// Trait used for starting the equivocation detection loop between 2 chains. -#[async_trait] -pub trait EquivocationsDetector: RelayToRelayEquivocationDetectionCliBridge -where - Self::Source: ChainWithTransactions, -{ - /// Start the equivocation detection loop. - async fn start(data: DetectEquivocationsParams) -> anyhow::Result<()> { - let source_client = data.source.into_client::().await?; - Self::Equivocation::start_relay_guards( - &source_client, - source_client.can_start_version_guard(), - ) - .await?; - - equivocation::run::( - source_client, - data.target.into_client::().await?, - data.source_sign.transaction_params::()?, - data.prometheus_params.into_metrics_params()?, - ) - .await - } -} diff --git a/relays/lib-substrate-relay/src/cli/init_bridge.rs b/relays/lib-substrate-relay/src/cli/init_bridge.rs deleted file mode 100644 index bf7c86437..000000000 --- a/relays/lib-substrate-relay/src/cli/init_bridge.rs +++ /dev/null @@ -1,85 +0,0 @@ -// Copyright 2019-2021 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Primitives for exposing the bridge initialization functionality in the CLI. - -use async_trait::async_trait; -use codec::Encode; - -use crate::{ - cli::{bridge::CliBridgeBase, chain_schema::*}, - finality_base::engine::Engine, -}; -use bp_runtime::Chain as ChainBase; -use relay_substrate_client::{AccountKeyPairOf, Chain, UnsignedTransaction}; -use sp_core::Pair; -use structopt::StructOpt; - -/// Bridge initialization params. -#[derive(StructOpt)] -pub struct InitBridgeParams { - #[structopt(flatten)] - source: SourceConnectionParams, - #[structopt(flatten)] - target: TargetConnectionParams, - #[structopt(flatten)] - target_sign: TargetSigningParams, - /// Generates all required data, but does not submit extrinsic - #[structopt(long)] - dry_run: bool, -} - -/// Trait used for bridge initializing. -#[async_trait] -pub trait BridgeInitializer: CliBridgeBase -where - ::AccountId: From< as Pair>::Public>, -{ - /// The finality engine used by the source chain. - type Engine: Engine; - - /// Get the encoded call to init the bridge. - fn encode_init_bridge( - init_data: >::InitializationData, - ) -> ::Call; - - /// Initialize the bridge. - async fn init_bridge(data: InitBridgeParams) -> anyhow::Result<()> { - let source_client = data.source.into_client::().await?; - let target_client = data.target.into_client::().await?; - let target_sign = data.target_sign.to_keypair::()?; - let dry_run = data.dry_run; - - crate::finality::initialize::initialize::( - source_client, - target_client.clone(), - target_sign, - move |transaction_nonce, initialization_data| { - let call = Self::encode_init_bridge(initialization_data); - log::info!( - target: "bridge", - "Initialize bridge call encoded as hex string: {:?}", - format!("0x{}", hex::encode(call.encode())) - ); - Ok(UnsignedTransaction::new(call.into(), transaction_nonce)) - }, - dry_run, - ) - .await; - - Ok(()) - } -} diff --git a/relays/lib-substrate-relay/src/cli/mod.rs b/relays/lib-substrate-relay/src/cli/mod.rs deleted file mode 100644 index 0dd0d5474..000000000 --- a/relays/lib-substrate-relay/src/cli/mod.rs +++ /dev/null @@ -1,192 +0,0 @@ -// Copyright 2019-2021 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Deal with CLI args of substrate-to-substrate relay. - -use codec::{Decode, Encode}; -use rbtag::BuildInfo; -use structopt::StructOpt; -use strum::{EnumString, VariantNames}; - -use bp_messages::LaneId; - -pub mod bridge; -pub mod chain_schema; -pub mod detect_equivocations; -pub mod init_bridge; -pub mod relay_headers; -pub mod relay_headers_and_messages; -pub mod relay_messages; -pub mod relay_parachains; - -/// The target that will be used when publishing logs related to this pallet. -pub const LOG_TARGET: &str = "bridge"; - -/// Lane id. -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct HexLaneId(pub [u8; 4]); - -impl From for LaneId { - fn from(lane_id: HexLaneId) -> LaneId { - LaneId(lane_id.0) - } -} - -impl std::str::FromStr for HexLaneId { - type Err = hex::FromHexError; - - fn from_str(s: &str) -> Result { - let mut lane_id = [0u8; 4]; - hex::decode_to_slice(s, &mut lane_id)?; - Ok(HexLaneId(lane_id)) - } -} - -/// Nicer formatting for raw bytes vectors. -#[derive(Default, Encode, Decode, PartialEq, Eq)] -pub struct HexBytes(pub Vec); - -impl std::str::FromStr for HexBytes { - type Err = hex::FromHexError; - - fn from_str(s: &str) -> Result { - Ok(Self(hex::decode(s)?)) - } -} - -impl std::fmt::Debug for HexBytes { - fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result { - write!(fmt, "0x{self}") - } -} - -impl std::fmt::Display for HexBytes { - fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result { - write!(fmt, "{}", hex::encode(&self.0)) - } -} - -/// Prometheus metrics params. -#[derive(Clone, Debug, PartialEq, StructOpt)] -pub struct PrometheusParams { - /// Do not expose a Prometheus metric endpoint. - #[structopt(long)] - pub no_prometheus: bool, - /// Expose Prometheus endpoint at given interface. - #[structopt(long, default_value = "127.0.0.1")] - pub prometheus_host: String, - /// Expose Prometheus endpoint at given port. - #[structopt(long, default_value = "9616")] - pub prometheus_port: u16, -} - -/// Struct to get git commit info and build time. -#[derive(BuildInfo)] -struct SubstrateRelayBuildInfo; - -impl SubstrateRelayBuildInfo { - /// Get git commit in form ``. - pub fn get_git_commit() -> String { - // on gitlab we use images without git installed, so we can't use `rbtag` there - // locally we don't have `CI_*` env variables, so we can't rely on them - // => we are using `CI_*` env variables or else `rbtag` - let maybe_sha_from_ci = option_env!("CI_COMMIT_SHORT_SHA"); - maybe_sha_from_ci - .map(|short_sha| { - // we assume that on CI the copy is always clean - format!("{short_sha}-clean") - }) - .unwrap_or_else(|| SubstrateRelayBuildInfo.get_build_commit().into()) - } -} - -impl PrometheusParams { - /// Tries to convert CLI metrics params into metrics params, used by the relay. - pub fn into_metrics_params(self) -> anyhow::Result { - let metrics_address = if !self.no_prometheus { - Some(relay_utils::metrics::MetricsAddress { - host: self.prometheus_host, - port: self.prometheus_port, - }) - } else { - None - }; - - let relay_version = option_env!("CARGO_PKG_VERSION").unwrap_or("unknown"); - let relay_commit = SubstrateRelayBuildInfo::get_git_commit(); - relay_utils::metrics::MetricsParams::new( - metrics_address, - relay_version.into(), - relay_commit, - ) - .map_err(|e| anyhow::format_err!("{:?}", e)) - } -} - -/// Either explicit or maximal allowed value. -#[derive(Debug, Clone, PartialEq, Eq)] -pub enum ExplicitOrMaximal { - /// User has explicitly specified argument value. - Explicit(V), - /// Maximal allowed value for this argument. - Maximal, -} - -impl std::str::FromStr for ExplicitOrMaximal -where - V::Err: std::fmt::Debug, -{ - type Err = String; - - fn from_str(s: &str) -> Result { - if s.to_lowercase() == "max" { - return Ok(ExplicitOrMaximal::Maximal) - } - - V::from_str(s) - .map(ExplicitOrMaximal::Explicit) - .map_err(|e| format!("Failed to parse '{e:?}'. Expected 'max' or explicit value")) - } -} - -#[doc = "Runtime version params."] -#[derive(StructOpt, Debug, PartialEq, Eq, Clone, Copy, EnumString, VariantNames)] -pub enum RuntimeVersionType { - /// Auto query version from chain - Auto, - /// Custom `spec_version` and `transaction_version` - Custom, - /// Read version from bundle dependencies directly. - Bundle, -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn hex_bytes_display_matches_from_str_for_clap() { - // given - let hex = HexBytes(vec![1, 2, 3, 4]); - let display = format!("{hex}"); - - // when - let hex2: HexBytes = display.parse().unwrap(); - - // then - assert_eq!(hex.0, hex2.0); - } -} diff --git a/relays/lib-substrate-relay/src/cli/relay_headers.rs b/relays/lib-substrate-relay/src/cli/relay_headers.rs deleted file mode 100644 index 90558ed46..000000000 --- a/relays/lib-substrate-relay/src/cli/relay_headers.rs +++ /dev/null @@ -1,76 +0,0 @@ -// Copyright 2019-2021 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Primitives for exposing the headers relaying functionality in the CLI. - -use async_trait::async_trait; -use structopt::StructOpt; - -use relay_utils::metrics::{GlobalMetrics, StandaloneMetric}; - -use crate::{ - cli::{bridge::*, chain_schema::*, PrometheusParams}, - finality::SubstrateFinalitySyncPipeline, -}; - -/// Chain headers relaying params. -#[derive(StructOpt)] -pub struct RelayHeadersParams { - /// If passed, only mandatory headers (headers that are changing the GRANDPA authorities set) - /// are relayed. - #[structopt(long)] - only_mandatory_headers: bool, - #[structopt(flatten)] - source: SourceConnectionParams, - #[structopt(flatten)] - target: TargetConnectionParams, - #[structopt(flatten)] - target_sign: TargetSigningParams, - #[structopt(flatten)] - prometheus_params: PrometheusParams, -} - -/// Trait used for relaying headers between 2 chains. -#[async_trait] -pub trait HeadersRelayer: RelayToRelayHeadersCliBridge { - /// Relay headers. - async fn relay_headers(data: RelayHeadersParams) -> anyhow::Result<()> { - let source_client = data.source.into_client::().await?; - let target_client = data.target.into_client::().await?; - let target_transactions_mortality = data.target_sign.target_transactions_mortality; - let target_sign = data.target_sign.to_keypair::()?; - - let metrics_params: relay_utils::metrics::MetricsParams = - data.prometheus_params.into_metrics_params()?; - GlobalMetrics::new()?.register_and_spawn(&metrics_params.registry)?; - - let target_transactions_params = crate::TransactionParams { - signer: target_sign, - mortality: target_transactions_mortality, - }; - Self::Finality::start_relay_guards(&target_client, target_client.can_start_version_guard()) - .await?; - - crate::finality::run::( - source_client, - target_client, - data.only_mandatory_headers, - target_transactions_params, - metrics_params, - ) - .await - } -} diff --git a/relays/lib-substrate-relay/src/cli/relay_headers_and_messages/mod.rs b/relays/lib-substrate-relay/src/cli/relay_headers_and_messages/mod.rs deleted file mode 100644 index 27e9f1c21..000000000 --- a/relays/lib-substrate-relay/src/cli/relay_headers_and_messages/mod.rs +++ /dev/null @@ -1,492 +0,0 @@ -// Copyright 2019-2022 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Complex 2-ways headers+messages relays support. -//! -//! To add new complex relay between `ChainA` and `ChainB`, you must: -//! -//! 1) ensure that there's a `declare_chain_cli_schema!(...)` for both chains. -//! 2) add `declare_chain_to_chain_bridge_schema!(...)` or -//! `declare_chain_to_parachain_bridge_schema` for the bridge. -//! 3) declare a new struct for the added bridge and implement the `Full2WayBridge` trait for it. - -#[macro_use] -pub mod parachain_to_parachain; -#[macro_use] -pub mod relay_to_relay; -#[macro_use] -pub mod relay_to_parachain; - -use async_trait::async_trait; -use std::{marker::PhantomData, sync::Arc}; -use structopt::StructOpt; - -use futures::{FutureExt, TryFutureExt}; - -use crate::{ - cli::{bridge::MessagesCliBridge, HexLaneId, PrometheusParams}, - messages_lane::{MessagesRelayLimits, MessagesRelayParams}, - on_demand::OnDemandRelay, - TaggedAccount, TransactionParams, -}; -use bp_messages::LaneId; -use bp_runtime::BalanceOf; -use relay_substrate_client::{ - AccountIdOf, AccountKeyPairOf, Chain, ChainWithBalances, ChainWithMessages, - ChainWithRuntimeVersion, ChainWithTransactions, Client, -}; -use relay_utils::metrics::MetricsParams; -use sp_core::Pair; - -/// Parameters that have the same names across all bridges. -#[derive(Debug, PartialEq, StructOpt)] -pub struct HeadersAndMessagesSharedParams { - /// Hex-encoded lane identifiers that should be served by the complex relay. - #[structopt(long, default_value = "00000000")] - pub lane: Vec, - /// If passed, only mandatory headers (headers that are changing the GRANDPA authorities set) - /// are relayed. - #[structopt(long)] - pub only_mandatory_headers: bool, - #[structopt(flatten)] - /// Prometheus metrics params. - pub prometheus_params: PrometheusParams, -} - -/// Bridge parameters, shared by all bridge types. -pub struct Full2WayBridgeCommonParams< - Left: ChainWithTransactions + ChainWithRuntimeVersion, - Right: ChainWithTransactions + ChainWithRuntimeVersion, -> { - /// Shared parameters. - pub shared: HeadersAndMessagesSharedParams, - /// Parameters of the left chain. - pub left: BridgeEndCommonParams, - /// Parameters of the right chain. - pub right: BridgeEndCommonParams, - - /// Common metric parameters. - pub metrics_params: MetricsParams, -} - -impl< - Left: ChainWithTransactions + ChainWithRuntimeVersion, - Right: ChainWithTransactions + ChainWithRuntimeVersion, - > Full2WayBridgeCommonParams -{ - /// Creates new bridge parameters from its components. - pub fn new>( - shared: HeadersAndMessagesSharedParams, - left: BridgeEndCommonParams, - right: BridgeEndCommonParams, - ) -> anyhow::Result { - // Create metrics registry. - let metrics_params = shared.prometheus_params.clone().into_metrics_params()?; - let metrics_params = relay_utils::relay_metrics(metrics_params).into_params(); - - Ok(Self { shared, left, right, metrics_params }) - } -} - -/// Parameters that are associated with one side of the bridge. -pub struct BridgeEndCommonParams { - /// Chain client. - pub client: Client, - /// Params used for sending transactions to the chain. - pub tx_params: TransactionParams>, - /// Accounts, which balances are exposed as metrics by the relay process. - pub accounts: Vec>>, -} - -/// All data of the bidirectional complex relay. -pub struct FullBridge< - 'a, - Source: ChainWithTransactions + ChainWithRuntimeVersion, - Target: ChainWithTransactions + ChainWithRuntimeVersion, - Bridge: MessagesCliBridge, -> { - source: &'a mut BridgeEndCommonParams, - target: &'a mut BridgeEndCommonParams, - metrics_params: &'a MetricsParams, - _phantom_data: PhantomData, -} - -impl< - 'a, - Source: ChainWithTransactions + ChainWithRuntimeVersion, - Target: ChainWithTransactions + ChainWithRuntimeVersion, - Bridge: MessagesCliBridge, - > FullBridge<'a, Source, Target, Bridge> -where - AccountIdOf: From< as Pair>::Public>, - AccountIdOf: From< as Pair>::Public>, - BalanceOf: TryFrom> + Into, -{ - /// Construct complex relay given it components. - fn new( - source: &'a mut BridgeEndCommonParams, - target: &'a mut BridgeEndCommonParams, - metrics_params: &'a MetricsParams, - ) -> Self { - Self { source, target, metrics_params, _phantom_data: Default::default() } - } - - /// Returns message relay parameters. - fn messages_relay_params( - &self, - source_to_target_headers_relay: Arc>, - target_to_source_headers_relay: Arc>, - lane_id: LaneId, - maybe_limits: Option, - ) -> MessagesRelayParams { - MessagesRelayParams { - source_client: self.source.client.clone(), - source_transaction_params: self.source.tx_params.clone(), - target_client: self.target.client.clone(), - target_transaction_params: self.target.tx_params.clone(), - source_to_target_headers_relay: Some(source_to_target_headers_relay), - target_to_source_headers_relay: Some(target_to_source_headers_relay), - lane_id, - limits: maybe_limits, - metrics_params: self.metrics_params.clone().disable(), - } - } -} - -/// Base portion of the bidirectional complex relay. -/// -/// This main purpose of extracting this trait is that in different relays the implementation -/// of `start_on_demand_headers_relayers` method will be different. But the number of -/// implementations is limited to relay <> relay, parachain <> relay and parachain <> parachain. -/// This trait allows us to reuse these implementations in different bridges. -#[async_trait] -pub trait Full2WayBridgeBase: Sized + Send + Sync { - /// The CLI params for the bridge. - type Params; - /// The left relay chain. - type Left: ChainWithTransactions + ChainWithRuntimeVersion; - /// The right destination chain (it can be a relay or a parachain). - type Right: ChainWithTransactions + ChainWithRuntimeVersion; - - /// Reference to common relay parameters. - fn common(&self) -> &Full2WayBridgeCommonParams; - - /// Mutable reference to common relay parameters. - fn mut_common(&mut self) -> &mut Full2WayBridgeCommonParams; - - /// Start on-demand headers relays. - async fn start_on_demand_headers_relayers( - &mut self, - ) -> anyhow::Result<( - Arc>, - Arc>, - )>; -} - -/// Bidirectional complex relay. -#[async_trait] -pub trait Full2WayBridge: Sized + Sync -where - AccountIdOf: From< as Pair>::Public>, - AccountIdOf: From< as Pair>::Public>, - BalanceOf: TryFrom> + Into, - BalanceOf: TryFrom> + Into, -{ - /// Base portion of the bidirectional complex relay. - type Base: Full2WayBridgeBase; - - /// The left relay chain. - type Left: ChainWithTransactions - + ChainWithBalances - + ChainWithMessages - + ChainWithRuntimeVersion; - /// The right relay chain. - type Right: ChainWithTransactions - + ChainWithBalances - + ChainWithMessages - + ChainWithRuntimeVersion; - - /// Left to Right bridge. - type L2R: MessagesCliBridge; - /// Right to Left bridge - type R2L: MessagesCliBridge; - - /// Construct new bridge. - fn new(params: ::Params) -> anyhow::Result; - - /// Reference to the base relay portion. - fn base(&self) -> &Self::Base; - - /// Mutable reference to the base relay portion. - fn mut_base(&mut self) -> &mut Self::Base; - - /// Creates and returns Left to Right complex relay. - fn left_to_right(&mut self) -> FullBridge { - let common = self.mut_base().mut_common(); - FullBridge::<_, _, Self::L2R>::new( - &mut common.left, - &mut common.right, - &common.metrics_params, - ) - } - - /// Creates and returns Right to Left complex relay. - fn right_to_left(&mut self) -> FullBridge { - let common = self.mut_base().mut_common(); - FullBridge::<_, _, Self::R2L>::new( - &mut common.right, - &mut common.left, - &common.metrics_params, - ) - } - - /// Start complex relay. - async fn run(&mut self) -> anyhow::Result<()> { - // Register standalone metrics. - { - let common = self.mut_base().mut_common(); - common.left.accounts.push(TaggedAccount::Messages { - id: common.left.tx_params.signer.public().into(), - bridged_chain: Self::Right::NAME.to_string(), - }); - common.right.accounts.push(TaggedAccount::Messages { - id: common.right.tx_params.signer.public().into(), - bridged_chain: Self::Left::NAME.to_string(), - }); - } - - // start on-demand header relays - let (left_to_right_on_demand_headers, right_to_left_on_demand_headers) = - self.mut_base().start_on_demand_headers_relayers().await?; - - // add balance-related metrics - let lanes = self - .base() - .common() - .shared - .lane - .iter() - .cloned() - .map(Into::into) - .collect::>(); - { - let common = self.mut_base().mut_common(); - crate::messages_metrics::add_relay_balances_metrics::<_, Self::Right>( - common.left.client.clone(), - &common.metrics_params, - &common.left.accounts, - &lanes, - ) - .await?; - crate::messages_metrics::add_relay_balances_metrics::<_, Self::Left>( - common.right.client.clone(), - &common.metrics_params, - &common.right.accounts, - &lanes, - ) - .await?; - } - - // Need 2x capacity since we consider both directions for each lane - let mut message_relays = Vec::with_capacity(lanes.len() * 2); - for lane in lanes { - let left_to_right_messages = crate::messages_lane::run::< - ::MessagesLane, - >(self.left_to_right().messages_relay_params( - left_to_right_on_demand_headers.clone(), - right_to_left_on_demand_headers.clone(), - lane, - Self::L2R::maybe_messages_limits(), - )) - .map_err(|e| anyhow::format_err!("{}", e)) - .boxed(); - message_relays.push(left_to_right_messages); - - let right_to_left_messages = crate::messages_lane::run::< - ::MessagesLane, - >(self.right_to_left().messages_relay_params( - right_to_left_on_demand_headers.clone(), - left_to_right_on_demand_headers.clone(), - lane, - Self::R2L::maybe_messages_limits(), - )) - .map_err(|e| anyhow::format_err!("{}", e)) - .boxed(); - message_relays.push(right_to_left_messages); - } - - relay_utils::relay_metrics(self.base().common().metrics_params.clone()) - .expose() - .await - .map_err(|e| anyhow::format_err!("{}", e))?; - - futures::future::select_all(message_relays).await.0 - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::{cli::chain_schema::RuntimeVersionType, declare_chain_cli_schema}; - - use relay_substrate_client::{ChainRuntimeVersion, Parachain, SimpleRuntimeVersion}; - - #[test] - // We need `#[allow(dead_code)]` because some of the methods generated by the macros - // are not used. - #[allow(dead_code)] - fn should_parse_parachain_to_parachain_options() { - // Chains. - declare_chain_cli_schema!(Kusama, kusama); - declare_chain_cli_schema!(BridgeHubKusama, bridge_hub_kusama); - declare_chain_cli_schema!(Polkadot, polkadot); - declare_chain_cli_schema!(BridgeHubPolkadot, bridge_hub_polkadot); - // Means to override signers of different layer transactions. - declare_chain_cli_schema!( - KusamaHeadersToBridgeHubPolkadot, - kusama_headers_to_bridge_hub_polkadot - ); - declare_chain_cli_schema!( - KusamaParachainsToBridgeHubPolkadot, - kusama_parachains_to_bridge_hub_polkadot - ); - declare_chain_cli_schema!( - PolkadotHeadersToBridgeHubKusama, - polkadot_headers_to_bridge_hub_kusama - ); - declare_chain_cli_schema!( - PolkadotParachainsToBridgeHubKusama, - polkadot_parachains_to_bridge_hub_kusama - ); - // Bridges. - declare_parachain_to_parachain_bridge_schema!( - BridgeHubKusama, - Kusama, - BridgeHubPolkadot, - Polkadot - ); - - let res = BridgeHubKusamaBridgeHubPolkadotHeadersAndMessages::from_iter(vec![ - "bridge-hub-kusama-bridge-hub-polkadot-headers-and-messages", - "--bridge-hub-kusama-host", - "bridge-hub-kusama-node-collator1", - "--bridge-hub-kusama-port", - "9944", - "--bridge-hub-kusama-signer", - "//Iden", - "--bridge-hub-kusama-transactions-mortality", - "64", - "--kusama-host", - "kusama-alice", - "--kusama-port", - "9944", - "--bridge-hub-polkadot-host", - "bridge-hub-polkadot-collator1", - "--bridge-hub-polkadot-port", - "9944", - "--bridge-hub-polkadot-signer", - "//George", - "--bridge-hub-polkadot-transactions-mortality", - "64", - "--polkadot-host", - "polkadot-alice", - "--polkadot-port", - "9944", - "--lane", - "00000000", - "--prometheus-host", - "0.0.0.0", - ]); - - // then - assert_eq!( - res, - BridgeHubKusamaBridgeHubPolkadotHeadersAndMessages { - shared: HeadersAndMessagesSharedParams { - lane: vec![HexLaneId([0x00, 0x00, 0x00, 0x00])], - only_mandatory_headers: false, - prometheus_params: PrometheusParams { - no_prometheus: false, - prometheus_host: "0.0.0.0".into(), - prometheus_port: 9616, - }, - }, - left: BridgeHubKusamaConnectionParams { - bridge_hub_kusama_uri: None, - bridge_hub_kusama_host: "bridge-hub-kusama-node-collator1".into(), - bridge_hub_kusama_port: 9944, - bridge_hub_kusama_path: None, - bridge_hub_kusama_secure: false, - bridge_hub_kusama_runtime_version: BridgeHubKusamaRuntimeVersionParams { - bridge_hub_kusama_version_mode: RuntimeVersionType::Bundle, - bridge_hub_kusama_spec_version: None, - bridge_hub_kusama_transaction_version: None, - }, - }, - left_sign: BridgeHubKusamaSigningParams { - bridge_hub_kusama_signer: Some("//Iden".into()), - bridge_hub_kusama_signer_password: None, - bridge_hub_kusama_signer_file: None, - bridge_hub_kusama_signer_password_file: None, - bridge_hub_kusama_transactions_mortality: Some(64), - }, - left_relay: KusamaConnectionParams { - kusama_uri: None, - kusama_host: "kusama-alice".into(), - kusama_port: 9944, - kusama_path: None, - kusama_secure: false, - kusama_runtime_version: KusamaRuntimeVersionParams { - kusama_version_mode: RuntimeVersionType::Bundle, - kusama_spec_version: None, - kusama_transaction_version: None, - }, - }, - right: BridgeHubPolkadotConnectionParams { - bridge_hub_polkadot_uri: None, - bridge_hub_polkadot_host: "bridge-hub-polkadot-collator1".into(), - bridge_hub_polkadot_port: 9944, - bridge_hub_polkadot_path: None, - bridge_hub_polkadot_secure: false, - bridge_hub_polkadot_runtime_version: BridgeHubPolkadotRuntimeVersionParams { - bridge_hub_polkadot_version_mode: RuntimeVersionType::Bundle, - bridge_hub_polkadot_spec_version: None, - bridge_hub_polkadot_transaction_version: None, - }, - }, - right_sign: BridgeHubPolkadotSigningParams { - bridge_hub_polkadot_signer: Some("//George".into()), - bridge_hub_polkadot_signer_password: None, - bridge_hub_polkadot_signer_file: None, - bridge_hub_polkadot_signer_password_file: None, - bridge_hub_polkadot_transactions_mortality: Some(64), - }, - right_relay: PolkadotConnectionParams { - polkadot_uri: None, - polkadot_host: "polkadot-alice".into(), - polkadot_port: 9944, - polkadot_path: None, - polkadot_secure: false, - polkadot_runtime_version: PolkadotRuntimeVersionParams { - polkadot_version_mode: RuntimeVersionType::Bundle, - polkadot_spec_version: None, - polkadot_transaction_version: None, - }, - }, - } - ); - } -} diff --git a/relays/lib-substrate-relay/src/cli/relay_headers_and_messages/parachain_to_parachain.rs b/relays/lib-substrate-relay/src/cli/relay_headers_and_messages/parachain_to_parachain.rs deleted file mode 100644 index 76accfa29..000000000 --- a/relays/lib-substrate-relay/src/cli/relay_headers_and_messages/parachain_to_parachain.rs +++ /dev/null @@ -1,217 +0,0 @@ -// Copyright 2019-2022 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Parachain to parachain relayer CLI primitives. - -use async_trait::async_trait; -use std::sync::Arc; - -use crate::{ - cli::{ - bridge::{CliBridgeBase, MessagesCliBridge, ParachainToRelayHeadersCliBridge}, - relay_headers_and_messages::{Full2WayBridgeBase, Full2WayBridgeCommonParams}, - }, - finality::SubstrateFinalitySyncPipeline, - on_demand::{ - headers::OnDemandHeadersRelay, parachains::OnDemandParachainsRelay, OnDemandRelay, - }, -}; -use bp_polkadot_core::parachains::ParaHash; -use pallet_bridge_parachains::{RelayBlockHash, RelayBlockHasher, RelayBlockNumber}; -use relay_substrate_client::{ - AccountIdOf, AccountKeyPairOf, Chain, ChainWithRuntimeVersion, ChainWithTransactions, Client, - Parachain, -}; -use sp_core::Pair; - -/// A base relay between two parachain from different consensus systems. -/// -/// Such relay starts 2 messages relay. It also starts 2 on-demand header relays and 2 on-demand -/// parachain heads relay. -pub struct ParachainToParachainBridge< - L2R: MessagesCliBridge + ParachainToRelayHeadersCliBridge, - R2L: MessagesCliBridge + ParachainToRelayHeadersCliBridge, -> where - ::Source: Parachain, - ::Source: Parachain, -{ - /// Parameters that are shared by all bridge types. - pub common: - Full2WayBridgeCommonParams<::Target, ::Target>, - /// Client of the left relay chain. - pub left_relay: Client<::SourceRelay>, - /// Client of the right relay chain. - pub right_relay: Client<::SourceRelay>, -} - -/// Create set of configuration objects specific to parachain-to-parachain relayer. -#[macro_export] -macro_rules! declare_parachain_to_parachain_bridge_schema { - // left-parachain, relay-chain-of-left-parachain, right-parachain, relay-chain-of-right-parachain - ($left_parachain:ident, $left_chain:ident, $right_parachain:ident, $right_chain:ident) => { - bp_runtime::paste::item! { - #[doc = $left_parachain ", " $left_chain ", " $right_parachain " and " $right_chain " headers+parachains+messages relay params."] - #[derive(Debug, PartialEq, StructOpt)] - pub struct [<$left_parachain $right_parachain HeadersAndMessages>] { - // shared parameters - #[structopt(flatten)] - shared: HeadersAndMessagesSharedParams, - - #[structopt(flatten)] - left: [<$left_parachain ConnectionParams>], - // default signer, which is always used to sign messages relay transactions on the left chain - #[structopt(flatten)] - left_sign: [<$left_parachain SigningParams>], - - #[structopt(flatten)] - left_relay: [<$left_chain ConnectionParams>], - - #[structopt(flatten)] - right: [<$right_parachain ConnectionParams>], - // default signer, which is always used to sign messages relay transactions on the right chain - #[structopt(flatten)] - right_sign: [<$right_parachain SigningParams>], - - #[structopt(flatten)] - right_relay: [<$right_chain ConnectionParams>], - } - - impl [<$left_parachain $right_parachain HeadersAndMessages>] { - async fn into_bridge< - Left: ChainWithTransactions + ChainWithRuntimeVersion + Parachain, - LeftRelay: ChainWithRuntimeVersion, - Right: ChainWithTransactions + ChainWithRuntimeVersion + Parachain, - RightRelay: ChainWithRuntimeVersion, - L2R: $crate::cli::bridge::CliBridgeBase - + MessagesCliBridge - + $crate::cli::bridge::ParachainToRelayHeadersCliBridge, - R2L: $crate::cli::bridge::CliBridgeBase - + MessagesCliBridge - + $crate::cli::bridge::ParachainToRelayHeadersCliBridge, - >( - self, - ) -> anyhow::Result<$crate::cli::relay_headers_and_messages::parachain_to_parachain::ParachainToParachainBridge> { - Ok($crate::cli::relay_headers_and_messages::parachain_to_parachain::ParachainToParachainBridge { - common: Full2WayBridgeCommonParams::new::( - self.shared, - BridgeEndCommonParams { - client: self.left.into_client::().await?, - tx_params: self.left_sign.transaction_params::()?, - accounts: vec![], - }, - BridgeEndCommonParams { - client: self.right.into_client::().await?, - tx_params: self.right_sign.transaction_params::()?, - accounts: vec![], - }, - )?, - left_relay: self.left_relay.into_client::().await?, - right_relay: self.right_relay.into_client::().await?, - }) - } - } - } - }; -} - -#[async_trait] -impl< - Left: Chain + ChainWithTransactions + ChainWithRuntimeVersion + Parachain, - Right: Chain + ChainWithTransactions + ChainWithRuntimeVersion + Parachain, - LeftRelay: Chain - + ChainWithRuntimeVersion, - RightRelay: Chain - + ChainWithRuntimeVersion, - L2R: CliBridgeBase - + MessagesCliBridge - + ParachainToRelayHeadersCliBridge, - R2L: CliBridgeBase - + MessagesCliBridge - + ParachainToRelayHeadersCliBridge, - > Full2WayBridgeBase for ParachainToParachainBridge -where - AccountIdOf: From< as Pair>::Public>, - AccountIdOf: From< as Pair>::Public>, -{ - type Params = ParachainToParachainBridge; - type Left = Left; - type Right = Right; - - fn common(&self) -> &Full2WayBridgeCommonParams { - &self.common - } - - fn mut_common(&mut self) -> &mut Full2WayBridgeCommonParams { - &mut self.common - } - - async fn start_on_demand_headers_relayers( - &mut self, - ) -> anyhow::Result<( - Arc>, - Arc>, - )> { - ::RelayFinality::start_relay_guards( - &self.common.right.client, - self.common.right.client.can_start_version_guard(), - ) - .await?; - ::RelayFinality::start_relay_guards( - &self.common.left.client, - self.common.left.client.can_start_version_guard(), - ) - .await?; - - let left_relay_to_right_on_demand_headers = - OnDemandHeadersRelay::<::RelayFinality>::new( - self.left_relay.clone(), - self.common.right.client.clone(), - self.common.right.tx_params.clone(), - self.common.shared.only_mandatory_headers, - Some(self.common.metrics_params.clone()), - ); - let right_relay_to_left_on_demand_headers = - OnDemandHeadersRelay::<::RelayFinality>::new( - self.right_relay.clone(), - self.common.left.client.clone(), - self.common.left.tx_params.clone(), - self.common.shared.only_mandatory_headers, - Some(self.common.metrics_params.clone()), - ); - - let left_to_right_on_demand_parachains = OnDemandParachainsRelay::< - ::ParachainFinality, - >::new( - self.left_relay.clone(), - self.common.right.client.clone(), - self.common.right.tx_params.clone(), - Arc::new(left_relay_to_right_on_demand_headers), - ); - let right_to_left_on_demand_parachains = OnDemandParachainsRelay::< - ::ParachainFinality, - >::new( - self.right_relay.clone(), - self.common.left.client.clone(), - self.common.left.tx_params.clone(), - Arc::new(right_relay_to_left_on_demand_headers), - ); - - Ok(( - Arc::new(left_to_right_on_demand_parachains), - Arc::new(right_to_left_on_demand_parachains), - )) - } -} diff --git a/relays/lib-substrate-relay/src/cli/relay_headers_and_messages/relay_to_parachain.rs b/relays/lib-substrate-relay/src/cli/relay_headers_and_messages/relay_to_parachain.rs deleted file mode 100644 index b75ac3e60..000000000 --- a/relays/lib-substrate-relay/src/cli/relay_headers_and_messages/relay_to_parachain.rs +++ /dev/null @@ -1,199 +0,0 @@ -// Copyright 2019-2022 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Relay chain to parachain relayer CLI primitives. - -use async_trait::async_trait; -use std::sync::Arc; - -use crate::{ - cli::{ - bridge::{ - CliBridgeBase, MessagesCliBridge, ParachainToRelayHeadersCliBridge, - RelayToRelayHeadersCliBridge, - }, - relay_headers_and_messages::{Full2WayBridgeBase, Full2WayBridgeCommonParams}, - }, - finality::SubstrateFinalitySyncPipeline, - on_demand::{ - headers::OnDemandHeadersRelay, parachains::OnDemandParachainsRelay, OnDemandRelay, - }, -}; -use bp_polkadot_core::parachains::ParaHash; -use pallet_bridge_parachains::{RelayBlockHash, RelayBlockHasher, RelayBlockNumber}; -use relay_substrate_client::{ - AccountIdOf, AccountKeyPairOf, Chain, ChainWithRuntimeVersion, ChainWithTransactions, Client, - Parachain, -}; -use sp_core::Pair; - -/// A base relay between standalone (relay) chain and a parachain from another consensus system. -/// -/// Such relay starts 2 messages relay. It also starts 2 on-demand header relays and 1 on-demand -/// parachain heads relay. -pub struct RelayToParachainBridge< - L2R: MessagesCliBridge + RelayToRelayHeadersCliBridge, - R2L: MessagesCliBridge + ParachainToRelayHeadersCliBridge, -> where - ::Source: Parachain, -{ - /// Parameters that are shared by all bridge types. - pub common: - Full2WayBridgeCommonParams<::Target, ::Target>, - /// Client of the right relay chain. - pub right_relay: Client<::SourceRelay>, -} - -/// Create set of configuration objects specific to relay-to-parachain relayer. -#[macro_export] -macro_rules! declare_relay_to_parachain_bridge_schema { - // chain, parachain, relay-chain-of-parachain - ($left_chain:ident, $right_parachain:ident, $right_chain:ident) => { - bp_runtime::paste::item! { - #[doc = $left_chain ", " $right_parachain " and " $right_chain " headers+parachains+messages relay params."] - #[derive(Debug, PartialEq, StructOpt)] - pub struct [<$left_chain $right_parachain HeadersAndMessages>] { - // shared parameters - #[structopt(flatten)] - shared: HeadersAndMessagesSharedParams, - - #[structopt(flatten)] - left: [<$left_chain ConnectionParams>], - // default signer, which is always used to sign messages relay transactions on the left chain - #[structopt(flatten)] - left_sign: [<$left_chain SigningParams>], - - #[structopt(flatten)] - right: [<$right_parachain ConnectionParams>], - // default signer, which is always used to sign messages relay transactions on the right chain - #[structopt(flatten)] - right_sign: [<$right_parachain SigningParams>], - - #[structopt(flatten)] - right_relay: [<$right_chain ConnectionParams>], - } - - impl [<$left_chain $right_parachain HeadersAndMessages>] { - async fn into_bridge< - Left: ChainWithTransactions + ChainWithRuntimeVersion, - Right: ChainWithTransactions + ChainWithRuntimeVersion + Parachain, - RightRelay: ChainWithRuntimeVersion, - L2R: CliBridgeBase + MessagesCliBridge + RelayToRelayHeadersCliBridge, - R2L: CliBridgeBase - + MessagesCliBridge - + ParachainToRelayHeadersCliBridge, - >( - self, - ) -> anyhow::Result> { - Ok(RelayToParachainBridge { - common: Full2WayBridgeCommonParams::new::( - self.shared, - BridgeEndCommonParams { - client: self.left.into_client::().await?, - tx_params: self.left_sign.transaction_params::()?, - accounts: vec![], - }, - BridgeEndCommonParams { - client: self.right.into_client::().await?, - tx_params: self.right_sign.transaction_params::()?, - accounts: vec![], - }, - )?, - right_relay: self.right_relay.into_client::().await?, - }) - } - } - } - }; -} - -#[async_trait] -impl< - Left: ChainWithTransactions + ChainWithRuntimeVersion, - Right: Chain + ChainWithTransactions + ChainWithRuntimeVersion + Parachain, - RightRelay: Chain - + ChainWithRuntimeVersion, - L2R: CliBridgeBase - + MessagesCliBridge - + RelayToRelayHeadersCliBridge, - R2L: CliBridgeBase - + MessagesCliBridge - + ParachainToRelayHeadersCliBridge, - > Full2WayBridgeBase for RelayToParachainBridge -where - AccountIdOf: From< as Pair>::Public>, - AccountIdOf: From< as Pair>::Public>, -{ - type Params = RelayToParachainBridge; - type Left = Left; - type Right = Right; - - fn common(&self) -> &Full2WayBridgeCommonParams { - &self.common - } - - fn mut_common(&mut self) -> &mut Full2WayBridgeCommonParams { - &mut self.common - } - - async fn start_on_demand_headers_relayers( - &mut self, - ) -> anyhow::Result<( - Arc>, - Arc>, - )> { - ::Finality::start_relay_guards( - &self.common.right.client, - self.common.right.client.can_start_version_guard(), - ) - .await?; - ::RelayFinality::start_relay_guards( - &self.common.left.client, - self.common.left.client.can_start_version_guard(), - ) - .await?; - - let left_to_right_on_demand_headers = - OnDemandHeadersRelay::<::Finality>::new( - self.common.left.client.clone(), - self.common.right.client.clone(), - self.common.right.tx_params.clone(), - self.common.shared.only_mandatory_headers, - None, - ); - let right_relay_to_left_on_demand_headers = - OnDemandHeadersRelay::<::RelayFinality>::new( - self.right_relay.clone(), - self.common.left.client.clone(), - self.common.left.tx_params.clone(), - self.common.shared.only_mandatory_headers, - Some(self.common.metrics_params.clone()), - ); - let right_to_left_on_demand_parachains = OnDemandParachainsRelay::< - ::ParachainFinality, - >::new( - self.right_relay.clone(), - self.common.left.client.clone(), - self.common.left.tx_params.clone(), - Arc::new(right_relay_to_left_on_demand_headers), - ); - - Ok(( - Arc::new(left_to_right_on_demand_headers), - Arc::new(right_to_left_on_demand_parachains), - )) - } -} diff --git a/relays/lib-substrate-relay/src/cli/relay_headers_and_messages/relay_to_relay.rs b/relays/lib-substrate-relay/src/cli/relay_headers_and_messages/relay_to_relay.rs deleted file mode 100644 index b397ff50a..000000000 --- a/relays/lib-substrate-relay/src/cli/relay_headers_and_messages/relay_to_relay.rs +++ /dev/null @@ -1,169 +0,0 @@ -// Copyright 2019-2022 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -// we don't have any relay/standalone <> relay/standalone chain bridges, but we may need it in a -// future -#![allow(unused_macros)] - -//! Relay chain to Relay chain relayer CLI primitives. - -use async_trait::async_trait; -use std::sync::Arc; - -use crate::{ - cli::{ - bridge::{CliBridgeBase, MessagesCliBridge, RelayToRelayHeadersCliBridge}, - relay_headers_and_messages::{Full2WayBridgeBase, Full2WayBridgeCommonParams}, - }, - finality::SubstrateFinalitySyncPipeline, - on_demand::{headers::OnDemandHeadersRelay, OnDemandRelay}, -}; -use relay_substrate_client::{ - AccountIdOf, AccountKeyPairOf, ChainWithRuntimeVersion, ChainWithTransactions, -}; -use sp_core::Pair; - -/// A base relay between two standalone (relay) chains. -/// -/// Such relay starts 2 messages relay and 2 on-demand header relays. -pub struct RelayToRelayBridge< - L2R: MessagesCliBridge + RelayToRelayHeadersCliBridge, - R2L: MessagesCliBridge + RelayToRelayHeadersCliBridge, -> { - /// Parameters that are shared by all bridge types. - pub common: - Full2WayBridgeCommonParams<::Target, ::Target>, -} - -/// Create set of configuration objects specific to relay-to-relay relayer. -macro_rules! declare_relay_to_relay_bridge_schema { - ($left_chain:ident, $right_chain:ident) => { - bp_runtime::paste::item! { - #[doc = $left_chain " and " $right_chain " headers+messages relay params."] - #[derive(Debug, PartialEq, StructOpt)] - pub struct [<$left_chain $right_chain HeadersAndMessages>] { - #[structopt(flatten)] - shared: HeadersAndMessagesSharedParams, - - #[structopt(flatten)] - left: [<$left_chain ConnectionParams>], - // default signer, which is always used to sign messages relay transactions on the left chain - #[structopt(flatten)] - left_sign: [<$left_chain SigningParams>], - - #[structopt(flatten)] - right: [<$right_chain ConnectionParams>], - #[structopt(flatten)] - // default signer, which is always used to sign messages relay transactions on the right chain - right_sign: [<$right_chain SigningParams>], - } - - impl [<$left_chain $right_chain HeadersAndMessages>] { - async fn into_bridge< - Left: ChainWithTransactions + CliChain, - Right: ChainWithTransactions + CliChain, - L2R: CliBridgeBase + MessagesCliBridge + RelayToRelayHeadersCliBridge, - R2L: CliBridgeBase + MessagesCliBridge + RelayToRelayHeadersCliBridge, - >( - self, - ) -> anyhow::Result> { - Ok(RelayToRelayBridge { - common: Full2WayBridgeCommonParams::new::( - self.shared, - BridgeEndCommonParams { - client: self.left.into_client::().await?, - tx_params: self.left_sign.transaction_params::()?, - accounts: vec![], - }, - BridgeEndCommonParams { - client: self.right.into_client::().await?, - tx_params: self.right_sign.transaction_params::()?, - accounts: vec![], - }, - )?, - right_to_left_transaction_params: self.left_sign.transaction_params::(), - left_to_right_transaction_params: self.right_sign.transaction_params::(), - }) - } - } - } - }; -} - -#[async_trait] -impl< - Left: ChainWithTransactions + ChainWithRuntimeVersion, - Right: ChainWithTransactions + ChainWithRuntimeVersion, - L2R: CliBridgeBase - + MessagesCliBridge - + RelayToRelayHeadersCliBridge, - R2L: CliBridgeBase - + MessagesCliBridge - + RelayToRelayHeadersCliBridge, - > Full2WayBridgeBase for RelayToRelayBridge -where - AccountIdOf: From< as Pair>::Public>, - AccountIdOf: From< as Pair>::Public>, -{ - type Params = RelayToRelayBridge; - type Left = Left; - type Right = Right; - - fn common(&self) -> &Full2WayBridgeCommonParams { - &self.common - } - - fn mut_common(&mut self) -> &mut Full2WayBridgeCommonParams { - &mut self.common - } - - async fn start_on_demand_headers_relayers( - &mut self, - ) -> anyhow::Result<( - Arc>, - Arc>, - )> { - ::Finality::start_relay_guards( - &self.common.right.client, - self.common.right.client.can_start_version_guard(), - ) - .await?; - ::Finality::start_relay_guards( - &self.common.left.client, - self.common.left.client.can_start_version_guard(), - ) - .await?; - - let left_to_right_on_demand_headers = - OnDemandHeadersRelay::<::Finality>::new( - self.common.left.client.clone(), - self.common.right.client.clone(), - self.common.right.tx_params.clone(), - self.common.shared.only_mandatory_headers, - None, - ); - let right_to_left_on_demand_headers = - OnDemandHeadersRelay::<::Finality>::new( - self.common.right.client.clone(), - self.common.left.client.clone(), - self.common.left.tx_params.clone(), - self.common.shared.only_mandatory_headers, - None, - ); - - Ok((Arc::new(left_to_right_on_demand_headers), Arc::new(right_to_left_on_demand_headers))) - } -} diff --git a/relays/lib-substrate-relay/src/cli/relay_messages.rs b/relays/lib-substrate-relay/src/cli/relay_messages.rs deleted file mode 100644 index b672bd4f9..000000000 --- a/relays/lib-substrate-relay/src/cli/relay_messages.rs +++ /dev/null @@ -1,89 +0,0 @@ -// Copyright 2019-2021 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Primitives for exposing the messages relaying functionality in the CLI. - -use crate::{ - cli::{bridge::*, chain_schema::*, HexLaneId, PrometheusParams}, - messages_lane::MessagesRelayParams, - TransactionParams, -}; - -use async_trait::async_trait; -use sp_core::Pair; -use structopt::StructOpt; - -use relay_substrate_client::{ - AccountIdOf, AccountKeyPairOf, BalanceOf, ChainWithRuntimeVersion, ChainWithTransactions, -}; - -/// Messages relaying params. -#[derive(StructOpt)] -pub struct RelayMessagesParams { - /// Hex-encoded lane id that should be served by the relay. Defaults to `00000000`. - #[structopt(long, default_value = "00000000")] - lane: HexLaneId, - #[structopt(flatten)] - source: SourceConnectionParams, - #[structopt(flatten)] - source_sign: SourceSigningParams, - #[structopt(flatten)] - target: TargetConnectionParams, - #[structopt(flatten)] - target_sign: TargetSigningParams, - #[structopt(flatten)] - prometheus_params: PrometheusParams, -} - -/// Trait used for relaying messages between 2 chains. -#[async_trait] -pub trait MessagesRelayer: MessagesCliBridge -where - Self::Source: ChainWithTransactions + ChainWithRuntimeVersion, - AccountIdOf: From< as Pair>::Public>, - AccountIdOf: From< as Pair>::Public>, - BalanceOf: TryFrom>, -{ - /// Start relaying messages. - async fn relay_messages(data: RelayMessagesParams) -> anyhow::Result<()> { - let source_client = data.source.into_client::().await?; - let source_sign = data.source_sign.to_keypair::()?; - let source_transactions_mortality = data.source_sign.transactions_mortality()?; - let target_client = data.target.into_client::().await?; - let target_sign = data.target_sign.to_keypair::()?; - let target_transactions_mortality = data.target_sign.transactions_mortality()?; - - crate::messages_lane::run::(MessagesRelayParams { - source_client, - source_transaction_params: TransactionParams { - signer: source_sign, - mortality: source_transactions_mortality, - }, - target_client, - target_transaction_params: TransactionParams { - signer: target_sign, - mortality: target_transactions_mortality, - }, - source_to_target_headers_relay: None, - target_to_source_headers_relay: None, - lane_id: data.lane.into(), - limits: Self::maybe_messages_limits(), - metrics_params: data.prometheus_params.into_metrics_params()?, - }) - .await - .map_err(|e| anyhow::format_err!("{}", e)) - } -} diff --git a/relays/lib-substrate-relay/src/cli/relay_parachains.rs b/relays/lib-substrate-relay/src/cli/relay_parachains.rs deleted file mode 100644 index e5a523494..000000000 --- a/relays/lib-substrate-relay/src/cli/relay_parachains.rs +++ /dev/null @@ -1,91 +0,0 @@ -// Copyright 2019-2021 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Primitives for exposing the parachains finality relaying functionality in the CLI. - -use async_std::sync::Mutex; -use async_trait::async_trait; -use parachains_relay::parachains_loop::{AvailableHeader, SourceClient, TargetClient}; -use relay_substrate_client::Parachain; -use relay_utils::metrics::{GlobalMetrics, StandaloneMetric}; -use std::sync::Arc; -use structopt::StructOpt; - -use crate::{ - cli::{ - bridge::{CliBridgeBase, ParachainToRelayHeadersCliBridge}, - chain_schema::*, - PrometheusParams, - }, - parachains::{source::ParachainsSource, target::ParachainsTarget, ParachainsPipelineAdapter}, - TransactionParams, -}; - -/// Parachains heads relaying params. -#[derive(StructOpt)] -pub struct RelayParachainsParams { - #[structopt(flatten)] - source: SourceConnectionParams, - #[structopt(flatten)] - target: TargetConnectionParams, - #[structopt(flatten)] - target_sign: TargetSigningParams, - #[structopt(flatten)] - prometheus_params: PrometheusParams, -} - -/// Trait used for relaying parachains finality between 2 chains. -#[async_trait] -pub trait ParachainsRelayer: ParachainToRelayHeadersCliBridge -where - ParachainsSource: - SourceClient>, - ParachainsTarget: - TargetClient>, - ::Source: Parachain, -{ - /// Start relaying parachains finality. - async fn relay_parachains(data: RelayParachainsParams) -> anyhow::Result<()> { - let source_client = data.source.into_client::().await?; - let source_client = ParachainsSource::::new( - source_client, - Arc::new(Mutex::new(AvailableHeader::Missing)), - ); - - let target_transaction_params = TransactionParams { - signer: data.target_sign.to_keypair::()?, - mortality: data.target_sign.target_transactions_mortality, - }; - let target_client = data.target.into_client::().await?; - let target_client = ParachainsTarget::::new( - target_client.clone(), - target_transaction_params, - ); - - let metrics_params: relay_utils::metrics::MetricsParams = - data.prometheus_params.into_metrics_params()?; - GlobalMetrics::new()?.register_and_spawn(&metrics_params.registry)?; - - parachains_relay::parachains_loop::run( - source_client, - target_client, - metrics_params, - futures::future::pending(), - ) - .await - .map_err(|e| anyhow::format_err!("{}", e)) - } -} diff --git a/relays/lib-substrate-relay/src/equivocation/mod.rs b/relays/lib-substrate-relay/src/equivocation/mod.rs deleted file mode 100644 index f6d58cbaa..000000000 --- a/relays/lib-substrate-relay/src/equivocation/mod.rs +++ /dev/null @@ -1,223 +0,0 @@ -// Copyright 2019-2023 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Types and functions intended to ease adding of new Substrate -> Substrate -//! equivocation detection pipelines. - -mod source; -mod target; - -use crate::{ - equivocation::{source::SubstrateEquivocationSource, target::SubstrateEquivocationTarget}, - finality_base::{engine::Engine, SubstrateFinalityPipeline, SubstrateFinalityProof}, - TransactionParams, -}; - -use async_trait::async_trait; -use bp_runtime::{AccountIdOf, BlockNumberOf, HashOf}; -use equivocation_detector::EquivocationDetectionPipeline; -use finality_relay::FinalityPipeline; -use pallet_grandpa::{Call as GrandpaCall, Config as GrandpaConfig}; -use relay_substrate_client::{AccountKeyPairOf, CallOf, Chain, ChainWithTransactions, Client}; -use relay_utils::metrics::MetricsParams; -use sp_core::Pair; -use sp_runtime::traits::{Block, Header}; -use std::marker::PhantomData; - -/// Convenience trait that adds bounds to `SubstrateEquivocationDetectionPipeline`. -pub trait BaseSubstrateEquivocationDetectionPipeline: - SubstrateFinalityPipeline -{ - /// Bounded `SubstrateFinalityPipeline::SourceChain`. - type BoundedSourceChain: ChainWithTransactions; - - /// Bounded `AccountIdOf`. - type BoundedSourceChainAccountId: From< as Pair>::Public> - + Send; -} - -impl BaseSubstrateEquivocationDetectionPipeline for T -where - T: SubstrateFinalityPipeline, - T::SourceChain: ChainWithTransactions, - AccountIdOf: From< as Pair>::Public>, -{ - type BoundedSourceChain = T::SourceChain; - type BoundedSourceChainAccountId = AccountIdOf; -} - -/// Substrate -> Substrate equivocation detection pipeline. -#[async_trait] -pub trait SubstrateEquivocationDetectionPipeline: - BaseSubstrateEquivocationDetectionPipeline -{ - /// How the `report_equivocation` call is built ? - type ReportEquivocationCallBuilder: ReportEquivocationCallBuilder; - - /// Add relay guards if required. - async fn start_relay_guards( - source_client: &Client, - enable_version_guard: bool, - ) -> relay_substrate_client::Result<()> { - if enable_version_guard { - relay_substrate_client::guard::abort_on_spec_version_change( - source_client.clone(), - source_client.simple_runtime_version().await?.spec_version, - ); - } - Ok(()) - } -} - -type FinalityProoffOf

= <

::FinalityEngine as Engine< -

::SourceChain, ->>::FinalityProof; -type FinalityVerificationContextfOf

= - <

::FinalityEngine as Engine< -

::SourceChain, - >>::FinalityVerificationContext; -/// The type of the equivocation proof used by the `SubstrateEquivocationDetectionPipeline` -pub type EquivocationProofOf

= <

::FinalityEngine as Engine< -

::SourceChain, ->>::EquivocationProof; -type EquivocationsFinderOf

= <

::FinalityEngine as Engine< -

::SourceChain, ->>::EquivocationsFinder; -/// The type of the key owner proof used by the `SubstrateEquivocationDetectionPipeline` -pub type KeyOwnerProofOf

= <

::FinalityEngine as Engine< -

::SourceChain, ->>::KeyOwnerProof; - -/// Adapter that allows a `SubstrateEquivocationDetectionPipeline` to act as an -/// `EquivocationDetectionPipeline`. -#[derive(Clone, Debug)] -pub struct EquivocationDetectionPipelineAdapter { - _phantom: PhantomData

, -} - -impl FinalityPipeline - for EquivocationDetectionPipelineAdapter

-{ - const SOURCE_NAME: &'static str = P::SourceChain::NAME; - const TARGET_NAME: &'static str = P::TargetChain::NAME; - - type Hash = HashOf; - type Number = BlockNumberOf; - type FinalityProof = SubstrateFinalityProof

; -} - -impl EquivocationDetectionPipeline - for EquivocationDetectionPipelineAdapter

-{ - type TargetNumber = BlockNumberOf; - type FinalityVerificationContext = FinalityVerificationContextfOf

; - type EquivocationProof = EquivocationProofOf

; - type EquivocationsFinder = EquivocationsFinderOf

; -} - -/// Different ways of building `report_equivocation` calls. -pub trait ReportEquivocationCallBuilder { - /// Build a `report_equivocation` call to be executed on the source chain. - fn build_report_equivocation_call( - equivocation_proof: EquivocationProofOf

, - key_owner_proof: KeyOwnerProofOf

, - ) -> CallOf; -} - -/// Building the `report_equivocation` call when having direct access to the target chain runtime. -pub struct DirectReportGrandpaEquivocationCallBuilder { - _phantom: PhantomData<(P, R)>, -} - -impl ReportEquivocationCallBuilder

for DirectReportGrandpaEquivocationCallBuilder -where - P: SubstrateEquivocationDetectionPipeline, - P::FinalityEngine: Engine< - P::SourceChain, - EquivocationProof = sp_consensus_grandpa::EquivocationProof< - HashOf, - BlockNumberOf, - >, - >, - R: frame_system::Config> - + GrandpaConfig>, - ::Header: Header>, - CallOf: From>, -{ - fn build_report_equivocation_call( - equivocation_proof: EquivocationProofOf

, - key_owner_proof: KeyOwnerProofOf

, - ) -> CallOf { - GrandpaCall::::report_equivocation { - equivocation_proof: Box::new(equivocation_proof), - key_owner_proof, - } - .into() - } -} - -/// Macro that generates `ReportEquivocationCallBuilder` implementation for the case where -/// we only have access to the mocked version of the source chain runtime. -#[rustfmt::skip] -#[macro_export] -macro_rules! generate_report_equivocation_call_builder { - ($pipeline:ident, $mocked_builder:ident, $grandpa:path, $report_equivocation:path) => { - pub struct $mocked_builder; - - impl $crate::equivocation::ReportEquivocationCallBuilder<$pipeline> - for $mocked_builder - { - fn build_report_equivocation_call( - equivocation_proof: $crate::equivocation::EquivocationProofOf<$pipeline>, - key_owner_proof: $crate::equivocation::KeyOwnerProofOf<$pipeline>, - ) -> relay_substrate_client::CallOf< - <$pipeline as $crate::finality_base::SubstrateFinalityPipeline>::SourceChain - > { - bp_runtime::paste::item! { - $grandpa($report_equivocation { - equivocation_proof: Box::new(equivocation_proof), - key_owner_proof: key_owner_proof - }) - } - } - } - }; -} - -/// Run Substrate-to-Substrate equivocations detection loop. -pub async fn run( - source_client: Client, - target_client: Client, - source_transaction_params: TransactionParams>, - metrics_params: MetricsParams, -) -> anyhow::Result<()> { - log::info!( - target: "bridge", - "Starting {} -> {} equivocations detection loop", - P::SourceChain::NAME, - P::TargetChain::NAME, - ); - - equivocation_detector::run( - SubstrateEquivocationSource::

::new(source_client, source_transaction_params), - SubstrateEquivocationTarget::

::new(target_client), - P::TargetChain::AVERAGE_BLOCK_INTERVAL, - metrics_params, - futures::future::pending(), - ) - .await - .map_err(|e| anyhow::format_err!("{}", e)) -} diff --git a/relays/lib-substrate-relay/src/equivocation/source.rs b/relays/lib-substrate-relay/src/equivocation/source.rs deleted file mode 100644 index a0c7dcf5c..000000000 --- a/relays/lib-substrate-relay/src/equivocation/source.rs +++ /dev/null @@ -1,109 +0,0 @@ -// Copyright 2019-2023 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Default generic implementation of equivocation source for basic Substrate client. - -use crate::{ - equivocation::{ - EquivocationDetectionPipelineAdapter, EquivocationProofOf, ReportEquivocationCallBuilder, - SubstrateEquivocationDetectionPipeline, - }, - finality_base::{engine::Engine, finality_proofs, SubstrateFinalityProofsStream}, - TransactionParams, -}; - -use async_trait::async_trait; -use bp_runtime::{HashOf, TransactionEra}; -use equivocation_detector::SourceClient; -use finality_relay::SourceClientBase; -use relay_substrate_client::{ - AccountKeyPairOf, Client, Error, TransactionTracker, UnsignedTransaction, -}; -use relay_utils::relay_loop::Client as RelayClient; - -/// Substrate node as equivocation source. -pub struct SubstrateEquivocationSource { - client: Client, - transaction_params: TransactionParams>, -} - -impl SubstrateEquivocationSource

{ - /// Create new instance of `SubstrateEquivocationSource`. - pub fn new( - client: Client, - transaction_params: TransactionParams>, - ) -> Self { - Self { client, transaction_params } - } -} - -impl Clone for SubstrateEquivocationSource

{ - fn clone(&self) -> Self { - Self { client: self.client.clone(), transaction_params: self.transaction_params.clone() } - } -} - -#[async_trait] -impl RelayClient for SubstrateEquivocationSource

{ - type Error = Error; - - async fn reconnect(&mut self) -> Result<(), Error> { - self.client.reconnect().await - } -} - -#[async_trait] -impl - SourceClientBase> for SubstrateEquivocationSource

-{ - type FinalityProofsStream = SubstrateFinalityProofsStream

; - - async fn finality_proofs(&self) -> Result { - finality_proofs::

(&self.client).await - } -} - -#[async_trait] -impl - SourceClient> for SubstrateEquivocationSource

-{ - type TransactionTracker = TransactionTracker>; - - async fn report_equivocation( - &self, - at: HashOf, - equivocation: EquivocationProofOf

, - ) -> Result { - let key_owner_proof = - P::FinalityEngine::generate_source_key_ownership_proof(&self.client, at, &equivocation) - .await?; - - let mortality = self.transaction_params.mortality; - let call = P::ReportEquivocationCallBuilder::build_report_equivocation_call( - equivocation, - key_owner_proof, - ); - self.client - .submit_and_watch_signed_extrinsic( - &self.transaction_params.signer, - move |best_block_id, transaction_nonce| { - Ok(UnsignedTransaction::new(call.into(), transaction_nonce) - .era(TransactionEra::new(best_block_id, mortality))) - }, - ) - .await - } -} diff --git a/relays/lib-substrate-relay/src/equivocation/target.rs b/relays/lib-substrate-relay/src/equivocation/target.rs deleted file mode 100644 index 6eee2ab91..000000000 --- a/relays/lib-substrate-relay/src/equivocation/target.rs +++ /dev/null @@ -1,111 +0,0 @@ -// Copyright 2019-2023 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Default generic implementation of equivocation source for basic Substrate client. - -use crate::{ - equivocation::{ - EquivocationDetectionPipelineAdapter, FinalityProoffOf, FinalityVerificationContextfOf, - SubstrateEquivocationDetectionPipeline, - }, - finality_base::{best_synced_header_id, engine::Engine}, -}; - -use async_trait::async_trait; -use bp_header_chain::HeaderFinalityInfo; -use bp_runtime::{BlockNumberOf, HashOf}; -use equivocation_detector::TargetClient; -use relay_substrate_client::{Client, Error}; -use relay_utils::relay_loop::Client as RelayClient; -use sp_runtime::traits::Header; -use std::marker::PhantomData; - -/// Substrate node as equivocation source. -pub struct SubstrateEquivocationTarget { - client: Client, - - _phantom: PhantomData

, -} - -impl SubstrateEquivocationTarget

{ - /// Create new instance of `SubstrateEquivocationTarget`. - pub fn new(client: Client) -> Self { - Self { client, _phantom: Default::default() } - } -} - -impl Clone for SubstrateEquivocationTarget

{ - fn clone(&self) -> Self { - Self { client: self.client.clone(), _phantom: Default::default() } - } -} - -#[async_trait] -impl RelayClient for SubstrateEquivocationTarget

{ - type Error = Error; - - async fn reconnect(&mut self) -> Result<(), Error> { - self.client.reconnect().await - } -} - -#[async_trait] -impl - TargetClient> for SubstrateEquivocationTarget

-{ - async fn best_finalized_header_number( - &self, - ) -> Result, Self::Error> { - self.client.best_finalized_header_number().await - } - - async fn best_synced_header_hash( - &self, - at: BlockNumberOf, - ) -> Result>, Self::Error> { - Ok(best_synced_header_id::( - &self.client, - self.client.header_by_number(at).await?.hash(), - ) - .await? - .map(|id| id.hash())) - } - - async fn finality_verification_context( - &self, - at: BlockNumberOf, - ) -> Result, Self::Error> { - P::FinalityEngine::finality_verification_context( - &self.client, - self.client.header_by_number(at).await?.hash(), - ) - .await - } - - async fn synced_headers_finality_info( - &self, - at: BlockNumberOf, - ) -> Result< - Vec, FinalityVerificationContextfOf

>>, - Self::Error, - > { - P::FinalityEngine::synced_headers_finality_info( - &self.client, - self.client.header_by_number(at).await?.hash(), - ) - .await - } -} diff --git a/relays/lib-substrate-relay/src/error.rs b/relays/lib-substrate-relay/src/error.rs deleted file mode 100644 index 2ebd9130f..000000000 --- a/relays/lib-substrate-relay/src/error.rs +++ /dev/null @@ -1,63 +0,0 @@ -// Copyright 2019-2021 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Relay errors. - -use relay_substrate_client as client; -use sp_consensus_grandpa::AuthorityList; -use sp_runtime::traits::MaybeDisplay; -use std::fmt::Debug; -use thiserror::Error; - -/// Relay errors. -#[derive(Error, Debug)] -pub enum Error { - /// Failed to submit signed extrinsic from to the target chain. - #[error("Failed to submit {0} transaction: {1:?}")] - SubmitTransaction(&'static str, client::Error), - /// Failed subscribe to justification stream of the source chain. - #[error("Failed to subscribe to {0} justifications: {1:?}")] - Subscribe(&'static str, client::Error), - /// Failed subscribe to read justification from the source chain (client error). - #[error("Failed to read {0} justification from the stream: {1}")] - ReadJustification(&'static str, client::Error), - /// Failed subscribe to read justification from the source chain (stream ended). - #[error("Failed to read {0} justification from the stream: stream has ended unexpectedly")] - ReadJustificationStreamEnded(&'static str), - /// Failed subscribe to decode justification from the source chain. - #[error("Failed to decode {0} justification: {1:?}")] - DecodeJustification(&'static str, codec::Error), - /// GRANDPA authorities read from the source chain are invalid. - #[error("Read invalid {0} authorities set: {1:?}")] - ReadInvalidAuthorities(&'static str, AuthorityList), - /// Failed to guess initial GRANDPA authorities at the given header of the source chain. - #[error("Failed to guess initial {0} GRANDPA authorities set id: checked all possible ids in range [0; {1}]")] - GuessInitialAuthorities(&'static str, HeaderNumber), - /// Failed to retrieve GRANDPA authorities at the given header from the source chain. - #[error("Failed to retrive {0} GRANDPA authorities set at header {1}: {2:?}")] - RetrieveAuthorities(&'static str, Hash, client::Error), - /// Failed to decode GRANDPA authorities at the given header of the source chain. - #[error("Failed to decode {0} GRANDPA authorities set at header {1}: {2:?}")] - DecodeAuthorities(&'static str, Hash, codec::Error), - /// Failed to retrieve header by the hash from the source chain. - #[error("Failed to retrieve {0} header with hash {1}: {2:?}")] - RetrieveHeader(&'static str, Hash, client::Error), - /// Failed to submit signed extrinsic from to the target chain. - #[error( - "Failed to retrieve `is_initialized` flag of the with-{0} finality pallet at {1}: {2:?}" - )] - IsInitializedRetrieve(&'static str, &'static str, client::Error), -} diff --git a/relays/lib-substrate-relay/src/finality/initialize.rs b/relays/lib-substrate-relay/src/finality/initialize.rs deleted file mode 100644 index 5dde46c39..000000000 --- a/relays/lib-substrate-relay/src/finality/initialize.rs +++ /dev/null @@ -1,163 +0,0 @@ -// Copyright 2019-2021 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Initialize Substrate -> Substrate finality bridge. -//! -//! Initialization is a transaction that calls `initialize()` function of the -//! finality pallet (GRANDPA/BEEFY/...). This transaction brings initial header -//! and authorities set from source to target chain. The finality sync starts -//! with this header. - -use crate::{error::Error, finality_base::engine::Engine}; -use sp_core::Pair; - -use bp_runtime::HeaderIdOf; -use relay_substrate_client::{ - AccountKeyPairOf, Chain, ChainWithTransactions, Client, Error as SubstrateError, - UnsignedTransaction, -}; -use relay_utils::{TrackedTransactionStatus, TransactionTracker}; -use sp_runtime::traits::Header as HeaderT; - -/// Submit headers-bridge initialization transaction. -pub async fn initialize< - E: Engine, - SourceChain: Chain, - TargetChain: ChainWithTransactions, - F, ->( - source_client: Client, - target_client: Client, - target_signer: AccountKeyPairOf, - prepare_initialize_transaction: F, - dry_run: bool, -) where - F: FnOnce( - TargetChain::Nonce, - E::InitializationData, - ) -> Result, SubstrateError> - + Send - + 'static, - TargetChain::AccountId: From<::Public>, -{ - let result = do_initialize::( - source_client, - target_client, - target_signer, - prepare_initialize_transaction, - dry_run, - ) - .await; - - match result { - Ok(Some(tx_status)) => match tx_status { - TrackedTransactionStatus::Lost => { - log::error!( - target: "bridge", - "Failed to execute {}-headers bridge initialization transaction on {}: {:?}.", - SourceChain::NAME, - TargetChain::NAME, - tx_status - ) - }, - TrackedTransactionStatus::Finalized(_) => { - log::info!( - target: "bridge", - "Successfully executed {}-headers bridge initialization transaction on {}: {:?}.", - SourceChain::NAME, - TargetChain::NAME, - tx_status - ) - }, - }, - Ok(None) => (), - Err(err) => log::error!( - target: "bridge", - "Failed to submit {}-headers bridge initialization transaction to {}: {:?}", - SourceChain::NAME, - TargetChain::NAME, - err, - ), - } -} - -/// Craft and submit initialization transaction, returning any error that may occur. -async fn do_initialize< - E: Engine, - SourceChain: Chain, - TargetChain: ChainWithTransactions, - F, ->( - source_client: Client, - target_client: Client, - target_signer: AccountKeyPairOf, - prepare_initialize_transaction: F, - dry_run: bool, -) -> Result< - Option>>, - Error::Number>, -> -where - F: FnOnce( - TargetChain::Nonce, - E::InitializationData, - ) -> Result, SubstrateError> - + Send - + 'static, - TargetChain::AccountId: From<::Public>, -{ - let is_initialized = E::is_initialized(&target_client) - .await - .map_err(|e| Error::IsInitializedRetrieve(SourceChain::NAME, TargetChain::NAME, e))?; - if is_initialized { - log::info!( - target: "bridge", - "{}-headers bridge at {} is already initialized. Skipping", - SourceChain::NAME, - TargetChain::NAME, - ); - if !dry_run { - return Ok(None) - } - } - - let initialization_data = E::prepare_initialization_data(source_client).await?; - log::info!( - target: "bridge", - "Prepared initialization data for {}-headers bridge at {}: {:?}", - SourceChain::NAME, - TargetChain::NAME, - initialization_data, - ); - - let tx_status = target_client - .submit_and_watch_signed_extrinsic(&target_signer, move |_, transaction_nonce| { - let tx = prepare_initialize_transaction(transaction_nonce, initialization_data); - if dry_run { - Err(SubstrateError::Custom( - "Not submitting extrinsic in `dry-run` mode!".to_string(), - )) - } else { - tx - } - }) - .await - .map_err(|err| Error::SubmitTransaction(TargetChain::NAME, err))? - .wait() - .await; - - Ok(Some(tx_status)) -} diff --git a/relays/lib-substrate-relay/src/finality/mod.rs b/relays/lib-substrate-relay/src/finality/mod.rs deleted file mode 100644 index 206f628b1..000000000 --- a/relays/lib-substrate-relay/src/finality/mod.rs +++ /dev/null @@ -1,270 +0,0 @@ -// Copyright 2019-2021 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Types and functions intended to ease adding of new Substrate -> Substrate -//! finality proofs synchronization pipelines. - -use crate::{ - finality::{source::SubstrateFinalitySource, target::SubstrateFinalityTarget}, - finality_base::{engine::Engine, SubstrateFinalityPipeline, SubstrateFinalityProof}, - TransactionParams, -}; - -use async_trait::async_trait; -use bp_header_chain::justification::{GrandpaJustification, JustificationVerificationContext}; -use finality_relay::{FinalityPipeline, FinalitySyncPipeline}; -use pallet_bridge_grandpa::{Call as BridgeGrandpaCall, Config as BridgeGrandpaConfig}; -use relay_substrate_client::{ - transaction_stall_timeout, AccountIdOf, AccountKeyPairOf, BlockNumberOf, CallOf, Chain, - ChainWithTransactions, Client, HashOf, HeaderOf, SyncHeader, -}; -use relay_utils::metrics::MetricsParams; -use sp_core::Pair; -use std::{fmt::Debug, marker::PhantomData}; - -pub mod initialize; -pub mod source; -pub mod target; - -/// Default limit of recent finality proofs. -/// -/// Finality delay of 4096 blocks is unlikely to happen in practice in -/// Substrate+GRANDPA based chains (good to know). -pub(crate) const RECENT_FINALITY_PROOFS_LIMIT: usize = 4096; - -/// Convenience trait that adds bounds to `SubstrateFinalitySyncPipeline`. -pub trait BaseSubstrateFinalitySyncPipeline: - SubstrateFinalityPipeline -{ - /// Bounded `SubstrateFinalityPipeline::TargetChain`. - type BoundedTargetChain: ChainWithTransactions; - - /// Bounded `AccountIdOf`. - type BoundedTargetChainAccountId: From< as Pair>::Public> - + Send; -} - -impl BaseSubstrateFinalitySyncPipeline for T -where - T: SubstrateFinalityPipeline, - T::TargetChain: ChainWithTransactions, - AccountIdOf: From< as Pair>::Public>, -{ - type BoundedTargetChain = T::TargetChain; - type BoundedTargetChainAccountId = AccountIdOf; -} - -/// Substrate -> Substrate finality proofs synchronization pipeline. -#[async_trait] -pub trait SubstrateFinalitySyncPipeline: BaseSubstrateFinalitySyncPipeline { - /// How submit finality proof call is built? - type SubmitFinalityProofCallBuilder: SubmitFinalityProofCallBuilder; - - /// Add relay guards if required. - async fn start_relay_guards( - target_client: &Client, - enable_version_guard: bool, - ) -> relay_substrate_client::Result<()> { - if enable_version_guard { - relay_substrate_client::guard::abort_on_spec_version_change( - target_client.clone(), - target_client.simple_runtime_version().await?.spec_version, - ); - } - Ok(()) - } -} - -/// Adapter that allows all `SubstrateFinalitySyncPipeline` to act as `FinalitySyncPipeline`. -#[derive(Clone, Debug)] -pub struct FinalitySyncPipelineAdapter { - _phantom: PhantomData

, -} - -impl FinalityPipeline for FinalitySyncPipelineAdapter

{ - const SOURCE_NAME: &'static str = P::SourceChain::NAME; - const TARGET_NAME: &'static str = P::TargetChain::NAME; - - type Hash = HashOf; - type Number = BlockNumberOf; - type FinalityProof = SubstrateFinalityProof

; -} - -impl FinalitySyncPipeline for FinalitySyncPipelineAdapter

{ - type ConsensusLogReader = >::ConsensusLogReader; - type Header = SyncHeader>; -} - -/// Different ways of building `submit_finality_proof` calls. -pub trait SubmitFinalityProofCallBuilder { - /// Given source chain header, its finality proof and the current authority set id, build call - /// of `submit_finality_proof` function of bridge GRANDPA module at the target chain. - fn build_submit_finality_proof_call( - header: SyncHeader>, - proof: SubstrateFinalityProof

, - context: <

::FinalityEngine as Engine>::FinalityVerificationContext, - ) -> CallOf; -} - -/// Building `submit_finality_proof` call when you have direct access to the target -/// chain runtime. -pub struct DirectSubmitGrandpaFinalityProofCallBuilder { - _phantom: PhantomData<(P, R, I)>, -} - -impl SubmitFinalityProofCallBuilder

- for DirectSubmitGrandpaFinalityProofCallBuilder -where - P: SubstrateFinalitySyncPipeline, - R: BridgeGrandpaConfig, - I: 'static, - R::BridgedChain: bp_runtime::Chain

>, - CallOf: From>, - P::FinalityEngine: Engine< - P::SourceChain, - FinalityProof = GrandpaJustification>, - FinalityVerificationContext = JustificationVerificationContext, - >, -{ - fn build_submit_finality_proof_call( - header: SyncHeader>, - proof: GrandpaJustification>, - _context: JustificationVerificationContext, - ) -> CallOf { - BridgeGrandpaCall::::submit_finality_proof { - finality_target: Box::new(header.into_inner()), - justification: proof, - } - .into() - } -} - -/// Macro that generates `SubmitFinalityProofCallBuilder` implementation for the case when -/// you only have an access to the mocked version of target chain runtime. In this case you -/// should provide "name" of the call variant for the bridge GRANDPA calls and the "name" of -/// the variant for the `submit_finality_proof` call within that first option. -#[rustfmt::skip] -#[macro_export] -macro_rules! generate_submit_finality_proof_call_builder { - ($pipeline:ident, $mocked_builder:ident, $bridge_grandpa:path, $submit_finality_proof:path) => { - pub struct $mocked_builder; - - impl $crate::finality::SubmitFinalityProofCallBuilder<$pipeline> - for $mocked_builder - { - fn build_submit_finality_proof_call( - header: relay_substrate_client::SyncHeader< - relay_substrate_client::HeaderOf< - <$pipeline as $crate::finality_base::SubstrateFinalityPipeline>::SourceChain - > - >, - proof: bp_header_chain::justification::GrandpaJustification< - relay_substrate_client::HeaderOf< - <$pipeline as $crate::finality_base::SubstrateFinalityPipeline>::SourceChain - > - >, - _context: bp_header_chain::justification::JustificationVerificationContext, - ) -> relay_substrate_client::CallOf< - <$pipeline as $crate::finality_base::SubstrateFinalityPipeline>::TargetChain - > { - bp_runtime::paste::item! { - $bridge_grandpa($submit_finality_proof { - finality_target: Box::new(header.into_inner()), - justification: proof - }) - } - } - } - }; -} - -/// Macro that generates `SubmitFinalityProofCallBuilder` implementation for the case when -/// you only have an access to the mocked version of target chain runtime. In this case you -/// should provide "name" of the call variant for the bridge GRANDPA calls and the "name" of -/// the variant for the `submit_finality_proof_ex` call within that first option. -#[rustfmt::skip] -#[macro_export] -macro_rules! generate_submit_finality_proof_ex_call_builder { - ($pipeline:ident, $mocked_builder:ident, $bridge_grandpa:path, $submit_finality_proof:path) => { - pub struct $mocked_builder; - - impl $crate::finality::SubmitFinalityProofCallBuilder<$pipeline> - for $mocked_builder - { - fn build_submit_finality_proof_call( - header: relay_substrate_client::SyncHeader< - relay_substrate_client::HeaderOf< - <$pipeline as $crate::finality_base::SubstrateFinalityPipeline>::SourceChain - > - >, - proof: bp_header_chain::justification::GrandpaJustification< - relay_substrate_client::HeaderOf< - <$pipeline as $crate::finality_base::SubstrateFinalityPipeline>::SourceChain - > - >, - context: bp_header_chain::justification::JustificationVerificationContext, - ) -> relay_substrate_client::CallOf< - <$pipeline as $crate::finality_base::SubstrateFinalityPipeline>::TargetChain - > { - bp_runtime::paste::item! { - $bridge_grandpa($submit_finality_proof { - finality_target: Box::new(header.into_inner()), - justification: proof, - current_set_id: context.authority_set_id - }) - } - } - } - }; -} - -/// Run Substrate-to-Substrate finality sync loop. -pub async fn run( - source_client: Client, - target_client: Client, - only_mandatory_headers: bool, - transaction_params: TransactionParams>, - metrics_params: MetricsParams, -) -> anyhow::Result<()> { - log::info!( - target: "bridge", - "Starting {} -> {} finality proof relay", - P::SourceChain::NAME, - P::TargetChain::NAME, - ); - - finality_relay::run( - SubstrateFinalitySource::

::new(source_client, None), - SubstrateFinalityTarget::

::new(target_client, transaction_params.clone()), - finality_relay::FinalitySyncParams { - tick: std::cmp::max( - P::SourceChain::AVERAGE_BLOCK_INTERVAL, - P::TargetChain::AVERAGE_BLOCK_INTERVAL, - ), - recent_finality_proofs_limit: RECENT_FINALITY_PROOFS_LIMIT, - stall_timeout: transaction_stall_timeout( - transaction_params.mortality, - P::TargetChain::AVERAGE_BLOCK_INTERVAL, - relay_utils::STALL_TIMEOUT, - ), - only_mandatory_headers, - }, - metrics_params, - futures::future::pending(), - ) - .await - .map_err(|e| anyhow::format_err!("{}", e)) -} diff --git a/relays/lib-substrate-relay/src/finality/source.rs b/relays/lib-substrate-relay/src/finality/source.rs deleted file mode 100644 index c94af6108..000000000 --- a/relays/lib-substrate-relay/src/finality/source.rs +++ /dev/null @@ -1,259 +0,0 @@ -// Copyright 2019-2021 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Default generic implementation of finality source for basic Substrate client. - -use crate::{ - finality::{FinalitySyncPipelineAdapter, SubstrateFinalitySyncPipeline}, - finality_base::{ - engine::Engine, finality_proofs, SubstrateFinalityProof, SubstrateFinalityProofsStream, - }, -}; - -use async_std::sync::{Arc, Mutex}; -use async_trait::async_trait; -use bp_header_chain::FinalityProof; -use codec::Decode; -use finality_relay::{SourceClient, SourceClientBase}; -use futures::{ - select, - stream::{try_unfold, Stream, StreamExt, TryStreamExt}, -}; -use num_traits::One; -use relay_substrate_client::{BlockNumberOf, BlockWithJustification, Client, Error, HeaderOf}; -use relay_utils::{relay_loop::Client as RelayClient, UniqueSaturatedInto}; - -/// Shared updatable reference to the maximal header number that we want to sync from the source. -pub type RequiredHeaderNumberRef = Arc::BlockNumber>>; - -/// Substrate node as finality source. -pub struct SubstrateFinalitySource { - client: Client, - maximal_header_number: Option>, -} - -impl SubstrateFinalitySource

{ - /// Create new headers source using given client. - pub fn new( - client: Client, - maximal_header_number: Option>, - ) -> Self { - SubstrateFinalitySource { client, maximal_header_number } - } - - /// Returns reference to the underlying RPC client. - pub fn client(&self) -> &Client { - &self.client - } - - /// Returns best finalized block number. - pub async fn on_chain_best_finalized_block_number( - &self, - ) -> Result, Error> { - // we **CAN** continue to relay finality proofs if source node is out of sync, because - // target node may be missing proofs that are already available at the source - self.client.best_finalized_header_number().await - } - - /// Return header and its justification of the given block or its descendant that - /// has a GRANDPA justification. - /// - /// This method is optimized for cases when `block_number` is close to the best finalized - /// chain block. - pub async fn prove_block_finality( - &self, - block_number: BlockNumberOf, - ) -> Result< - (relay_substrate_client::SyncHeader>, SubstrateFinalityProof

), - Error, - > { - // first, subscribe to proofs - let next_persistent_proof = - self.persistent_proofs_stream(block_number + One::one()).await?.fuse(); - let next_ephemeral_proof = self.ephemeral_proofs_stream(block_number).await?.fuse(); - - // in perfect world we'll need to return justfication for the requested `block_number` - let (header, maybe_proof) = self.header_and_finality_proof(block_number).await?; - if let Some(proof) = maybe_proof { - return Ok((header, proof)) - } - - // otherwise we don't care which header to return, so let's select first - futures::pin_mut!(next_persistent_proof, next_ephemeral_proof); - loop { - select! { - maybe_header_and_proof = next_persistent_proof.next() => match maybe_header_and_proof { - Some(header_and_proof) => return header_and_proof, - None => continue, - }, - maybe_header_and_proof = next_ephemeral_proof.next() => match maybe_header_and_proof { - Some(header_and_proof) => return header_and_proof, - None => continue, - }, - complete => return Err(Error::FinalityProofNotFound(block_number.unique_saturated_into())) - } - } - } - - /// Returns stream of headers and their persistent proofs, starting from given block. - async fn persistent_proofs_stream( - &self, - block_number: BlockNumberOf, - ) -> Result< - impl Stream< - Item = Result< - ( - relay_substrate_client::SyncHeader>, - SubstrateFinalityProof

, - ), - Error, - >, - >, - Error, - > { - let client = self.client.clone(); - let best_finalized_block_number = client.best_finalized_header_number().await?; - Ok(try_unfold((client, block_number), move |(client, current_block_number)| async move { - // if we've passed the `best_finalized_block_number`, we no longer need persistent - // justifications - if current_block_number > best_finalized_block_number { - return Ok(None) - } - - let (header, maybe_proof) = - header_and_finality_proof::

(&client, current_block_number).await?; - let next_block_number = current_block_number + One::one(); - let next_state = (client, next_block_number); - - Ok(Some((maybe_proof.map(|proof| (header, proof)), next_state))) - }) - .try_filter_map(|maybe_result| async { Ok(maybe_result) })) - } - - /// Returns stream of headers and their ephemeral proofs, starting from given block. - async fn ephemeral_proofs_stream( - &self, - block_number: BlockNumberOf, - ) -> Result< - impl Stream< - Item = Result< - ( - relay_substrate_client::SyncHeader>, - SubstrateFinalityProof

, - ), - Error, - >, - >, - Error, - > { - let client = self.client.clone(); - Ok(self.finality_proofs().await?.map(Ok).try_filter_map(move |proof| { - let client = client.clone(); - async move { - if proof.target_header_number() < block_number { - return Ok(None) - } - - let header = client.header_by_number(proof.target_header_number()).await?; - Ok(Some((header.into(), proof))) - } - })) - } -} - -impl Clone for SubstrateFinalitySource

{ - fn clone(&self) -> Self { - SubstrateFinalitySource { - client: self.client.clone(), - maximal_header_number: self.maximal_header_number.clone(), - } - } -} - -#[async_trait] -impl RelayClient for SubstrateFinalitySource

{ - type Error = Error; - - async fn reconnect(&mut self) -> Result<(), Error> { - self.client.reconnect().await - } -} - -#[async_trait] -impl SourceClientBase> - for SubstrateFinalitySource

-{ - type FinalityProofsStream = SubstrateFinalityProofsStream

; - - async fn finality_proofs(&self) -> Result { - finality_proofs::

(&self.client).await - } -} - -#[async_trait] -impl SourceClient> - for SubstrateFinalitySource

-{ - async fn best_finalized_block_number(&self) -> Result, Error> { - let mut finalized_header_number = self.on_chain_best_finalized_block_number().await?; - // never return block number larger than requested. This way we'll never sync headers - // past `maximal_header_number` - if let Some(ref maximal_header_number) = self.maximal_header_number { - let maximal_header_number = *maximal_header_number.lock().await; - if finalized_header_number > maximal_header_number { - finalized_header_number = maximal_header_number; - } - } - Ok(finalized_header_number) - } - - async fn header_and_finality_proof( - &self, - number: BlockNumberOf, - ) -> Result< - ( - relay_substrate_client::SyncHeader>, - Option>, - ), - Error, - > { - header_and_finality_proof::

(&self.client, number).await - } -} - -async fn header_and_finality_proof( - client: &Client, - number: BlockNumberOf, -) -> Result< - ( - relay_substrate_client::SyncHeader>, - Option>, - ), - Error, -> { - let header_hash = client.block_hash_by_number(number).await?; - let signed_block = client.get_block(Some(header_hash)).await?; - - let justification = signed_block - .justification(P::FinalityEngine::ID) - .map(|raw_justification| { - SubstrateFinalityProof::

::decode(&mut raw_justification.as_slice()) - }) - .transpose() - .map_err(Error::ResponseParseFailed)?; - - Ok((signed_block.header().into(), justification)) -} diff --git a/relays/lib-substrate-relay/src/finality/target.rs b/relays/lib-substrate-relay/src/finality/target.rs deleted file mode 100644 index 18464d523..000000000 --- a/relays/lib-substrate-relay/src/finality/target.rs +++ /dev/null @@ -1,130 +0,0 @@ -// Copyright 2019-2021 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Substrate client as Substrate finality proof target. - -use crate::{ - finality::{ - FinalitySyncPipelineAdapter, SubmitFinalityProofCallBuilder, SubstrateFinalitySyncPipeline, - }, - finality_base::{best_synced_header_id, engine::Engine, SubstrateFinalityProof}, - TransactionParams, -}; - -use async_trait::async_trait; -use finality_relay::TargetClient; -use relay_substrate_client::{ - AccountKeyPairOf, Client, Error, HeaderIdOf, HeaderOf, SyncHeader, TransactionEra, - TransactionTracker, UnsignedTransaction, -}; -use relay_utils::relay_loop::Client as RelayClient; -use sp_runtime::traits::Header; - -/// Substrate client as Substrate finality target. -pub struct SubstrateFinalityTarget { - client: Client, - transaction_params: TransactionParams>, -} - -impl SubstrateFinalityTarget

{ - /// Create new Substrate headers target. - pub fn new( - client: Client, - transaction_params: TransactionParams>, - ) -> Self { - SubstrateFinalityTarget { client, transaction_params } - } - - /// Ensure that the bridge pallet at target chain is active. - pub async fn ensure_pallet_active(&self) -> Result<(), Error> { - let is_halted = P::FinalityEngine::is_halted(&self.client).await?; - if is_halted { - return Err(Error::BridgePalletIsHalted) - } - - let is_initialized = P::FinalityEngine::is_initialized(&self.client).await?; - if !is_initialized { - return Err(Error::BridgePalletIsNotInitialized) - } - - Ok(()) - } -} - -impl Clone for SubstrateFinalityTarget

{ - fn clone(&self) -> Self { - SubstrateFinalityTarget { - client: self.client.clone(), - transaction_params: self.transaction_params.clone(), - } - } -} - -#[async_trait] -impl RelayClient for SubstrateFinalityTarget

{ - type Error = Error; - - async fn reconnect(&mut self) -> Result<(), Error> { - self.client.reconnect().await - } -} - -#[async_trait] -impl TargetClient> - for SubstrateFinalityTarget

-{ - type TransactionTracker = TransactionTracker>; - - async fn best_finalized_source_block_id(&self) -> Result, Error> { - // we can't continue to relay finality if target node is out of sync, because - // it may have already received (some of) headers that we're going to relay - self.client.ensure_synced().await?; - // we can't relay finality if bridge pallet at target chain is halted - self.ensure_pallet_active().await?; - - Ok(best_synced_header_id::( - &self.client, - self.client.best_header().await?.hash(), - ) - .await? - .ok_or(Error::BridgePalletIsNotInitialized)?) - } - - async fn submit_finality_proof( - &self, - header: SyncHeader>, - mut proof: SubstrateFinalityProof

, - ) -> Result { - // verify and runtime module at target chain may require optimized finality proof - let context = - P::FinalityEngine::verify_and_optimize_proof(&self.client, &header, &mut proof).await?; - - // now we may submit optimized finality proof - let mortality = self.transaction_params.mortality; - let call = P::SubmitFinalityProofCallBuilder::build_submit_finality_proof_call( - header, proof, context, - ); - self.client - .submit_and_watch_signed_extrinsic( - &self.transaction_params.signer, - move |best_block_id, transaction_nonce| { - Ok(UnsignedTransaction::new(call.into(), transaction_nonce) - .era(TransactionEra::new(best_block_id, mortality))) - }, - ) - .await - } -} diff --git a/relays/lib-substrate-relay/src/finality_base/engine.rs b/relays/lib-substrate-relay/src/finality_base/engine.rs deleted file mode 100644 index e517b0fd9..000000000 --- a/relays/lib-substrate-relay/src/finality_base/engine.rs +++ /dev/null @@ -1,464 +0,0 @@ -// Copyright 2019-2021 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Support of different finality engines, available in Substrate. - -use crate::error::Error; -use async_trait::async_trait; -use bp_header_chain::{ - justification::{ - verify_and_optimize_justification, GrandpaEquivocationsFinder, GrandpaJustification, - JustificationVerificationContext, - }, - max_expected_submit_finality_proof_arguments_size, AuthoritySet, ConsensusLogReader, - FinalityProof, FindEquivocations, GrandpaConsensusLogReader, HeaderFinalityInfo, - HeaderGrandpaInfo, StoredHeaderGrandpaInfo, -}; -use bp_runtime::{BasicOperatingMode, HeaderIdProvider, OperatingMode}; -use codec::{Decode, Encode}; -use num_traits::{One, Zero}; -use relay_substrate_client::{ - BlockNumberOf, Chain, ChainWithGrandpa, Client, Error as SubstrateError, HashOf, HeaderOf, - Subscription, SubstrateFinalityClient, SubstrateGrandpaFinalityClient, -}; -use sp_consensus_grandpa::{AuthorityList as GrandpaAuthoritiesSet, GRANDPA_ENGINE_ID}; -use sp_core::{storage::StorageKey, Bytes}; -use sp_runtime::{scale_info::TypeInfo, traits::Header, ConsensusEngineId, SaturatedConversion}; -use std::{fmt::Debug, marker::PhantomData}; - -/// Result of checking maximal expected call size. -pub enum MaxExpectedCallSizeCheck { - /// Size is ok and call will be refunded. - Ok, - /// The call size exceeds the maximal expected and relayer will only get partial refund. - Exceeds { - /// Actual call size. - call_size: u32, - /// Maximal expected call size. - max_call_size: u32, - }, -} - -/// Finality engine, used by the Substrate chain. -#[async_trait] -pub trait Engine: Send { - /// Unique consensus engine identifier. - const ID: ConsensusEngineId; - /// A reader that can extract the consensus log from the header digest and interpret it. - type ConsensusLogReader: ConsensusLogReader; - /// Type of Finality RPC client used by this engine. - type FinalityClient: SubstrateFinalityClient; - /// Type of finality proofs, used by consensus engine. - type FinalityProof: FinalityProof, BlockNumberOf> + Decode + Encode; - /// The context needed for verifying finality proofs. - type FinalityVerificationContext: Debug + Send; - /// The type of the equivocation proof used by the consensus engine. - type EquivocationProof: Clone + Debug + Send + Sync; - /// The equivocations finder. - type EquivocationsFinder: FindEquivocations< - Self::FinalityProof, - Self::FinalityVerificationContext, - Self::EquivocationProof, - >; - /// The type of the key owner proof used by the consensus engine. - type KeyOwnerProof: Send; - /// Type of bridge pallet initialization data. - type InitializationData: Debug + Send + Sync + 'static; - /// Type of bridge pallet operating mode. - type OperatingMode: OperatingMode + 'static; - - /// Returns storage at the bridged (target) chain that corresponds to some value that is - /// missing from the storage until bridge pallet is initialized. - /// - /// Note that we don't care about type of the value - just if it present or not. - fn is_initialized_key() -> StorageKey; - - /// Returns `Ok(true)` if finality pallet at the bridged chain has already been initialized. - async fn is_initialized( - target_client: &Client, - ) -> Result { - Ok(target_client - .raw_storage_value(Self::is_initialized_key(), None) - .await? - .is_some()) - } - - /// Returns storage key at the bridged (target) chain that corresponds to the variable - /// that holds the operating mode of the pallet. - fn pallet_operating_mode_key() -> StorageKey; - - /// Returns `Ok(true)` if finality pallet at the bridged chain is halted. - async fn is_halted( - target_client: &Client, - ) -> Result { - Ok(target_client - .storage_value::(Self::pallet_operating_mode_key(), None) - .await? - .map(|operating_mode| operating_mode.is_halted()) - .unwrap_or(false)) - } - - /// A method to subscribe to encoded finality proofs, given source client. - async fn source_finality_proofs( - source_client: &Client, - ) -> Result, SubstrateError> { - source_client.subscribe_finality_justifications::().await - } - - /// Verify and optimize finality proof before sending it to the target node. - /// - /// Apart from optimization, we expect this method to perform all required checks - /// that the `header` and `proof` are valid at the current state of the target chain. - async fn verify_and_optimize_proof( - target_client: &Client, - header: &C::Header, - proof: &mut Self::FinalityProof, - ) -> Result; - - /// Checks whether the given `header` and its finality `proof` fit the maximal expected - /// call size limit. If result is `MaxExpectedCallSizeCheck::Exceeds { .. }`, this - /// submission won't be fully refunded and relayer will spend its own funds on that. - fn check_max_expected_call_size( - header: &C::Header, - proof: &Self::FinalityProof, - ) -> MaxExpectedCallSizeCheck; - - /// Prepare initialization data for the finality bridge pallet. - async fn prepare_initialization_data( - client: Client, - ) -> Result, BlockNumberOf>>; - - /// Get the context needed for validating a finality proof. - async fn finality_verification_context( - target_client: &Client, - at: HashOf, - ) -> Result; - - /// Returns the finality info associated to the source headers synced with the target - /// at the provided block. - async fn synced_headers_finality_info( - target_client: &Client, - at: TargetChain::Hash, - ) -> Result< - Vec>, - SubstrateError, - >; - - /// Generate key ownership proof for the provided equivocation. - async fn generate_source_key_ownership_proof( - source_client: &Client, - at: C::Hash, - equivocation: &Self::EquivocationProof, - ) -> Result; -} - -/// GRANDPA finality engine. -pub struct Grandpa(PhantomData); - -impl Grandpa { - /// Read header by hash from the source client. - async fn source_header( - source_client: &Client, - header_hash: C::Hash, - ) -> Result, BlockNumberOf>> { - source_client - .header_by_hash(header_hash) - .await - .map_err(|err| Error::RetrieveHeader(C::NAME, header_hash, err)) - } - - /// Read GRANDPA authorities set at given header. - async fn source_authorities_set( - source_client: &Client, - header_hash: C::Hash, - ) -> Result, BlockNumberOf>> { - let raw_authorities_set = source_client - .grandpa_authorities_set(header_hash) - .await - .map_err(|err| Error::RetrieveAuthorities(C::NAME, header_hash, err))?; - GrandpaAuthoritiesSet::decode(&mut &raw_authorities_set[..]) - .map_err(|err| Error::DecodeAuthorities(C::NAME, header_hash, err)) - } -} - -#[async_trait] -impl Engine for Grandpa { - const ID: ConsensusEngineId = GRANDPA_ENGINE_ID; - type ConsensusLogReader = GrandpaConsensusLogReader<::Number>; - type FinalityClient = SubstrateGrandpaFinalityClient; - type FinalityProof = GrandpaJustification>; - type FinalityVerificationContext = JustificationVerificationContext; - type EquivocationProof = sp_consensus_grandpa::EquivocationProof, BlockNumberOf>; - type EquivocationsFinder = GrandpaEquivocationsFinder; - type KeyOwnerProof = C::KeyOwnerProof; - type InitializationData = bp_header_chain::InitializationData; - type OperatingMode = BasicOperatingMode; - - fn is_initialized_key() -> StorageKey { - bp_header_chain::storage_keys::best_finalized_key(C::WITH_CHAIN_GRANDPA_PALLET_NAME) - } - - fn pallet_operating_mode_key() -> StorageKey { - bp_header_chain::storage_keys::pallet_operating_mode_key(C::WITH_CHAIN_GRANDPA_PALLET_NAME) - } - - async fn verify_and_optimize_proof( - target_client: &Client, - header: &C::Header, - proof: &mut Self::FinalityProof, - ) -> Result { - let verification_context = Grandpa::::finality_verification_context( - target_client, - target_client.best_header().await?.hash(), - ) - .await?; - // we're risking with race here - we have decided to submit justification some time ago and - // actual authorities set (which we have read now) may have changed, so this - // `optimize_justification` may fail. But if target chain is configured properly, it'll fail - // anyway, after we submit transaction and failing earlier is better. So - it is fine - verify_and_optimize_justification( - (header.hash(), *header.number()), - &verification_context, - proof, - ) - .map(|_| verification_context) - .map_err(|e| { - SubstrateError::Custom(format!( - "Failed to optimize {} GRANDPA jutification for header {:?}: {:?}", - C::NAME, - header.id(), - e, - )) - }) - } - - fn check_max_expected_call_size( - header: &C::Header, - proof: &Self::FinalityProof, - ) -> MaxExpectedCallSizeCheck { - let is_mandatory = Self::ConsensusLogReader::schedules_authorities_change(header.digest()); - let call_size: u32 = - header.encoded_size().saturating_add(proof.encoded_size()).saturated_into(); - let max_call_size = max_expected_submit_finality_proof_arguments_size::( - is_mandatory, - proof.commit.precommits.len().saturated_into(), - ); - if call_size > max_call_size { - MaxExpectedCallSizeCheck::Exceeds { call_size, max_call_size } - } else { - MaxExpectedCallSizeCheck::Ok - } - } - - /// Prepare initialization data for the GRANDPA verifier pallet. - async fn prepare_initialization_data( - source_client: Client, - ) -> Result, BlockNumberOf>> { - // In ideal world we just need to get best finalized header and then to read GRANDPA - // authorities set (`pallet_grandpa::CurrentSetId` + `GrandpaApi::grandpa_authorities()`) at - // this header. - // - // But now there are problems with this approach - `CurrentSetId` may return invalid value. - // So here we're waiting for the next justification, read the authorities set and then try - // to figure out the set id with bruteforce. - let justifications = Self::source_finality_proofs(&source_client) - .await - .map_err(|err| Error::Subscribe(C::NAME, err))?; - // Read next justification - the header that it finalizes will be used as initial header. - let justification = justifications - .next() - .await - .map_err(|e| Error::ReadJustification(C::NAME, e)) - .and_then(|justification| { - justification.ok_or(Error::ReadJustificationStreamEnded(C::NAME)) - })?; - - // Read initial header. - let justification: GrandpaJustification = - Decode::decode(&mut &justification.0[..]) - .map_err(|err| Error::DecodeJustification(C::NAME, err))?; - - let (initial_header_hash, initial_header_number) = - (justification.commit.target_hash, justification.commit.target_number); - - let initial_header = Self::source_header(&source_client, initial_header_hash).await?; - log::trace!(target: "bridge", "Selected {} initial header: {}/{}", - C::NAME, - initial_header_number, - initial_header_hash, - ); - - // Read GRANDPA authorities set at initial header. - let initial_authorities_set = - Self::source_authorities_set(&source_client, initial_header_hash).await?; - log::trace!(target: "bridge", "Selected {} initial authorities set: {:?}", - C::NAME, - initial_authorities_set, - ); - - // If initial header changes the GRANDPA authorities set, then we need previous authorities - // to verify justification. - let mut authorities_for_verification = initial_authorities_set.clone(); - let scheduled_change = GrandpaConsensusLogReader::>::find_scheduled_change( - initial_header.digest(), - ); - assert!( - scheduled_change.as_ref().map(|c| c.delay.is_zero()).unwrap_or(true), - "GRANDPA authorities change at {} scheduled to happen in {:?} blocks. We expect\ - regular change to have zero delay", - initial_header_hash, - scheduled_change.as_ref().map(|c| c.delay), - ); - let schedules_change = scheduled_change.is_some(); - if schedules_change { - authorities_for_verification = - Self::source_authorities_set(&source_client, *initial_header.parent_hash()).await?; - log::trace!( - target: "bridge", - "Selected {} header is scheduling GRANDPA authorities set changes. Using previous set: {:?}", - C::NAME, - authorities_for_verification, - ); - } - - // Now let's try to guess authorities set id by verifying justification. - let mut initial_authorities_set_id = 0; - let mut min_possible_block_number = C::BlockNumber::zero(); - loop { - log::trace!( - target: "bridge", "Trying {} GRANDPA authorities set id: {}", - C::NAME, - initial_authorities_set_id, - ); - - let is_valid_set_id = verify_and_optimize_justification( - (initial_header_hash, initial_header_number), - &AuthoritySet { - authorities: authorities_for_verification.clone(), - set_id: initial_authorities_set_id, - } - .try_into() - .map_err(|_| { - Error::ReadInvalidAuthorities(C::NAME, authorities_for_verification.clone()) - })?, - &mut justification.clone(), - ) - .is_ok(); - - if is_valid_set_id { - break - } - - initial_authorities_set_id += 1; - min_possible_block_number += One::one(); - if min_possible_block_number > initial_header_number { - // there can't be more authorities set changes than headers => if we have reached - // `initial_block_number` and still have not found correct value of - // `initial_authorities_set_id`, then something else is broken => fail - return Err(Error::GuessInitialAuthorities(C::NAME, initial_header_number)) - } - } - - Ok(bp_header_chain::InitializationData { - header: Box::new(initial_header), - authority_list: initial_authorities_set, - set_id: if schedules_change { - initial_authorities_set_id + 1 - } else { - initial_authorities_set_id - }, - operating_mode: BasicOperatingMode::Normal, - }) - } - - async fn finality_verification_context( - target_client: &Client, - at: HashOf, - ) -> Result { - let current_authority_set_key = bp_header_chain::storage_keys::current_authority_set_key( - C::WITH_CHAIN_GRANDPA_PALLET_NAME, - ); - let authority_set: AuthoritySet = target_client - .storage_value(current_authority_set_key, Some(at)) - .await? - .map(Ok) - .unwrap_or(Err(SubstrateError::Custom(format!( - "{} `CurrentAuthoritySet` is missing from the {} storage", - C::NAME, - TargetChain::NAME, - ))))?; - - authority_set.try_into().map_err(|e| { - SubstrateError::Custom(format!( - "{} `CurrentAuthoritySet` from the {} storage is invalid: {e:?}", - C::NAME, - TargetChain::NAME, - )) - }) - } - - async fn synced_headers_finality_info( - target_client: &Client, - at: TargetChain::Hash, - ) -> Result>>, SubstrateError> { - let stored_headers_grandpa_info: Vec>> = target_client - .typed_state_call(C::SYNCED_HEADERS_GRANDPA_INFO_METHOD.to_string(), (), Some(at)) - .await?; - - let mut headers_grandpa_info = vec![]; - for stored_header_grandpa_info in stored_headers_grandpa_info { - headers_grandpa_info.push(stored_header_grandpa_info.try_into().map_err(|e| { - SubstrateError::Custom(format!( - "{} `AuthoritySet` synced to {} is invalid: {e:?} ", - C::NAME, - TargetChain::NAME, - )) - })?); - } - - Ok(headers_grandpa_info) - } - - async fn generate_source_key_ownership_proof( - source_client: &Client, - at: C::Hash, - equivocation: &Self::EquivocationProof, - ) -> Result { - let set_id = equivocation.set_id(); - let offender = equivocation.offender(); - - let opaque_key_owner_proof = source_client - .generate_grandpa_key_ownership_proof(at, set_id, offender.clone()) - .await? - .ok_or(SubstrateError::Custom(format!( - "Couldn't get GRANDPA key ownership proof from {} at block: {at} \ - for offender: {:?}, set_id: {set_id} ", - C::NAME, - offender.clone(), - )))?; - - let key_owner_proof = - opaque_key_owner_proof.decode().ok_or(SubstrateError::Custom(format!( - "Couldn't decode GRANDPA `OpaqueKeyOwnnershipProof` from {} at block: {at} - to `{:?}` for offender: {:?}, set_id: {set_id}, at block: {at}", - C::NAME, - ::type_info().path, - offender.clone(), - )))?; - - Ok(key_owner_proof) - } -} diff --git a/relays/lib-substrate-relay/src/finality_base/mod.rs b/relays/lib-substrate-relay/src/finality_base/mod.rs deleted file mode 100644 index 825960b1b..000000000 --- a/relays/lib-substrate-relay/src/finality_base/mod.rs +++ /dev/null @@ -1,107 +0,0 @@ -// Copyright 2019-2023 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Types and functions intended to ease adding of new Substrate -> Substrate -//! finality pipelines. - -pub mod engine; - -use crate::finality_base::engine::Engine; - -use async_trait::async_trait; -use bp_runtime::{HashOf, HeaderIdOf}; -use codec::Decode; -use futures::{stream::unfold, Stream, StreamExt}; -use relay_substrate_client::{Chain, Client, Error}; -use std::{fmt::Debug, pin::Pin}; - -/// Substrate -> Substrate finality related pipeline. -#[async_trait] -pub trait SubstrateFinalityPipeline: 'static + Clone + Debug + Send + Sync { - /// Headers of this chain are submitted to the `TargetChain`. - type SourceChain: Chain; - /// Headers of the `SourceChain` are submitted to this chain. - type TargetChain: Chain; - /// Finality engine. - type FinalityEngine: Engine; -} - -/// Substrate finality proof. Specific to the used `FinalityEngine`. -pub type SubstrateFinalityProof

= <

::FinalityEngine as Engine< -

::SourceChain, ->>::FinalityProof; - -/// Substrate finality proofs stream. -pub type SubstrateFinalityProofsStream

= - Pin> + Send>>; - -/// Subscribe to new finality proofs. -pub async fn finality_proofs( - client: &Client, -) -> Result, Error> { - Ok(unfold( - P::FinalityEngine::source_finality_proofs(client).await?, - move |subscription| async move { - loop { - let log_error = |err| { - log::error!( - target: "bridge", - "Failed to read justification target from the {} justifications stream: {:?}", - P::SourceChain::NAME, - err, - ); - }; - - let next_justification = - subscription.next().await.map_err(|err| log_error(err.to_string())).ok()??; - - let decoded_justification = - >::FinalityProof::decode( - &mut &next_justification[..], - ); - - let justification = match decoded_justification { - Ok(j) => j, - Err(err) => { - log_error(format!("decode failed with error {err:?}")); - continue - }, - }; - - return Some((justification, subscription)) - } - }, - ) - .boxed()) -} - -/// Get the id of the best `SourceChain` header known to the `TargetChain` at the provided -/// target block using the exposed runtime API method. -/// -/// The runtime API method should be `FinalityApi::best_finalized()`. -pub async fn best_synced_header_id( - target_client: &Client, - at: HashOf, -) -> Result>, Error> -where - SourceChain: Chain, - TargetChain: Chain, -{ - // now let's read id of best finalized peer header at our best finalized block - target_client - .typed_state_call(SourceChain::BEST_FINALIZED_HEADER_ID_METHOD.into(), (), Some(at)) - .await -} diff --git a/relays/lib-substrate-relay/src/lib.rs b/relays/lib-substrate-relay/src/lib.rs deleted file mode 100644 index b90453ae0..000000000 --- a/relays/lib-substrate-relay/src/lib.rs +++ /dev/null @@ -1,129 +0,0 @@ -// Copyright 2019-2021 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! The library of substrate relay. contains some public codes to provide to substrate relay. - -#![warn(missing_docs)] - -use relay_substrate_client::{Chain, ChainWithUtilityPallet, UtilityPallet}; - -use std::marker::PhantomData; - -pub mod cli; -pub mod equivocation; -pub mod error; -pub mod finality; -pub mod finality_base; -pub mod messages_lane; -pub mod messages_metrics; -pub mod messages_source; -pub mod messages_target; -pub mod on_demand; -pub mod parachains; - -/// Transaction creation parameters. -#[derive(Clone, Debug)] -pub struct TransactionParams { - /// Transactions author. - pub signer: TS, - /// Transactions mortality. - pub mortality: Option, -} - -/// Tagged relay account, which balance may be exposed as metrics by the relay. -#[derive(Clone, Debug)] -pub enum TaggedAccount { - /// Account, used to sign message (also headers and parachains) relay transactions from given - /// bridged chain. - Messages { - /// Account id. - id: AccountId, - /// Name of the bridged chain, which sends us messages or delivery confirmations. - bridged_chain: String, - }, -} - -impl TaggedAccount { - /// Returns reference to the account id. - pub fn id(&self) -> &AccountId { - match *self { - TaggedAccount::Messages { ref id, .. } => id, - } - } - - /// Returns stringified account tag. - pub fn tag(&self) -> String { - match *self { - TaggedAccount::Messages { ref bridged_chain, .. } => { - format!("{bridged_chain}Messages") - }, - } - } -} - -/// Batch call builder. -pub trait BatchCallBuilder: Clone + Send + Sync { - /// Create batch call from given calls vector. - fn build_batch_call(&self, _calls: Vec) -> Call; -} - -/// Batch call builder constructor. -pub trait BatchCallBuilderConstructor: Clone { - /// Call builder, used by this constructor. - type CallBuilder: BatchCallBuilder; - /// Create a new instance of a batch call builder. - fn new_builder() -> Option; -} - -/// Batch call builder based on `pallet-utility`. -#[derive(Clone)] -pub struct UtilityPalletBatchCallBuilder(PhantomData); - -impl BatchCallBuilder for UtilityPalletBatchCallBuilder -where - C: ChainWithUtilityPallet, -{ - fn build_batch_call(&self, calls: Vec) -> C::Call { - C::UtilityPallet::build_batch_call(calls) - } -} - -impl BatchCallBuilderConstructor for UtilityPalletBatchCallBuilder -where - C: ChainWithUtilityPallet, -{ - type CallBuilder = Self; - - fn new_builder() -> Option { - Some(Self(Default::default())) - } -} - -// A `BatchCallBuilderConstructor` that always returns `None`. -impl BatchCallBuilderConstructor for () { - type CallBuilder = (); - fn new_builder() -> Option { - None - } -} - -// Dummy `BatchCallBuilder` implementation that must never be used outside -// of the `impl BatchCallBuilderConstructor for ()` code. -impl BatchCallBuilder for () { - fn build_batch_call(&self, _calls: Vec) -> Call { - unreachable!("never called, because ()::new_builder() returns None; qed") - } -} diff --git a/relays/lib-substrate-relay/src/messages_lane.rs b/relays/lib-substrate-relay/src/messages_lane.rs deleted file mode 100644 index abeab8c14..000000000 --- a/relays/lib-substrate-relay/src/messages_lane.rs +++ /dev/null @@ -1,587 +0,0 @@ -// Copyright 2019-2021 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Tools for supporting message lanes between two Substrate-based chains. - -use crate::{ - messages_source::{SubstrateMessagesProof, SubstrateMessagesSource}, - messages_target::{SubstrateMessagesDeliveryProof, SubstrateMessagesTarget}, - on_demand::OnDemandRelay, - BatchCallBuilder, BatchCallBuilderConstructor, TransactionParams, -}; - -use async_std::sync::Arc; -use bp_messages::{ChainWithMessages as _, LaneId, MessageNonce}; -use bp_runtime::{ - AccountIdOf, Chain as _, EncodedOrDecodedCall, HeaderIdOf, TransactionEra, WeightExtraOps, -}; -use bridge_runtime_common::messages::{ - source::FromBridgedChainMessagesDeliveryProof, target::FromBridgedChainMessagesProof, -}; -use codec::Encode; -use frame_support::{dispatch::GetDispatchInfo, weights::Weight}; -use messages_relay::{message_lane::MessageLane, message_lane_loop::BatchTransaction}; -use pallet_bridge_messages::{Call as BridgeMessagesCall, Config as BridgeMessagesConfig}; -use relay_substrate_client::{ - transaction_stall_timeout, AccountKeyPairOf, BalanceOf, BlockNumberOf, CallOf, Chain, - ChainWithMessages, ChainWithTransactions, Client, Error as SubstrateError, HashOf, SignParam, - UnsignedTransaction, -}; -use relay_utils::{ - metrics::{GlobalMetrics, MetricsParams, StandaloneMetric}, - STALL_TIMEOUT, -}; -use sp_core::Pair; -use sp_runtime::traits::Zero; -use std::{convert::TryFrom, fmt::Debug, marker::PhantomData}; - -/// Substrate -> Substrate messages synchronization pipeline. -pub trait SubstrateMessageLane: 'static + Clone + Debug + Send + Sync { - /// Messages of this chain are relayed to the `TargetChain`. - type SourceChain: ChainWithMessages + ChainWithTransactions; - /// Messages from the `SourceChain` are dispatched on this chain. - type TargetChain: ChainWithMessages + ChainWithTransactions; - - /// How receive messages proof call is built? - type ReceiveMessagesProofCallBuilder: ReceiveMessagesProofCallBuilder; - /// How receive messages delivery proof call is built? - type ReceiveMessagesDeliveryProofCallBuilder: ReceiveMessagesDeliveryProofCallBuilder; - - /// How batch calls are built at the source chain? - type SourceBatchCallBuilder: BatchCallBuilderConstructor>; - /// How batch calls are built at the target chain? - type TargetBatchCallBuilder: BatchCallBuilderConstructor>; -} - -/// Adapter that allows all `SubstrateMessageLane` to act as `MessageLane`. -#[derive(Clone, Debug)] -pub struct MessageLaneAdapter { - _phantom: PhantomData

, -} - -impl MessageLane for MessageLaneAdapter

{ - const SOURCE_NAME: &'static str = P::SourceChain::NAME; - const TARGET_NAME: &'static str = P::TargetChain::NAME; - - type MessagesProof = SubstrateMessagesProof; - type MessagesReceivingProof = SubstrateMessagesDeliveryProof; - - type SourceChainBalance = BalanceOf; - type SourceHeaderNumber = BlockNumberOf; - type SourceHeaderHash = HashOf; - - type TargetHeaderNumber = BlockNumberOf; - type TargetHeaderHash = HashOf; -} - -/// Substrate <-> Substrate messages relay parameters. -pub struct MessagesRelayParams { - /// Messages source client. - pub source_client: Client, - /// Source transaction params. - pub source_transaction_params: TransactionParams>, - /// Messages target client. - pub target_client: Client, - /// Target transaction params. - pub target_transaction_params: TransactionParams>, - /// Optional on-demand source to target headers relay. - pub source_to_target_headers_relay: - Option>>, - /// Optional on-demand target to source headers relay. - pub target_to_source_headers_relay: - Option>>, - /// Identifier of lane that needs to be served. - pub lane_id: LaneId, - /// Messages relay limits. If not provided, the relay tries to determine it automatically, - /// using `TransactionPayment` pallet runtime API. - pub limits: Option, - /// Metrics parameters. - pub metrics_params: MetricsParams, -} - -/// Delivery transaction limits. -pub struct MessagesRelayLimits { - /// Maximal number of messages in the delivery transaction. - pub max_messages_in_single_batch: MessageNonce, - /// Maximal cumulative weight of messages in the delivery transaction. - pub max_messages_weight_in_single_batch: Weight, -} - -/// Batch transaction that brings headers + and messages delivery/receiving confirmations to the -/// source node. -#[derive(Clone)] -pub struct BatchProofTransaction>> { - builder: B::CallBuilder, - proved_header: HeaderIdOf, - prove_calls: Vec>, - - /// Using `fn() -> B` in order to avoid implementing `Send` for `B`. - _phantom: PhantomData B>, -} - -impl>> std::fmt::Debug - for BatchProofTransaction -{ - fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result { - fmt.debug_struct("BatchProofTransaction") - .field("proved_header", &self.proved_header) - .finish() - } -} - -impl>> - BatchProofTransaction -{ - /// Creates a new instance of `BatchProofTransaction`. - pub async fn new( - relay: Arc>, - block_num: BlockNumberOf, - ) -> Result, SubstrateError> { - if let Some(builder) = B::new_builder() { - let (proved_header, prove_calls) = relay.prove_header(block_num).await?; - return Ok(Some(Self { - builder, - proved_header, - prove_calls, - _phantom: Default::default(), - })) - } - - Ok(None) - } - - /// Return a batch call that includes the provided call. - pub fn append_call_and_build(mut self, call: CallOf) -> CallOf { - self.prove_calls.push(call); - self.builder.build_batch_call(self.prove_calls) - } -} - -impl>> - BatchTransaction> for BatchProofTransaction -{ - fn required_header_id(&self) -> HeaderIdOf { - self.proved_header - } -} - -/// Run Substrate-to-Substrate messages sync loop. -pub async fn run(params: MessagesRelayParams

) -> anyhow::Result<()> -where - AccountIdOf: From< as Pair>::Public>, - AccountIdOf: From< as Pair>::Public>, - BalanceOf: TryFrom>, -{ - // 2/3 is reserved for proofs and tx overhead - let max_messages_size_in_single_batch = P::TargetChain::max_extrinsic_size() / 3; - let limits = match params.limits { - Some(limits) => limits, - None => - select_delivery_transaction_limits_rpc::

( - ¶ms, - P::TargetChain::max_extrinsic_weight(), - P::SourceChain::MAX_UNREWARDED_RELAYERS_IN_CONFIRMATION_TX, - ) - .await?, - }; - let (max_messages_in_single_batch, max_messages_weight_in_single_batch) = - (limits.max_messages_in_single_batch / 2, limits.max_messages_weight_in_single_batch / 2); - - let source_client = params.source_client; - let target_client = params.target_client; - let relayer_id_at_source: AccountIdOf = - params.source_transaction_params.signer.public().into(); - - log::info!( - target: "bridge", - "Starting {} -> {} messages relay.\n\t\ - {} relayer account id: {:?}\n\t\ - Max messages in single transaction: {}\n\t\ - Max messages size in single transaction: {}\n\t\ - Max messages weight in single transaction: {}\n\t\ - Tx mortality: {:?} (~{}m)/{:?} (~{}m)", - P::SourceChain::NAME, - P::TargetChain::NAME, - P::SourceChain::NAME, - relayer_id_at_source, - max_messages_in_single_batch, - max_messages_size_in_single_batch, - max_messages_weight_in_single_batch, - params.source_transaction_params.mortality, - transaction_stall_timeout( - params.source_transaction_params.mortality, - P::SourceChain::AVERAGE_BLOCK_INTERVAL, - STALL_TIMEOUT, - ).as_secs_f64() / 60.0f64, - params.target_transaction_params.mortality, - transaction_stall_timeout( - params.target_transaction_params.mortality, - P::TargetChain::AVERAGE_BLOCK_INTERVAL, - STALL_TIMEOUT, - ).as_secs_f64() / 60.0f64, - ); - - messages_relay::message_lane_loop::run( - messages_relay::message_lane_loop::Params { - lane: params.lane_id, - source_tick: P::SourceChain::AVERAGE_BLOCK_INTERVAL, - target_tick: P::TargetChain::AVERAGE_BLOCK_INTERVAL, - reconnect_delay: relay_utils::relay_loop::RECONNECT_DELAY, - delivery_params: messages_relay::message_lane_loop::MessageDeliveryParams { - max_unrewarded_relayer_entries_at_target: - P::SourceChain::MAX_UNREWARDED_RELAYERS_IN_CONFIRMATION_TX, - max_unconfirmed_nonces_at_target: - P::SourceChain::MAX_UNCONFIRMED_MESSAGES_IN_CONFIRMATION_TX, - max_messages_in_single_batch, - max_messages_weight_in_single_batch, - max_messages_size_in_single_batch, - }, - }, - SubstrateMessagesSource::

::new( - source_client.clone(), - target_client.clone(), - params.lane_id, - params.source_transaction_params, - params.target_to_source_headers_relay, - ), - SubstrateMessagesTarget::

::new( - target_client, - source_client, - params.lane_id, - relayer_id_at_source, - params.target_transaction_params, - params.source_to_target_headers_relay, - ), - { - GlobalMetrics::new()?.register_and_spawn(¶ms.metrics_params.registry)?; - params.metrics_params - }, - futures::future::pending(), - ) - .await - .map_err(Into::into) -} - -/// Different ways of building `receive_messages_proof` calls. -pub trait ReceiveMessagesProofCallBuilder { - /// Given messages proof, build call of `receive_messages_proof` function of bridge - /// messages module at the target chain. - fn build_receive_messages_proof_call( - relayer_id_at_source: AccountIdOf, - proof: SubstrateMessagesProof, - messages_count: u32, - dispatch_weight: Weight, - trace_call: bool, - ) -> CallOf; -} - -/// Building `receive_messages_proof` call when you have direct access to the target -/// chain runtime. -pub struct DirectReceiveMessagesProofCallBuilder { - _phantom: PhantomData<(P, R, I)>, -} - -impl ReceiveMessagesProofCallBuilder

for DirectReceiveMessagesProofCallBuilder -where - P: SubstrateMessageLane, - R: BridgeMessagesConfig>, - I: 'static, - R::SourceHeaderChain: bp_messages::target_chain::SourceHeaderChain< - MessagesProof = FromBridgedChainMessagesProof>, - >, - CallOf: From> + GetDispatchInfo, -{ - fn build_receive_messages_proof_call( - relayer_id_at_source: AccountIdOf, - proof: SubstrateMessagesProof, - messages_count: u32, - dispatch_weight: Weight, - trace_call: bool, - ) -> CallOf { - let call: CallOf = BridgeMessagesCall::::receive_messages_proof { - relayer_id_at_bridged_chain: relayer_id_at_source, - proof: proof.1, - messages_count, - dispatch_weight, - } - .into(); - if trace_call { - // this trace isn't super-accurate, because limits are for transactions and we - // have a call here, but it provides required information - log::trace!( - target: "bridge", - "Prepared {} -> {} messages delivery call. Weight: {}/{}, size: {}/{}", - P::SourceChain::NAME, - P::TargetChain::NAME, - call.get_dispatch_info().weight, - P::TargetChain::max_extrinsic_weight(), - call.encode().len(), - P::TargetChain::max_extrinsic_size(), - ); - } - call - } -} - -/// Macro that generates `ReceiveMessagesProofCallBuilder` implementation for the case when -/// you only have an access to the mocked version of target chain runtime. In this case you -/// should provide "name" of the call variant for the bridge messages calls and the "name" of -/// the variant for the `receive_messages_proof` call within that first option. -#[rustfmt::skip] -#[macro_export] -macro_rules! generate_receive_message_proof_call_builder { - ($pipeline:ident, $mocked_builder:ident, $bridge_messages:path, $receive_messages_proof:path) => { - pub struct $mocked_builder; - - impl $crate::messages_lane::ReceiveMessagesProofCallBuilder<$pipeline> - for $mocked_builder - { - fn build_receive_messages_proof_call( - relayer_id_at_source: relay_substrate_client::AccountIdOf< - <$pipeline as $crate::messages_lane::SubstrateMessageLane>::SourceChain - >, - proof: $crate::messages_source::SubstrateMessagesProof< - <$pipeline as $crate::messages_lane::SubstrateMessageLane>::SourceChain - >, - messages_count: u32, - dispatch_weight: bp_messages::Weight, - _trace_call: bool, - ) -> relay_substrate_client::CallOf< - <$pipeline as $crate::messages_lane::SubstrateMessageLane>::TargetChain - > { - bp_runtime::paste::item! { - $bridge_messages($receive_messages_proof { - relayer_id_at_bridged_chain: relayer_id_at_source, - proof: proof.1, - messages_count: messages_count, - dispatch_weight: dispatch_weight, - }) - } - } - } - }; -} - -/// Different ways of building `receive_messages_delivery_proof` calls. -pub trait ReceiveMessagesDeliveryProofCallBuilder { - /// Given messages delivery proof, build call of `receive_messages_delivery_proof` function of - /// bridge messages module at the source chain. - fn build_receive_messages_delivery_proof_call( - proof: SubstrateMessagesDeliveryProof, - trace_call: bool, - ) -> CallOf; -} - -/// Building `receive_messages_delivery_proof` call when you have direct access to the source -/// chain runtime. -pub struct DirectReceiveMessagesDeliveryProofCallBuilder { - _phantom: PhantomData<(P, R, I)>, -} - -impl ReceiveMessagesDeliveryProofCallBuilder

- for DirectReceiveMessagesDeliveryProofCallBuilder -where - P: SubstrateMessageLane, - R: BridgeMessagesConfig, - I: 'static, - R::TargetHeaderChain: bp_messages::source_chain::TargetHeaderChain< - R::OutboundPayload, - R::AccountId, - MessagesDeliveryProof = FromBridgedChainMessagesDeliveryProof>, - >, - CallOf: From> + GetDispatchInfo, -{ - fn build_receive_messages_delivery_proof_call( - proof: SubstrateMessagesDeliveryProof, - trace_call: bool, - ) -> CallOf { - let call: CallOf = - BridgeMessagesCall::::receive_messages_delivery_proof { - proof: proof.1, - relayers_state: proof.0, - } - .into(); - if trace_call { - // this trace isn't super-accurate, because limits are for transactions and we - // have a call here, but it provides required information - log::trace!( - target: "bridge", - "Prepared {} -> {} delivery confirmation transaction. Weight: {}/{}, size: {}/{}", - P::TargetChain::NAME, - P::SourceChain::NAME, - call.get_dispatch_info().weight, - P::SourceChain::max_extrinsic_weight(), - call.encode().len(), - P::SourceChain::max_extrinsic_size(), - ); - } - call - } -} - -/// Macro that generates `ReceiveMessagesDeliveryProofCallBuilder` implementation for the case when -/// you only have an access to the mocked version of source chain runtime. In this case you -/// should provide "name" of the call variant for the bridge messages calls and the "name" of -/// the variant for the `receive_messages_delivery_proof` call within that first option. -#[rustfmt::skip] -#[macro_export] -macro_rules! generate_receive_message_delivery_proof_call_builder { - ($pipeline:ident, $mocked_builder:ident, $bridge_messages:path, $receive_messages_delivery_proof:path) => { - pub struct $mocked_builder; - - impl $crate::messages_lane::ReceiveMessagesDeliveryProofCallBuilder<$pipeline> - for $mocked_builder - { - fn build_receive_messages_delivery_proof_call( - proof: $crate::messages_target::SubstrateMessagesDeliveryProof< - <$pipeline as $crate::messages_lane::SubstrateMessageLane>::TargetChain - >, - _trace_call: bool, - ) -> relay_substrate_client::CallOf< - <$pipeline as $crate::messages_lane::SubstrateMessageLane>::SourceChain - > { - bp_runtime::paste::item! { - $bridge_messages($receive_messages_delivery_proof { - proof: proof.1, - relayers_state: proof.0 - }) - } - } - } - }; -} - -/// Returns maximal number of messages and their maximal cumulative dispatch weight. -async fn select_delivery_transaction_limits_rpc( - params: &MessagesRelayParams

, - max_extrinsic_weight: Weight, - max_unconfirmed_messages_at_inbound_lane: MessageNonce, -) -> anyhow::Result -where - AccountIdOf: From< as Pair>::Public>, -{ - // We may try to guess accurate value, based on maximal number of messages and per-message - // weight overhead, but the relay loop isn't using this info in a super-accurate way anyway. - // So just a rough guess: let's say 1/3 of max tx weight is for tx itself and the rest is - // for messages dispatch. - - // Another thing to keep in mind is that our runtimes (when this code was written) accept - // messages with dispatch weight <= max_extrinsic_weight/2. So we can't reserve less than - // that for dispatch. - - let weight_for_delivery_tx = max_extrinsic_weight / 3; - let weight_for_messages_dispatch = max_extrinsic_weight - weight_for_delivery_tx; - - // weight of empty message delivery with outbound lane state - let delivery_tx_with_zero_messages = dummy_messages_delivery_transaction::

(params, 0)?; - let delivery_tx_with_zero_messages_weight = params - .target_client - .extimate_extrinsic_weight(delivery_tx_with_zero_messages) - .await - .map_err(|e| { - anyhow::format_err!("Failed to estimate delivery extrinsic weight: {:?}", e) - })?; - - // weight of single message delivery with outbound lane state - let delivery_tx_with_one_message = dummy_messages_delivery_transaction::

(params, 1)?; - let delivery_tx_with_one_message_weight = params - .target_client - .extimate_extrinsic_weight(delivery_tx_with_one_message) - .await - .map_err(|e| { - anyhow::format_err!("Failed to estimate delivery extrinsic weight: {:?}", e) - })?; - - // message overhead is roughly `delivery_tx_with_one_message_weight - - // delivery_tx_with_zero_messages_weight` - let delivery_tx_weight_rest = weight_for_delivery_tx - delivery_tx_with_zero_messages_weight; - let delivery_tx_message_overhead = - delivery_tx_with_one_message_weight.saturating_sub(delivery_tx_with_zero_messages_weight); - - let max_number_of_messages = std::cmp::min( - delivery_tx_weight_rest - .min_components_checked_div(delivery_tx_message_overhead) - .unwrap_or(u64::MAX), - max_unconfirmed_messages_at_inbound_lane, - ); - - assert!( - max_number_of_messages > 0, - "Relay should fit at least one message in every delivery transaction", - ); - assert!( - weight_for_messages_dispatch.ref_time() >= max_extrinsic_weight.ref_time() / 2, - "Relay shall be able to deliver messages with dispatch weight = max_extrinsic_weight / 2", - ); - - Ok(MessagesRelayLimits { - max_messages_in_single_batch: max_number_of_messages, - max_messages_weight_in_single_batch: weight_for_messages_dispatch, - }) -} - -/// Returns dummy message delivery transaction with zero messages and `1kb` proof. -fn dummy_messages_delivery_transaction( - params: &MessagesRelayParams

, - messages: u32, -) -> anyhow::Result<::SignedTransaction> -where - AccountIdOf: From< as Pair>::Public>, -{ - // we don't care about any call values here, because all that the estimation RPC does - // is calls `GetDispatchInfo::get_dispatch_info` for the wrapped call. So we only are - // interested in values that affect call weight - e.g. number of messages and the - // storage proof size - - let dummy_messages_delivery_call = - P::ReceiveMessagesProofCallBuilder::build_receive_messages_proof_call( - params.source_transaction_params.signer.public().into(), - ( - Weight::zero(), - FromBridgedChainMessagesProof { - bridged_header_hash: Default::default(), - // we may use per-chain `EXTRA_STORAGE_PROOF_SIZE`, but since we don't need - // exact values, this global estimation is fine - storage_proof: vec![vec![ - 42u8; - pallet_bridge_messages::EXTRA_STORAGE_PROOF_SIZE - as usize - ]], - lane: Default::default(), - nonces_start: 1, - nonces_end: messages as u64, - }, - ), - messages, - Weight::zero(), - false, - ); - P::TargetChain::sign_transaction( - SignParam { - spec_version: 0, - transaction_version: 0, - genesis_hash: Default::default(), - signer: params.target_transaction_params.signer.clone(), - }, - UnsignedTransaction { - call: EncodedOrDecodedCall::Decoded(dummy_messages_delivery_call), - nonce: Zero::zero(), - tip: Zero::zero(), - era: TransactionEra::Immortal, - }, - ) - .map_err(Into::into) -} diff --git a/relays/lib-substrate-relay/src/messages_metrics.rs b/relays/lib-substrate-relay/src/messages_metrics.rs deleted file mode 100644 index 27bf6186c..000000000 --- a/relays/lib-substrate-relay/src/messages_metrics.rs +++ /dev/null @@ -1,190 +0,0 @@ -// Copyright 2019-2021 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Tools for supporting message lanes between two Substrate-based chains. - -use crate::TaggedAccount; - -use bp_messages::LaneId; -use bp_relayers::{RewardsAccountOwner, RewardsAccountParams}; -use bp_runtime::StorageDoubleMapKeyProvider; -use codec::Decode; -use frame_system::AccountInfo; -use pallet_balances::AccountData; -use relay_substrate_client::{ - metrics::{FloatStorageValue, FloatStorageValueMetric}, - AccountIdOf, BalanceOf, Chain, ChainWithBalances, ChainWithMessages, Client, - Error as SubstrateError, NonceOf, -}; -use relay_utils::metrics::{MetricsParams, StandaloneMetric}; -use sp_core::storage::StorageData; -use sp_runtime::{FixedPointNumber, FixedU128}; -use std::{convert::TryFrom, fmt::Debug, marker::PhantomData}; - -/// Add relay accounts balance metrics. -pub async fn add_relay_balances_metrics( - client: Client, - metrics: &MetricsParams, - relay_accounts: &Vec>>, - lanes: &[LaneId], -) -> anyhow::Result<()> -where - BalanceOf: Into + std::fmt::Debug, -{ - if relay_accounts.is_empty() { - return Ok(()) - } - - // if `tokenDecimals` is missing from system properties, we'll be using - let token_decimals = client - .token_decimals() - .await? - .map(|token_decimals| { - log::info!(target: "bridge", "Read `tokenDecimals` for {}: {}", C::NAME, token_decimals); - token_decimals - }) - .unwrap_or_else(|| { - // turns out it is normal not to have this property - e.g. when polkadot binary is - // started using `polkadot-local` chain. Let's use minimal nominal here - log::info!(target: "bridge", "Using default (zero) `tokenDecimals` value for {}", C::NAME); - 0 - }); - let token_decimals = u32::try_from(token_decimals).map_err(|e| { - anyhow::format_err!( - "Token decimals value ({}) of {} doesn't fit into u32: {:?}", - token_decimals, - C::NAME, - e, - ) - })?; - - for account in relay_accounts { - let relay_account_balance_metric = FloatStorageValueMetric::new( - AccountBalanceFromAccountInfo:: { token_decimals, _phantom: Default::default() }, - client.clone(), - C::account_info_storage_key(account.id()), - format!("at_{}_relay_{}_balance", C::NAME, account.tag()), - format!("Balance of the {} relay account at the {}", account.tag(), C::NAME), - )?; - relay_account_balance_metric.register_and_spawn(&metrics.registry)?; - - if let Some(relayers_pallet_name) = BC::WITH_CHAIN_RELAYERS_PALLET_NAME { - for lane in lanes { - FloatStorageValueMetric::new( - AccountBalance:: { token_decimals, _phantom: Default::default() }, - client.clone(), - bp_relayers::RelayerRewardsKeyProvider::, BalanceOf>::final_key( - relayers_pallet_name, - account.id(), - &RewardsAccountParams::new(*lane, BC::ID, RewardsAccountOwner::ThisChain), - ), - format!("at_{}_relay_{}_reward_for_msgs_from_{}_on_lane_{}", C::NAME, account.tag(), BC::NAME, hex::encode(lane.as_ref())), - format!("Reward of the {} relay account at {} for delivering messages from {} on lane {:?}", account.tag(), C::NAME, BC::NAME, lane), - )?.register_and_spawn(&metrics.registry)?; - - FloatStorageValueMetric::new( - AccountBalance:: { token_decimals, _phantom: Default::default() }, - client.clone(), - bp_relayers::RelayerRewardsKeyProvider::, BalanceOf>::final_key( - relayers_pallet_name, - account.id(), - &RewardsAccountParams::new(*lane, BC::ID, RewardsAccountOwner::BridgedChain), - ), - format!("at_{}_relay_{}_reward_for_msgs_to_{}_on_lane_{}", C::NAME, account.tag(), BC::NAME, hex::encode(lane.as_ref())), - format!("Reward of the {} relay account at {} for delivering messages confirmations from {} on lane {:?}", account.tag(), C::NAME, BC::NAME, lane), - )?.register_and_spawn(&metrics.registry)?; - } - } - } - - Ok(()) -} - -/// Adapter for `FloatStorageValueMetric` to decode account free balance. -#[derive(Clone, Debug)] -struct AccountBalanceFromAccountInfo { - token_decimals: u32, - _phantom: PhantomData, -} - -impl FloatStorageValue for AccountBalanceFromAccountInfo -where - C: Chain, - BalanceOf: Into, -{ - type Value = FixedU128; - - fn decode( - &self, - maybe_raw_value: Option, - ) -> Result, SubstrateError> { - maybe_raw_value - .map(|raw_value| { - AccountInfo::, AccountData>>::decode(&mut &raw_value.0[..]) - .map_err(SubstrateError::ResponseParseFailed) - .map(|account_data| { - convert_to_token_balance(account_data.data.free.into(), self.token_decimals) - }) - }) - .transpose() - } -} - -/// Adapter for `FloatStorageValueMetric` to decode account free balance. -#[derive(Clone, Debug)] -struct AccountBalance { - token_decimals: u32, - _phantom: PhantomData, -} - -impl FloatStorageValue for AccountBalance -where - C: Chain, - BalanceOf: Into, -{ - type Value = FixedU128; - - fn decode( - &self, - maybe_raw_value: Option, - ) -> Result, SubstrateError> { - maybe_raw_value - .map(|raw_value| { - BalanceOf::::decode(&mut &raw_value.0[..]) - .map_err(SubstrateError::ResponseParseFailed) - .map(|balance| convert_to_token_balance(balance.into(), self.token_decimals)) - }) - .transpose() - } -} - -/// Convert from raw `u128` balance (nominated in smallest chain token units) to the float regular -/// tokens value. -fn convert_to_token_balance(balance: u128, token_decimals: u32) -> FixedU128 { - FixedU128::from_inner(balance.saturating_mul(FixedU128::DIV / 10u128.pow(token_decimals))) -} - -#[cfg(test)] -mod tests { - use super::*; - #[test] - fn token_decimals_used_properly() { - let plancks = 425_000_000_000; - let token_decimals = 10; - let dots = convert_to_token_balance(plancks, token_decimals); - assert_eq!(dots, FixedU128::saturating_from_rational(425, 10)); - } -} diff --git a/relays/lib-substrate-relay/src/messages_source.rs b/relays/lib-substrate-relay/src/messages_source.rs deleted file mode 100644 index 49deff046..000000000 --- a/relays/lib-substrate-relay/src/messages_source.rs +++ /dev/null @@ -1,713 +0,0 @@ -// Copyright 2019-2021 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Substrate client as Substrate messages source. The chain we connect to should have -//! runtime that implements `HeaderApi` to allow bridging with -//! `` chain. - -use crate::{ - finality_base::best_synced_header_id, - messages_lane::{ - BatchProofTransaction, MessageLaneAdapter, ReceiveMessagesDeliveryProofCallBuilder, - SubstrateMessageLane, - }, - on_demand::OnDemandRelay, - TransactionParams, -}; - -use async_std::sync::Arc; -use async_trait::async_trait; -use bp_messages::{ - storage_keys::{operating_mode_key, outbound_lane_data_key}, - ChainWithMessages as _, InboundMessageDetails, LaneId, MessageNonce, MessagePayload, - MessagesOperatingMode, OutboundLaneData, OutboundMessageDetails, -}; -use bp_runtime::{BasicOperatingMode, HeaderIdProvider}; -use bridge_runtime_common::messages::target::FromBridgedChainMessagesProof; -use codec::Encode; -use frame_support::weights::Weight; -use messages_relay::{ - message_lane::{MessageLane, SourceHeaderIdOf, TargetHeaderIdOf}, - message_lane_loop::{ - ClientState, MessageDetails, MessageDetailsMap, MessageProofParameters, SourceClient, - SourceClientState, - }, -}; -use num_traits::Zero; -use relay_substrate_client::{ - AccountIdOf, AccountKeyPairOf, BalanceOf, Chain, ChainWithMessages, Client, - Error as SubstrateError, HashOf, HeaderIdOf, TransactionEra, TransactionTracker, - UnsignedTransaction, -}; -use relay_utils::relay_loop::Client as RelayClient; -use sp_core::Pair; -use std::ops::RangeInclusive; - -/// Intermediate message proof returned by the source Substrate node. Includes everything -/// required to submit to the target node: cumulative dispatch weight of bundled messages and -/// the proof itself. -pub type SubstrateMessagesProof = (Weight, FromBridgedChainMessagesProof>); -type MessagesToRefine<'a> = Vec<(MessagePayload, &'a mut OutboundMessageDetails)>; - -/// Substrate client as Substrate messages source. -pub struct SubstrateMessagesSource { - source_client: Client, - target_client: Client, - lane_id: LaneId, - transaction_params: TransactionParams>, - target_to_source_headers_relay: Option>>, -} - -impl SubstrateMessagesSource

{ - /// Create new Substrate headers source. - pub fn new( - source_client: Client, - target_client: Client, - lane_id: LaneId, - transaction_params: TransactionParams>, - target_to_source_headers_relay: Option< - Arc>, - >, - ) -> Self { - SubstrateMessagesSource { - source_client, - target_client, - lane_id, - transaction_params, - target_to_source_headers_relay, - } - } - - /// Read outbound lane state from the on-chain storage at given block. - async fn outbound_lane_data( - &self, - id: SourceHeaderIdOf>, - ) -> Result, SubstrateError> { - self.source_client - .storage_value( - outbound_lane_data_key( - P::TargetChain::WITH_CHAIN_MESSAGES_PALLET_NAME, - &self.lane_id, - ), - Some(id.1), - ) - .await - } - - /// Ensure that the messages pallet at source chain is active. - async fn ensure_pallet_active(&self) -> Result<(), SubstrateError> { - ensure_messages_pallet_active::(&self.source_client).await - } -} - -impl Clone for SubstrateMessagesSource

{ - fn clone(&self) -> Self { - Self { - source_client: self.source_client.clone(), - target_client: self.target_client.clone(), - lane_id: self.lane_id, - transaction_params: self.transaction_params.clone(), - target_to_source_headers_relay: self.target_to_source_headers_relay.clone(), - } - } -} - -#[async_trait] -impl RelayClient for SubstrateMessagesSource

{ - type Error = SubstrateError; - - async fn reconnect(&mut self) -> Result<(), SubstrateError> { - // since the client calls RPC methods on both sides, we need to reconnect both - self.source_client.reconnect().await?; - self.target_client.reconnect().await?; - - // call reconnect on on-demand headers relay, because we may use different chains there - // and the error that has lead to reconnect may have came from those other chains - // (see `require_target_header_on_source`) - // - // this may lead to multiple reconnects to the same node during the same call and it - // needs to be addressed in the future - // TODO: https://github.com/paritytech/parity-bridges-common/issues/1928 - if let Some(ref mut target_to_source_headers_relay) = self.target_to_source_headers_relay { - target_to_source_headers_relay.reconnect().await?; - } - - Ok(()) - } -} - -#[async_trait] -impl SourceClient> for SubstrateMessagesSource

-where - AccountIdOf: From< as Pair>::Public>, -{ - type BatchTransaction = - BatchProofTransaction; - type TransactionTracker = TransactionTracker>; - - async fn state(&self) -> Result>, SubstrateError> { - // we can't continue to deliver confirmations if source node is out of sync, because - // it may have already received confirmations that we're going to deliver - // - // we can't continue to deliver messages if target node is out of sync, because - // it may have already received (some of) messages that we're going to deliver - self.source_client.ensure_synced().await?; - self.target_client.ensure_synced().await?; - // we can't relay confirmations if messages pallet at source chain is halted - self.ensure_pallet_active().await?; - - read_client_state(&self.source_client, Some(&self.target_client)).await - } - - async fn latest_generated_nonce( - &self, - id: SourceHeaderIdOf>, - ) -> Result<(SourceHeaderIdOf>, MessageNonce), SubstrateError> { - // lane data missing from the storage is fine until first message is sent - let latest_generated_nonce = self - .outbound_lane_data(id) - .await? - .map(|data| data.latest_generated_nonce) - .unwrap_or(0); - Ok((id, latest_generated_nonce)) - } - - async fn latest_confirmed_received_nonce( - &self, - id: SourceHeaderIdOf>, - ) -> Result<(SourceHeaderIdOf>, MessageNonce), SubstrateError> { - // lane data missing from the storage is fine until first message is sent - let latest_received_nonce = self - .outbound_lane_data(id) - .await? - .map(|data| data.latest_received_nonce) - .unwrap_or(0); - Ok((id, latest_received_nonce)) - } - - async fn generated_message_details( - &self, - id: SourceHeaderIdOf>, - nonces: RangeInclusive, - ) -> Result>, SubstrateError> { - let mut out_msgs_details = self - .source_client - .typed_state_call::<_, Vec<_>>( - P::TargetChain::TO_CHAIN_MESSAGE_DETAILS_METHOD.into(), - (self.lane_id, *nonces.start(), *nonces.end()), - Some(id.1), - ) - .await?; - validate_out_msgs_details::(&out_msgs_details, nonces)?; - - // prepare arguments of the inbound message details call (if we need it) - let mut msgs_to_refine = vec![]; - for out_msg_details in out_msgs_details.iter_mut() { - // in our current strategy all messages are supposed to be paid at the target chain - - // for pay-at-target messages we may want to ask target chain for - // refined dispatch weight - let msg_key = bp_messages::storage_keys::message_key( - P::TargetChain::WITH_CHAIN_MESSAGES_PALLET_NAME, - &self.lane_id, - out_msg_details.nonce, - ); - let msg_payload: MessagePayload = - self.source_client.storage_value(msg_key, Some(id.1)).await?.ok_or_else(|| { - SubstrateError::Custom(format!( - "Message to {} {:?}/{} is missing from runtime the storage of {} at {:?}", - P::TargetChain::NAME, - self.lane_id, - out_msg_details.nonce, - P::SourceChain::NAME, - id, - )) - })?; - - msgs_to_refine.push((msg_payload, out_msg_details)); - } - - for mut msgs_to_refine_batch in - split_msgs_to_refine::(self.lane_id, msgs_to_refine)? - { - let in_msgs_details = self - .target_client - .typed_state_call::<_, Vec>( - P::SourceChain::FROM_CHAIN_MESSAGE_DETAILS_METHOD.into(), - (self.lane_id, &msgs_to_refine_batch), - None, - ) - .await?; - if in_msgs_details.len() != msgs_to_refine_batch.len() { - return Err(SubstrateError::Custom(format!( - "Call of {} at {} has returned {} entries instead of expected {}", - P::SourceChain::FROM_CHAIN_MESSAGE_DETAILS_METHOD, - P::TargetChain::NAME, - in_msgs_details.len(), - msgs_to_refine_batch.len(), - ))) - } - for ((_, out_msg_details), in_msg_details) in - msgs_to_refine_batch.iter_mut().zip(in_msgs_details) - { - log::trace!( - target: "bridge", - "Refined weight of {}->{} message {:?}/{}: at-source: {}, at-target: {}", - P::SourceChain::NAME, - P::TargetChain::NAME, - self.lane_id, - out_msg_details.nonce, - out_msg_details.dispatch_weight, - in_msg_details.dispatch_weight, - ); - out_msg_details.dispatch_weight = in_msg_details.dispatch_weight; - } - } - - let mut msgs_details_map = MessageDetailsMap::new(); - for out_msg_details in out_msgs_details { - msgs_details_map.insert( - out_msg_details.nonce, - MessageDetails { - dispatch_weight: out_msg_details.dispatch_weight, - size: out_msg_details.size as _, - reward: Zero::zero(), - }, - ); - } - - Ok(msgs_details_map) - } - - async fn prove_messages( - &self, - id: SourceHeaderIdOf>, - nonces: RangeInclusive, - proof_parameters: MessageProofParameters, - ) -> Result< - ( - SourceHeaderIdOf>, - RangeInclusive, - as MessageLane>::MessagesProof, - ), - SubstrateError, - > { - let mut storage_keys = - Vec::with_capacity(nonces.end().saturating_sub(*nonces.start()) as usize + 1); - let mut message_nonce = *nonces.start(); - while message_nonce <= *nonces.end() { - let message_key = bp_messages::storage_keys::message_key( - P::TargetChain::WITH_CHAIN_MESSAGES_PALLET_NAME, - &self.lane_id, - message_nonce, - ); - storage_keys.push(message_key); - message_nonce += 1; - } - if proof_parameters.outbound_state_proof_required { - storage_keys.push(bp_messages::storage_keys::outbound_lane_data_key( - P::TargetChain::WITH_CHAIN_MESSAGES_PALLET_NAME, - &self.lane_id, - )); - } - - let proof = self - .source_client - .prove_storage(storage_keys, id.1) - .await? - .into_iter_nodes() - .collect(); - let proof = FromBridgedChainMessagesProof { - bridged_header_hash: id.1, - storage_proof: proof, - lane: self.lane_id, - nonces_start: *nonces.start(), - nonces_end: *nonces.end(), - }; - Ok((id, nonces, (proof_parameters.dispatch_weight, proof))) - } - - async fn submit_messages_receiving_proof( - &self, - maybe_batch_tx: Option, - _generated_at_block: TargetHeaderIdOf>, - proof: as MessageLane>::MessagesReceivingProof, - ) -> Result { - let messages_proof_call = - P::ReceiveMessagesDeliveryProofCallBuilder::build_receive_messages_delivery_proof_call( - proof, - maybe_batch_tx.is_none(), - ); - let final_call = match maybe_batch_tx { - Some(batch_tx) => batch_tx.append_call_and_build(messages_proof_call), - None => messages_proof_call, - }; - - let transaction_params = self.transaction_params.clone(); - self.source_client - .submit_and_watch_signed_extrinsic( - &self.transaction_params.signer, - move |best_block_id, transaction_nonce| { - Ok(UnsignedTransaction::new(final_call.into(), transaction_nonce) - .era(TransactionEra::new(best_block_id, transaction_params.mortality))) - }, - ) - .await - } - - async fn require_target_header_on_source( - &self, - id: TargetHeaderIdOf>, - ) -> Result, SubstrateError> { - if let Some(ref target_to_source_headers_relay) = self.target_to_source_headers_relay { - if let Some(batch_tx) = - BatchProofTransaction::new(target_to_source_headers_relay.clone(), id.0).await? - { - return Ok(Some(batch_tx)) - } - - target_to_source_headers_relay.require_more_headers(id.0).await; - } - - Ok(None) - } -} - -/// Ensure that the messages pallet at source chain is active. -pub(crate) async fn ensure_messages_pallet_active( - client: &Client, -) -> Result<(), SubstrateError> -where - AtChain: ChainWithMessages, - WithChain: ChainWithMessages, -{ - let operating_mode = client - .storage_value(operating_mode_key(WithChain::WITH_CHAIN_MESSAGES_PALLET_NAME), None) - .await?; - let is_halted = - operating_mode == Some(MessagesOperatingMode::Basic(BasicOperatingMode::Halted)); - if is_halted { - Err(SubstrateError::BridgePalletIsHalted) - } else { - Ok(()) - } -} - -/// Read best blocks from given client. -/// -/// This function assumes that the chain that is followed by the `self_client` has -/// bridge GRANDPA pallet deployed and it provides `best_finalized_header_id_method_name` -/// runtime API to read the best finalized Bridged chain header. -/// -/// If `peer_client` is `None`, the value of `actual_best_finalized_peer_at_best_self` will -/// always match the `best_finalized_peer_at_best_self`. -pub async fn read_client_state( - self_client: &Client, - peer_client: Option<&Client>, -) -> Result, HeaderIdOf>, SubstrateError> -where - SelfChain: Chain, - PeerChain: Chain, -{ - // let's read our state first: we need best finalized header hash on **this** chain - let self_best_finalized_id = self_client.best_finalized_header().await?.id(); - // now let's read our best header on **this** chain - let self_best_id = self_client.best_header().await?.id(); - - // now let's read id of best finalized peer header at our best finalized block - let peer_on_self_best_finalized_id = - best_synced_header_id::(self_client, self_best_id.hash()).await?; - - // read actual header, matching the `peer_on_self_best_finalized_id` from the peer chain - let actual_peer_on_self_best_finalized_id = - match (peer_client, peer_on_self_best_finalized_id.as_ref()) { - (Some(peer_client), Some(peer_on_self_best_finalized_id)) => { - let actual_peer_on_self_best_finalized = - peer_client.header_by_number(peer_on_self_best_finalized_id.number()).await?; - Some(actual_peer_on_self_best_finalized.id()) - }, - _ => peer_on_self_best_finalized_id, - }; - - Ok(ClientState { - best_self: self_best_id, - best_finalized_self: self_best_finalized_id, - best_finalized_peer_at_best_self: peer_on_self_best_finalized_id, - actual_best_finalized_peer_at_best_self: actual_peer_on_self_best_finalized_id, - }) -} - -/// Reads best `PeerChain` header known to the `SelfChain` using provided runtime API method. -/// -/// Method is supposed to be the `FinalityApi::best_finalized()` method. -pub async fn best_finalized_peer_header_at_self( - self_client: &Client, - at_self_hash: HashOf, -) -> Result>, SubstrateError> -where - SelfChain: Chain, - PeerChain: Chain, -{ - // now let's read id of best finalized peer header at our best finalized block - self_client - .typed_state_call::<_, Option<_>>( - PeerChain::BEST_FINALIZED_HEADER_ID_METHOD.into(), - (), - Some(at_self_hash), - ) - .await -} - -fn validate_out_msgs_details( - out_msgs_details: &[OutboundMessageDetails], - nonces: RangeInclusive, -) -> Result<(), SubstrateError> { - let make_missing_nonce_error = |expected_nonce| { - Err(SubstrateError::Custom(format!( - "Missing nonce {expected_nonce} in message_details call result. Expected all nonces from {nonces:?}", - ))) - }; - - if out_msgs_details.len() > nonces.clone().count() { - return Err(SubstrateError::Custom( - "More messages than requested returned by the message_details call.".into(), - )) - } - - // Check if last nonce is missing. The loop below is not checking this. - if out_msgs_details.is_empty() && !nonces.is_empty() { - return make_missing_nonce_error(*nonces.end()) - } - - let mut nonces_iter = nonces.clone().rev().peekable(); - let mut out_msgs_details_iter = out_msgs_details.iter().rev(); - while let Some((out_msg_details, &nonce)) = out_msgs_details_iter.next().zip(nonces_iter.peek()) - { - nonces_iter.next(); - if out_msg_details.nonce != nonce { - // Some nonces are missing from the middle/tail of the range. This is critical error. - return make_missing_nonce_error(nonce) - } - } - - // Check if some nonces from the beginning of the range are missing. This may happen if - // some messages were already pruned from the source node. This is not a critical error - // and will be auto-resolved by messages lane (and target node). - if nonces_iter.peek().is_some() { - log::info!( - target: "bridge", - "Some messages are missing from the {} node: {:?}. Target node may be out of sync?", - C::NAME, - nonces_iter.rev().collect::>(), - ); - } - - Ok(()) -} - -fn split_msgs_to_refine( - lane_id: LaneId, - msgs_to_refine: MessagesToRefine, -) -> Result, SubstrateError> { - let max_batch_size = Target::max_extrinsic_size() as usize; - let mut batches = vec![]; - - let mut current_msgs_batch = msgs_to_refine; - while !current_msgs_batch.is_empty() { - let mut next_msgs_batch = vec![]; - while (lane_id, ¤t_msgs_batch).encoded_size() > max_batch_size { - if current_msgs_batch.len() <= 1 { - return Err(SubstrateError::Custom(format!( - "Call of {} at {} can't be executed even if only one message is supplied. \ - max_extrinsic_size(): {}", - Source::FROM_CHAIN_MESSAGE_DETAILS_METHOD, - Target::NAME, - Target::max_extrinsic_size(), - ))) - } - - if let Some(msg) = current_msgs_batch.pop() { - next_msgs_batch.insert(0, msg); - } - } - - batches.push(current_msgs_batch); - current_msgs_batch = next_msgs_batch; - } - - Ok(batches) -} - -#[cfg(test)] -mod tests { - use super::*; - use relay_substrate_client::test_chain::TestChain; - - fn message_details_from_rpc( - nonces: RangeInclusive, - ) -> Vec { - nonces - .into_iter() - .map(|nonce| bp_messages::OutboundMessageDetails { - nonce, - dispatch_weight: Weight::zero(), - size: 0, - }) - .collect() - } - - #[test] - fn validate_out_msgs_details_succeeds_if_no_messages_are_missing() { - assert!(validate_out_msgs_details::(&message_details_from_rpc(1..=3), 1..=3,) - .is_ok()); - } - - #[test] - fn validate_out_msgs_details_succeeds_if_head_messages_are_missing() { - assert!(validate_out_msgs_details::(&message_details_from_rpc(2..=3), 1..=3,) - .is_ok()) - } - - #[test] - fn validate_out_msgs_details_fails_if_mid_messages_are_missing() { - let mut message_details_from_rpc = message_details_from_rpc(1..=3); - message_details_from_rpc.remove(1); - assert!(matches!( - validate_out_msgs_details::(&message_details_from_rpc, 1..=3,), - Err(SubstrateError::Custom(_)) - )); - } - - #[test] - fn validate_out_msgs_details_map_fails_if_tail_messages_are_missing() { - assert!(matches!( - validate_out_msgs_details::(&message_details_from_rpc(1..=2), 1..=3,), - Err(SubstrateError::Custom(_)) - )); - } - - #[test] - fn validate_out_msgs_details_fails_if_all_messages_are_missing() { - assert!(matches!( - validate_out_msgs_details::(&[], 1..=3), - Err(SubstrateError::Custom(_)) - )); - } - - #[test] - fn validate_out_msgs_details_fails_if_more_messages_than_nonces() { - assert!(matches!( - validate_out_msgs_details::(&message_details_from_rpc(1..=5), 2..=5,), - Err(SubstrateError::Custom(_)) - )); - } - - fn check_split_msgs_to_refine( - payload_sizes: Vec, - expected_batches: Result, ()>, - ) { - let mut out_msgs_details = vec![]; - for (idx, _) in payload_sizes.iter().enumerate() { - out_msgs_details.push(OutboundMessageDetails { - nonce: idx as MessageNonce, - dispatch_weight: Weight::zero(), - size: 0, - }); - } - - let mut msgs_to_refine = vec![]; - for (&payload_size, out_msg_details) in - payload_sizes.iter().zip(out_msgs_details.iter_mut()) - { - let payload = vec![1u8; payload_size]; - msgs_to_refine.push((payload, out_msg_details)); - } - - let maybe_batches = - split_msgs_to_refine::(Default::default(), msgs_to_refine); - match expected_batches { - Ok(expected_batches) => { - let batches = maybe_batches.unwrap(); - let mut idx = 0; - assert_eq!(batches.len(), expected_batches.len()); - for (batch, &expected_batch_size) in batches.iter().zip(expected_batches.iter()) { - assert_eq!(batch.len(), expected_batch_size); - for msg_to_refine in batch { - assert_eq!(msg_to_refine.0.len(), payload_sizes[idx]); - idx += 1; - } - } - }, - Err(_) => { - matches!(maybe_batches, Err(SubstrateError::Custom(_))); - }, - } - } - - #[test] - fn test_split_msgs_to_refine() { - let max_extrinsic_size = 100000; - - // Check that an error is returned when one of the messages is too big. - check_split_msgs_to_refine(vec![max_extrinsic_size], Err(())); - check_split_msgs_to_refine(vec![50, 100, max_extrinsic_size, 200], Err(())); - - // Otherwise check that the split is valid. - check_split_msgs_to_refine(vec![100, 200, 300, 400], Ok(vec![4])); - check_split_msgs_to_refine( - vec![ - 50, - 100, - max_extrinsic_size - 500, - 500, - 1000, - 1500, - max_extrinsic_size - 3500, - 5000, - 10000, - ], - Ok(vec![3, 4, 2]), - ); - check_split_msgs_to_refine( - vec![ - 50, - 100, - max_extrinsic_size - 150, - 500, - 1000, - 1500, - max_extrinsic_size - 3000, - 5000, - 10000, - ], - Ok(vec![2, 1, 3, 1, 2]), - ); - check_split_msgs_to_refine( - vec![ - 5000, - 10000, - max_extrinsic_size - 3500, - 500, - 1000, - 1500, - max_extrinsic_size - 500, - 50, - 100, - ], - Ok(vec![2, 4, 3]), - ); - } -} diff --git a/relays/lib-substrate-relay/src/messages_target.rs b/relays/lib-substrate-relay/src/messages_target.rs deleted file mode 100644 index 9396e7855..000000000 --- a/relays/lib-substrate-relay/src/messages_target.rs +++ /dev/null @@ -1,300 +0,0 @@ -// Copyright 2019-2021 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Substrate client as Substrate messages target. The chain we connect to should have -//! runtime that implements `HeaderApi` to allow bridging with -//! `` chain. - -use crate::{ - messages_lane::{ - BatchProofTransaction, MessageLaneAdapter, ReceiveMessagesProofCallBuilder, - SubstrateMessageLane, - }, - messages_source::{ensure_messages_pallet_active, read_client_state, SubstrateMessagesProof}, - on_demand::OnDemandRelay, - TransactionParams, -}; - -use async_std::sync::Arc; -use async_trait::async_trait; -use bp_messages::{ - storage_keys::inbound_lane_data_key, ChainWithMessages as _, InboundLaneData, LaneId, - MessageNonce, UnrewardedRelayersState, -}; -use bridge_runtime_common::messages::source::FromBridgedChainMessagesDeliveryProof; -use messages_relay::{ - message_lane::{MessageLane, SourceHeaderIdOf, TargetHeaderIdOf}, - message_lane_loop::{NoncesSubmitArtifacts, TargetClient, TargetClientState}, -}; -use relay_substrate_client::{ - AccountIdOf, AccountKeyPairOf, BalanceOf, CallOf, Client, Error as SubstrateError, HashOf, - TransactionEra, TransactionTracker, UnsignedTransaction, -}; -use relay_utils::relay_loop::Client as RelayClient; -use sp_core::Pair; -use std::{convert::TryFrom, ops::RangeInclusive}; - -/// Message receiving proof returned by the target Substrate node. -pub type SubstrateMessagesDeliveryProof = - (UnrewardedRelayersState, FromBridgedChainMessagesDeliveryProof>); - -/// Substrate client as Substrate messages target. -pub struct SubstrateMessagesTarget { - target_client: Client, - source_client: Client, - lane_id: LaneId, - relayer_id_at_source: AccountIdOf, - transaction_params: TransactionParams>, - source_to_target_headers_relay: Option>>, -} - -impl SubstrateMessagesTarget

{ - /// Create new Substrate headers target. - pub fn new( - target_client: Client, - source_client: Client, - lane_id: LaneId, - relayer_id_at_source: AccountIdOf, - transaction_params: TransactionParams>, - source_to_target_headers_relay: Option< - Arc>, - >, - ) -> Self { - SubstrateMessagesTarget { - target_client, - source_client, - lane_id, - relayer_id_at_source, - transaction_params, - source_to_target_headers_relay, - } - } - - /// Read inbound lane state from the on-chain storage at given block. - async fn inbound_lane_data( - &self, - id: TargetHeaderIdOf>, - ) -> Result>>, SubstrateError> { - self.target_client - .storage_value( - inbound_lane_data_key( - P::SourceChain::WITH_CHAIN_MESSAGES_PALLET_NAME, - &self.lane_id, - ), - Some(id.1), - ) - .await - } - - /// Ensure that the messages pallet at target chain is active. - async fn ensure_pallet_active(&self) -> Result<(), SubstrateError> { - ensure_messages_pallet_active::(&self.target_client).await - } -} - -impl Clone for SubstrateMessagesTarget

{ - fn clone(&self) -> Self { - Self { - target_client: self.target_client.clone(), - source_client: self.source_client.clone(), - lane_id: self.lane_id, - relayer_id_at_source: self.relayer_id_at_source.clone(), - transaction_params: self.transaction_params.clone(), - source_to_target_headers_relay: self.source_to_target_headers_relay.clone(), - } - } -} - -#[async_trait] -impl RelayClient for SubstrateMessagesTarget

{ - type Error = SubstrateError; - - async fn reconnect(&mut self) -> Result<(), SubstrateError> { - // since the client calls RPC methods on both sides, we need to reconnect both - self.target_client.reconnect().await?; - self.source_client.reconnect().await?; - - // call reconnect on on-demand headers relay, because we may use different chains there - // and the error that has lead to reconnect may have came from those other chains - // (see `require_source_header_on_target`) - // - // this may lead to multiple reconnects to the same node during the same call and it - // needs to be addressed in the future - // TODO: https://github.com/paritytech/parity-bridges-common/issues/1928 - if let Some(ref mut source_to_target_headers_relay) = self.source_to_target_headers_relay { - source_to_target_headers_relay.reconnect().await?; - } - - Ok(()) - } -} - -#[async_trait] -impl TargetClient> for SubstrateMessagesTarget

-where - AccountIdOf: From< as Pair>::Public>, - BalanceOf: TryFrom>, -{ - type BatchTransaction = - BatchProofTransaction; - type TransactionTracker = TransactionTracker>; - - async fn state(&self) -> Result>, SubstrateError> { - // we can't continue to deliver confirmations if source node is out of sync, because - // it may have already received confirmations that we're going to deliver - // - // we can't continue to deliver messages if target node is out of sync, because - // it may have already received (some of) messages that we're going to deliver - self.source_client.ensure_synced().await?; - self.target_client.ensure_synced().await?; - // we can't relay messages if messages pallet at target chain is halted - self.ensure_pallet_active().await?; - - read_client_state(&self.target_client, Some(&self.source_client)).await - } - - async fn latest_received_nonce( - &self, - id: TargetHeaderIdOf>, - ) -> Result<(TargetHeaderIdOf>, MessageNonce), SubstrateError> { - // lane data missing from the storage is fine until first message is received - let latest_received_nonce = self - .inbound_lane_data(id) - .await? - .map(|data| data.last_delivered_nonce()) - .unwrap_or(0); - Ok((id, latest_received_nonce)) - } - - async fn latest_confirmed_received_nonce( - &self, - id: TargetHeaderIdOf>, - ) -> Result<(TargetHeaderIdOf>, MessageNonce), SubstrateError> { - // lane data missing from the storage is fine until first message is received - let last_confirmed_nonce = self - .inbound_lane_data(id) - .await? - .map(|data| data.last_confirmed_nonce) - .unwrap_or(0); - Ok((id, last_confirmed_nonce)) - } - - async fn unrewarded_relayers_state( - &self, - id: TargetHeaderIdOf>, - ) -> Result<(TargetHeaderIdOf>, UnrewardedRelayersState), SubstrateError> - { - let inbound_lane_data = - self.inbound_lane_data(id).await?.unwrap_or(InboundLaneData::default()); - Ok((id, (&inbound_lane_data).into())) - } - - async fn prove_messages_receiving( - &self, - id: TargetHeaderIdOf>, - ) -> Result< - ( - TargetHeaderIdOf>, - as MessageLane>::MessagesReceivingProof, - ), - SubstrateError, - > { - let (id, relayers_state) = self.unrewarded_relayers_state(id).await?; - let inbound_data_key = bp_messages::storage_keys::inbound_lane_data_key( - P::SourceChain::WITH_CHAIN_MESSAGES_PALLET_NAME, - &self.lane_id, - ); - let proof = self - .target_client - .prove_storage(vec![inbound_data_key], id.1) - .await? - .into_iter_nodes() - .collect(); - let proof = FromBridgedChainMessagesDeliveryProof { - bridged_header_hash: id.1, - storage_proof: proof, - lane: self.lane_id, - }; - Ok((id, (relayers_state, proof))) - } - - async fn submit_messages_proof( - &self, - maybe_batch_tx: Option, - _generated_at_header: SourceHeaderIdOf>, - nonces: RangeInclusive, - proof: as MessageLane>::MessagesProof, - ) -> Result, SubstrateError> { - let messages_proof_call = make_messages_delivery_call::

( - self.relayer_id_at_source.clone(), - proof.1.nonces_start..=proof.1.nonces_end, - proof, - maybe_batch_tx.is_none(), - ); - let final_call = match maybe_batch_tx { - Some(batch_tx) => batch_tx.append_call_and_build(messages_proof_call), - None => messages_proof_call, - }; - - let transaction_params = self.transaction_params.clone(); - let tx_tracker = self - .target_client - .submit_and_watch_signed_extrinsic( - &self.transaction_params.signer, - move |best_block_id, transaction_nonce| { - Ok(UnsignedTransaction::new(final_call.into(), transaction_nonce) - .era(TransactionEra::new(best_block_id, transaction_params.mortality))) - }, - ) - .await?; - Ok(NoncesSubmitArtifacts { nonces, tx_tracker }) - } - - async fn require_source_header_on_target( - &self, - id: SourceHeaderIdOf>, - ) -> Result, SubstrateError> { - if let Some(ref source_to_target_headers_relay) = self.source_to_target_headers_relay { - if let Some(batch_tx) = - BatchProofTransaction::new(source_to_target_headers_relay.clone(), id.0).await? - { - return Ok(Some(batch_tx)) - } - - source_to_target_headers_relay.require_more_headers(id.0).await; - } - - Ok(None) - } -} - -/// Make messages delivery call from given proof. -fn make_messages_delivery_call( - relayer_id_at_source: AccountIdOf, - nonces: RangeInclusive, - proof: SubstrateMessagesProof, - trace_call: bool, -) -> CallOf { - let messages_count = nonces.end() - nonces.start() + 1; - let dispatch_weight = proof.0; - P::ReceiveMessagesProofCallBuilder::build_receive_messages_proof_call( - relayer_id_at_source, - proof, - messages_count as _, - dispatch_weight, - trace_call, - ) -} diff --git a/relays/lib-substrate-relay/src/on_demand/headers.rs b/relays/lib-substrate-relay/src/on_demand/headers.rs deleted file mode 100644 index e8a2a3c6c..000000000 --- a/relays/lib-substrate-relay/src/on_demand/headers.rs +++ /dev/null @@ -1,550 +0,0 @@ -// Copyright 2019-2021 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! On-demand Substrate -> Substrate header finality relay. - -use crate::{ - finality::SubmitFinalityProofCallBuilder, finality_base::engine::MaxExpectedCallSizeCheck, -}; - -use async_std::sync::{Arc, Mutex}; -use async_trait::async_trait; -use bp_header_chain::ConsensusLogReader; -use bp_runtime::HeaderIdProvider; -use futures::{select, FutureExt}; -use num_traits::{One, Saturating, Zero}; -use sp_runtime::traits::Header; - -use finality_relay::{FinalitySyncParams, TargetClient as FinalityTargetClient}; -use relay_substrate_client::{ - AccountIdOf, AccountKeyPairOf, BlockNumberOf, CallOf, Chain, Client, Error as SubstrateError, - HeaderIdOf, -}; -use relay_utils::{ - metrics::MetricsParams, relay_loop::Client as RelayClient, FailedClient, MaybeConnectionError, - STALL_TIMEOUT, -}; - -use crate::{ - finality::{ - source::{RequiredHeaderNumberRef, SubstrateFinalitySource}, - target::SubstrateFinalityTarget, - SubstrateFinalitySyncPipeline, RECENT_FINALITY_PROOFS_LIMIT, - }, - finality_base::engine::Engine, - on_demand::OnDemandRelay, - TransactionParams, -}; - -/// On-demand Substrate <-> Substrate header finality relay. -/// -/// This relay may be requested to sync more headers, whenever some other relay (e.g. messages -/// relay) needs it to continue its regular work. When enough headers are relayed, on-demand stops -/// syncing headers. -#[derive(Clone)] -pub struct OnDemandHeadersRelay { - /// Relay task name. - relay_task_name: String, - /// Shared reference to maximal required finalized header number. - required_header_number: RequiredHeaderNumberRef, - /// Client of the source chain. - source_client: Client, - /// Client of the target chain. - target_client: Client, -} - -impl OnDemandHeadersRelay

{ - /// Create new on-demand headers relay. - /// - /// If `metrics_params` is `Some(_)`, the metrics of the finality relay are registered. - /// Otherwise, all required metrics must be exposed outside of this method. - pub fn new( - source_client: Client, - target_client: Client, - target_transaction_params: TransactionParams>, - only_mandatory_headers: bool, - metrics_params: Option, - ) -> Self - where - AccountIdOf: - From< as sp_core::Pair>::Public>, - { - let required_header_number = Arc::new(Mutex::new(Zero::zero())); - let this = OnDemandHeadersRelay { - relay_task_name: on_demand_headers_relay_name::(), - required_header_number: required_header_number.clone(), - source_client: source_client.clone(), - target_client: target_client.clone(), - }; - async_std::task::spawn(async move { - background_task::

( - source_client, - target_client, - target_transaction_params, - only_mandatory_headers, - required_header_number, - metrics_params, - ) - .await; - }); - - this - } -} - -#[async_trait] -impl OnDemandRelay - for OnDemandHeadersRelay

-{ - async fn reconnect(&self) -> Result<(), SubstrateError> { - // using clone is fine here (to avoid mut requirement), because clone on Client clones - // internal references - self.source_client.clone().reconnect().await?; - self.target_client.clone().reconnect().await - } - - async fn require_more_headers(&self, required_header: BlockNumberOf) { - let mut required_header_number = self.required_header_number.lock().await; - if required_header > *required_header_number { - log::trace!( - target: "bridge", - "[{}] More {} headers required. Going to sync up to the {}", - self.relay_task_name, - P::SourceChain::NAME, - required_header, - ); - - *required_header_number = required_header; - } - } - - async fn prove_header( - &self, - required_header: BlockNumberOf, - ) -> Result<(HeaderIdOf, Vec>), SubstrateError> { - const MAX_ITERATIONS: u32 = 4; - let mut iterations = 0; - let mut current_required_header = required_header; - loop { - // first find proper header (either `current_required_header`) or its descendant - let finality_source = - SubstrateFinalitySource::

::new(self.source_client.clone(), None); - let (header, mut proof) = - finality_source.prove_block_finality(current_required_header).await?; - let header_id = header.id(); - - // verify and optimize justification before including it into the call - let context = P::FinalityEngine::verify_and_optimize_proof( - &self.target_client, - &header, - &mut proof, - ) - .await?; - - // now we have the header and its proof, but we want to minimize our losses, so let's - // check if we'll get the full refund for submitting this header - let check_result = P::FinalityEngine::check_max_expected_call_size(&header, &proof); - if let MaxExpectedCallSizeCheck::Exceeds { call_size, max_call_size } = check_result { - iterations += 1; - current_required_header = header_id.number().saturating_add(One::one()); - if iterations < MAX_ITERATIONS { - log::debug!( - target: "bridge", - "[{}] Requested to prove {} head {:?}. Selected to prove {} head {:?}. But it is too large: {} vs {}. \ - Going to select next header", - self.relay_task_name, - P::SourceChain::NAME, - required_header, - P::SourceChain::NAME, - header_id, - call_size, - max_call_size, - ); - - continue; - } - } - - log::debug!( - target: "bridge", - "[{}] Requested to prove {} head {:?}. Selected to prove {} head {:?} (after {} iterations)", - self.relay_task_name, - P::SourceChain::NAME, - required_header, - P::SourceChain::NAME, - header_id, - iterations, - ); - - // and then craft the submit-proof call - let call = P::SubmitFinalityProofCallBuilder::build_submit_finality_proof_call( - header, proof, context, - ); - - return Ok((header_id, vec![call])); - } - } -} - -/// Background task that is responsible for starting headers relay. -async fn background_task( - source_client: Client, - target_client: Client, - target_transaction_params: TransactionParams>, - only_mandatory_headers: bool, - required_header_number: RequiredHeaderNumberRef, - metrics_params: Option, -) where - AccountIdOf: From< as sp_core::Pair>::Public>, -{ - let relay_task_name = on_demand_headers_relay_name::(); - let target_transactions_mortality = target_transaction_params.mortality; - let mut finality_source = SubstrateFinalitySource::

::new( - source_client.clone(), - Some(required_header_number.clone()), - ); - let mut finality_target = - SubstrateFinalityTarget::new(target_client.clone(), target_transaction_params); - let mut latest_non_mandatory_at_source = Zero::zero(); - - let mut restart_relay = true; - let finality_relay_task = futures::future::Fuse::terminated(); - futures::pin_mut!(finality_relay_task); - - loop { - select! { - _ = async_std::task::sleep(P::TargetChain::AVERAGE_BLOCK_INTERVAL).fuse() => {}, - _ = finality_relay_task => { - // this should never happen in practice given the current code - restart_relay = true; - }, - } - - // read best finalized source header number from source - let best_finalized_source_header_at_source = - best_finalized_source_header_at_source(&finality_source, &relay_task_name).await; - if matches!(best_finalized_source_header_at_source, Err(ref e) if e.is_connection_error()) { - relay_utils::relay_loop::reconnect_failed_client( - FailedClient::Source, - relay_utils::relay_loop::RECONNECT_DELAY, - &mut finality_source, - &mut finality_target, - ) - .await; - continue - } - - // read best finalized source header number from target - let best_finalized_source_header_at_target = - best_finalized_source_header_at_target::

(&finality_target, &relay_task_name).await; - if matches!(best_finalized_source_header_at_target, Err(ref e) if e.is_connection_error()) { - relay_utils::relay_loop::reconnect_failed_client( - FailedClient::Target, - relay_utils::relay_loop::RECONNECT_DELAY, - &mut finality_source, - &mut finality_target, - ) - .await; - continue - } - - // submit mandatory header if some headers are missing - let best_finalized_source_header_at_source_fmt = - format!("{best_finalized_source_header_at_source:?}"); - let best_finalized_source_header_at_target_fmt = - format!("{best_finalized_source_header_at_target:?}"); - let required_header_number_value = *required_header_number.lock().await; - let mandatory_scan_range = mandatory_headers_scan_range::( - best_finalized_source_header_at_source.ok(), - best_finalized_source_header_at_target.ok(), - required_header_number_value, - ) - .await; - - log::trace!( - target: "bridge", - "[{}] Mandatory headers scan range: ({:?}, {:?}, {:?}) -> {:?}", - relay_task_name, - required_header_number_value, - best_finalized_source_header_at_source_fmt, - best_finalized_source_header_at_target_fmt, - mandatory_scan_range, - ); - - if let Some(mandatory_scan_range) = mandatory_scan_range { - let relay_mandatory_header_result = relay_mandatory_header_from_range( - &finality_source, - &required_header_number, - best_finalized_source_header_at_target_fmt, - ( - std::cmp::max(mandatory_scan_range.0, latest_non_mandatory_at_source), - mandatory_scan_range.1, - ), - &relay_task_name, - ) - .await; - match relay_mandatory_header_result { - Ok(true) => (), - Ok(false) => { - // there are no (or we don't need to relay them) mandatory headers in the range - // => to avoid scanning the same headers over and over again, remember that - latest_non_mandatory_at_source = mandatory_scan_range.1; - - log::trace!( - target: "bridge", - "[{}] No mandatory {} headers in the range {:?}", - relay_task_name, - P::SourceChain::NAME, - mandatory_scan_range, - ); - }, - Err(e) => { - log::warn!( - target: "bridge", - "[{}] Failed to scan mandatory {} headers range ({:?}): {:?}", - relay_task_name, - P::SourceChain::NAME, - mandatory_scan_range, - e, - ); - - if e.is_connection_error() { - relay_utils::relay_loop::reconnect_failed_client( - FailedClient::Source, - relay_utils::relay_loop::RECONNECT_DELAY, - &mut finality_source, - &mut finality_target, - ) - .await; - continue - } - }, - } - } - - // start/restart relay - if restart_relay { - let stall_timeout = relay_substrate_client::transaction_stall_timeout( - target_transactions_mortality, - P::TargetChain::AVERAGE_BLOCK_INTERVAL, - STALL_TIMEOUT, - ); - - log::info!( - target: "bridge", - "[{}] Starting on-demand headers relay task\n\t\ - Only mandatory headers: {}\n\t\ - Tx mortality: {:?} (~{}m)\n\t\ - Stall timeout: {:?}", - relay_task_name, - only_mandatory_headers, - target_transactions_mortality, - stall_timeout.as_secs_f64() / 60.0f64, - stall_timeout, - ); - - finality_relay_task.set( - finality_relay::run( - finality_source.clone(), - finality_target.clone(), - FinalitySyncParams { - tick: std::cmp::max( - P::SourceChain::AVERAGE_BLOCK_INTERVAL, - P::TargetChain::AVERAGE_BLOCK_INTERVAL, - ), - recent_finality_proofs_limit: RECENT_FINALITY_PROOFS_LIMIT, - stall_timeout, - only_mandatory_headers, - }, - metrics_params.clone().unwrap_or_else(MetricsParams::disabled), - futures::future::pending(), - ) - .fuse(), - ); - - restart_relay = false; - } - } -} - -/// Returns `Some()` with inclusive range of headers which must be scanned for mandatory headers -/// and the first of such headers must be submitted to the target node. -async fn mandatory_headers_scan_range( - best_finalized_source_header_at_source: Option, - best_finalized_source_header_at_target: Option, - required_header_number: BlockNumberOf, -) -> Option<(C::BlockNumber, C::BlockNumber)> { - // if we have been unable to read header number from the target, then let's assume - // that it is the same as required header number. Otherwise we risk submitting - // unneeded transactions - let best_finalized_source_header_at_target = - best_finalized_source_header_at_target.unwrap_or(required_header_number); - - // if we have been unable to read header number from the source, then let's assume - // that it is the same as at the target - let best_finalized_source_header_at_source = - best_finalized_source_header_at_source.unwrap_or(best_finalized_source_header_at_target); - - // if relay is already asked to sync more headers than we have at source, don't do anything yet - if required_header_number >= best_finalized_source_header_at_source { - return None - } - - Some(( - best_finalized_source_header_at_target + One::one(), - best_finalized_source_header_at_source, - )) -} - -/// Try to find mandatory header in the inclusive headers range and, if one is found, ask to relay -/// it. -/// -/// Returns `true` if header was found and (asked to be) relayed and `false` otherwise. -async fn relay_mandatory_header_from_range( - finality_source: &SubstrateFinalitySource

, - required_header_number: &RequiredHeaderNumberRef, - best_finalized_source_header_at_target: String, - range: (BlockNumberOf, BlockNumberOf), - relay_task_name: &str, -) -> Result { - // search for mandatory header first - let mandatory_source_header_number = - find_mandatory_header_in_range(finality_source, range).await?; - - // if there are no mandatory headers - we have nothing to do - let mandatory_source_header_number = match mandatory_source_header_number { - Some(mandatory_source_header_number) => mandatory_source_header_number, - None => return Ok(false), - }; - - // `find_mandatory_header` call may take a while => check if `required_header_number` is still - // less than our `mandatory_source_header_number` before logging anything - let mut required_header_number = required_header_number.lock().await; - if *required_header_number >= mandatory_source_header_number { - return Ok(false) - } - - log::trace!( - target: "bridge", - "[{}] Too many {} headers missing at target ({} vs {}). Going to sync up to the mandatory {}", - relay_task_name, - P::SourceChain::NAME, - best_finalized_source_header_at_target, - range.1, - mandatory_source_header_number, - ); - - *required_header_number = mandatory_source_header_number; - Ok(true) -} - -/// Read best finalized source block number from source client. -/// -/// Returns `None` if we have failed to read the number. -async fn best_finalized_source_header_at_source( - finality_source: &SubstrateFinalitySource

, - relay_task_name: &str, -) -> Result, relay_substrate_client::Error> { - finality_source.on_chain_best_finalized_block_number().await.map_err(|error| { - log::error!( - target: "bridge", - "[{}] Failed to read best finalized source header from source: {:?}", - relay_task_name, - error, - ); - - error - }) -} - -/// Read best finalized source block number from target client. -/// -/// Returns `None` if we have failed to read the number. -async fn best_finalized_source_header_at_target( - finality_target: &SubstrateFinalityTarget

, - relay_task_name: &str, -) -> Result, as RelayClient>::Error> -where - AccountIdOf: From< as sp_core::Pair>::Public>, -{ - finality_target - .best_finalized_source_block_id() - .await - .map_err(|error| { - log::error!( - target: "bridge", - "[{}] Failed to read best finalized source header from target: {:?}", - relay_task_name, - error, - ); - - error - }) - .map(|id| id.0) -} - -/// Read first mandatory header in given inclusive range. -/// -/// Returns `Ok(None)` if there were no mandatory headers in the range. -async fn find_mandatory_header_in_range( - finality_source: &SubstrateFinalitySource

, - range: (BlockNumberOf, BlockNumberOf), -) -> Result>, relay_substrate_client::Error> { - let mut current = range.0; - while current <= range.1 { - let header = finality_source.client().header_by_number(current).await?; - if >::ConsensusLogReader::schedules_authorities_change( - header.digest(), - ) { - return Ok(Some(current)) - } - - current += One::one(); - } - - Ok(None) -} - -/// On-demand headers relay task name. -fn on_demand_headers_relay_name() -> String { - format!("{}-to-{}-on-demand-headers", SourceChain::NAME, TargetChain::NAME) -} - -#[cfg(test)] -mod tests { - use super::*; - use relay_substrate_client::test_chain::TestChain; - - const AT_SOURCE: Option> = Some(10); - const AT_TARGET: Option> = Some(1); - - #[async_std::test] - async fn mandatory_headers_scan_range_selects_range_if_some_headers_are_missing() { - assert_eq!( - mandatory_headers_scan_range::(AT_SOURCE, AT_TARGET, 0,).await, - Some((AT_TARGET.unwrap() + 1, AT_SOURCE.unwrap())), - ); - } - - #[async_std::test] - async fn mandatory_headers_scan_range_selects_nothing_if_already_queued() { - assert_eq!( - mandatory_headers_scan_range::(AT_SOURCE, AT_TARGET, AT_SOURCE.unwrap(),) - .await, - None, - ); - } -} diff --git a/relays/lib-substrate-relay/src/on_demand/mod.rs b/relays/lib-substrate-relay/src/on_demand/mod.rs deleted file mode 100644 index 00bb33d67..000000000 --- a/relays/lib-substrate-relay/src/on_demand/mod.rs +++ /dev/null @@ -1,48 +0,0 @@ -// Copyright 2019-2021 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Types and functions intended to ease adding of new Substrate -> Substrate -//! on-demand pipelines. - -use async_trait::async_trait; -use relay_substrate_client::{BlockNumberOf, CallOf, Chain, Error as SubstrateError, HeaderIdOf}; - -pub mod headers; -pub mod parachains; - -/// On-demand headers relay that is relaying finalizing headers only when requested. -#[async_trait] -pub trait OnDemandRelay: Send + Sync { - /// Reconnect to source and target nodes. - async fn reconnect(&self) -> Result<(), SubstrateError>; - - /// Ask relay to relay source header with given number to the target chain. - /// - /// Depending on implementation, on-demand relay may also relay `required_header` ancestors - /// (e.g. if they're mandatory), or its descendants. The request is considered complete if - /// the best avbailable header at the target chain has number that is larger than or equal - /// to the `required_header`. - async fn require_more_headers(&self, required_header: BlockNumberOf); - - /// Ask relay to prove source `required_header` to the `TargetChain`. - /// - /// Returns number of header that is proved (it may be the `required_header` or one of its - /// descendants) and calls for delivering the proof. - async fn prove_header( - &self, - required_header: BlockNumberOf, - ) -> Result<(HeaderIdOf, Vec>), SubstrateError>; -} diff --git a/relays/lib-substrate-relay/src/on_demand/parachains.rs b/relays/lib-substrate-relay/src/on_demand/parachains.rs deleted file mode 100644 index f67c002bb..000000000 --- a/relays/lib-substrate-relay/src/on_demand/parachains.rs +++ /dev/null @@ -1,1033 +0,0 @@ -// Copyright 2019-2021 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! On-demand Substrate -> Substrate parachain finality relay. - -use crate::{ - messages_source::best_finalized_peer_header_at_self, - on_demand::OnDemandRelay, - parachains::{ - source::ParachainsSource, target::ParachainsTarget, ParachainsPipelineAdapter, - SubmitParachainHeadsCallBuilder, SubstrateParachainsPipeline, - }, - TransactionParams, -}; - -use async_std::{ - channel::{unbounded, Receiver, Sender}, - sync::{Arc, Mutex}, -}; -use async_trait::async_trait; -use bp_polkadot_core::parachains::{ParaHash, ParaId}; -use bp_runtime::HeaderIdProvider; -use futures::{select, FutureExt}; -use num_traits::Zero; -use pallet_bridge_parachains::{RelayBlockHash, RelayBlockHasher, RelayBlockNumber}; -use parachains_relay::parachains_loop::{AvailableHeader, SourceClient, TargetClient}; -use relay_substrate_client::{ - is_ancient_block, AccountIdOf, AccountKeyPairOf, BlockNumberOf, CallOf, Chain, Client, - Error as SubstrateError, HashOf, HeaderIdOf, ParachainBase, -}; -use relay_utils::{ - metrics::MetricsParams, relay_loop::Client as RelayClient, BlockNumberBase, FailedClient, - HeaderId, UniqueSaturatedInto, -}; -use std::fmt::Debug; - -/// On-demand Substrate <-> Substrate parachain finality relay. -/// -/// This relay may be requested to sync more parachain headers, whenever some other relay -/// (e.g. messages relay) needs it to continue its regular work. When enough parachain headers -/// are relayed, on-demand stops syncing headers. -#[derive(Clone)] -pub struct OnDemandParachainsRelay { - /// Relay task name. - relay_task_name: String, - /// Channel used to communicate with background task and ask for relay of parachain heads. - required_header_number_sender: Sender>, - /// Source relay chain client. - source_relay_client: Client, - /// Target chain client. - target_client: Client, - /// On-demand relay chain relay. - on_demand_source_relay_to_target_headers: - Arc>, -} - -impl OnDemandParachainsRelay

{ - /// Create new on-demand parachains relay. - /// - /// Note that the argument is the source relay chain client, not the parachain client. - /// That's because parachain finality is determined by the relay chain and we don't - /// need to connect to the parachain itself here. - pub fn new( - source_relay_client: Client, - target_client: Client, - target_transaction_params: TransactionParams>, - on_demand_source_relay_to_target_headers: Arc< - dyn OnDemandRelay, - >, - ) -> Self - where - P::SourceParachain: Chain, - P::SourceRelayChain: - Chain, - AccountIdOf: - From< as sp_core::Pair>::Public>, - { - let (required_header_number_sender, required_header_number_receiver) = unbounded(); - let this = OnDemandParachainsRelay { - relay_task_name: on_demand_parachains_relay_name::( - ), - required_header_number_sender, - source_relay_client: source_relay_client.clone(), - target_client: target_client.clone(), - on_demand_source_relay_to_target_headers: on_demand_source_relay_to_target_headers - .clone(), - }; - async_std::task::spawn(async move { - background_task::

( - source_relay_client, - target_client, - target_transaction_params, - on_demand_source_relay_to_target_headers, - required_header_number_receiver, - ) - .await; - }); - - this - } -} - -#[async_trait] -impl OnDemandRelay - for OnDemandParachainsRelay

-where - P::SourceParachain: Chain, -{ - async fn reconnect(&self) -> Result<(), SubstrateError> { - // using clone is fine here (to avoid mut requirement), because clone on Client clones - // internal references - self.source_relay_client.clone().reconnect().await?; - self.target_client.clone().reconnect().await?; - // we'll probably need to reconnect relay chain relayer clients also - self.on_demand_source_relay_to_target_headers.reconnect().await - } - - async fn require_more_headers(&self, required_header: BlockNumberOf) { - if let Err(e) = self.required_header_number_sender.send(required_header).await { - log::trace!( - target: "bridge", - "[{}] Failed to request {} header {:?}: {:?}", - self.relay_task_name, - P::SourceParachain::NAME, - required_header, - e, - ); - } - } - - /// Ask relay to prove source `required_header` to the `TargetChain`. - async fn prove_header( - &self, - required_parachain_header: BlockNumberOf, - ) -> Result<(HeaderIdOf, Vec>), SubstrateError> { - // select headers to prove - let parachains_source = ParachainsSource::

::new( - self.source_relay_client.clone(), - Arc::new(Mutex::new(AvailableHeader::Missing)), - ); - let env = (self, ¶chains_source); - let (need_to_prove_relay_block, selected_relay_block, selected_parachain_block) = - select_headers_to_prove(env, required_parachain_header).await?; - - log::debug!( - target: "bridge", - "[{}] Requested to prove {} head {:?}. Selected to prove {} head {:?} and {} head {:?}", - self.relay_task_name, - P::SourceParachain::NAME, - required_parachain_header, - P::SourceParachain::NAME, - selected_parachain_block, - P::SourceRelayChain::NAME, - if need_to_prove_relay_block { - Some(selected_relay_block) - } else { - None - }, - ); - - // now let's prove relay chain block (if needed) - let mut calls = Vec::new(); - let mut proved_relay_block = selected_relay_block; - if need_to_prove_relay_block { - let (relay_block, relay_prove_call) = self - .on_demand_source_relay_to_target_headers - .prove_header(selected_relay_block.number()) - .await?; - proved_relay_block = relay_block; - calls.extend(relay_prove_call); - } - - // despite what we've selected before (in `select_headers_to_prove` call), if headers relay - // have chose the different header (e.g. because there's no GRANDPA jusstification for it), - // we need to prove parachain head available at this header - let para_id = ParaId(P::SourceParachain::PARACHAIN_ID); - let mut proved_parachain_block = selected_parachain_block; - if proved_relay_block != selected_relay_block { - proved_parachain_block = parachains_source - .on_chain_para_head_id(proved_relay_block) - .await? - // this could happen e.g. if parachain has been offboarded? - .ok_or_else(|| { - SubstrateError::MissingRequiredParachainHead( - para_id, - proved_relay_block.number().unique_saturated_into(), - ) - })?; - - log::debug!( - target: "bridge", - "[{}] Selected to prove {} head {:?} and {} head {:?}. Instead proved {} head {:?} and {} head {:?}", - self.relay_task_name, - P::SourceParachain::NAME, - selected_parachain_block, - P::SourceRelayChain::NAME, - selected_relay_block, - P::SourceParachain::NAME, - proved_parachain_block, - P::SourceRelayChain::NAME, - proved_relay_block, - ); - } - - // and finally - prove parachain head - let (para_proof, para_hash) = - parachains_source.prove_parachain_head(proved_relay_block).await?; - calls.push(P::SubmitParachainHeadsCallBuilder::build_submit_parachain_heads_call( - proved_relay_block, - vec![(para_id, para_hash)], - para_proof, - )); - - Ok((proved_parachain_block, calls)) - } -} - -/// Background task that is responsible for starting parachain headers relay. -async fn background_task( - source_relay_client: Client, - target_client: Client, - target_transaction_params: TransactionParams>, - on_demand_source_relay_to_target_headers: Arc< - dyn OnDemandRelay, - >, - required_parachain_header_number_receiver: Receiver>, -) where - P::SourceParachain: Chain, - P::SourceRelayChain: - Chain, - AccountIdOf: From< as sp_core::Pair>::Public>, -{ - let relay_task_name = on_demand_parachains_relay_name::(); - let target_transactions_mortality = target_transaction_params.mortality; - - let mut relay_state = RelayState::Idle; - let mut required_parachain_header_number = Zero::zero(); - let required_para_header_ref = Arc::new(Mutex::new(AvailableHeader::Unavailable)); - - let mut restart_relay = true; - let parachains_relay_task = futures::future::Fuse::terminated(); - futures::pin_mut!(parachains_relay_task); - - let mut parachains_source = - ParachainsSource::

::new(source_relay_client.clone(), required_para_header_ref.clone()); - let mut parachains_target = - ParachainsTarget::

::new(target_client.clone(), target_transaction_params.clone()); - - loop { - select! { - new_required_parachain_header_number = required_parachain_header_number_receiver.recv().fuse() => { - let new_required_parachain_header_number = match new_required_parachain_header_number { - Ok(new_required_parachain_header_number) => new_required_parachain_header_number, - Err(e) => { - log::error!( - target: "bridge", - "[{}] Background task has exited with error: {:?}", - relay_task_name, - e, - ); - - return; - }, - }; - - // keep in mind that we are not updating `required_para_header_ref` here, because - // then we'll be submitting all previous headers as well (while required relay headers are - // delivered) and we want to avoid that (to reduce cost) - if new_required_parachain_header_number > required_parachain_header_number { - log::trace!( - target: "bridge", - "[{}] More {} headers required. Going to sync up to the {}", - relay_task_name, - P::SourceParachain::NAME, - new_required_parachain_header_number, - ); - - required_parachain_header_number = new_required_parachain_header_number; - } - }, - _ = async_std::task::sleep(P::TargetChain::AVERAGE_BLOCK_INTERVAL).fuse() => {}, - _ = parachains_relay_task => { - // this should never happen in practice given the current code - restart_relay = true; - }, - } - - // the workflow of the on-demand parachains relay is: - // - // 1) message relay (or any other dependent relay) sees new message at parachain header - // `PH`; - // - // 2) it sees that the target chain does not know `PH`; - // - // 3) it asks on-demand parachains relay to relay `PH` to the target chain; - // - // Phase#1: relaying relay chain header - // - // 4) on-demand parachains relay waits for GRANDPA-finalized block of the source relay chain - // `RH` that is storing `PH` or its descendant. Let it be `PH'`; - // 5) it asks on-demand headers relay to relay `RH` to the target chain; - // 6) it waits until `RH` (or its descendant) is relayed to the target chain; - // - // Phase#2: relaying parachain header - // - // 7) on-demand parachains relay sets `ParachainsSource::maximal_header_number` to the - // `PH'.number()`. - // 8) parachains finality relay sees that the parachain head has been updated and relays - // `PH'` to the target chain. - - // select headers to relay - let relay_data = read_relay_data( - ¶chains_source, - ¶chains_target, - required_parachain_header_number, - ) - .await; - match relay_data { - Ok(relay_data) => { - let prev_relay_state = relay_state; - relay_state = select_headers_to_relay(&relay_data, relay_state); - log::trace!( - target: "bridge", - "[{}] Selected new relay state: {:?} using old state {:?} and data {:?}", - relay_task_name, - relay_state, - prev_relay_state, - relay_data, - ); - }, - Err(failed_client) => { - relay_utils::relay_loop::reconnect_failed_client( - failed_client, - relay_utils::relay_loop::RECONNECT_DELAY, - &mut parachains_source, - &mut parachains_target, - ) - .await; - continue - }, - } - - // we have selected our new 'state' => let's notify our source clients about our new - // requirements - match relay_state { - RelayState::Idle => (), - RelayState::RelayingRelayHeader(required_relay_header) => { - on_demand_source_relay_to_target_headers - .require_more_headers(required_relay_header) - .await; - }, - RelayState::RelayingParaHeader(required_para_header) => { - *required_para_header_ref.lock().await = - AvailableHeader::Available(required_para_header); - }, - } - - // start/restart relay - if restart_relay { - let stall_timeout = relay_substrate_client::transaction_stall_timeout( - target_transactions_mortality, - P::TargetChain::AVERAGE_BLOCK_INTERVAL, - relay_utils::STALL_TIMEOUT, - ); - - log::info!( - target: "bridge", - "[{}] Starting on-demand-parachains relay task\n\t\ - Tx mortality: {:?} (~{}m)\n\t\ - Stall timeout: {:?}", - relay_task_name, - target_transactions_mortality, - stall_timeout.as_secs_f64() / 60.0f64, - stall_timeout, - ); - - parachains_relay_task.set( - parachains_relay::parachains_loop::run( - parachains_source.clone(), - parachains_target.clone(), - MetricsParams::disabled(), - futures::future::pending(), - ) - .fuse(), - ); - - restart_relay = false; - } - } -} - -/// On-demand parachains relay task name. -fn on_demand_parachains_relay_name() -> String { - format!("{}-to-{}-on-demand-parachain", SourceChain::NAME, TargetChain::NAME) -} - -/// On-demand relay state. -#[derive(Clone, Copy, Debug, PartialEq)] -enum RelayState { - /// On-demand relay is not doing anything. - Idle, - /// Relaying given relay header to relay given parachain header later. - RelayingRelayHeader(RelayNumber), - /// Relaying given parachain header. - RelayingParaHeader(HeaderId), -} - -/// Data gathered from source and target clients, used by on-demand relay. -#[derive(Debug)] -struct RelayData { - /// Parachain header number that is required at the target chain. - pub required_para_header: ParaNumber, - /// Parachain header number, known to the target chain. - pub para_header_at_target: Option, - /// Parachain header id, known to the source (relay) chain. - pub para_header_at_source: Option>, - /// Parachain header, that is available at the source relay chain at `relay_header_at_target` - /// block. - /// - /// May be `None` if there's no `relay_header_at_target` yet, or if the - /// `relay_header_at_target` is too old and we think its state has been pruned. - pub para_header_at_relay_header_at_target: Option>, - /// Relay header number at the source chain. - pub relay_header_at_source: RelayNumber, - /// Relay header number at the target chain. - pub relay_header_at_target: Option, -} - -/// Read required data from source and target clients. -async fn read_relay_data( - source: &ParachainsSource

, - target: &ParachainsTarget

, - required_header_number: BlockNumberOf, -) -> Result< - RelayData< - HashOf, - BlockNumberOf, - BlockNumberOf, - >, - FailedClient, -> -where - ParachainsTarget

: - TargetClient> + RelayClient, -{ - let map_target_err = |e| { - log::error!( - target: "bridge", - "[{}] Failed to read relay data from {} client: {:?}", - on_demand_parachains_relay_name::(), - P::TargetChain::NAME, - e, - ); - FailedClient::Target - }; - let map_source_err = |e| { - log::error!( - target: "bridge", - "[{}] Failed to read relay data from {} client: {:?}", - on_demand_parachains_relay_name::(), - P::SourceRelayChain::NAME, - e, - ); - FailedClient::Source - }; - - let best_target_block_hash = target.best_block().await.map_err(map_target_err)?.1; - let para_header_at_target = best_finalized_peer_header_at_self::< - P::TargetChain, - P::SourceParachain, - >(target.client(), best_target_block_hash) - .await; - // if there are no parachain heads at the target (`NoParachainHeadAtTarget`), we'll need to - // submit at least one. Otherwise the pallet will be treated as uninitialized and messages - // sync will stall. - let para_header_at_target = match para_header_at_target { - Ok(Some(para_header_at_target)) => Some(para_header_at_target.0), - Ok(None) => None, - Err(e) => return Err(map_target_err(e)), - }; - - let best_finalized_relay_header = - source.client().best_finalized_header().await.map_err(map_source_err)?; - let best_finalized_relay_block_id = best_finalized_relay_header.id(); - let para_header_at_source = source - .on_chain_para_head_id(best_finalized_relay_block_id) - .await - .map_err(map_source_err)?; - - let relay_header_at_source = best_finalized_relay_block_id.0; - let relay_header_at_target = best_finalized_peer_header_at_self::< - P::TargetChain, - P::SourceRelayChain, - >(target.client(), best_target_block_hash) - .await - .map_err(map_target_err)?; - - // if relay header at target is too old then its state may already be discarded at the source - // => just use `None` in this case - // - // the same is for case when there's no relay header at target at all - let available_relay_header_at_target = - relay_header_at_target.filter(|relay_header_at_target| { - !is_ancient_block(relay_header_at_target.number(), relay_header_at_source) - }); - let para_header_at_relay_header_at_target = - if let Some(available_relay_header_at_target) = available_relay_header_at_target { - source - .on_chain_para_head_id(available_relay_header_at_target) - .await - .map_err(map_source_err)? - } else { - None - }; - - Ok(RelayData { - required_para_header: required_header_number, - para_header_at_target, - para_header_at_source, - relay_header_at_source, - relay_header_at_target: relay_header_at_target - .map(|relay_header_at_target| relay_header_at_target.0), - para_header_at_relay_header_at_target, - }) -} - -/// Select relay and parachain headers that need to be relayed. -fn select_headers_to_relay( - data: &RelayData, - state: RelayState, -) -> RelayState -where - ParaHash: Clone, - ParaNumber: Copy + PartialOrd + Zero, - RelayNumber: Copy + Debug + Ord, -{ - // we can't do anything until **relay chain** bridge GRANDPA pallet is not initialized at the - // target chain - let relay_header_at_target = match data.relay_header_at_target { - Some(relay_header_at_target) => relay_header_at_target, - None => return RelayState::Idle, - }; - - // Process the `RelayingRelayHeader` state. - if let &RelayState::RelayingRelayHeader(relay_header_number) = &state { - if relay_header_at_target < relay_header_number { - // The required relay header hasn't yet been relayed. Ask / wait for it. - return state - } - - // We may switch to `RelayingParaHeader` if parachain head is available. - if let Some(para_header_at_relay_header_at_target) = - data.para_header_at_relay_header_at_target.as_ref() - { - return RelayState::RelayingParaHeader(para_header_at_relay_header_at_target.clone()) - } - - // else use the regular process - e.g. we may require to deliver new relay header first - } - - // Process the `RelayingParaHeader` state. - if let RelayState::RelayingParaHeader(para_header_id) = &state { - let para_header_at_target_or_zero = data.para_header_at_target.unwrap_or_else(Zero::zero); - if para_header_at_target_or_zero < para_header_id.0 { - // The required parachain header hasn't yet been relayed. Ask / wait for it. - return state - } - } - - // if we haven't read para head from the source, we can't yet do anything - let para_header_at_source = match data.para_header_at_source { - Some(ref para_header_at_source) => para_header_at_source.clone(), - None => return RelayState::Idle, - }; - - // if we have parachain head at the source, but no parachain heads at the target, we'll need - // to deliver at least one parachain head - let (required_para_header, para_header_at_target) = match data.para_header_at_target { - Some(para_header_at_target) => (data.required_para_header, para_header_at_target), - None => (para_header_at_source.0, Zero::zero()), - }; - - // if we have already satisfied our "customer", do nothing - if required_para_header <= para_header_at_target { - return RelayState::Idle - } - - // if required header is not available even at the source chain, let's wait - if required_para_header > para_header_at_source.0 { - return RelayState::Idle - } - - // we will always try to sync latest parachain/relay header, even if we've been asked for some - // its ancestor - - // we need relay chain header first - if relay_header_at_target < data.relay_header_at_source { - return RelayState::RelayingRelayHeader(data.relay_header_at_source) - } - - // if all relay headers synced, we may start directly with parachain header - RelayState::RelayingParaHeader(para_header_at_source) -} - -/// Environment for the `select_headers_to_prove` call. -#[async_trait] -trait SelectHeadersToProveEnvironment { - /// Returns associated parachain id. - fn parachain_id(&self) -> ParaId; - /// Returns best finalized relay block. - async fn best_finalized_relay_block_at_source( - &self, - ) -> Result, SubstrateError>; - /// Returns best finalized relay block that is known at `P::TargetChain`. - async fn best_finalized_relay_block_at_target( - &self, - ) -> Result, SubstrateError>; - /// Returns best finalized parachain block at given source relay chain block. - async fn best_finalized_para_block_at_source( - &self, - at_relay_block: HeaderId, - ) -> Result>, SubstrateError>; -} - -#[async_trait] -impl<'a, P: SubstrateParachainsPipeline> - SelectHeadersToProveEnvironment< - BlockNumberOf, - HashOf, - BlockNumberOf, - HashOf, - > for (&'a OnDemandParachainsRelay

, &'a ParachainsSource

) -{ - fn parachain_id(&self) -> ParaId { - ParaId(P::SourceParachain::PARACHAIN_ID) - } - - async fn best_finalized_relay_block_at_source( - &self, - ) -> Result, SubstrateError> { - Ok(self.0.source_relay_client.best_finalized_header().await?.id()) - } - - async fn best_finalized_relay_block_at_target( - &self, - ) -> Result, SubstrateError> { - Ok(crate::messages_source::read_client_state::( - &self.0.target_client, - None, - ) - .await? - .best_finalized_peer_at_best_self - .ok_or(SubstrateError::BridgePalletIsNotInitialized)?) - } - - async fn best_finalized_para_block_at_source( - &self, - at_relay_block: HeaderIdOf, - ) -> Result>, SubstrateError> { - self.1.on_chain_para_head_id(at_relay_block).await - } -} - -/// Given request to prove `required_parachain_header`, select actual headers that need to be -/// proved. -async fn select_headers_to_prove( - env: impl SelectHeadersToProveEnvironment, - required_parachain_header: PBN, -) -> Result<(bool, HeaderId, HeaderId), SubstrateError> -where - RBH: Copy, - RBN: BlockNumberBase, - PBH: Copy, - PBN: BlockNumberBase, -{ - // parachains proof also requires relay header proof. Let's first select relay block - // number that we'll be dealing with - let best_finalized_relay_block_at_source = env.best_finalized_relay_block_at_source().await?; - let best_finalized_relay_block_at_target = env.best_finalized_relay_block_at_target().await?; - - // if we can't prove `required_header` even using `best_finalized_relay_block_at_source`, we - // can't do anything here - // (this shall not actually happen, given current code, because we only require finalized - // headers) - let best_possible_parachain_block = env - .best_finalized_para_block_at_source(best_finalized_relay_block_at_source) - .await? - .filter(|best_possible_parachain_block| { - best_possible_parachain_block.number() >= required_parachain_header - }) - .ok_or(SubstrateError::MissingRequiredParachainHead( - env.parachain_id(), - required_parachain_header.unique_saturated_into(), - ))?; - - // we don't require source node to be archive, so we can't craft storage proofs using - // ancient headers. So if the `best_finalized_relay_block_at_target` is too ancient, we - // can't craft storage proofs using it - let may_use_state_at_best_finalized_relay_block_at_target = !is_ancient_block( - best_finalized_relay_block_at_target.number(), - best_finalized_relay_block_at_source.number(), - ); - - // now let's check if `required_header` may be proved using - // `best_finalized_relay_block_at_target` - let selection = if may_use_state_at_best_finalized_relay_block_at_target { - env.best_finalized_para_block_at_source(best_finalized_relay_block_at_target) - .await? - .filter(|best_finalized_para_block_at_target| { - best_finalized_para_block_at_target.number() >= required_parachain_header - }) - .map(|best_finalized_para_block_at_target| { - (false, best_finalized_relay_block_at_target, best_finalized_para_block_at_target) - }) - } else { - None - }; - - Ok(selection.unwrap_or(( - true, - best_finalized_relay_block_at_source, - best_possible_parachain_block, - ))) -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn relay_waits_for_relay_header_to_be_delivered() { - assert_eq!( - select_headers_to_relay( - &RelayData { - required_para_header: 90, - para_header_at_target: Some(50), - para_header_at_source: Some(HeaderId(110, 110)), - relay_header_at_source: 800, - relay_header_at_target: Some(700), - para_header_at_relay_header_at_target: Some(HeaderId(100, 100)), - }, - RelayState::RelayingRelayHeader(750), - ), - RelayState::RelayingRelayHeader(750), - ); - } - - #[test] - fn relay_starts_relaying_requested_para_header_after_relay_header_is_delivered() { - assert_eq!( - select_headers_to_relay( - &RelayData { - required_para_header: 90, - para_header_at_target: Some(50), - para_header_at_source: Some(HeaderId(110, 110)), - relay_header_at_source: 800, - relay_header_at_target: Some(750), - para_header_at_relay_header_at_target: Some(HeaderId(100, 100)), - }, - RelayState::RelayingRelayHeader(750), - ), - RelayState::RelayingParaHeader(HeaderId(100, 100)), - ); - } - - #[test] - fn relay_selects_better_para_header_after_better_relay_header_is_delivered() { - assert_eq!( - select_headers_to_relay( - &RelayData { - required_para_header: 90, - para_header_at_target: Some(50), - para_header_at_source: Some(HeaderId(110, 110)), - relay_header_at_source: 800, - relay_header_at_target: Some(780), - para_header_at_relay_header_at_target: Some(HeaderId(105, 105)), - }, - RelayState::RelayingRelayHeader(750), - ), - RelayState::RelayingParaHeader(HeaderId(105, 105)), - ); - } - #[test] - fn relay_waits_for_para_header_to_be_delivered() { - assert_eq!( - select_headers_to_relay( - &RelayData { - required_para_header: 90, - para_header_at_target: Some(50), - para_header_at_source: Some(HeaderId(110, 110)), - relay_header_at_source: 800, - relay_header_at_target: Some(780), - para_header_at_relay_header_at_target: Some(HeaderId(105, 105)), - }, - RelayState::RelayingParaHeader(HeaderId(105, 105)), - ), - RelayState::RelayingParaHeader(HeaderId(105, 105)), - ); - } - - #[test] - fn relay_stays_idle_if_required_para_header_is_already_delivered() { - assert_eq!( - select_headers_to_relay( - &RelayData { - required_para_header: 90, - para_header_at_target: Some(105), - para_header_at_source: Some(HeaderId(110, 110)), - relay_header_at_source: 800, - relay_header_at_target: Some(780), - para_header_at_relay_header_at_target: Some(HeaderId(105, 105)), - }, - RelayState::Idle, - ), - RelayState::Idle, - ); - } - - #[test] - fn relay_waits_for_required_para_header_to_appear_at_source_1() { - assert_eq!( - select_headers_to_relay( - &RelayData { - required_para_header: 120, - para_header_at_target: Some(105), - para_header_at_source: None, - relay_header_at_source: 800, - relay_header_at_target: Some(780), - para_header_at_relay_header_at_target: Some(HeaderId(105, 105)), - }, - RelayState::Idle, - ), - RelayState::Idle, - ); - } - - #[test] - fn relay_waits_for_required_para_header_to_appear_at_source_2() { - assert_eq!( - select_headers_to_relay( - &RelayData { - required_para_header: 120, - para_header_at_target: Some(105), - para_header_at_source: Some(HeaderId(110, 110)), - relay_header_at_source: 800, - relay_header_at_target: Some(780), - para_header_at_relay_header_at_target: Some(HeaderId(105, 105)), - }, - RelayState::Idle, - ), - RelayState::Idle, - ); - } - - #[test] - fn relay_starts_relaying_relay_header_when_new_para_header_is_requested() { - assert_eq!( - select_headers_to_relay( - &RelayData { - required_para_header: 120, - para_header_at_target: Some(105), - para_header_at_source: Some(HeaderId(125, 125)), - relay_header_at_source: 800, - relay_header_at_target: Some(780), - para_header_at_relay_header_at_target: Some(HeaderId(105, 105)), - }, - RelayState::Idle, - ), - RelayState::RelayingRelayHeader(800), - ); - } - - #[test] - fn relay_starts_relaying_para_header_when_new_para_header_is_requested() { - assert_eq!( - select_headers_to_relay( - &RelayData { - required_para_header: 120, - para_header_at_target: Some(105), - para_header_at_source: Some(HeaderId(125, 125)), - relay_header_at_source: 800, - relay_header_at_target: Some(800), - para_header_at_relay_header_at_target: Some(HeaderId(125, 125)), - }, - RelayState::Idle, - ), - RelayState::RelayingParaHeader(HeaderId(125, 125)), - ); - } - - #[test] - fn relay_goes_idle_when_parachain_is_deregistered() { - assert_eq!( - select_headers_to_relay::( - &RelayData { - required_para_header: 120, - para_header_at_target: Some(105), - para_header_at_source: None, - relay_header_at_source: 800, - relay_header_at_target: Some(800), - para_header_at_relay_header_at_target: None, - }, - RelayState::RelayingRelayHeader(800), - ), - RelayState::Idle, - ); - } - - #[test] - fn relay_starts_relaying_first_parachain_header() { - assert_eq!( - select_headers_to_relay::( - &RelayData { - required_para_header: 0, - para_header_at_target: None, - para_header_at_source: Some(HeaderId(125, 125)), - relay_header_at_source: 800, - relay_header_at_target: Some(800), - para_header_at_relay_header_at_target: Some(HeaderId(125, 125)), - }, - RelayState::Idle, - ), - RelayState::RelayingParaHeader(HeaderId(125, 125)), - ); - } - - #[test] - fn relay_starts_relaying_relay_header_to_relay_first_parachain_header() { - assert_eq!( - select_headers_to_relay::( - &RelayData { - required_para_header: 0, - para_header_at_target: None, - para_header_at_source: Some(HeaderId(125, 125)), - relay_header_at_source: 800, - relay_header_at_target: Some(700), - para_header_at_relay_header_at_target: Some(HeaderId(125, 125)), - }, - RelayState::Idle, - ), - RelayState::RelayingRelayHeader(800), - ); - } - - // tuple is: - // - // - best_finalized_relay_block_at_source - // - best_finalized_relay_block_at_target - // - best_finalized_para_block_at_source at best_finalized_relay_block_at_source - // - best_finalized_para_block_at_source at best_finalized_relay_block_at_target - #[async_trait] - impl SelectHeadersToProveEnvironment for (u32, u32, u32, u32) { - fn parachain_id(&self) -> ParaId { - ParaId(0) - } - - async fn best_finalized_relay_block_at_source( - &self, - ) -> Result, SubstrateError> { - Ok(HeaderId(self.0, self.0)) - } - - async fn best_finalized_relay_block_at_target( - &self, - ) -> Result, SubstrateError> { - Ok(HeaderId(self.1, self.1)) - } - - async fn best_finalized_para_block_at_source( - &self, - at_relay_block: HeaderId, - ) -> Result>, SubstrateError> { - if at_relay_block.0 == self.0 { - Ok(Some(HeaderId(self.2, self.2))) - } else if at_relay_block.0 == self.1 { - Ok(Some(HeaderId(self.3, self.3))) - } else { - Ok(None) - } - } - } - - #[async_std::test] - async fn select_headers_to_prove_returns_err_if_required_para_block_is_missing_at_source() { - assert!(matches!( - select_headers_to_prove((20_u32, 10_u32, 200_u32, 100_u32), 300_u32,).await, - Err(SubstrateError::MissingRequiredParachainHead(ParaId(0), 300_u64)), - )); - } - - #[async_std::test] - async fn select_headers_to_prove_fails_to_use_existing_ancient_relay_block() { - assert_eq!( - select_headers_to_prove((220_u32, 10_u32, 200_u32, 100_u32), 100_u32,) - .await - .map_err(drop), - Ok((true, HeaderId(220, 220), HeaderId(200, 200))), - ); - } - - #[async_std::test] - async fn select_headers_to_prove_is_able_to_use_existing_recent_relay_block() { - assert_eq!( - select_headers_to_prove((40_u32, 10_u32, 200_u32, 100_u32), 100_u32,) - .await - .map_err(drop), - Ok((false, HeaderId(10, 10), HeaderId(100, 100))), - ); - } - - #[async_std::test] - async fn select_headers_to_prove_uses_new_relay_block() { - assert_eq!( - select_headers_to_prove((20_u32, 10_u32, 200_u32, 100_u32), 200_u32,) - .await - .map_err(drop), - Ok((true, HeaderId(20, 20), HeaderId(200, 200))), - ); - } -} diff --git a/relays/lib-substrate-relay/src/parachains/mod.rs b/relays/lib-substrate-relay/src/parachains/mod.rs deleted file mode 100644 index 722f9b61f..000000000 --- a/relays/lib-substrate-relay/src/parachains/mod.rs +++ /dev/null @@ -1,108 +0,0 @@ -// Copyright 2019-2021 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Types and functions intended to ease adding of new Substrate -> Substrate -//! parachain finality proofs synchronization pipelines. - -use async_trait::async_trait; -use bp_polkadot_core::parachains::{ParaHash, ParaHeadsProof, ParaId}; -use pallet_bridge_parachains::{ - Call as BridgeParachainsCall, Config as BridgeParachainsConfig, RelayBlockHash, - RelayBlockHasher, RelayBlockNumber, -}; -use parachains_relay::ParachainsPipeline; -use relay_substrate_client::{ - CallOf, Chain, ChainWithTransactions, HeaderIdOf, Parachain, RelayChain, -}; -use std::{fmt::Debug, marker::PhantomData}; - -pub mod source; -pub mod target; - -/// Substrate -> Substrate parachain finality proofs synchronization pipeline. -/// -/// This is currently restricted to the single parachain, because it is how it -/// will be used (at least) initially. -#[async_trait] -pub trait SubstrateParachainsPipeline: 'static + Clone + Debug + Send + Sync { - /// Headers of this parachain are submitted to the `Self::TargetChain`. - type SourceParachain: Parachain; - /// Relay chain that is storing headers of `Self::SourceParachain`. - type SourceRelayChain: RelayChain; - /// Target chain where `Self::SourceParachain` headers are submitted. - type TargetChain: ChainWithTransactions; - - /// How submit parachains heads call is built? - type SubmitParachainHeadsCallBuilder: SubmitParachainHeadsCallBuilder; -} - -/// Adapter that allows all `SubstrateParachainsPipeline` to act as `ParachainsPipeline`. -#[derive(Clone, Debug)] -pub struct ParachainsPipelineAdapter { - _phantom: PhantomData

, -} - -impl ParachainsPipeline for ParachainsPipelineAdapter

{ - type SourceParachain = P::SourceParachain; - type SourceRelayChain = P::SourceRelayChain; - type TargetChain = P::TargetChain; -} - -/// Different ways of building `submit_parachain_heads` calls. -pub trait SubmitParachainHeadsCallBuilder: - 'static + Send + Sync -{ - /// Given parachains and their heads proof, build call of `submit_parachain_heads` - /// function of bridge parachains module at the target chain. - fn build_submit_parachain_heads_call( - at_relay_block: HeaderIdOf, - parachains: Vec<(ParaId, ParaHash)>, - parachain_heads_proof: ParaHeadsProof, - ) -> CallOf; -} - -/// Building `submit_parachain_heads` call when you have direct access to the target -/// chain runtime. -pub struct DirectSubmitParachainHeadsCallBuilder { - _phantom: PhantomData<(P, R, I)>, -} - -impl SubmitParachainHeadsCallBuilder

for DirectSubmitParachainHeadsCallBuilder -where - P: SubstrateParachainsPipeline, - P::SourceRelayChain: Chain, - R: BridgeParachainsConfig + Send + Sync, - I: 'static + Send + Sync, - R::BridgedChain: bp_runtime::Chain< - BlockNumber = RelayBlockNumber, - Hash = RelayBlockHash, - Hasher = RelayBlockHasher, - >, - CallOf: From>, -{ - fn build_submit_parachain_heads_call( - at_relay_block: HeaderIdOf, - parachains: Vec<(ParaId, ParaHash)>, - parachain_heads_proof: ParaHeadsProof, - ) -> CallOf { - BridgeParachainsCall::::submit_parachain_heads { - at_relay_block: (at_relay_block.0, at_relay_block.1), - parachains, - parachain_heads_proof, - } - .into() - } -} diff --git a/relays/lib-substrate-relay/src/parachains/source.rs b/relays/lib-substrate-relay/src/parachains/source.rs deleted file mode 100644 index 4cc512b9d..000000000 --- a/relays/lib-substrate-relay/src/parachains/source.rs +++ /dev/null @@ -1,181 +0,0 @@ -// Copyright 2019-2021 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Parachain heads source. - -use crate::parachains::{ParachainsPipelineAdapter, SubstrateParachainsPipeline}; - -use async_std::sync::{Arc, Mutex}; -use async_trait::async_trait; -use bp_parachains::parachain_head_storage_key_at_source; -use bp_polkadot_core::parachains::{ParaHash, ParaHead, ParaHeadsProof, ParaId}; -use bp_runtime::HeaderIdProvider; -use codec::Decode; -use parachains_relay::parachains_loop::{AvailableHeader, SourceClient}; -use relay_substrate_client::{ - is_ancient_block, Chain, Client, Error as SubstrateError, HeaderIdOf, HeaderOf, ParachainBase, - RelayChain, -}; -use relay_utils::relay_loop::Client as RelayClient; - -/// Shared updatable reference to the maximal parachain header id that we want to sync from the -/// source. -pub type RequiredHeaderIdRef = Arc>>>; - -/// Substrate client as parachain heads source. -#[derive(Clone)] -pub struct ParachainsSource { - client: Client, - max_head_id: RequiredHeaderIdRef, -} - -impl ParachainsSource

{ - /// Creates new parachains source client. - pub fn new( - client: Client, - max_head_id: RequiredHeaderIdRef, - ) -> Self { - ParachainsSource { client, max_head_id } - } - - /// Returns reference to the underlying RPC client. - pub fn client(&self) -> &Client { - &self.client - } - - /// Return decoded head of given parachain. - pub async fn on_chain_para_head_id( - &self, - at_block: HeaderIdOf, - ) -> Result>, SubstrateError> { - let para_id = ParaId(P::SourceParachain::PARACHAIN_ID); - let storage_key = - parachain_head_storage_key_at_source(P::SourceRelayChain::PARAS_PALLET_NAME, para_id); - let para_head = self.client.raw_storage_value(storage_key, Some(at_block.1)).await?; - let para_head = para_head.map(|h| ParaHead::decode(&mut &h.0[..])).transpose()?; - let para_head = match para_head { - Some(para_head) => para_head, - None => return Ok(None), - }; - let para_head: HeaderOf = Decode::decode(&mut ¶_head.0[..])?; - Ok(Some(para_head.id())) - } -} - -#[async_trait] -impl RelayClient for ParachainsSource

{ - type Error = SubstrateError; - - async fn reconnect(&mut self) -> Result<(), SubstrateError> { - self.client.reconnect().await - } -} - -#[async_trait] -impl SourceClient> - for ParachainsSource

-where - P::SourceParachain: Chain, -{ - async fn ensure_synced(&self) -> Result { - match self.client.ensure_synced().await { - Ok(_) => Ok(true), - Err(SubstrateError::ClientNotSynced(_)) => Ok(false), - Err(e) => Err(e), - } - } - - async fn parachain_head( - &self, - at_block: HeaderIdOf, - ) -> Result>, Self::Error> { - // if requested relay header is ancient, then we don't even want to try to read the - // parachain head - we simply return `Unavailable` - let best_block_number = self.client.best_finalized_header_number().await?; - if is_ancient_block(at_block.number(), best_block_number) { - log::trace!( - target: "bridge", - "{} block {:?} is ancient. Cannot prove the {} header there", - P::SourceRelayChain::NAME, - at_block, - P::SourceParachain::NAME, - ); - return Ok(AvailableHeader::Unavailable) - } - - // else - try to read head from the source client - let mut para_head_id = AvailableHeader::Missing; - if let Some(on_chain_para_head_id) = self.on_chain_para_head_id(at_block).await? { - // Never return head that is larger than requested. This way we'll never sync - // headers past `max_header_id`. - para_head_id = match *self.max_head_id.lock().await { - AvailableHeader::Unavailable => AvailableHeader::Unavailable, - AvailableHeader::Missing => { - // `max_header_id` is not set. There is no limit. - AvailableHeader::Available(on_chain_para_head_id) - }, - AvailableHeader::Available(max_head_id) if on_chain_para_head_id >= max_head_id => { - // We report at most `max_header_id`. - AvailableHeader::Available(std::cmp::min(on_chain_para_head_id, max_head_id)) - }, - AvailableHeader::Available(_) => { - // the `max_head_id` is not yet available at the source chain => wait and avoid - // syncing extra headers - AvailableHeader::Unavailable - }, - } - } - - Ok(para_head_id) - } - - async fn prove_parachain_head( - &self, - at_block: HeaderIdOf, - ) -> Result<(ParaHeadsProof, ParaHash), Self::Error> { - let parachain = ParaId(P::SourceParachain::PARACHAIN_ID); - let storage_key = - parachain_head_storage_key_at_source(P::SourceRelayChain::PARAS_PALLET_NAME, parachain); - let parachain_heads_proof = self - .client - .prove_storage(vec![storage_key.clone()], at_block.1) - .await? - .into_iter_nodes() - .collect(); - - // why we're reading parachain head here once again (it has already been read at the - // `parachain_head`)? that's because `parachain_head` sometimes returns obsolete parachain - // head and loop sometimes asks to prove this obsolete head and gets other (actual) head - // instead - // - // => since we want to provide proper hashes in our `submit_parachain_heads` call, we're - // rereading actual value here - let parachain_head = self - .client - .raw_storage_value(storage_key, Some(at_block.1)) - .await? - .map(|h| ParaHead::decode(&mut &h.0[..])) - .transpose()? - .ok_or_else(|| { - SubstrateError::Custom(format!( - "Failed to read expected parachain {parachain:?} head at {at_block:?}" - )) - })?; - let parachain_head_hash = parachain_head.hash(); - - Ok((ParaHeadsProof { storage_proof: parachain_heads_proof }, parachain_head_hash)) - } -} diff --git a/relays/lib-substrate-relay/src/parachains/target.rs b/relays/lib-substrate-relay/src/parachains/target.rs deleted file mode 100644 index 6df7bc0a7..000000000 --- a/relays/lib-substrate-relay/src/parachains/target.rs +++ /dev/null @@ -1,148 +0,0 @@ -// Copyright 2019-2021 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Parachain heads target. - -use crate::{ - parachains::{ - ParachainsPipelineAdapter, SubmitParachainHeadsCallBuilder, SubstrateParachainsPipeline, - }, - TransactionParams, -}; - -use async_trait::async_trait; -use bp_polkadot_core::parachains::{ParaHash, ParaHeadsProof, ParaId}; -use bp_runtime::HeaderIdProvider; -use codec::Decode; -use parachains_relay::parachains_loop::TargetClient; -use relay_substrate_client::{ - AccountIdOf, AccountKeyPairOf, Chain, Client, Error as SubstrateError, HeaderIdOf, - ParachainBase, TransactionEra, TransactionTracker, UnsignedTransaction, -}; -use relay_utils::relay_loop::Client as RelayClient; -use sp_core::{Bytes, Pair}; - -/// Substrate client as parachain heads source. -pub struct ParachainsTarget { - client: Client, - transaction_params: TransactionParams>, -} - -impl ParachainsTarget

{ - /// Creates new parachains target client. - pub fn new( - client: Client, - transaction_params: TransactionParams>, - ) -> Self { - ParachainsTarget { client, transaction_params } - } - - /// Returns reference to the underlying RPC client. - pub fn client(&self) -> &Client { - &self.client - } -} - -impl Clone for ParachainsTarget

{ - fn clone(&self) -> Self { - ParachainsTarget { - client: self.client.clone(), - transaction_params: self.transaction_params.clone(), - } - } -} - -#[async_trait] -impl RelayClient for ParachainsTarget

{ - type Error = SubstrateError; - - async fn reconnect(&mut self) -> Result<(), SubstrateError> { - self.client.reconnect().await - } -} - -#[async_trait] -impl

TargetClient> for ParachainsTarget

-where - P: SubstrateParachainsPipeline, - AccountIdOf: From< as Pair>::Public>, -{ - type TransactionTracker = TransactionTracker>; - - async fn best_block(&self) -> Result, Self::Error> { - let best_header = self.client.best_header().await?; - let best_id = best_header.id(); - - Ok(best_id) - } - - async fn best_finalized_source_relay_chain_block( - &self, - at_block: &HeaderIdOf, - ) -> Result, Self::Error> { - self.client - .typed_state_call::<_, Option>>( - P::SourceRelayChain::BEST_FINALIZED_HEADER_ID_METHOD.into(), - (), - Some(at_block.1), - ) - .await? - .map(Ok) - .unwrap_or(Err(SubstrateError::BridgePalletIsNotInitialized)) - } - - async fn parachain_head( - &self, - at_block: HeaderIdOf, - ) -> Result>, Self::Error> { - let encoded_best_finalized_source_para_block = self - .client - .state_call( - P::SourceParachain::BEST_FINALIZED_HEADER_ID_METHOD.into(), - Bytes(Vec::new()), - Some(at_block.1), - ) - .await?; - - Ok(Option::>::decode( - &mut &encoded_best_finalized_source_para_block.0[..], - ) - .map_err(SubstrateError::ResponseParseFailed)?) - } - - async fn submit_parachain_head_proof( - &self, - at_relay_block: HeaderIdOf, - updated_head_hash: ParaHash, - proof: ParaHeadsProof, - ) -> Result { - let transaction_params = self.transaction_params.clone(); - let call = P::SubmitParachainHeadsCallBuilder::build_submit_parachain_heads_call( - at_relay_block, - vec![(ParaId(P::SourceParachain::PARACHAIN_ID), updated_head_hash)], - proof, - ); - self.client - .submit_and_watch_signed_extrinsic( - &transaction_params.signer, - move |best_block_id, transaction_nonce| { - Ok(UnsignedTransaction::new(call.into(), transaction_nonce) - .era(TransactionEra::new(best_block_id, transaction_params.mortality))) - }, - ) - .await - } -} diff --git a/relays/messages/Cargo.toml b/relays/messages/Cargo.toml deleted file mode 100644 index 8a411e508..000000000 --- a/relays/messages/Cargo.toml +++ /dev/null @@ -1,29 +0,0 @@ -[package] -name = "messages-relay" -version = "0.1.0" -authors.workspace = true -edition.workspace = true -license = "GPL-3.0-or-later WITH Classpath-exception-2.0" -repository.workspace = true -publish = false - -[lints] -workspace = true - -[dependencies] -async-std = { version = "1.9.0", features = ["attributes"] } -async-trait = "0.1.79" -env_logger = "0.11" -futures = "0.3.30" -hex = "0.4" -log = { workspace = true } -num-traits = "0.2" -parking_lot = "0.12.1" - -# Bridge Dependencies - -bp-messages = { path = "../../primitives/messages" } -finality-relay = { path = "../finality" } -relay-utils = { path = "../utils" } - -sp-arithmetic = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } diff --git a/relays/messages/src/lib.rs b/relays/messages/src/lib.rs deleted file mode 100644 index 9c62cee5e..000000000 --- a/relays/messages/src/lib.rs +++ /dev/null @@ -1,37 +0,0 @@ -// Copyright 2019-2021 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Relaying [`pallet-bridge-messages`](../pallet_bridge_messages/index.html) application specific -//! data. Message lane allows sending arbitrary messages between bridged chains. This -//! module provides entrypoint that starts reading messages from given message lane -//! of source chain and submits proof-of-message-at-source-chain transactions to the -//! target chain. Additionally, proofs-of-messages-delivery are sent back from the -//! target chain to the source chain. - -// required for futures::select! -#![recursion_limit = "1024"] -#![warn(missing_docs)] - -mod metrics; - -pub mod message_lane; -pub mod message_lane_loop; - -mod message_race_delivery; -mod message_race_limits; -mod message_race_loop; -mod message_race_receiving; -mod message_race_strategy; diff --git a/relays/messages/src/message_lane.rs b/relays/messages/src/message_lane.rs deleted file mode 100644 index 5c9728ad9..000000000 --- a/relays/messages/src/message_lane.rs +++ /dev/null @@ -1,71 +0,0 @@ -// Copyright 2019-2021 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! One-way message lane types. Within single one-way lane we have three 'races' where we try to: -//! -//! 1) relay new messages from source to target node; -//! 2) relay proof-of-delivery from target to source node. - -use num_traits::{SaturatingAdd, Zero}; -use relay_utils::{BlockNumberBase, HeaderId}; -use sp_arithmetic::traits::AtLeast32BitUnsigned; -use std::{fmt::Debug, ops::Sub}; - -/// One-way message lane. -pub trait MessageLane: 'static + Clone + Send + Sync { - /// Name of the messages source. - const SOURCE_NAME: &'static str; - /// Name of the messages target. - const TARGET_NAME: &'static str; - - /// Messages proof. - type MessagesProof: Clone + Debug + Send + Sync; - /// Messages receiving proof. - type MessagesReceivingProof: Clone + Debug + Send + Sync; - - /// The type of the source chain token balance, that is used to: - /// - /// 1) pay transaction fees; - /// 2) pay message delivery and dispatch fee; - /// 3) pay relayer rewards. - type SourceChainBalance: AtLeast32BitUnsigned - + Clone - + Copy - + Debug - + PartialOrd - + Sub - + SaturatingAdd - + Zero - + Send - + Sync; - /// Number of the source header. - type SourceHeaderNumber: BlockNumberBase; - /// Hash of the source header. - type SourceHeaderHash: Clone + Debug + Default + PartialEq + Send + Sync; - - /// Number of the target header. - type TargetHeaderNumber: BlockNumberBase; - /// Hash of the target header. - type TargetHeaderHash: Clone + Debug + Default + PartialEq + Send + Sync; -} - -/// Source header id within given one-way message lane. -pub type SourceHeaderIdOf

= - HeaderId<

::SourceHeaderHash,

::SourceHeaderNumber>; - -/// Target header id within given one-way message lane. -pub type TargetHeaderIdOf

= - HeaderId<

::TargetHeaderHash,

::TargetHeaderNumber>; diff --git a/relays/messages/src/message_lane_loop.rs b/relays/messages/src/message_lane_loop.rs deleted file mode 100644 index b681d86d2..000000000 --- a/relays/messages/src/message_lane_loop.rs +++ /dev/null @@ -1,1277 +0,0 @@ -// Copyright 2019-2021 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Message delivery loop. Designed to work with messages pallet. -//! -//! Single relay instance delivers messages of single lane in single direction. -//! To serve two-way lane, you would need two instances of relay. -//! To serve N two-way lanes, you would need N*2 instances of relay. -//! -//! Please keep in mind that the best header in this file is actually best -//! finalized header. I.e. when talking about headers in lane context, we -//! only care about finalized headers. - -use std::{collections::BTreeMap, fmt::Debug, future::Future, ops::RangeInclusive, time::Duration}; - -use async_trait::async_trait; -use futures::{channel::mpsc::unbounded, future::FutureExt, stream::StreamExt}; - -use bp_messages::{LaneId, MessageNonce, UnrewardedRelayersState, Weight}; -use relay_utils::{ - interval, metrics::MetricsParams, process_future_result, relay_loop::Client as RelayClient, - retry_backoff, FailedClient, TransactionTracker, -}; - -use crate::{ - message_lane::{MessageLane, SourceHeaderIdOf, TargetHeaderIdOf}, - message_race_delivery::run as run_message_delivery_race, - message_race_receiving::run as run_message_receiving_race, - metrics::MessageLaneLoopMetrics, -}; - -/// Message lane loop configuration params. -#[derive(Debug, Clone)] -pub struct Params { - /// Id of lane this loop is servicing. - pub lane: LaneId, - /// Interval at which we ask target node about its updates. - pub source_tick: Duration, - /// Interval at which we ask target node about its updates. - pub target_tick: Duration, - /// Delay between moments when connection error happens and our reconnect attempt. - pub reconnect_delay: Duration, - /// Message delivery race parameters. - pub delivery_params: MessageDeliveryParams, -} - -/// Message delivery race parameters. -#[derive(Debug, Clone)] -pub struct MessageDeliveryParams { - /// Maximal number of unconfirmed relayer entries at the inbound lane. If there's that number - /// of entries in the `InboundLaneData::relayers` set, all new messages will be rejected until - /// reward payment will be proved (by including outbound lane state to the message delivery - /// transaction). - pub max_unrewarded_relayer_entries_at_target: MessageNonce, - /// Message delivery race will stop delivering messages if there are - /// `max_unconfirmed_nonces_at_target` unconfirmed nonces on the target node. The race would - /// continue once they're confirmed by the receiving race. - pub max_unconfirmed_nonces_at_target: MessageNonce, - /// Maximal number of relayed messages in single delivery transaction. - pub max_messages_in_single_batch: MessageNonce, - /// Maximal cumulative dispatch weight of relayed messages in single delivery transaction. - pub max_messages_weight_in_single_batch: Weight, - /// Maximal cumulative size of relayed messages in single delivery transaction. - pub max_messages_size_in_single_batch: u32, -} - -/// Message details. -#[derive(Debug, Clone, Copy, PartialEq, Eq)] -pub struct MessageDetails { - /// Message dispatch weight. - pub dispatch_weight: Weight, - /// Message size (number of bytes in encoded payload). - pub size: u32, - /// The relayer reward paid in the source chain tokens. - pub reward: SourceChainBalance, -} - -/// Messages details map. -pub type MessageDetailsMap = - BTreeMap>; - -/// Message delivery race proof parameters. -#[derive(Debug, PartialEq, Eq)] -pub struct MessageProofParameters { - /// Include outbound lane state proof? - pub outbound_state_proof_required: bool, - /// Cumulative dispatch weight of messages that we're building proof for. - pub dispatch_weight: Weight, -} - -/// Artifacts of submitting nonces proof. -pub struct NoncesSubmitArtifacts { - /// Submitted nonces range. - pub nonces: RangeInclusive, - /// Submitted transaction tracker. - pub tx_tracker: T, -} - -/// Batch transaction that already submit some headers and needs to be extended with -/// messages/delivery proof before sending. -pub trait BatchTransaction: Debug + Send + Sync { - /// Header that was required in the original call and which is bundled within this - /// batch transaction. - fn required_header_id(&self) -> HeaderId; -} - -/// Source client trait. -#[async_trait] -pub trait SourceClient: RelayClient { - /// Type of batch transaction that submits finality and message receiving proof. - type BatchTransaction: BatchTransaction> + Clone; - /// Transaction tracker to track submitted transactions. - type TransactionTracker: TransactionTracker>; - - /// Returns state of the client. - async fn state(&self) -> Result, Self::Error>; - - /// Get nonce of instance of latest generated message. - async fn latest_generated_nonce( - &self, - id: SourceHeaderIdOf

, - ) -> Result<(SourceHeaderIdOf

, MessageNonce), Self::Error>; - - /// Get nonce of the latest message, which receiving has been confirmed by the target chain. - async fn latest_confirmed_received_nonce( - &self, - id: SourceHeaderIdOf

, - ) -> Result<(SourceHeaderIdOf

, MessageNonce), Self::Error>; - - /// Returns mapping of message nonces, generated on this client, to their weights. - /// - /// Some messages may be missing from returned map, if corresponding messages were pruned at - /// the source chain. - async fn generated_message_details( - &self, - id: SourceHeaderIdOf

, - nonces: RangeInclusive, - ) -> Result, Self::Error>; - - /// Prove messages in inclusive range [begin; end]. - async fn prove_messages( - &self, - id: SourceHeaderIdOf

, - nonces: RangeInclusive, - proof_parameters: MessageProofParameters, - ) -> Result<(SourceHeaderIdOf

, RangeInclusive, P::MessagesProof), Self::Error>; - - /// Submit messages receiving proof. - async fn submit_messages_receiving_proof( - &self, - maybe_batch_tx: Option, - generated_at_block: TargetHeaderIdOf

, - proof: P::MessagesReceivingProof, - ) -> Result; - - /// We need given finalized target header on source to continue synchronization. - /// - /// We assume that the absence of header `id` has already been checked by caller. - /// - /// The client may return `Some(_)`, which means that nothing has happened yet and - /// the caller must generate and append message receiving proof to the batch transaction - /// to actually send it (along with required header) to the node. - /// - /// If function has returned `None`, it means that the caller now must wait for the - /// appearance of the target header `id` at the source client. - async fn require_target_header_on_source( - &self, - id: TargetHeaderIdOf

, - ) -> Result, Self::Error>; -} - -/// Target client trait. -#[async_trait] -pub trait TargetClient: RelayClient { - /// Type of batch transaction that submits finality and messages proof. - type BatchTransaction: BatchTransaction> + Clone; - /// Transaction tracker to track submitted transactions. - type TransactionTracker: TransactionTracker>; - - /// Returns state of the client. - async fn state(&self) -> Result, Self::Error>; - - /// Get nonce of latest received message. - async fn latest_received_nonce( - &self, - id: TargetHeaderIdOf

, - ) -> Result<(TargetHeaderIdOf

, MessageNonce), Self::Error>; - - /// Get nonce of the latest confirmed message. - async fn latest_confirmed_received_nonce( - &self, - id: TargetHeaderIdOf

, - ) -> Result<(TargetHeaderIdOf

, MessageNonce), Self::Error>; - - /// Get state of unrewarded relayers set at the inbound lane. - async fn unrewarded_relayers_state( - &self, - id: TargetHeaderIdOf

, - ) -> Result<(TargetHeaderIdOf

, UnrewardedRelayersState), Self::Error>; - - /// Prove messages receiving at given block. - async fn prove_messages_receiving( - &self, - id: TargetHeaderIdOf

, - ) -> Result<(TargetHeaderIdOf

, P::MessagesReceivingProof), Self::Error>; - - /// Submit messages proof. - async fn submit_messages_proof( - &self, - maybe_batch_tx: Option, - generated_at_header: SourceHeaderIdOf

, - nonces: RangeInclusive, - proof: P::MessagesProof, - ) -> Result, Self::Error>; - - /// We need given finalized source header on target to continue synchronization. - /// - /// The client may return `Some(_)`, which means that nothing has happened yet and - /// the caller must generate and append messages proof to the batch transaction - /// to actually send it (along with required header) to the node. - /// - /// If function has returned `None`, it means that the caller now must wait for the - /// appearance of the source header `id` at the target client. - async fn require_source_header_on_target( - &self, - id: SourceHeaderIdOf

, - ) -> Result, Self::Error>; -} - -/// State of the client. -#[derive(Clone, Debug, Default, PartialEq, Eq)] -pub struct ClientState { - /// The best header id of this chain. - pub best_self: SelfHeaderId, - /// Best finalized header id of this chain. - pub best_finalized_self: SelfHeaderId, - /// Best finalized header id of the peer chain read at the best block of this chain (at - /// `best_finalized_self`). - /// - /// It may be `None` e,g. if peer is a parachain and we haven't yet relayed any parachain - /// heads. - pub best_finalized_peer_at_best_self: Option, - /// Header id of the peer chain with the number, matching the - /// `best_finalized_peer_at_best_self`. - pub actual_best_finalized_peer_at_best_self: Option, -} - -/// State of source client in one-way message lane. -pub type SourceClientState

= ClientState, TargetHeaderIdOf

>; - -/// State of target client in one-way message lane. -pub type TargetClientState

= ClientState, SourceHeaderIdOf

>; - -/// Both clients state. -#[derive(Debug, Default)] -pub struct ClientsState { - /// Source client state. - pub source: Option>, - /// Target client state. - pub target: Option>, -} - -/// Return prefix that will be used by default to expose Prometheus metrics of the finality proofs -/// sync loop. -pub fn metrics_prefix(lane: &LaneId) -> String { - format!("{}_to_{}_MessageLane_{}", P::SOURCE_NAME, P::TARGET_NAME, hex::encode(lane)) -} - -/// Run message lane service loop. -pub async fn run( - params: Params, - source_client: impl SourceClient

, - target_client: impl TargetClient

, - metrics_params: MetricsParams, - exit_signal: impl Future + Send + 'static, -) -> Result<(), relay_utils::Error> { - let exit_signal = exit_signal.shared(); - relay_utils::relay_loop(source_client, target_client) - .reconnect_delay(params.reconnect_delay) - .with_metrics(metrics_params) - .loop_metric(MessageLaneLoopMetrics::new(Some(&metrics_prefix::

(¶ms.lane)))?)? - .expose() - .await? - .run(metrics_prefix::

(¶ms.lane), move |source_client, target_client, metrics| { - run_until_connection_lost( - params.clone(), - source_client, - target_client, - metrics, - exit_signal.clone(), - ) - }) - .await -} - -/// Run one-way message delivery loop until connection with target or source node is lost, or exit -/// signal is received. -async fn run_until_connection_lost, TC: TargetClient

>( - params: Params, - source_client: SC, - target_client: TC, - metrics_msg: Option, - exit_signal: impl Future, -) -> Result<(), FailedClient> { - let mut source_retry_backoff = retry_backoff(); - let mut source_client_is_online = false; - let mut source_state_required = true; - let source_state = source_client.state().fuse(); - let source_go_offline_future = futures::future::Fuse::terminated(); - let source_tick_stream = interval(params.source_tick).fuse(); - - let mut target_retry_backoff = retry_backoff(); - let mut target_client_is_online = false; - let mut target_state_required = true; - let target_state = target_client.state().fuse(); - let target_go_offline_future = futures::future::Fuse::terminated(); - let target_tick_stream = interval(params.target_tick).fuse(); - - let ( - (delivery_source_state_sender, delivery_source_state_receiver), - (delivery_target_state_sender, delivery_target_state_receiver), - ) = (unbounded(), unbounded()); - let delivery_race_loop = run_message_delivery_race( - source_client.clone(), - delivery_source_state_receiver, - target_client.clone(), - delivery_target_state_receiver, - metrics_msg.clone(), - params.delivery_params, - ) - .fuse(); - - let ( - (receiving_source_state_sender, receiving_source_state_receiver), - (receiving_target_state_sender, receiving_target_state_receiver), - ) = (unbounded(), unbounded()); - let receiving_race_loop = run_message_receiving_race( - source_client.clone(), - receiving_source_state_receiver, - target_client.clone(), - receiving_target_state_receiver, - metrics_msg.clone(), - ) - .fuse(); - - let exit_signal = exit_signal.fuse(); - - futures::pin_mut!( - source_state, - source_go_offline_future, - source_tick_stream, - target_state, - target_go_offline_future, - target_tick_stream, - delivery_race_loop, - receiving_race_loop, - exit_signal - ); - - loop { - futures::select! { - new_source_state = source_state => { - source_state_required = false; - - source_client_is_online = process_future_result( - new_source_state, - &mut source_retry_backoff, - |new_source_state| { - log::debug!( - target: "bridge", - "Received state from {} node: {:?}", - P::SOURCE_NAME, - new_source_state, - ); - let _ = delivery_source_state_sender.unbounded_send(new_source_state.clone()); - let _ = receiving_source_state_sender.unbounded_send(new_source_state.clone()); - - if let Some(metrics_msg) = metrics_msg.as_ref() { - metrics_msg.update_source_state::

(new_source_state); - } - }, - &mut source_go_offline_future, - async_std::task::sleep, - || format!("Error retrieving state from {} node", P::SOURCE_NAME), - ).fail_if_connection_error(FailedClient::Source)?; - }, - _ = source_go_offline_future => { - source_client_is_online = true; - }, - _ = source_tick_stream.next() => { - source_state_required = true; - }, - new_target_state = target_state => { - target_state_required = false; - - target_client_is_online = process_future_result( - new_target_state, - &mut target_retry_backoff, - |new_target_state| { - log::debug!( - target: "bridge", - "Received state from {} node: {:?}", - P::TARGET_NAME, - new_target_state, - ); - let _ = delivery_target_state_sender.unbounded_send(new_target_state.clone()); - let _ = receiving_target_state_sender.unbounded_send(new_target_state.clone()); - - if let Some(metrics_msg) = metrics_msg.as_ref() { - metrics_msg.update_target_state::

(new_target_state); - } - }, - &mut target_go_offline_future, - async_std::task::sleep, - || format!("Error retrieving state from {} node", P::TARGET_NAME), - ).fail_if_connection_error(FailedClient::Target)?; - }, - _ = target_go_offline_future => { - target_client_is_online = true; - }, - _ = target_tick_stream.next() => { - target_state_required = true; - }, - - delivery_error = delivery_race_loop => { - match delivery_error { - Ok(_) => unreachable!("only ends with error; qed"), - Err(err) => return Err(err), - } - }, - receiving_error = receiving_race_loop => { - match receiving_error { - Ok(_) => unreachable!("only ends with error; qed"), - Err(err) => return Err(err), - } - }, - - () = exit_signal => { - return Ok(()); - } - } - - if source_client_is_online && source_state_required { - log::debug!(target: "bridge", "Asking {} node about its state", P::SOURCE_NAME); - source_state.set(source_client.state().fuse()); - source_client_is_online = false; - } - - if target_client_is_online && target_state_required { - log::debug!(target: "bridge", "Asking {} node about its state", P::TARGET_NAME); - target_state.set(target_client.state().fuse()); - target_client_is_online = false; - } - } -} - -#[cfg(test)] -pub(crate) mod tests { - use std::sync::Arc; - - use futures::stream::StreamExt; - use parking_lot::Mutex; - - use relay_utils::{HeaderId, MaybeConnectionError, TrackedTransactionStatus}; - - use super::*; - - pub fn header_id(number: TestSourceHeaderNumber) -> TestSourceHeaderId { - HeaderId(number, number) - } - - pub type TestSourceChainBalance = u64; - pub type TestSourceHeaderId = HeaderId; - pub type TestTargetHeaderId = HeaderId; - - pub type TestMessagesProof = (RangeInclusive, Option); - pub type TestMessagesReceivingProof = MessageNonce; - - pub type TestSourceHeaderNumber = u64; - pub type TestSourceHeaderHash = u64; - - pub type TestTargetHeaderNumber = u64; - pub type TestTargetHeaderHash = u64; - - #[derive(Debug)] - pub struct TestError; - - impl MaybeConnectionError for TestError { - fn is_connection_error(&self) -> bool { - true - } - } - - #[derive(Clone)] - pub struct TestMessageLane; - - impl MessageLane for TestMessageLane { - const SOURCE_NAME: &'static str = "TestSource"; - const TARGET_NAME: &'static str = "TestTarget"; - - type MessagesProof = TestMessagesProof; - type MessagesReceivingProof = TestMessagesReceivingProof; - - type SourceChainBalance = TestSourceChainBalance; - type SourceHeaderNumber = TestSourceHeaderNumber; - type SourceHeaderHash = TestSourceHeaderHash; - - type TargetHeaderNumber = TestTargetHeaderNumber; - type TargetHeaderHash = TestTargetHeaderHash; - } - - #[derive(Clone, Debug)] - pub struct TestMessagesBatchTransaction { - required_header_id: TestSourceHeaderId, - } - - #[async_trait] - impl BatchTransaction for TestMessagesBatchTransaction { - fn required_header_id(&self) -> TestSourceHeaderId { - self.required_header_id - } - } - - #[derive(Clone, Debug)] - pub struct TestConfirmationBatchTransaction { - required_header_id: TestTargetHeaderId, - } - - #[async_trait] - impl BatchTransaction for TestConfirmationBatchTransaction { - fn required_header_id(&self) -> TestTargetHeaderId { - self.required_header_id - } - } - - #[derive(Clone, Debug)] - pub struct TestTransactionTracker(TrackedTransactionStatus); - - impl Default for TestTransactionTracker { - fn default() -> TestTransactionTracker { - TestTransactionTracker(TrackedTransactionStatus::Finalized(Default::default())) - } - } - - #[async_trait] - impl TransactionTracker for TestTransactionTracker { - type HeaderId = TestTargetHeaderId; - - async fn wait(self) -> TrackedTransactionStatus { - self.0 - } - } - - #[derive(Debug, Clone)] - pub struct TestClientData { - is_source_fails: bool, - is_source_reconnected: bool, - source_state: SourceClientState, - source_latest_generated_nonce: MessageNonce, - source_latest_confirmed_received_nonce: MessageNonce, - source_tracked_transaction_status: TrackedTransactionStatus, - submitted_messages_receiving_proofs: Vec, - is_target_fails: bool, - is_target_reconnected: bool, - target_state: SourceClientState, - target_latest_received_nonce: MessageNonce, - target_latest_confirmed_received_nonce: MessageNonce, - target_tracked_transaction_status: TrackedTransactionStatus, - submitted_messages_proofs: Vec, - target_to_source_batch_transaction: Option, - target_to_source_header_required: Option, - target_to_source_header_requirements: Vec, - source_to_target_batch_transaction: Option, - source_to_target_header_required: Option, - source_to_target_header_requirements: Vec, - } - - impl Default for TestClientData { - fn default() -> TestClientData { - TestClientData { - is_source_fails: false, - is_source_reconnected: false, - source_state: Default::default(), - source_latest_generated_nonce: 0, - source_latest_confirmed_received_nonce: 0, - source_tracked_transaction_status: TrackedTransactionStatus::Finalized(HeaderId( - 0, - Default::default(), - )), - submitted_messages_receiving_proofs: Vec::new(), - is_target_fails: false, - is_target_reconnected: false, - target_state: Default::default(), - target_latest_received_nonce: 0, - target_latest_confirmed_received_nonce: 0, - target_tracked_transaction_status: TrackedTransactionStatus::Finalized(HeaderId( - 0, - Default::default(), - )), - submitted_messages_proofs: Vec::new(), - target_to_source_batch_transaction: None, - target_to_source_header_required: None, - target_to_source_header_requirements: Vec::new(), - source_to_target_batch_transaction: None, - source_to_target_header_required: None, - source_to_target_header_requirements: Vec::new(), - } - } - } - - impl TestClientData { - fn receive_messages( - &mut self, - maybe_batch_tx: Option, - proof: TestMessagesProof, - ) { - self.target_state.best_self = - HeaderId(self.target_state.best_self.0 + 1, self.target_state.best_self.1 + 1); - self.target_state.best_finalized_self = self.target_state.best_self; - self.target_latest_received_nonce = *proof.0.end(); - if let Some(maybe_batch_tx) = maybe_batch_tx { - self.target_state.best_finalized_peer_at_best_self = - Some(maybe_batch_tx.required_header_id()); - } - if let Some(target_latest_confirmed_received_nonce) = proof.1 { - self.target_latest_confirmed_received_nonce = - target_latest_confirmed_received_nonce; - } - self.submitted_messages_proofs.push(proof); - } - - fn receive_messages_delivery_proof( - &mut self, - maybe_batch_tx: Option, - proof: TestMessagesReceivingProof, - ) { - self.source_state.best_self = - HeaderId(self.source_state.best_self.0 + 1, self.source_state.best_self.1 + 1); - self.source_state.best_finalized_self = self.source_state.best_self; - if let Some(maybe_batch_tx) = maybe_batch_tx { - self.source_state.best_finalized_peer_at_best_self = - Some(maybe_batch_tx.required_header_id()); - } - self.submitted_messages_receiving_proofs.push(proof); - self.source_latest_confirmed_received_nonce = proof; - } - } - - #[derive(Clone)] - pub struct TestSourceClient { - data: Arc>, - tick: Arc, - post_tick: Arc, - } - - impl Default for TestSourceClient { - fn default() -> Self { - TestSourceClient { - data: Arc::new(Mutex::new(TestClientData::default())), - tick: Arc::new(|_| {}), - post_tick: Arc::new(|_| {}), - } - } - } - - #[async_trait] - impl RelayClient for TestSourceClient { - type Error = TestError; - - async fn reconnect(&mut self) -> Result<(), TestError> { - { - let mut data = self.data.lock(); - (self.tick)(&mut data); - data.is_source_reconnected = true; - (self.post_tick)(&mut data); - } - Ok(()) - } - } - - #[async_trait] - impl SourceClient for TestSourceClient { - type BatchTransaction = TestConfirmationBatchTransaction; - type TransactionTracker = TestTransactionTracker; - - async fn state(&self) -> Result, TestError> { - let mut data = self.data.lock(); - (self.tick)(&mut data); - if data.is_source_fails { - return Err(TestError) - } - (self.post_tick)(&mut data); - Ok(data.source_state.clone()) - } - - async fn latest_generated_nonce( - &self, - id: SourceHeaderIdOf, - ) -> Result<(SourceHeaderIdOf, MessageNonce), TestError> { - let mut data = self.data.lock(); - (self.tick)(&mut data); - if data.is_source_fails { - return Err(TestError) - } - (self.post_tick)(&mut data); - Ok((id, data.source_latest_generated_nonce)) - } - - async fn latest_confirmed_received_nonce( - &self, - id: SourceHeaderIdOf, - ) -> Result<(SourceHeaderIdOf, MessageNonce), TestError> { - let mut data = self.data.lock(); - (self.tick)(&mut data); - (self.post_tick)(&mut data); - Ok((id, data.source_latest_confirmed_received_nonce)) - } - - async fn generated_message_details( - &self, - _id: SourceHeaderIdOf, - nonces: RangeInclusive, - ) -> Result, TestError> { - Ok(nonces - .map(|nonce| { - ( - nonce, - MessageDetails { - dispatch_weight: Weight::from_parts(1, 0), - size: 1, - reward: 1, - }, - ) - }) - .collect()) - } - - async fn prove_messages( - &self, - id: SourceHeaderIdOf, - nonces: RangeInclusive, - proof_parameters: MessageProofParameters, - ) -> Result< - (SourceHeaderIdOf, RangeInclusive, TestMessagesProof), - TestError, - > { - let mut data = self.data.lock(); - (self.tick)(&mut data); - (self.post_tick)(&mut data); - Ok(( - id, - nonces.clone(), - ( - nonces, - if proof_parameters.outbound_state_proof_required { - Some(data.source_latest_confirmed_received_nonce) - } else { - None - }, - ), - )) - } - - async fn submit_messages_receiving_proof( - &self, - maybe_batch_tx: Option, - _generated_at_block: TargetHeaderIdOf, - proof: TestMessagesReceivingProof, - ) -> Result { - let mut data = self.data.lock(); - (self.tick)(&mut data); - data.receive_messages_delivery_proof(maybe_batch_tx, proof); - (self.post_tick)(&mut data); - Ok(TestTransactionTracker(data.source_tracked_transaction_status)) - } - - async fn require_target_header_on_source( - &self, - id: TargetHeaderIdOf, - ) -> Result, Self::Error> { - let mut data = self.data.lock(); - data.target_to_source_header_required = Some(id); - data.target_to_source_header_requirements.push(id); - (self.tick)(&mut data); - (self.post_tick)(&mut data); - - Ok(data.target_to_source_batch_transaction.take().map(|mut tx| { - tx.required_header_id = id; - tx - })) - } - } - - #[derive(Clone)] - pub struct TestTargetClient { - data: Arc>, - tick: Arc, - post_tick: Arc, - } - - impl Default for TestTargetClient { - fn default() -> Self { - TestTargetClient { - data: Arc::new(Mutex::new(TestClientData::default())), - tick: Arc::new(|_| {}), - post_tick: Arc::new(|_| {}), - } - } - } - - #[async_trait] - impl RelayClient for TestTargetClient { - type Error = TestError; - - async fn reconnect(&mut self) -> Result<(), TestError> { - { - let mut data = self.data.lock(); - (self.tick)(&mut data); - data.is_target_reconnected = true; - (self.post_tick)(&mut data); - } - Ok(()) - } - } - - #[async_trait] - impl TargetClient for TestTargetClient { - type BatchTransaction = TestMessagesBatchTransaction; - type TransactionTracker = TestTransactionTracker; - - async fn state(&self) -> Result, TestError> { - let mut data = self.data.lock(); - (self.tick)(&mut data); - if data.is_target_fails { - return Err(TestError) - } - (self.post_tick)(&mut data); - Ok(data.target_state.clone()) - } - - async fn latest_received_nonce( - &self, - id: TargetHeaderIdOf, - ) -> Result<(TargetHeaderIdOf, MessageNonce), TestError> { - let mut data = self.data.lock(); - (self.tick)(&mut data); - if data.is_target_fails { - return Err(TestError) - } - (self.post_tick)(&mut data); - Ok((id, data.target_latest_received_nonce)) - } - - async fn unrewarded_relayers_state( - &self, - id: TargetHeaderIdOf, - ) -> Result<(TargetHeaderIdOf, UnrewardedRelayersState), TestError> { - Ok(( - id, - UnrewardedRelayersState { - unrewarded_relayer_entries: 0, - messages_in_oldest_entry: 0, - total_messages: 0, - last_delivered_nonce: 0, - }, - )) - } - - async fn latest_confirmed_received_nonce( - &self, - id: TargetHeaderIdOf, - ) -> Result<(TargetHeaderIdOf, MessageNonce), TestError> { - let mut data = self.data.lock(); - (self.tick)(&mut data); - if data.is_target_fails { - return Err(TestError) - } - (self.post_tick)(&mut data); - Ok((id, data.target_latest_confirmed_received_nonce)) - } - - async fn prove_messages_receiving( - &self, - id: TargetHeaderIdOf, - ) -> Result<(TargetHeaderIdOf, TestMessagesReceivingProof), TestError> { - Ok((id, self.data.lock().target_latest_received_nonce)) - } - - async fn submit_messages_proof( - &self, - maybe_batch_tx: Option, - _generated_at_header: SourceHeaderIdOf, - nonces: RangeInclusive, - proof: TestMessagesProof, - ) -> Result, TestError> { - let mut data = self.data.lock(); - (self.tick)(&mut data); - if data.is_target_fails { - return Err(TestError) - } - data.receive_messages(maybe_batch_tx, proof); - (self.post_tick)(&mut data); - Ok(NoncesSubmitArtifacts { - nonces, - tx_tracker: TestTransactionTracker(data.target_tracked_transaction_status), - }) - } - - async fn require_source_header_on_target( - &self, - id: SourceHeaderIdOf, - ) -> Result, Self::Error> { - let mut data = self.data.lock(); - data.source_to_target_header_required = Some(id); - data.source_to_target_header_requirements.push(id); - (self.tick)(&mut data); - (self.post_tick)(&mut data); - - Ok(data.source_to_target_batch_transaction.take().map(|mut tx| { - tx.required_header_id = id; - tx - })) - } - } - - fn run_loop_test( - data: Arc>, - source_tick: Arc, - source_post_tick: Arc, - target_tick: Arc, - target_post_tick: Arc, - exit_signal: impl Future + 'static + Send, - ) -> TestClientData { - async_std::task::block_on(async { - let source_client = TestSourceClient { - data: data.clone(), - tick: source_tick, - post_tick: source_post_tick, - }; - let target_client = TestTargetClient { - data: data.clone(), - tick: target_tick, - post_tick: target_post_tick, - }; - let _ = run( - Params { - lane: LaneId([0, 0, 0, 0]), - source_tick: Duration::from_millis(100), - target_tick: Duration::from_millis(100), - reconnect_delay: Duration::from_millis(0), - delivery_params: MessageDeliveryParams { - max_unrewarded_relayer_entries_at_target: 4, - max_unconfirmed_nonces_at_target: 4, - max_messages_in_single_batch: 4, - max_messages_weight_in_single_batch: Weight::from_parts(4, 0), - max_messages_size_in_single_batch: 4, - }, - }, - source_client, - target_client, - MetricsParams::disabled(), - exit_signal, - ) - .await; - let result = data.lock().clone(); - result - }) - } - - #[test] - fn message_lane_loop_is_able_to_recover_from_connection_errors() { - // with this configuration, source client will return Err, making source client - // reconnect. Then the target client will fail with Err + reconnect. Then we finally - // able to deliver messages. - let (exit_sender, exit_receiver) = unbounded(); - let result = run_loop_test( - Arc::new(Mutex::new(TestClientData { - is_source_fails: true, - source_state: ClientState { - best_self: HeaderId(0, 0), - best_finalized_self: HeaderId(0, 0), - best_finalized_peer_at_best_self: Some(HeaderId(0, 0)), - actual_best_finalized_peer_at_best_self: Some(HeaderId(0, 0)), - }, - source_latest_generated_nonce: 1, - target_state: ClientState { - best_self: HeaderId(0, 0), - best_finalized_self: HeaderId(0, 0), - best_finalized_peer_at_best_self: Some(HeaderId(0, 0)), - actual_best_finalized_peer_at_best_self: Some(HeaderId(0, 0)), - }, - target_latest_received_nonce: 0, - ..Default::default() - })), - Arc::new(|data: &mut TestClientData| { - if data.is_source_reconnected { - data.is_source_fails = false; - data.is_target_fails = true; - } - }), - Arc::new(|_| {}), - Arc::new(move |data: &mut TestClientData| { - if data.is_target_reconnected { - data.is_target_fails = false; - } - if data.target_state.best_finalized_peer_at_best_self.unwrap().0 < 10 { - data.target_state.best_finalized_peer_at_best_self = Some(HeaderId( - data.target_state.best_finalized_peer_at_best_self.unwrap().0 + 1, - data.target_state.best_finalized_peer_at_best_self.unwrap().0 + 1, - )); - } - if !data.submitted_messages_proofs.is_empty() { - exit_sender.unbounded_send(()).unwrap(); - } - }), - Arc::new(|_| {}), - exit_receiver.into_future().map(|(_, _)| ()), - ); - - assert_eq!(result.submitted_messages_proofs, vec![(1..=1, None)],); - } - - #[test] - fn message_lane_loop_is_able_to_recover_from_unsuccessful_transaction() { - // with this configuration, both source and target clients will mine their transactions, but - // their corresponding nonce won't be udpated => reconnect will happen - let (exit_sender, exit_receiver) = unbounded(); - let result = run_loop_test( - Arc::new(Mutex::new(TestClientData { - source_state: ClientState { - best_self: HeaderId(0, 0), - best_finalized_self: HeaderId(0, 0), - best_finalized_peer_at_best_self: Some(HeaderId(0, 0)), - actual_best_finalized_peer_at_best_self: Some(HeaderId(0, 0)), - }, - source_latest_generated_nonce: 1, - target_state: ClientState { - best_self: HeaderId(0, 0), - best_finalized_self: HeaderId(0, 0), - best_finalized_peer_at_best_self: Some(HeaderId(0, 0)), - actual_best_finalized_peer_at_best_self: Some(HeaderId(0, 0)), - }, - target_latest_received_nonce: 0, - ..Default::default() - })), - Arc::new(move |data: &mut TestClientData| { - // blocks are produced on every tick - data.source_state.best_self = - HeaderId(data.source_state.best_self.0 + 1, data.source_state.best_self.1 + 1); - data.source_state.best_finalized_self = data.source_state.best_self; - // syncing target headers -> source chain - if let Some(last_requirement) = data.target_to_source_header_requirements.last() { - if *last_requirement != - data.source_state.best_finalized_peer_at_best_self.unwrap() - { - data.source_state.best_finalized_peer_at_best_self = - Some(*last_requirement); - } - } - }), - Arc::new(move |data: &mut TestClientData| { - // if it is the first time we're submitting delivery proof, let's revert changes - // to source status => then the delivery confirmation transaction is "finalized", - // but the state is not altered - if data.submitted_messages_receiving_proofs.len() == 1 { - data.source_latest_confirmed_received_nonce = 0; - } - }), - Arc::new(move |data: &mut TestClientData| { - // blocks are produced on every tick - data.target_state.best_self = - HeaderId(data.target_state.best_self.0 + 1, data.target_state.best_self.1 + 1); - data.target_state.best_finalized_self = data.target_state.best_self; - // syncing source headers -> target chain - if let Some(last_requirement) = data.source_to_target_header_requirements.last() { - if *last_requirement != - data.target_state.best_finalized_peer_at_best_self.unwrap() - { - data.target_state.best_finalized_peer_at_best_self = - Some(*last_requirement); - } - } - // if source has received all messages receiving confirmations => stop - if data.source_latest_confirmed_received_nonce == 1 { - exit_sender.unbounded_send(()).unwrap(); - } - }), - Arc::new(move |data: &mut TestClientData| { - // if it is the first time we're submitting messages proof, let's revert changes - // to target status => then the messages delivery transaction is "finalized", but - // the state is not altered - if data.submitted_messages_proofs.len() == 1 { - data.target_latest_received_nonce = 0; - data.target_latest_confirmed_received_nonce = 0; - } - }), - exit_receiver.into_future().map(|(_, _)| ()), - ); - - assert_eq!(result.submitted_messages_proofs.len(), 2); - assert_eq!(result.submitted_messages_receiving_proofs.len(), 2); - } - - #[test] - fn message_lane_loop_works() { - let (exit_sender, exit_receiver) = unbounded(); - let result = run_loop_test( - Arc::new(Mutex::new(TestClientData { - source_state: ClientState { - best_self: HeaderId(10, 10), - best_finalized_self: HeaderId(10, 10), - best_finalized_peer_at_best_self: Some(HeaderId(0, 0)), - actual_best_finalized_peer_at_best_self: Some(HeaderId(0, 0)), - }, - source_latest_generated_nonce: 10, - target_state: ClientState { - best_self: HeaderId(0, 0), - best_finalized_self: HeaderId(0, 0), - best_finalized_peer_at_best_self: Some(HeaderId(0, 0)), - actual_best_finalized_peer_at_best_self: Some(HeaderId(0, 0)), - }, - target_latest_received_nonce: 0, - ..Default::default() - })), - Arc::new(|data: &mut TestClientData| { - // blocks are produced on every tick - data.source_state.best_self = - HeaderId(data.source_state.best_self.0 + 1, data.source_state.best_self.1 + 1); - data.source_state.best_finalized_self = data.source_state.best_self; - // headers relay must only be started when we need new target headers at source node - if data.target_to_source_header_required.is_some() { - assert!( - data.source_state.best_finalized_peer_at_best_self.unwrap().0 < - data.target_state.best_self.0 - ); - data.target_to_source_header_required = None; - } - // syncing target headers -> source chain - if let Some(last_requirement) = data.target_to_source_header_requirements.last() { - if *last_requirement != - data.source_state.best_finalized_peer_at_best_self.unwrap() - { - data.source_state.best_finalized_peer_at_best_self = - Some(*last_requirement); - } - } - }), - Arc::new(|_| {}), - Arc::new(move |data: &mut TestClientData| { - // blocks are produced on every tick - data.target_state.best_self = - HeaderId(data.target_state.best_self.0 + 1, data.target_state.best_self.1 + 1); - data.target_state.best_finalized_self = data.target_state.best_self; - // headers relay must only be started when we need new source headers at target node - if data.source_to_target_header_required.is_some() { - assert!( - data.target_state.best_finalized_peer_at_best_self.unwrap().0 < - data.source_state.best_self.0 - ); - data.source_to_target_header_required = None; - } - // syncing source headers -> target chain - if let Some(last_requirement) = data.source_to_target_header_requirements.last() { - if *last_requirement != - data.target_state.best_finalized_peer_at_best_self.unwrap() - { - data.target_state.best_finalized_peer_at_best_self = - Some(*last_requirement); - } - } - // if source has received all messages receiving confirmations => stop - if data.source_latest_confirmed_received_nonce == 10 { - exit_sender.unbounded_send(()).unwrap(); - } - }), - Arc::new(|_| {}), - exit_receiver.into_future().map(|(_, _)| ()), - ); - - // there are no strict restrictions on when reward confirmation should come - // (because `max_unconfirmed_nonces_at_target` is `100` in tests and this confirmation - // depends on the state of both clients) - // => we do not check it here - assert_eq!(result.submitted_messages_proofs[0].0, 1..=4); - assert_eq!(result.submitted_messages_proofs[1].0, 5..=8); - assert_eq!(result.submitted_messages_proofs[2].0, 9..=10); - assert!(!result.submitted_messages_receiving_proofs.is_empty()); - - // check that we have at least once required new source->target or target->source headers - assert!(!result.target_to_source_header_requirements.is_empty()); - assert!(!result.source_to_target_header_requirements.is_empty()); - } - - #[test] - fn message_lane_loop_works_with_batch_transactions() { - let (exit_sender, exit_receiver) = unbounded(); - let original_data = Arc::new(Mutex::new(TestClientData { - source_state: ClientState { - best_self: HeaderId(10, 10), - best_finalized_self: HeaderId(10, 10), - best_finalized_peer_at_best_self: Some(HeaderId(0, 0)), - actual_best_finalized_peer_at_best_self: Some(HeaderId(0, 0)), - }, - source_latest_generated_nonce: 10, - target_state: ClientState { - best_self: HeaderId(0, 0), - best_finalized_self: HeaderId(0, 0), - best_finalized_peer_at_best_self: Some(HeaderId(0, 0)), - actual_best_finalized_peer_at_best_self: Some(HeaderId(0, 0)), - }, - target_latest_received_nonce: 0, - ..Default::default() - })); - let result = run_loop_test( - original_data, - Arc::new(|_| {}), - Arc::new(move |data: &mut TestClientData| { - data.source_state.best_self = - HeaderId(data.source_state.best_self.0 + 1, data.source_state.best_self.1 + 1); - data.source_state.best_finalized_self = data.source_state.best_self; - if let Some(target_to_source_header_required) = - data.target_to_source_header_required.take() - { - data.target_to_source_batch_transaction = - Some(TestConfirmationBatchTransaction { - required_header_id: target_to_source_header_required, - }) - } - }), - Arc::new(|_| {}), - Arc::new(move |data: &mut TestClientData| { - data.target_state.best_self = - HeaderId(data.target_state.best_self.0 + 1, data.target_state.best_self.1 + 1); - data.target_state.best_finalized_self = data.target_state.best_self; - - if let Some(source_to_target_header_required) = - data.source_to_target_header_required.take() - { - data.source_to_target_batch_transaction = Some(TestMessagesBatchTransaction { - required_header_id: source_to_target_header_required, - }) - } - - if data.source_latest_confirmed_received_nonce == 10 { - exit_sender.unbounded_send(()).unwrap(); - } - }), - exit_receiver.into_future().map(|(_, _)| ()), - ); - - // there are no strict restrictions on when reward confirmation should come - // (because `max_unconfirmed_nonces_at_target` is `100` in tests and this confirmation - // depends on the state of both clients) - // => we do not check it here - assert_eq!(result.submitted_messages_proofs[0].0, 1..=4); - assert_eq!(result.submitted_messages_proofs[1].0, 5..=8); - assert_eq!(result.submitted_messages_proofs[2].0, 9..=10); - assert!(!result.submitted_messages_receiving_proofs.is_empty()); - - // check that we have at least once required new source->target or target->source headers - assert!(!result.target_to_source_header_requirements.is_empty()); - assert!(!result.source_to_target_header_requirements.is_empty()); - } -} diff --git a/relays/messages/src/message_race_delivery.rs b/relays/messages/src/message_race_delivery.rs deleted file mode 100644 index 137deb5b7..000000000 --- a/relays/messages/src/message_race_delivery.rs +++ /dev/null @@ -1,1405 +0,0 @@ -// Copyright 2019-2021 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -//! Message delivery race delivers proof-of-messages from "lane.source" to "lane.target". - -use std::{collections::VecDeque, marker::PhantomData, ops::RangeInclusive}; - -use async_trait::async_trait; -use futures::stream::FusedStream; - -use bp_messages::{MessageNonce, UnrewardedRelayersState, Weight}; -use relay_utils::FailedClient; - -use crate::{ - message_lane::{MessageLane, SourceHeaderIdOf, TargetHeaderIdOf}, - message_lane_loop::{ - MessageDeliveryParams, MessageDetailsMap, MessageProofParameters, NoncesSubmitArtifacts, - SourceClient as MessageLaneSourceClient, SourceClientState, - TargetClient as MessageLaneTargetClient, TargetClientState, - }, - message_race_limits::{MessageRaceLimits, RelayMessagesBatchReference}, - message_race_loop::{ - MessageRace, NoncesRange, RaceState, RaceStrategy, SourceClient, SourceClientNonces, - TargetClient, TargetClientNonces, - }, - message_race_strategy::BasicStrategy, - metrics::MessageLaneLoopMetrics, -}; - -/// Run message delivery race. -pub async fn run( - source_client: impl MessageLaneSourceClient

, - source_state_updates: impl FusedStream>, - target_client: impl MessageLaneTargetClient

, - target_state_updates: impl FusedStream>, - metrics_msg: Option, - params: MessageDeliveryParams, -) -> Result<(), FailedClient> { - crate::message_race_loop::run( - MessageDeliveryRaceSource { - client: source_client.clone(), - metrics_msg: metrics_msg.clone(), - _phantom: Default::default(), - }, - source_state_updates, - MessageDeliveryRaceTarget { - client: target_client.clone(), - metrics_msg: metrics_msg.clone(), - _phantom: Default::default(), - }, - target_state_updates, - MessageDeliveryStrategy:: { - lane_source_client: source_client, - lane_target_client: target_client, - max_unrewarded_relayer_entries_at_target: params - .max_unrewarded_relayer_entries_at_target, - max_unconfirmed_nonces_at_target: params.max_unconfirmed_nonces_at_target, - max_messages_in_single_batch: params.max_messages_in_single_batch, - max_messages_weight_in_single_batch: params.max_messages_weight_in_single_batch, - max_messages_size_in_single_batch: params.max_messages_size_in_single_batch, - latest_confirmed_nonces_at_source: VecDeque::new(), - target_nonces: None, - strategy: BasicStrategy::new(), - metrics_msg, - }, - ) - .await -} - -/// Message delivery race. -struct MessageDeliveryRace

(std::marker::PhantomData

); - -impl MessageRace for MessageDeliveryRace

{ - type SourceHeaderId = SourceHeaderIdOf

; - type TargetHeaderId = TargetHeaderIdOf

; - - type MessageNonce = MessageNonce; - type Proof = P::MessagesProof; - - fn source_name() -> String { - format!("{}::MessagesDelivery", P::SOURCE_NAME) - } - - fn target_name() -> String { - format!("{}::MessagesDelivery", P::TARGET_NAME) - } -} - -/// Message delivery race source, which is a source of the lane. -struct MessageDeliveryRaceSource { - client: C, - metrics_msg: Option, - _phantom: PhantomData

, -} - -#[async_trait] -impl SourceClient> for MessageDeliveryRaceSource -where - P: MessageLane, - C: MessageLaneSourceClient

, -{ - type Error = C::Error; - type NoncesRange = MessageDetailsMap; - type ProofParameters = MessageProofParameters; - - async fn nonces( - &self, - at_block: SourceHeaderIdOf

, - prev_latest_nonce: MessageNonce, - ) -> Result<(SourceHeaderIdOf

, SourceClientNonces), Self::Error> { - let (at_block, latest_generated_nonce) = - self.client.latest_generated_nonce(at_block).await?; - let (at_block, latest_confirmed_nonce) = - self.client.latest_confirmed_received_nonce(at_block).await?; - - if let Some(metrics_msg) = self.metrics_msg.as_ref() { - metrics_msg.update_source_latest_generated_nonce(latest_generated_nonce); - metrics_msg.update_source_latest_confirmed_nonce(latest_confirmed_nonce); - } - - let new_nonces = if latest_generated_nonce > prev_latest_nonce { - self.client - .generated_message_details( - at_block.clone(), - prev_latest_nonce + 1..=latest_generated_nonce, - ) - .await? - } else { - MessageDetailsMap::new() - }; - - Ok(( - at_block, - SourceClientNonces { new_nonces, confirmed_nonce: Some(latest_confirmed_nonce) }, - )) - } - - async fn generate_proof( - &self, - at_block: SourceHeaderIdOf

, - nonces: RangeInclusive, - proof_parameters: Self::ProofParameters, - ) -> Result<(SourceHeaderIdOf

, RangeInclusive, P::MessagesProof), Self::Error> - { - self.client.prove_messages(at_block, nonces, proof_parameters).await - } -} - -/// Message delivery race target, which is a target of the lane. -struct MessageDeliveryRaceTarget { - client: C, - metrics_msg: Option, - _phantom: PhantomData

, -} - -#[async_trait] -impl TargetClient> for MessageDeliveryRaceTarget -where - P: MessageLane, - C: MessageLaneTargetClient

, -{ - type Error = C::Error; - type TargetNoncesData = DeliveryRaceTargetNoncesData; - type BatchTransaction = C::BatchTransaction; - type TransactionTracker = C::TransactionTracker; - - async fn require_source_header( - &self, - id: SourceHeaderIdOf

, - ) -> Result, Self::Error> { - self.client.require_source_header_on_target(id).await - } - - async fn nonces( - &self, - at_block: TargetHeaderIdOf

, - update_metrics: bool, - ) -> Result<(TargetHeaderIdOf

, TargetClientNonces), Self::Error> - { - let (at_block, latest_received_nonce) = self.client.latest_received_nonce(at_block).await?; - let (at_block, latest_confirmed_nonce) = - self.client.latest_confirmed_received_nonce(at_block).await?; - let (at_block, unrewarded_relayers) = - self.client.unrewarded_relayers_state(at_block).await?; - - if update_metrics { - if let Some(metrics_msg) = self.metrics_msg.as_ref() { - metrics_msg.update_target_latest_received_nonce(latest_received_nonce); - metrics_msg.update_target_latest_confirmed_nonce(latest_confirmed_nonce); - } - } - - Ok(( - at_block, - TargetClientNonces { - latest_nonce: latest_received_nonce, - nonces_data: DeliveryRaceTargetNoncesData { - confirmed_nonce: latest_confirmed_nonce, - unrewarded_relayers, - }, - }, - )) - } - - async fn submit_proof( - &self, - maybe_batch_tx: Option, - generated_at_block: SourceHeaderIdOf

, - nonces: RangeInclusive, - proof: P::MessagesProof, - ) -> Result, Self::Error> { - self.client - .submit_messages_proof(maybe_batch_tx, generated_at_block, nonces, proof) - .await - } -} - -/// Additional nonces data from the target client used by message delivery race. -#[derive(Debug, Clone)] -struct DeliveryRaceTargetNoncesData { - /// The latest nonce that we know: (1) has been delivered to us (2) has been confirmed - /// back to the source node (by confirmations race) and (3) relayer has received - /// reward for (and this has been confirmed by the message delivery race). - confirmed_nonce: MessageNonce, - /// State of the unrewarded relayers set at the target node. - unrewarded_relayers: UnrewardedRelayersState, -} - -/// Messages delivery strategy. -struct MessageDeliveryStrategy { - /// The client that is connected to the message lane source node. - lane_source_client: SC, - /// The client that is connected to the message lane target node. - lane_target_client: TC, - /// Maximal unrewarded relayer entries at target client. - max_unrewarded_relayer_entries_at_target: MessageNonce, - /// Maximal unconfirmed nonces at target client. - max_unconfirmed_nonces_at_target: MessageNonce, - /// Maximal number of messages in the single delivery transaction. - max_messages_in_single_batch: MessageNonce, - /// Maximal cumulative messages weight in the single delivery transaction. - max_messages_weight_in_single_batch: Weight, - /// Maximal messages size in the single delivery transaction. - max_messages_size_in_single_batch: u32, - /// Latest confirmed nonces at the source client + the header id where we have first met this - /// nonce. - latest_confirmed_nonces_at_source: VecDeque<(SourceHeaderIdOf

, MessageNonce)>, - /// Target nonces available at the **best** block of the target chain. - target_nonces: Option>, - /// Basic delivery strategy. - strategy: MessageDeliveryStrategyBase

, - /// Message lane metrics. - metrics_msg: Option, -} - -type MessageDeliveryStrategyBase

= BasicStrategy< -

::SourceHeaderNumber, -

::SourceHeaderHash, -

::TargetHeaderNumber, -

::TargetHeaderHash, - MessageDetailsMap<

::SourceChainBalance>, -

::MessagesProof, ->; - -impl std::fmt::Debug for MessageDeliveryStrategy { - fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result { - fmt.debug_struct("MessageDeliveryStrategy") - .field( - "max_unrewarded_relayer_entries_at_target", - &self.max_unrewarded_relayer_entries_at_target, - ) - .field("max_unconfirmed_nonces_at_target", &self.max_unconfirmed_nonces_at_target) - .field("max_messages_in_single_batch", &self.max_messages_in_single_batch) - .field("max_messages_weight_in_single_batch", &self.max_messages_weight_in_single_batch) - .field("max_messages_size_in_single_batch", &self.max_messages_size_in_single_batch) - .field("latest_confirmed_nonces_at_source", &self.latest_confirmed_nonces_at_source) - .field("target_nonces", &self.target_nonces) - .field("strategy", &self.strategy) - .finish() - } -} - -impl MessageDeliveryStrategy -where - P: MessageLane, - SC: MessageLaneSourceClient

, - TC: MessageLaneTargetClient

, -{ - /// Returns true if some race action can be selected (with `select_race_action`) at given - /// `best_finalized_source_header_id_at_best_target` source header at target. - async fn can_submit_transaction_with< - RS: RaceState, TargetHeaderIdOf

>, - >( - &self, - mut race_state: RS, - maybe_best_finalized_source_header_id_at_best_target: Option>, - ) -> bool { - if let Some(best_finalized_source_header_id_at_best_target) = - maybe_best_finalized_source_header_id_at_best_target - { - race_state.set_best_finalized_source_header_id_at_best_target( - best_finalized_source_header_id_at_best_target, - ); - - return self.select_race_action(race_state).await.is_some() - } - - false - } - - async fn select_race_action, TargetHeaderIdOf

>>( - &self, - race_state: RS, - ) -> Option<(RangeInclusive, MessageProofParameters)> { - // if we have already selected nonces that we want to submit, do nothing - if race_state.nonces_to_submit().is_some() { - return None - } - - // if we already submitted some nonces, do nothing - if race_state.nonces_submitted().is_some() { - return None - } - - let best_target_nonce = self.strategy.best_at_target()?; - let best_finalized_source_header_id_at_best_target = - race_state.best_finalized_source_header_id_at_best_target()?; - let target_nonces = self.target_nonces.as_ref()?; - let latest_confirmed_nonce_at_source = self - .latest_confirmed_nonce_at_source(&best_finalized_source_header_id_at_best_target) - .unwrap_or(target_nonces.nonces_data.confirmed_nonce); - - // There's additional condition in the message delivery race: target would reject messages - // if there are too much unconfirmed messages at the inbound lane. - - // Ok - we may have new nonces to deliver. But target may still reject new messages, because - // we haven't notified it that (some) messages have been confirmed. So we may want to - // include updated `source.latest_confirmed` in the proof. - // - // Important note: we're including outbound state lane proof whenever there are unconfirmed - // nonces on the target chain. Other strategy is to include it only if it's absolutely - // necessary. - let latest_received_nonce_at_target = target_nonces.latest_nonce; - let latest_confirmed_nonce_at_target = target_nonces.nonces_data.confirmed_nonce; - let outbound_state_proof_required = - latest_confirmed_nonce_at_target < latest_confirmed_nonce_at_source; - - // The target node would also reject messages if there are too many entries in the - // "unrewarded relayers" set. If we are unable to prove new rewards to the target node, then - // we should wait for confirmations race. - let unrewarded_limit_reached = - target_nonces.nonces_data.unrewarded_relayers.unrewarded_relayer_entries >= - self.max_unrewarded_relayer_entries_at_target || - target_nonces.nonces_data.unrewarded_relayers.total_messages >= - self.max_unconfirmed_nonces_at_target; - if unrewarded_limit_reached { - // so there are already too many unrewarded relayer entries in the set - // - // => check if we can prove enough rewards. If not, we should wait for more rewards to - // be paid - let number_of_rewards_being_proved = - latest_confirmed_nonce_at_source.saturating_sub(latest_confirmed_nonce_at_target); - let enough_rewards_being_proved = number_of_rewards_being_proved >= - target_nonces.nonces_data.unrewarded_relayers.messages_in_oldest_entry; - if !enough_rewards_being_proved { - return None - } - } - - // If we're here, then the confirmations race did its job && sending side now knows that - // messages have been delivered. Now let's select nonces that we want to deliver. - // - // We may deliver at most: - // - // max_unconfirmed_nonces_at_target - (latest_received_nonce_at_target - - // latest_confirmed_nonce_at_target) - // - // messages in the batch. But since we're including outbound state proof in the batch, then - // it may be increased to: - // - // max_unconfirmed_nonces_at_target - (latest_received_nonce_at_target - - // latest_confirmed_nonce_at_source) - let future_confirmed_nonce_at_target = if outbound_state_proof_required { - latest_confirmed_nonce_at_source - } else { - latest_confirmed_nonce_at_target - }; - let max_nonces = latest_received_nonce_at_target - .checked_sub(future_confirmed_nonce_at_target) - .and_then(|diff| self.max_unconfirmed_nonces_at_target.checked_sub(diff)) - .unwrap_or_default(); - let max_nonces = std::cmp::min(max_nonces, self.max_messages_in_single_batch); - let max_messages_weight_in_single_batch = self.max_messages_weight_in_single_batch; - let max_messages_size_in_single_batch = self.max_messages_size_in_single_batch; - let lane_source_client = self.lane_source_client.clone(); - let lane_target_client = self.lane_target_client.clone(); - - // select nonces from nonces, available for delivery - let selected_nonces = match self.strategy.available_source_queue_indices(race_state) { - Some(available_source_queue_indices) => { - let source_queue = self.strategy.source_queue(); - let reference = RelayMessagesBatchReference { - max_messages_in_this_batch: max_nonces, - max_messages_weight_in_single_batch, - max_messages_size_in_single_batch, - lane_source_client: lane_source_client.clone(), - lane_target_client: lane_target_client.clone(), - best_target_nonce, - nonces_queue: source_queue.clone(), - nonces_queue_range: available_source_queue_indices, - metrics: self.metrics_msg.clone(), - }; - - MessageRaceLimits::decide(reference).await - }, - None => { - // we still may need to submit delivery transaction with zero messages to - // unblock the lane. But it'll only be accepted if the lane is blocked - // (i.e. when `unrewarded_limit_reached` is `true`) - None - }, - }; - - // check if we need unblocking transaction and we may submit it - #[allow(clippy::reversed_empty_ranges)] - let selected_nonces = match selected_nonces { - Some(selected_nonces) => selected_nonces, - None if unrewarded_limit_reached && outbound_state_proof_required => 1..=0, - _ => return None, - }; - - let dispatch_weight = self.dispatch_weight_for_range(&selected_nonces); - Some(( - selected_nonces, - MessageProofParameters { outbound_state_proof_required, dispatch_weight }, - )) - } - - /// Returns lastest confirmed message at source chain, given source block. - fn latest_confirmed_nonce_at_source(&self, at: &SourceHeaderIdOf

) -> Option { - self.latest_confirmed_nonces_at_source - .iter() - .take_while(|(id, _)| id.0 <= at.0) - .last() - .map(|(_, nonce)| *nonce) - } - - /// Returns total weight of all undelivered messages. - fn dispatch_weight_for_range(&self, range: &RangeInclusive) -> Weight { - self.strategy - .source_queue() - .iter() - .flat_map(|(_, subrange)| { - subrange - .iter() - .filter(|(nonce, _)| range.contains(nonce)) - .map(|(_, details)| details.dispatch_weight) - }) - .fold(Weight::zero(), |total, weight| total.saturating_add(weight)) - } -} - -#[async_trait] -impl RaceStrategy, TargetHeaderIdOf

, P::MessagesProof> - for MessageDeliveryStrategy -where - P: MessageLane, - SC: MessageLaneSourceClient

, - TC: MessageLaneTargetClient

, -{ - type SourceNoncesRange = MessageDetailsMap; - type ProofParameters = MessageProofParameters; - type TargetNoncesData = DeliveryRaceTargetNoncesData; - - fn is_empty(&self) -> bool { - self.strategy.is_empty() - } - - async fn required_source_header_at_target< - RS: RaceState, TargetHeaderIdOf

>, - >( - &self, - race_state: RS, - ) -> Option> { - // we have already submitted something - let's wait until it is mined - if race_state.nonces_submitted().is_some() { - return None - } - - // if we can deliver something using current race state, go on - let selected_nonces = self.select_race_action(race_state.clone()).await; - if selected_nonces.is_some() { - return None - } - - // check if we may deliver some messages if we'll relay require source header - // to target first - let maybe_source_header_for_delivery = - self.strategy.source_queue().back().map(|(id, _)| id.clone()); - if self - .can_submit_transaction_with( - race_state.clone(), - maybe_source_header_for_delivery.clone(), - ) - .await - { - return maybe_source_header_for_delivery - } - - // ok, we can't delivery anything even if we relay some source blocks first. But maybe - // the lane is blocked and we need to submit unblock transaction? - let maybe_source_header_for_reward_confirmation = - self.latest_confirmed_nonces_at_source.back().map(|(id, _)| id.clone()); - if self - .can_submit_transaction_with( - race_state.clone(), - maybe_source_header_for_reward_confirmation.clone(), - ) - .await - { - return maybe_source_header_for_reward_confirmation - } - - None - } - - fn best_at_source(&self) -> Option { - self.strategy.best_at_source() - } - - fn best_at_target(&self) -> Option { - self.strategy.best_at_target() - } - - fn source_nonces_updated( - &mut self, - at_block: SourceHeaderIdOf

, - nonces: SourceClientNonces, - ) { - if let Some(confirmed_nonce) = nonces.confirmed_nonce { - let is_confirmed_nonce_updated = self - .latest_confirmed_nonces_at_source - .back() - .map(|(_, prev_nonce)| *prev_nonce != confirmed_nonce) - .unwrap_or(true); - if is_confirmed_nonce_updated { - self.latest_confirmed_nonces_at_source - .push_back((at_block.clone(), confirmed_nonce)); - } - } - self.strategy.source_nonces_updated(at_block, nonces) - } - - fn reset_best_target_nonces(&mut self) { - self.target_nonces = None; - self.strategy.reset_best_target_nonces(); - } - - fn best_target_nonces_updated, TargetHeaderIdOf

>>( - &mut self, - nonces: TargetClientNonces, - race_state: &mut RS, - ) { - // best target nonces must always be ge than finalized target nonces - let latest_nonce = nonces.latest_nonce; - self.target_nonces = Some(nonces); - - self.strategy.best_target_nonces_updated( - TargetClientNonces { latest_nonce, nonces_data: () }, - race_state, - ) - } - - fn finalized_target_nonces_updated, TargetHeaderIdOf

>>( - &mut self, - nonces: TargetClientNonces, - race_state: &mut RS, - ) { - if let Some(ref best_finalized_source_header_id_at_best_target) = - race_state.best_finalized_source_header_id_at_best_target() - { - let oldest_header_number_to_keep = best_finalized_source_header_id_at_best_target.0; - while self - .latest_confirmed_nonces_at_source - .front() - .map(|(id, _)| id.0 < oldest_header_number_to_keep) - .unwrap_or(false) - { - self.latest_confirmed_nonces_at_source.pop_front(); - } - } - - if let Some(ref mut target_nonces) = self.target_nonces { - target_nonces.latest_nonce = - std::cmp::max(target_nonces.latest_nonce, nonces.latest_nonce); - } - - self.strategy.finalized_target_nonces_updated( - TargetClientNonces { latest_nonce: nonces.latest_nonce, nonces_data: () }, - race_state, - ) - } - - async fn select_nonces_to_deliver, TargetHeaderIdOf

>>( - &self, - race_state: RS, - ) -> Option<(RangeInclusive, Self::ProofParameters)> { - self.select_race_action(race_state).await - } -} - -impl NoncesRange for MessageDetailsMap { - fn begin(&self) -> MessageNonce { - self.keys().next().cloned().unwrap_or_default() - } - - fn end(&self) -> MessageNonce { - self.keys().next_back().cloned().unwrap_or_default() - } - - fn greater_than(mut self, nonce: MessageNonce) -> Option { - let gte = self.split_off(&(nonce + 1)); - if gte.is_empty() { - None - } else { - Some(gte) - } - } -} - -#[cfg(test)] -mod tests { - use crate::{ - message_lane_loop::{ - tests::{ - header_id, TestMessageLane, TestMessagesBatchTransaction, TestMessagesProof, - TestSourceChainBalance, TestSourceClient, TestSourceHeaderId, TestTargetClient, - TestTargetHeaderId, - }, - MessageDetails, - }, - message_race_loop::RaceStateImpl, - }; - - use super::*; - - const DEFAULT_DISPATCH_WEIGHT: Weight = Weight::from_parts(1, 0); - const DEFAULT_SIZE: u32 = 1; - - type TestRaceState = RaceStateImpl< - TestSourceHeaderId, - TestTargetHeaderId, - TestMessagesProof, - TestMessagesBatchTransaction, - >; - type TestStrategy = - MessageDeliveryStrategy; - - fn source_nonces( - new_nonces: RangeInclusive, - confirmed_nonce: MessageNonce, - reward: TestSourceChainBalance, - ) -> SourceClientNonces> { - SourceClientNonces { - new_nonces: new_nonces - .into_iter() - .map(|nonce| { - ( - nonce, - MessageDetails { - dispatch_weight: DEFAULT_DISPATCH_WEIGHT, - size: DEFAULT_SIZE, - reward, - }, - ) - }) - .collect(), - confirmed_nonce: Some(confirmed_nonce), - } - } - - fn prepare_strategy() -> (TestRaceState, TestStrategy) { - let mut race_state = RaceStateImpl { - best_finalized_source_header_id_at_source: Some(header_id(1)), - best_finalized_source_header_id_at_best_target: Some(header_id(1)), - best_target_header_id: Some(header_id(1)), - best_finalized_target_header_id: Some(header_id(1)), - nonces_to_submit: None, - nonces_to_submit_batch: None, - nonces_submitted: None, - }; - - let mut race_strategy = TestStrategy { - max_unrewarded_relayer_entries_at_target: 4, - max_unconfirmed_nonces_at_target: 4, - max_messages_in_single_batch: 4, - max_messages_weight_in_single_batch: Weight::from_parts(4, 0), - max_messages_size_in_single_batch: 4, - latest_confirmed_nonces_at_source: vec![(header_id(1), 19)].into_iter().collect(), - lane_source_client: TestSourceClient::default(), - lane_target_client: TestTargetClient::default(), - metrics_msg: None, - target_nonces: Some(TargetClientNonces { - latest_nonce: 19, - nonces_data: DeliveryRaceTargetNoncesData { - confirmed_nonce: 19, - unrewarded_relayers: UnrewardedRelayersState { - unrewarded_relayer_entries: 0, - messages_in_oldest_entry: 0, - total_messages: 0, - last_delivered_nonce: 0, - }, - }, - }), - strategy: BasicStrategy::new(), - }; - - race_strategy - .strategy - .source_nonces_updated(header_id(1), source_nonces(20..=23, 19, 0)); - - let target_nonces = TargetClientNonces { latest_nonce: 19, nonces_data: () }; - race_strategy - .strategy - .best_target_nonces_updated(target_nonces.clone(), &mut race_state); - race_strategy - .strategy - .finalized_target_nonces_updated(target_nonces, &mut race_state); - - (race_state, race_strategy) - } - - fn proof_parameters(state_required: bool, weight: u32) -> MessageProofParameters { - MessageProofParameters { - outbound_state_proof_required: state_required, - dispatch_weight: Weight::from_parts(weight as u64, 0), - } - } - - #[test] - fn weights_map_works_as_nonces_range() { - fn build_map( - range: RangeInclusive, - ) -> MessageDetailsMap { - range - .map(|idx| { - ( - idx, - MessageDetails { - dispatch_weight: Weight::from_parts(idx, 0), - size: idx as _, - reward: idx as _, - }, - ) - }) - .collect() - } - - let map = build_map(20..=30); - - assert_eq!(map.begin(), 20); - assert_eq!(map.end(), 30); - assert_eq!(map.clone().greater_than(10), Some(build_map(20..=30))); - assert_eq!(map.clone().greater_than(19), Some(build_map(20..=30))); - assert_eq!(map.clone().greater_than(20), Some(build_map(21..=30))); - assert_eq!(map.clone().greater_than(25), Some(build_map(26..=30))); - assert_eq!(map.clone().greater_than(29), Some(build_map(30..=30))); - assert_eq!(map.greater_than(30), None); - } - - #[async_std::test] - async fn message_delivery_strategy_selects_messages_to_deliver() { - let (state, strategy) = prepare_strategy(); - - // both sides are ready to relay new messages - assert_eq!( - strategy.select_nonces_to_deliver(state).await, - Some(((20..=23), proof_parameters(false, 4))) - ); - } - - #[async_std::test] - async fn message_delivery_strategy_includes_outbound_state_proof_when_new_nonces_are_available() - { - let (state, mut strategy) = prepare_strategy(); - - // if there are new confirmed nonces on source, we want to relay this information - // to target to prune rewards queue - let prev_confirmed_nonce_at_source = - strategy.latest_confirmed_nonces_at_source.back().unwrap().1; - strategy.target_nonces.as_mut().unwrap().nonces_data.confirmed_nonce = - prev_confirmed_nonce_at_source - 1; - assert_eq!( - strategy.select_nonces_to_deliver(state).await, - Some(((20..=23), proof_parameters(true, 4))) - ); - } - - #[async_std::test] - async fn message_delivery_strategy_selects_nothing_if_there_are_too_many_unrewarded_relayers() { - let (state, mut strategy) = prepare_strategy(); - - // if there are already `max_unrewarded_relayer_entries_at_target` entries at target, - // we need to wait until rewards will be paid - { - let unrewarded_relayers = - &mut strategy.target_nonces.as_mut().unwrap().nonces_data.unrewarded_relayers; - unrewarded_relayers.unrewarded_relayer_entries = - strategy.max_unrewarded_relayer_entries_at_target; - unrewarded_relayers.messages_in_oldest_entry = 4; - } - assert_eq!(strategy.select_nonces_to_deliver(state).await, None); - } - - #[async_std::test] - async fn message_delivery_strategy_selects_nothing_if_proved_rewards_is_not_enough_to_remove_oldest_unrewarded_entry( - ) { - let (state, mut strategy) = prepare_strategy(); - - // if there are already `max_unrewarded_relayer_entries_at_target` entries at target, - // we need to prove at least `messages_in_oldest_entry` rewards - let prev_confirmed_nonce_at_source = - strategy.latest_confirmed_nonces_at_source.back().unwrap().1; - { - let nonces_data = &mut strategy.target_nonces.as_mut().unwrap().nonces_data; - nonces_data.confirmed_nonce = prev_confirmed_nonce_at_source - 1; - let unrewarded_relayers = &mut nonces_data.unrewarded_relayers; - unrewarded_relayers.unrewarded_relayer_entries = - strategy.max_unrewarded_relayer_entries_at_target; - unrewarded_relayers.messages_in_oldest_entry = 4; - } - assert_eq!(strategy.select_nonces_to_deliver(state).await, None); - } - - #[async_std::test] - async fn message_delivery_strategy_includes_outbound_state_proof_if_proved_rewards_is_enough() { - let (state, mut strategy) = prepare_strategy(); - - // if there are already `max_unrewarded_relayer_entries_at_target` entries at target, - // we need to prove at least `messages_in_oldest_entry` rewards - let prev_confirmed_nonce_at_source = - strategy.latest_confirmed_nonces_at_source.back().unwrap().1; - { - let nonces_data = &mut strategy.target_nonces.as_mut().unwrap().nonces_data; - nonces_data.confirmed_nonce = prev_confirmed_nonce_at_source - 3; - let unrewarded_relayers = &mut nonces_data.unrewarded_relayers; - unrewarded_relayers.unrewarded_relayer_entries = - strategy.max_unrewarded_relayer_entries_at_target; - unrewarded_relayers.messages_in_oldest_entry = 3; - } - assert_eq!( - strategy.select_nonces_to_deliver(state).await, - Some(((20..=23), proof_parameters(true, 4))) - ); - } - - #[async_std::test] - async fn message_delivery_strategy_limits_batch_by_messages_weight() { - let (state, mut strategy) = prepare_strategy(); - - // not all queued messages may fit in the batch, because batch has max weight - strategy.max_messages_weight_in_single_batch = Weight::from_parts(3, 0); - assert_eq!( - strategy.select_nonces_to_deliver(state).await, - Some(((20..=22), proof_parameters(false, 3))) - ); - } - - #[async_std::test] - async fn message_delivery_strategy_accepts_single_message_even_if_its_weight_overflows_maximal_weight( - ) { - let (state, mut strategy) = prepare_strategy(); - - // first message doesn't fit in the batch, because it has weight (10) that overflows max - // weight (4) - strategy.strategy.source_queue_mut()[0].1.get_mut(&20).unwrap().dispatch_weight = - Weight::from_parts(10, 0); - assert_eq!( - strategy.select_nonces_to_deliver(state).await, - Some(((20..=20), proof_parameters(false, 10))) - ); - } - - #[async_std::test] - async fn message_delivery_strategy_limits_batch_by_messages_size() { - let (state, mut strategy) = prepare_strategy(); - - // not all queued messages may fit in the batch, because batch has max weight - strategy.max_messages_size_in_single_batch = 3; - assert_eq!( - strategy.select_nonces_to_deliver(state).await, - Some(((20..=22), proof_parameters(false, 3))) - ); - } - - #[async_std::test] - async fn message_delivery_strategy_accepts_single_message_even_if_its_weight_overflows_maximal_size( - ) { - let (state, mut strategy) = prepare_strategy(); - - // first message doesn't fit in the batch, because it has weight (10) that overflows max - // weight (4) - strategy.strategy.source_queue_mut()[0].1.get_mut(&20).unwrap().size = 10; - assert_eq!( - strategy.select_nonces_to_deliver(state).await, - Some(((20..=20), proof_parameters(false, 1))) - ); - } - - #[async_std::test] - async fn message_delivery_strategy_limits_batch_by_messages_count_when_there_is_upper_limit() { - let (state, mut strategy) = prepare_strategy(); - - // not all queued messages may fit in the batch, because batch has max number of messages - // limit - strategy.max_messages_in_single_batch = 3; - assert_eq!( - strategy.select_nonces_to_deliver(state).await, - Some(((20..=22), proof_parameters(false, 3))) - ); - } - - #[async_std::test] - async fn message_delivery_strategy_limits_batch_by_messages_count_when_there_are_unconfirmed_nonces( - ) { - let (state, mut strategy) = prepare_strategy(); - - // 1 delivery confirmation from target to source is still missing, so we may only - // relay 3 new messages - let prev_confirmed_nonce_at_source = - strategy.latest_confirmed_nonces_at_source.back().unwrap().1; - strategy.latest_confirmed_nonces_at_source = - vec![(header_id(1), prev_confirmed_nonce_at_source - 1)].into_iter().collect(); - strategy.target_nonces.as_mut().unwrap().nonces_data.confirmed_nonce = - prev_confirmed_nonce_at_source - 1; - assert_eq!( - strategy.select_nonces_to_deliver(state).await, - Some(((20..=22), proof_parameters(false, 3))) - ); - } - - #[async_std::test] - async fn message_delivery_strategy_waits_for_confirmed_nonce_header_to_appear_on_target() { - // 1 delivery confirmation from target to source is still missing, so we may deliver - // reward confirmation with our message delivery transaction. But the problem is that - // the reward has been paid at header 2 && this header is still unknown to target node. - // - // => so we can't deliver more than 3 messages - let (mut state, mut strategy) = prepare_strategy(); - let prev_confirmed_nonce_at_source = - strategy.latest_confirmed_nonces_at_source.back().unwrap().1; - strategy.latest_confirmed_nonces_at_source = vec![ - (header_id(1), prev_confirmed_nonce_at_source - 1), - (header_id(2), prev_confirmed_nonce_at_source), - ] - .into_iter() - .collect(); - strategy.target_nonces.as_mut().unwrap().nonces_data.confirmed_nonce = - prev_confirmed_nonce_at_source - 1; - state.best_finalized_source_header_id_at_best_target = Some(header_id(1)); - assert_eq!( - strategy.select_nonces_to_deliver(state).await, - Some(((20..=22), proof_parameters(false, 3))) - ); - - // the same situation, but the header 2 is known to the target node, so we may deliver - // reward confirmation - let (mut state, mut strategy) = prepare_strategy(); - let prev_confirmed_nonce_at_source = - strategy.latest_confirmed_nonces_at_source.back().unwrap().1; - strategy.latest_confirmed_nonces_at_source = vec![ - (header_id(1), prev_confirmed_nonce_at_source - 1), - (header_id(2), prev_confirmed_nonce_at_source), - ] - .into_iter() - .collect(); - strategy.target_nonces.as_mut().unwrap().nonces_data.confirmed_nonce = - prev_confirmed_nonce_at_source - 1; - state.best_finalized_source_header_id_at_source = Some(header_id(2)); - state.best_finalized_source_header_id_at_best_target = Some(header_id(2)); - assert_eq!( - strategy.select_nonces_to_deliver(state).await, - Some(((20..=23), proof_parameters(true, 4))) - ); - } - - #[async_std::test] - async fn source_header_is_required_when_confirmations_are_required() { - // let's prepare situation when: - // - all messages [20; 23] have been generated at source block#1; - let (mut state, mut strategy) = prepare_strategy(); - // - // - messages [20; 23] have been delivered - assert_eq!( - strategy.select_nonces_to_deliver(state.clone()).await, - Some(((20..=23), proof_parameters(false, 4))) - ); - strategy.finalized_target_nonces_updated( - TargetClientNonces { - latest_nonce: 23, - nonces_data: DeliveryRaceTargetNoncesData { - confirmed_nonce: 19, - unrewarded_relayers: UnrewardedRelayersState { - unrewarded_relayer_entries: 1, - messages_in_oldest_entry: 4, - total_messages: 4, - last_delivered_nonce: 23, - }, - }, - }, - &mut state, - ); - // nothing needs to be delivered now and we don't need any new headers - assert_eq!(strategy.select_nonces_to_deliver(state.clone()).await, None); - assert_eq!(strategy.required_source_header_at_target(state.clone()).await, None); - - // block#2 is generated - state.best_finalized_source_header_id_at_source = Some(header_id(2)); - state.best_finalized_source_header_id_at_best_target = Some(header_id(2)); - state.best_target_header_id = Some(header_id(2)); - state.best_finalized_target_header_id = Some(header_id(2)); - - // now let's generate two more nonces [24; 25] at the source; - strategy.source_nonces_updated(header_id(2), source_nonces(24..=25, 19, 0)); - // - // we don't need to relay more headers to target, because messages [20; 23] have - // not confirmed to source yet - assert_eq!(strategy.select_nonces_to_deliver(state.clone()).await, None); - assert_eq!(strategy.required_source_header_at_target(state.clone()).await, None); - - // let's relay source block#3 - state.best_finalized_source_header_id_at_source = Some(header_id(3)); - state.best_finalized_source_header_id_at_best_target = Some(header_id(3)); - state.best_target_header_id = Some(header_id(3)); - state.best_finalized_target_header_id = Some(header_id(3)); - - // and ask strategy again => still nothing to deliver, because parallel confirmations - // race need to be pushed further - assert_eq!(strategy.select_nonces_to_deliver(state.clone()).await, None); - assert_eq!(strategy.required_source_header_at_target(state.clone()).await, None); - - // let's relay source block#3 - state.best_finalized_source_header_id_at_source = Some(header_id(4)); - state.best_finalized_source_header_id_at_best_target = Some(header_id(4)); - state.best_target_header_id = Some(header_id(4)); - state.best_finalized_target_header_id = Some(header_id(4)); - - // let's confirm messages [20; 23] - strategy.source_nonces_updated(header_id(4), source_nonces(24..=25, 23, 0)); - - // and ask strategy again => now we have everything required to deliver remaining - // [24; 25] nonces and proof of [20; 23] confirmation - assert_eq!( - strategy.select_nonces_to_deliver(state.clone()).await, - Some(((24..=25), proof_parameters(true, 2))), - ); - assert_eq!(strategy.required_source_header_at_target(state).await, None); - } - - #[async_std::test] - async fn relayer_uses_flattened_view_of_the_source_queue_to_select_nonces() { - // Real scenario that has happened on test deployments: - // 1) relayer witnessed M1 at block 1 => it has separate entry in the `source_queue` - // 2) relayer witnessed M2 at block 2 => it has separate entry in the `source_queue` - // 3) if block 2 is known to the target node, then both M1 and M2 are selected for single - // delivery, even though weight(M1+M2) > larger than largest allowed weight - // - // This was happening because selector (`select_nonces_for_delivery_transaction`) has been - // called for every `source_queue` entry separately without preserving any context. - let (mut state, mut strategy) = prepare_strategy(); - let nonces = source_nonces(24..=25, 19, 0); - strategy.strategy.source_nonces_updated(header_id(2), nonces); - strategy.max_unrewarded_relayer_entries_at_target = 100; - strategy.max_unconfirmed_nonces_at_target = 100; - strategy.max_messages_in_single_batch = 5; - strategy.max_messages_weight_in_single_batch = Weight::from_parts(100, 0); - strategy.max_messages_size_in_single_batch = 100; - state.best_finalized_source_header_id_at_best_target = Some(header_id(2)); - - assert_eq!( - strategy.select_nonces_to_deliver(state).await, - Some(((20..=24), proof_parameters(false, 5))) - ); - } - - #[async_std::test] - #[allow(clippy::reversed_empty_ranges)] - async fn no_source_headers_required_at_target_if_lanes_are_empty() { - let (state, _) = prepare_strategy(); - let mut strategy = TestStrategy { - max_unrewarded_relayer_entries_at_target: 4, - max_unconfirmed_nonces_at_target: 4, - max_messages_in_single_batch: 4, - max_messages_weight_in_single_batch: Weight::from_parts(4, 0), - max_messages_size_in_single_batch: 4, - latest_confirmed_nonces_at_source: VecDeque::new(), - lane_source_client: TestSourceClient::default(), - lane_target_client: TestTargetClient::default(), - metrics_msg: None, - target_nonces: None, - strategy: BasicStrategy::new(), - }; - - let source_header_id = header_id(10); - strategy.source_nonces_updated( - source_header_id, - // MessageDeliveryRaceSource::nonces returns Some(0), because that's how it is - // represented in memory (there's no Options in OutboundLaneState) - source_nonces(1u64..=0u64, 0, 0), - ); - - // even though `latest_confirmed_nonces_at_source` is not empty, new headers are not - // requested - assert_eq!( - strategy.latest_confirmed_nonces_at_source, - VecDeque::from([(source_header_id, 0)]) - ); - assert_eq!(strategy.required_source_header_at_target(state).await, None); - } - - #[async_std::test] - async fn previous_nonces_are_selected_if_reorg_happens_at_target_chain() { - // this is the copy of the similar test in the `mesage_race_strategy.rs`, but it also tests - // that the `MessageDeliveryStrategy` acts properly in the similar scenario - - // tune parameters to allow 5 nonces per delivery transaction - let (mut state, mut strategy) = prepare_strategy(); - strategy.max_unrewarded_relayer_entries_at_target = 5; - strategy.max_unconfirmed_nonces_at_target = 5; - strategy.max_messages_in_single_batch = 5; - strategy.max_messages_weight_in_single_batch = Weight::from_parts(5, 0); - strategy.max_messages_size_in_single_batch = 5; - - // in this state we have 4 available nonces for delivery - assert_eq!( - strategy.select_nonces_to_deliver(state.clone()).await, - Some(( - 20..=23, - MessageProofParameters { - outbound_state_proof_required: false, - dispatch_weight: Weight::from_parts(4, 0), - } - )), - ); - - // let's say we have submitted 20..=23 - state.nonces_submitted = Some(20..=23); - - // then new nonce 24 appear at the source block 2 - let new_nonce_24 = vec![( - 24, - MessageDetails { dispatch_weight: Weight::from_parts(1, 0), size: 0, reward: 0 }, - )] - .into_iter() - .collect(); - let source_header_2 = header_id(2); - state.best_finalized_source_header_id_at_source = Some(source_header_2); - strategy.source_nonces_updated( - source_header_2, - SourceClientNonces { new_nonces: new_nonce_24, confirmed_nonce: None }, - ); - // and nonce 23 appear at the best block of the target node (best finalized still has 0 - // nonces) - let target_nonces_data = DeliveryRaceTargetNoncesData { - confirmed_nonce: 19, - unrewarded_relayers: UnrewardedRelayersState::default(), - }; - let target_header_2 = header_id(2); - state.best_target_header_id = Some(target_header_2); - strategy.best_target_nonces_updated( - TargetClientNonces { latest_nonce: 23, nonces_data: target_nonces_data.clone() }, - &mut state, - ); - - // then best target header is retracted - strategy.best_target_nonces_updated( - TargetClientNonces { latest_nonce: 19, nonces_data: target_nonces_data.clone() }, - &mut state, - ); - - // ... and some fork with 19 delivered nonces is finalized - let target_header_2_fork = header_id(2_1); - state.best_finalized_source_header_id_at_source = Some(source_header_2); - state.best_finalized_source_header_id_at_best_target = Some(source_header_2); - state.best_target_header_id = Some(target_header_2_fork); - state.best_finalized_target_header_id = Some(target_header_2_fork); - strategy.finalized_target_nonces_updated( - TargetClientNonces { latest_nonce: 19, nonces_data: target_nonces_data.clone() }, - &mut state, - ); - - // now we have to select nonces 20..=23 for delivery again - assert_eq!( - strategy.select_nonces_to_deliver(state.clone()).await, - Some(( - 20..=24, - MessageProofParameters { - outbound_state_proof_required: false, - dispatch_weight: Weight::from_parts(5, 0), - } - )), - ); - } - - #[async_std::test] - #[allow(clippy::reversed_empty_ranges)] - async fn delivery_race_is_able_to_unblock_lane() { - // step 1: messages 20..=23 are delivered from source to target at target block 2 - fn at_target_block_2_deliver_messages( - strategy: &mut TestStrategy, - state: &mut TestRaceState, - occupied_relayer_slots: MessageNonce, - occupied_message_slots: MessageNonce, - ) { - let nonces_at_target = TargetClientNonces { - latest_nonce: 23, - nonces_data: DeliveryRaceTargetNoncesData { - confirmed_nonce: 19, - unrewarded_relayers: UnrewardedRelayersState { - unrewarded_relayer_entries: occupied_relayer_slots, - total_messages: occupied_message_slots, - ..Default::default() - }, - }, - }; - - state.best_target_header_id = Some(header_id(2)); - state.best_finalized_target_header_id = Some(header_id(2)); - - strategy.best_target_nonces_updated(nonces_at_target.clone(), state); - strategy.finalized_target_nonces_updated(nonces_at_target, state); - } - - // step 2: delivery of messages 20..=23 is confirmed to the source node at source block 2 - fn at_source_block_2_deliver_confirmations( - strategy: &mut TestStrategy, - state: &mut TestRaceState, - ) { - state.best_finalized_source_header_id_at_source = Some(header_id(2)); - - strategy.source_nonces_updated( - header_id(2), - SourceClientNonces { new_nonces: Default::default(), confirmed_nonce: Some(23) }, - ); - } - - // step 3: finalize source block 2 at target block 3 and select nonces to deliver - async fn at_target_block_3_select_nonces_to_deliver( - strategy: &TestStrategy, - mut state: TestRaceState, - ) -> Option<(RangeInclusive, MessageProofParameters)> { - state.best_finalized_source_header_id_at_best_target = Some(header_id(2)); - state.best_target_header_id = Some(header_id(3)); - state.best_finalized_target_header_id = Some(header_id(3)); - - strategy.select_nonces_to_deliver(state).await - } - - let max_unrewarded_relayer_entries_at_target = 4; - let max_unconfirmed_nonces_at_target = 4; - let expected_rewards_proof = Some(( - 1..=0, - MessageProofParameters { - outbound_state_proof_required: true, - dispatch_weight: Weight::zero(), - }, - )); - - // when lane is NOT blocked - let (mut state, mut strategy) = prepare_strategy(); - at_target_block_2_deliver_messages( - &mut strategy, - &mut state, - max_unrewarded_relayer_entries_at_target - 1, - max_unconfirmed_nonces_at_target - 1, - ); - at_source_block_2_deliver_confirmations(&mut strategy, &mut state); - assert_eq!(strategy.required_source_header_at_target(state.clone()).await, None); - assert_eq!(at_target_block_3_select_nonces_to_deliver(&strategy, state).await, None); - - // when lane is blocked by no-relayer-slots in unrewarded relayers vector - let (mut state, mut strategy) = prepare_strategy(); - at_target_block_2_deliver_messages( - &mut strategy, - &mut state, - max_unrewarded_relayer_entries_at_target, - max_unconfirmed_nonces_at_target - 1, - ); - at_source_block_2_deliver_confirmations(&mut strategy, &mut state); - assert_eq!( - strategy.required_source_header_at_target(state.clone()).await, - Some(header_id(2)) - ); - assert_eq!( - at_target_block_3_select_nonces_to_deliver(&strategy, state).await, - expected_rewards_proof - ); - - // when lane is blocked by no-message-slots in unrewarded relayers vector - let (mut state, mut strategy) = prepare_strategy(); - at_target_block_2_deliver_messages( - &mut strategy, - &mut state, - max_unrewarded_relayer_entries_at_target - 1, - max_unconfirmed_nonces_at_target, - ); - at_source_block_2_deliver_confirmations(&mut strategy, &mut state); - assert_eq!( - strategy.required_source_header_at_target(state.clone()).await, - Some(header_id(2)) - ); - assert_eq!( - at_target_block_3_select_nonces_to_deliver(&strategy, state).await, - expected_rewards_proof - ); - - // when lane is blocked by no-message-slots and no-message-slots in unrewarded relayers - // vector - let (mut state, mut strategy) = prepare_strategy(); - at_target_block_2_deliver_messages( - &mut strategy, - &mut state, - max_unrewarded_relayer_entries_at_target - 1, - max_unconfirmed_nonces_at_target, - ); - at_source_block_2_deliver_confirmations(&mut strategy, &mut state); - assert_eq!( - strategy.required_source_header_at_target(state.clone()).await, - Some(header_id(2)) - ); - assert_eq!( - at_target_block_3_select_nonces_to_deliver(&strategy, state).await, - expected_rewards_proof - ); - - // when we have already selected some nonces to deliver, we don't need to select anything - let (mut state, mut strategy) = prepare_strategy(); - at_target_block_2_deliver_messages( - &mut strategy, - &mut state, - max_unrewarded_relayer_entries_at_target - 1, - max_unconfirmed_nonces_at_target, - ); - at_source_block_2_deliver_confirmations(&mut strategy, &mut state); - state.nonces_to_submit = Some((header_id(2), 1..=0, (1..=0, None))); - assert_eq!(strategy.required_source_header_at_target(state.clone()).await, None); - assert_eq!(at_target_block_3_select_nonces_to_deliver(&strategy, state).await, None); - - // when we have already submitted some nonces, we don't need to select anything - let (mut state, mut strategy) = prepare_strategy(); - at_target_block_2_deliver_messages( - &mut strategy, - &mut state, - max_unrewarded_relayer_entries_at_target - 1, - max_unconfirmed_nonces_at_target, - ); - at_source_block_2_deliver_confirmations(&mut strategy, &mut state); - state.nonces_submitted = Some(1..=0); - assert_eq!(strategy.required_source_header_at_target(state.clone()).await, None); - assert_eq!(at_target_block_3_select_nonces_to_deliver(&strategy, state).await, None); - } - - #[async_std::test] - async fn outbound_state_proof_is_not_required_when_we_have_no_new_confirmations() { - let (mut state, mut strategy) = prepare_strategy(); - - // pretend that we haven't seen any confirmations yet (or they're at the future target chain - // blocks) - strategy.latest_confirmed_nonces_at_source.clear(); - - // emulate delivery of some nonces (20..=23 are generated, but we only deliver 20..=21) - let nonces_at_target = TargetClientNonces { - latest_nonce: 21, - nonces_data: DeliveryRaceTargetNoncesData { - confirmed_nonce: 19, - unrewarded_relayers: UnrewardedRelayersState { - unrewarded_relayer_entries: 1, - total_messages: 2, - ..Default::default() - }, - }, - }; - state.best_target_header_id = Some(header_id(2)); - state.best_finalized_target_header_id = Some(header_id(2)); - strategy.best_target_nonces_updated(nonces_at_target.clone(), &mut state); - strategy.finalized_target_nonces_updated(nonces_at_target, &mut state); - - // we won't include outbound lane state proof into 22..=23 delivery transaction - // because it brings no new reward confirmations - assert_eq!( - strategy.select_nonces_to_deliver(state).await, - Some(((22..=23), proof_parameters(false, 2))) - ); - } -} diff --git a/relays/messages/src/message_race_limits.rs b/relays/messages/src/message_race_limits.rs deleted file mode 100644 index 873bb6aad..000000000 --- a/relays/messages/src/message_race_limits.rs +++ /dev/null @@ -1,206 +0,0 @@ -// Copyright 2019-2021 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! enforcement strategy - -use num_traits::Zero; -use std::ops::RangeInclusive; - -use bp_messages::{MessageNonce, Weight}; - -use crate::{ - message_lane::MessageLane, - message_lane_loop::{ - MessageDetails, MessageDetailsMap, SourceClient as MessageLaneSourceClient, - TargetClient as MessageLaneTargetClient, - }, - message_race_loop::NoncesRange, - message_race_strategy::SourceRangesQueue, - metrics::MessageLaneLoopMetrics, -}; - -/// Reference data for participating in relay -pub struct RelayReference< - P: MessageLane, - SourceClient: MessageLaneSourceClient

, - TargetClient: MessageLaneTargetClient

, -> { - /// The client that is connected to the message lane source node. - pub lane_source_client: SourceClient, - /// The client that is connected to the message lane target node. - pub lane_target_client: TargetClient, - /// Metrics reference. - pub metrics: Option, - /// Messages size summary - pub selected_size: u32, - - /// Hard check begin nonce - pub hard_selected_begin_nonce: MessageNonce, - - /// Index by all ready nonces - pub index: usize, - /// Current nonce - pub nonce: MessageNonce, - /// Current nonce details - pub details: MessageDetails, -} - -/// Relay reference data -pub struct RelayMessagesBatchReference< - P: MessageLane, - SourceClient: MessageLaneSourceClient

, - TargetClient: MessageLaneTargetClient

, -> { - /// Maximal number of relayed messages in single delivery transaction. - pub max_messages_in_this_batch: MessageNonce, - /// Maximal cumulative dispatch weight of relayed messages in single delivery transaction. - pub max_messages_weight_in_single_batch: Weight, - /// Maximal cumulative size of relayed messages in single delivery transaction. - pub max_messages_size_in_single_batch: u32, - /// The client that is connected to the message lane source node. - pub lane_source_client: SourceClient, - /// The client that is connected to the message lane target node. - pub lane_target_client: TargetClient, - /// Metrics reference. - pub metrics: Option, - /// Best available nonce at the **best** target block. We do not want to deliver nonces - /// less than this nonce, even though the block may be retracted. - pub best_target_nonce: MessageNonce, - /// Source queue. - pub nonces_queue: SourceRangesQueue< - P::SourceHeaderHash, - P::SourceHeaderNumber, - MessageDetailsMap, - >, - /// Range of indices within the `nonces_queue` that are available for selection. - pub nonces_queue_range: RangeInclusive, -} - -/// Limits of the message race transactions. -#[derive(Clone)] -pub struct MessageRaceLimits; - -impl MessageRaceLimits { - pub async fn decide< - P: MessageLane, - SourceClient: MessageLaneSourceClient

, - TargetClient: MessageLaneTargetClient

, - >( - reference: RelayMessagesBatchReference, - ) -> Option> { - let mut hard_selected_count = 0; - - let mut selected_weight = Weight::zero(); - let mut selected_count: MessageNonce = 0; - - let hard_selected_begin_nonce = std::cmp::max( - reference.best_target_nonce + 1, - reference.nonces_queue[*reference.nonces_queue_range.start()].1.begin(), - ); - - // relay reference - let mut relay_reference = RelayReference { - lane_source_client: reference.lane_source_client.clone(), - lane_target_client: reference.lane_target_client.clone(), - metrics: reference.metrics.clone(), - - selected_size: 0, - - hard_selected_begin_nonce, - - index: 0, - nonce: 0, - details: MessageDetails { - dispatch_weight: Weight::zero(), - size: 0, - reward: P::SourceChainBalance::zero(), - }, - }; - - let all_ready_nonces = reference - .nonces_queue - .range(reference.nonces_queue_range.clone()) - .flat_map(|(_, ready_nonces)| ready_nonces.iter()) - .filter(|(nonce, _)| **nonce >= hard_selected_begin_nonce) - .enumerate(); - for (index, (nonce, details)) in all_ready_nonces { - relay_reference.index = index; - relay_reference.nonce = *nonce; - relay_reference.details = *details; - - // Since we (hopefully) have some reserves in `max_messages_weight_in_single_batch` - // and `max_messages_size_in_single_batch`, we may still try to submit transaction - // with single message if message overflows these limits. The worst case would be if - // transaction will be rejected by the target runtime, but at least we have tried. - - // limit messages in the batch by weight - let new_selected_weight = match selected_weight.checked_add(&details.dispatch_weight) { - Some(new_selected_weight) - if new_selected_weight - .all_lte(reference.max_messages_weight_in_single_batch) => - new_selected_weight, - new_selected_weight if selected_count == 0 => { - log::warn!( - target: "bridge", - "Going to submit message delivery transaction with declared dispatch \ - weight {:?} that overflows maximal configured weight {}", - new_selected_weight, - reference.max_messages_weight_in_single_batch, - ); - new_selected_weight.unwrap_or(Weight::MAX) - }, - _ => break, - }; - - // limit messages in the batch by size - let new_selected_size = match relay_reference.selected_size.checked_add(details.size) { - Some(new_selected_size) - if new_selected_size <= reference.max_messages_size_in_single_batch => - new_selected_size, - new_selected_size if selected_count == 0 => { - log::warn!( - target: "bridge", - "Going to submit message delivery transaction with message \ - size {:?} that overflows maximal configured size {}", - new_selected_size, - reference.max_messages_size_in_single_batch, - ); - new_selected_size.unwrap_or(u32::MAX) - }, - _ => break, - }; - - // limit number of messages in the batch - let new_selected_count = selected_count + 1; - if new_selected_count > reference.max_messages_in_this_batch { - break - } - relay_reference.selected_size = new_selected_size; - - hard_selected_count = index + 1; - selected_weight = new_selected_weight; - selected_count = new_selected_count; - } - - if hard_selected_count != 0 { - let selected_max_nonce = - hard_selected_begin_nonce + hard_selected_count as MessageNonce - 1; - Some(hard_selected_begin_nonce..=selected_max_nonce) - } else { - None - } - } -} diff --git a/relays/messages/src/message_race_loop.rs b/relays/messages/src/message_race_loop.rs deleted file mode 100644 index 31341a9a0..000000000 --- a/relays/messages/src/message_race_loop.rs +++ /dev/null @@ -1,835 +0,0 @@ -// Copyright 2019-2021 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -//! Loop that is serving single race within message lane. This could be -//! message delivery race, receiving confirmations race or processing -//! confirmations race. -//! -//! The idea of the race is simple - we have `nonce`-s on source and target -//! nodes. We're trying to prove that the source node has this nonce (and -//! associated data - like messages, lane state, etc) to the target node by -//! generating and submitting proof. - -use crate::message_lane_loop::{BatchTransaction, ClientState, NoncesSubmitArtifacts}; - -use async_trait::async_trait; -use bp_messages::MessageNonce; -use futures::{ - future::{FutureExt, TryFutureExt}, - stream::{FusedStream, StreamExt}, -}; -use relay_utils::{ - process_future_result, retry_backoff, FailedClient, MaybeConnectionError, - TrackedTransactionStatus, TransactionTracker, -}; -use std::{ - fmt::Debug, - ops::RangeInclusive, - time::{Duration, Instant}, -}; - -/// One of races within lane. -pub trait MessageRace { - /// Header id of the race source. - type SourceHeaderId: Debug + Clone + PartialEq + Send + Sync; - /// Header id of the race source. - type TargetHeaderId: Debug + Clone + PartialEq + Send + Sync; - - /// Message nonce used in the race. - type MessageNonce: Debug + Clone; - /// Proof that is generated and delivered in this race. - type Proof: Debug + Clone + Send + Sync; - - /// Name of the race source. - fn source_name() -> String; - /// Name of the race target. - fn target_name() -> String; -} - -/// State of race source client. -type SourceClientState

= - ClientState<

::SourceHeaderId,

::TargetHeaderId>; - -/// State of race target client. -type TargetClientState

= - ClientState<

::TargetHeaderId,

::SourceHeaderId>; - -/// Inclusive nonces range. -pub trait NoncesRange: Debug + Sized { - /// Get begin of the range. - fn begin(&self) -> MessageNonce; - /// Get end of the range. - fn end(&self) -> MessageNonce; - /// Returns new range with current range nonces that are greater than the passed `nonce`. - /// If there are no such nonces, `None` is returned. - fn greater_than(self, nonce: MessageNonce) -> Option; -} - -/// Nonces on the race source client. -#[derive(Debug, Clone)] -pub struct SourceClientNonces { - /// New nonces range known to the client. `New` here means all nonces generated after - /// `prev_latest_nonce` passed to the `SourceClient::nonces` method. - pub new_nonces: NoncesRange, - /// The latest nonce that is confirmed to the bridged client. This nonce only makes - /// sense in some races. In other races it is `None`. - pub confirmed_nonce: Option, -} - -/// Nonces on the race target client. -#[derive(Debug, Clone)] -pub struct TargetClientNonces { - /// The latest nonce that is known to the target client. - pub latest_nonce: MessageNonce, - /// Additional data from target node that may be used by the race. - pub nonces_data: TargetNoncesData, -} - -/// One of message lane clients, which is source client for the race. -#[async_trait] -pub trait SourceClient { - /// Type of error these clients returns. - type Error: std::fmt::Debug + MaybeConnectionError; - /// Type of nonces range returned by the source client. - type NoncesRange: NoncesRange; - /// Additional proof parameters required to generate proof. - type ProofParameters; - - /// Return nonces that are known to the source client. - async fn nonces( - &self, - at_block: P::SourceHeaderId, - prev_latest_nonce: MessageNonce, - ) -> Result<(P::SourceHeaderId, SourceClientNonces), Self::Error>; - /// Generate proof for delivering to the target client. - async fn generate_proof( - &self, - at_block: P::SourceHeaderId, - nonces: RangeInclusive, - proof_parameters: Self::ProofParameters, - ) -> Result<(P::SourceHeaderId, RangeInclusive, P::Proof), Self::Error>; -} - -/// One of message lane clients, which is target client for the race. -#[async_trait] -pub trait TargetClient { - /// Type of error these clients returns. - type Error: std::fmt::Debug + MaybeConnectionError; - /// Type of the additional data from the target client, used by the race. - type TargetNoncesData: std::fmt::Debug; - /// Type of batch transaction that submits finality and proof to the target node. - type BatchTransaction: BatchTransaction + Clone; - /// Transaction tracker to track submitted transactions. - type TransactionTracker: TransactionTracker; - - /// Ask headers relay to relay finalized headers up to (and including) given header - /// from race source to race target. - /// - /// The client may return `Some(_)`, which means that nothing has happened yet and - /// the caller must generate and append proof to the batch transaction - /// to actually send it (along with required header) to the node. - /// - /// If function has returned `None`, it means that the caller now must wait for the - /// appearance of the required header `id` at the target client. - async fn require_source_header( - &self, - id: P::SourceHeaderId, - ) -> Result, Self::Error>; - - /// Return nonces that are known to the target client. - async fn nonces( - &self, - at_block: P::TargetHeaderId, - update_metrics: bool, - ) -> Result<(P::TargetHeaderId, TargetClientNonces), Self::Error>; - /// Submit proof to the target client. - async fn submit_proof( - &self, - maybe_batch_tx: Option, - generated_at_block: P::SourceHeaderId, - nonces: RangeInclusive, - proof: P::Proof, - ) -> Result, Self::Error>; -} - -/// Race strategy. -#[async_trait] -pub trait RaceStrategy: Debug { - /// Type of nonces range expected from the source client. - type SourceNoncesRange: NoncesRange; - /// Additional proof parameters required to generate proof. - type ProofParameters; - /// Additional data expected from the target client. - type TargetNoncesData; - - /// Should return true if nothing has to be synced. - fn is_empty(&self) -> bool; - /// Return id of source header that is required to be on target to continue synchronization. - async fn required_source_header_at_target>( - &self, - race_state: RS, - ) -> Option; - /// Return the best nonce at source node. - /// - /// `Some` is returned only if we are sure that the value is greater or equal - /// than the result of `best_at_target`. - fn best_at_source(&self) -> Option; - /// Return the best nonce at target node. - /// - /// May return `None` if value is yet unknown. - fn best_at_target(&self) -> Option; - - /// Called when nonces are updated at source node of the race. - fn source_nonces_updated( - &mut self, - at_block: SourceHeaderId, - nonces: SourceClientNonces, - ); - /// Called when we want to wait until next `best_target_nonces_updated` before selecting - /// any nonces for delivery. - fn reset_best_target_nonces(&mut self); - /// Called when best nonces are updated at target node of the race. - fn best_target_nonces_updated>( - &mut self, - nonces: TargetClientNonces, - race_state: &mut RS, - ); - /// Called when finalized nonces are updated at target node of the race. - fn finalized_target_nonces_updated>( - &mut self, - nonces: TargetClientNonces, - race_state: &mut RS, - ); - /// Should return `Some(nonces)` if we need to deliver proof of `nonces` (and associated - /// data) from source to target node. - /// Additionally, parameters required to generate proof are returned. - async fn select_nonces_to_deliver>( - &self, - race_state: RS, - ) -> Option<(RangeInclusive, Self::ProofParameters)>; -} - -/// State of the race. -pub trait RaceState: Clone + Send + Sync { - /// Set best finalized source header id at the best block on the target - /// client (at the `best_finalized_source_header_id_at_best_target`). - fn set_best_finalized_source_header_id_at_best_target(&mut self, id: SourceHeaderId); - - /// Best finalized source header id at the source client. - fn best_finalized_source_header_id_at_source(&self) -> Option; - /// Best finalized source header id at the best block on the target - /// client (at the `best_finalized_source_header_id_at_best_target`). - fn best_finalized_source_header_id_at_best_target(&self) -> Option; - /// The best header id at the target client. - fn best_target_header_id(&self) -> Option; - /// Best finalized header id at the target client. - fn best_finalized_target_header_id(&self) -> Option; - - /// Returns `true` if we have selected nonces to submit to the target node. - fn nonces_to_submit(&self) -> Option>; - /// Reset our nonces selection. - fn reset_nonces_to_submit(&mut self); - - /// Returns `true` if we have submitted some nonces to the target node and are - /// waiting for them to appear there. - fn nonces_submitted(&self) -> Option>; - /// Reset our nonces submission. - fn reset_nonces_submitted(&mut self); -} - -/// State of the race and prepared batch transaction (if available). -#[derive(Debug, Clone)] -pub(crate) struct RaceStateImpl { - /// Best finalized source header id at the source client. - pub best_finalized_source_header_id_at_source: Option, - /// Best finalized source header id at the best block on the target - /// client (at the `best_finalized_source_header_id_at_best_target`). - pub best_finalized_source_header_id_at_best_target: Option, - /// The best header id at the target client. - pub best_target_header_id: Option, - /// Best finalized header id at the target client. - pub best_finalized_target_header_id: Option, - /// Range of nonces that we have selected to submit. - pub nonces_to_submit: Option<(SourceHeaderId, RangeInclusive, Proof)>, - /// Batch transaction ready to include and deliver selected `nonces_to_submit` from the - /// `state`. - pub nonces_to_submit_batch: Option, - /// Range of nonces that is currently submitted. - pub nonces_submitted: Option>, -} - -impl Default - for RaceStateImpl -{ - fn default() -> Self { - RaceStateImpl { - best_finalized_source_header_id_at_source: None, - best_finalized_source_header_id_at_best_target: None, - best_target_header_id: None, - best_finalized_target_header_id: None, - nonces_to_submit: None, - nonces_to_submit_batch: None, - nonces_submitted: None, - } - } -} - -impl RaceState - for RaceStateImpl -where - SourceHeaderId: Clone + Send + Sync, - TargetHeaderId: Clone + Send + Sync, - Proof: Clone + Send + Sync, - BatchTx: Clone + Send + Sync, -{ - fn set_best_finalized_source_header_id_at_best_target(&mut self, id: SourceHeaderId) { - self.best_finalized_source_header_id_at_best_target = Some(id); - } - - fn best_finalized_source_header_id_at_source(&self) -> Option { - self.best_finalized_source_header_id_at_source.clone() - } - - fn best_finalized_source_header_id_at_best_target(&self) -> Option { - self.best_finalized_source_header_id_at_best_target.clone() - } - - fn best_target_header_id(&self) -> Option { - self.best_target_header_id.clone() - } - - fn best_finalized_target_header_id(&self) -> Option { - self.best_finalized_target_header_id.clone() - } - - fn nonces_to_submit(&self) -> Option> { - self.nonces_to_submit.clone().map(|(_, nonces, _)| nonces) - } - - fn reset_nonces_to_submit(&mut self) { - self.nonces_to_submit = None; - self.nonces_to_submit_batch = None; - } - - fn nonces_submitted(&self) -> Option> { - self.nonces_submitted.clone() - } - - fn reset_nonces_submitted(&mut self) { - self.nonces_submitted = None; - } -} - -/// Run race loop until connection with target or source node is lost. -pub async fn run, TC: TargetClient

>( - race_source: SC, - race_source_updated: impl FusedStream>, - race_target: TC, - race_target_updated: impl FusedStream>, - mut strategy: impl RaceStrategy< - P::SourceHeaderId, - P::TargetHeaderId, - P::Proof, - SourceNoncesRange = SC::NoncesRange, - ProofParameters = SC::ProofParameters, - TargetNoncesData = TC::TargetNoncesData, - >, -) -> Result<(), FailedClient> { - let mut progress_context = Instant::now(); - let mut race_state = RaceStateImpl::default(); - - let mut source_retry_backoff = retry_backoff(); - let mut source_client_is_online = true; - let mut source_nonces_required = false; - let mut source_required_header = None; - let source_nonces = futures::future::Fuse::terminated(); - let source_generate_proof = futures::future::Fuse::terminated(); - let source_go_offline_future = futures::future::Fuse::terminated(); - - let mut target_retry_backoff = retry_backoff(); - let mut target_client_is_online = true; - let mut target_best_nonces_required = false; - let mut target_finalized_nonces_required = false; - let mut target_batch_transaction = None; - let target_require_source_header = futures::future::Fuse::terminated(); - let target_best_nonces = futures::future::Fuse::terminated(); - let target_finalized_nonces = futures::future::Fuse::terminated(); - let target_submit_proof = futures::future::Fuse::terminated(); - let target_tx_tracker = futures::future::Fuse::terminated(); - let target_go_offline_future = futures::future::Fuse::terminated(); - - futures::pin_mut!( - race_source_updated, - source_nonces, - source_generate_proof, - source_go_offline_future, - race_target_updated, - target_require_source_header, - target_best_nonces, - target_finalized_nonces, - target_submit_proof, - target_tx_tracker, - target_go_offline_future, - ); - - loop { - futures::select! { - // when headers ids are updated - source_state = race_source_updated.next() => { - if let Some(source_state) = source_state { - let is_source_state_updated = race_state.best_finalized_source_header_id_at_source.as_ref() - != Some(&source_state.best_finalized_self); - if is_source_state_updated { - source_nonces_required = true; - race_state.best_finalized_source_header_id_at_source - = Some(source_state.best_finalized_self); - } - } - }, - target_state = race_target_updated.next() => { - if let Some(target_state) = target_state { - let is_target_best_state_updated = race_state.best_target_header_id.as_ref() - != Some(&target_state.best_self); - - if is_target_best_state_updated { - target_best_nonces_required = true; - race_state.best_target_header_id = Some(target_state.best_self); - race_state.best_finalized_source_header_id_at_best_target - = target_state.best_finalized_peer_at_best_self; - } - - let is_target_finalized_state_updated = race_state.best_finalized_target_header_id.as_ref() - != Some(&target_state.best_finalized_self); - if is_target_finalized_state_updated { - target_finalized_nonces_required = true; - race_state.best_finalized_target_header_id = Some(target_state.best_finalized_self); - } - } - }, - - // when nonces are updated - nonces = source_nonces => { - source_nonces_required = false; - - source_client_is_online = process_future_result( - nonces, - &mut source_retry_backoff, - |(at_block, nonces)| { - log::debug!( - target: "bridge", - "Received nonces from {}: {:?}", - P::source_name(), - nonces, - ); - - strategy.source_nonces_updated(at_block, nonces); - }, - &mut source_go_offline_future, - async_std::task::sleep, - || format!("Error retrieving nonces from {}", P::source_name()), - ).fail_if_connection_error(FailedClient::Source)?; - - // ask for more headers if we have nonces to deliver and required headers are missing - source_required_header = strategy - .required_source_header_at_target(race_state.clone()) - .await; - }, - nonces = target_best_nonces => { - target_best_nonces_required = false; - - target_client_is_online = process_future_result( - nonces, - &mut target_retry_backoff, - |(_, nonces)| { - log::debug!( - target: "bridge", - "Received best nonces from {}: {:?}", - P::target_name(), - nonces, - ); - - strategy.best_target_nonces_updated(nonces, &mut race_state); - }, - &mut target_go_offline_future, - async_std::task::sleep, - || format!("Error retrieving best nonces from {}", P::target_name()), - ).fail_if_connection_error(FailedClient::Target)?; - }, - nonces = target_finalized_nonces => { - target_finalized_nonces_required = false; - - target_client_is_online = process_future_result( - nonces, - &mut target_retry_backoff, - |(_, nonces)| { - log::debug!( - target: "bridge", - "Received finalized nonces from {}: {:?}", - P::target_name(), - nonces, - ); - - strategy.finalized_target_nonces_updated(nonces, &mut race_state); - }, - &mut target_go_offline_future, - async_std::task::sleep, - || format!("Error retrieving finalized nonces from {}", P::target_name()), - ).fail_if_connection_error(FailedClient::Target)?; - }, - - // proof generation and submission - maybe_batch_transaction = target_require_source_header => { - source_required_header = None; - - target_client_is_online = process_future_result( - maybe_batch_transaction, - &mut target_retry_backoff, - |maybe_batch_transaction: Option| { - log::debug!( - target: "bridge", - "Target {} client has been asked for more {} headers. Batch tx: {}", - P::target_name(), - P::source_name(), - maybe_batch_transaction - .as_ref() - .map(|bt| format!("yes ({:?})", bt.required_header_id())) - .unwrap_or_else(|| "no".into()), - ); - - target_batch_transaction = maybe_batch_transaction; - }, - &mut target_go_offline_future, - async_std::task::sleep, - || format!("Error asking for source headers at {}", P::target_name()), - ).fail_if_connection_error(FailedClient::Target)?; - }, - proof = source_generate_proof => { - source_client_is_online = process_future_result( - proof, - &mut source_retry_backoff, - |(at_block, nonces_range, proof, batch_transaction)| { - log::debug!( - target: "bridge", - "Received proof for nonces in range {:?} from {}", - nonces_range, - P::source_name(), - ); - - race_state.nonces_to_submit = Some((at_block, nonces_range, proof)); - race_state.nonces_to_submit_batch = batch_transaction; - }, - &mut source_go_offline_future, - async_std::task::sleep, - || format!("Error generating proof at {}", P::source_name()), - ).fail_if_error(FailedClient::Source).map(|_| true)?; - }, - proof_submit_result = target_submit_proof => { - target_client_is_online = process_future_result( - proof_submit_result, - &mut target_retry_backoff, - |artifacts: NoncesSubmitArtifacts| { - log::debug!( - target: "bridge", - "Successfully submitted proof of nonces {:?} to {}", - artifacts.nonces, - P::target_name(), - ); - - race_state.nonces_submitted = Some(artifacts.nonces); - target_tx_tracker.set(artifacts.tx_tracker.wait().fuse()); - }, - &mut target_go_offline_future, - async_std::task::sleep, - || format!("Error submitting proof {}", P::target_name()), - ).fail_if_connection_error(FailedClient::Target)?; - - // in any case - we don't need to retry submitting the same nonces again until - // we read nonces from the target client - race_state.reset_nonces_to_submit(); - // if we have failed to submit transaction AND that is not the connection issue, - // then we need to read best target nonces before selecting nonces again - if !target_client_is_online { - strategy.reset_best_target_nonces(); - } - }, - target_transaction_status = target_tx_tracker => { - match (target_transaction_status, race_state.nonces_submitted.as_ref()) { - (TrackedTransactionStatus::Finalized(at_block), Some(nonces_submitted)) => { - // our transaction has been mined, but was it successful or not? let's check the best - // nonce at the target node. - let _ = race_target.nonces(at_block, false) - .await - .map_err(|e| format!("failed to read nonces from target node: {e:?}")) - .and_then(|(_, nonces_at_target)| { - if nonces_at_target.latest_nonce < *nonces_submitted.end() { - Err(format!( - "best nonce at target after tx is {:?} and we've submitted {:?}", - nonces_at_target.latest_nonce, - nonces_submitted.end(), - )) - } else { - Ok(()) - } - }) - .map_err(|e| { - log::error!( - target: "bridge", - "{} -> {} race transaction failed: {}", - P::source_name(), - P::target_name(), - e, - ); - - race_state.reset_nonces_submitted(); - }); - }, - (TrackedTransactionStatus::Lost, _) => { - log::warn!( - target: "bridge", - "{} -> {} race transaction has been lost. State: {:?}. Strategy: {:?}", - P::source_name(), - P::target_name(), - race_state, - strategy, - ); - - race_state.reset_nonces_submitted(); - }, - _ => (), - } - }, - - // when we're ready to retry request - _ = source_go_offline_future => { - source_client_is_online = true; - }, - _ = target_go_offline_future => { - target_client_is_online = true; - }, - } - - progress_context = print_race_progress::(progress_context, &strategy); - - if source_client_is_online { - source_client_is_online = false; - - // if we've started to submit batch transaction, let's prioritize it - // - // we're using `take` here, because we don't need batch transaction (i.e. some - // underlying finality proof) anymore for our future calls - we were unable to - // use it for our current state, so why would we need to keep an obsolete proof - // for the future? - let target_batch_transaction = target_batch_transaction.take(); - let expected_race_state = - if let Some(ref target_batch_transaction) = target_batch_transaction { - // when selecting nonces for the batch transaction, we assume that the required - // source header is already at the target chain - let required_source_header_at_target = - target_batch_transaction.required_header_id(); - let mut expected_race_state = race_state.clone(); - expected_race_state.best_finalized_source_header_id_at_best_target = - Some(required_source_header_at_target); - expected_race_state - } else { - race_state.clone() - }; - - let nonces_to_deliver = select_nonces_to_deliver(expected_race_state, &strategy).await; - let best_at_source = strategy.best_at_source(); - - if let Some((at_block, nonces_range, proof_parameters)) = nonces_to_deliver { - log::debug!( - target: "bridge", - "Asking {} to prove nonces in range {:?} at block {:?}", - P::source_name(), - nonces_range, - at_block, - ); - - source_generate_proof.set( - race_source - .generate_proof(at_block, nonces_range, proof_parameters) - .and_then(|(at_source_block, nonces, proof)| async { - Ok((at_source_block, nonces, proof, target_batch_transaction)) - }) - .fuse(), - ); - } else if let (true, Some(best_at_source)) = (source_nonces_required, best_at_source) { - log::debug!(target: "bridge", "Asking {} about message nonces", P::source_name()); - let at_block = race_state - .best_finalized_source_header_id_at_source - .as_ref() - .expect( - "source_nonces_required is only true when\ - best_finalized_source_header_id_at_source is Some; qed", - ) - .clone(); - source_nonces.set(race_source.nonces(at_block, best_at_source).fuse()); - } else { - source_client_is_online = true; - } - } - - if target_client_is_online { - target_client_is_online = false; - - if let Some((at_block, nonces_range, proof)) = race_state.nonces_to_submit.as_ref() { - log::debug!( - target: "bridge", - "Going to submit proof of messages in range {:?} to {} node{}", - nonces_range, - P::target_name(), - race_state.nonces_to_submit_batch.as_ref().map(|tx| format!( - ". This transaction is batched with sending the proof for header {:?}.", - tx.required_header_id()) - ).unwrap_or_default(), - ); - - target_submit_proof.set( - race_target - .submit_proof( - race_state.nonces_to_submit_batch.clone(), - at_block.clone(), - nonces_range.clone(), - proof.clone(), - ) - .fuse(), - ); - } else if let Some(source_required_header) = source_required_header.clone() { - log::debug!( - target: "bridge", - "Going to require {} header {:?} at {}", - P::source_name(), - source_required_header, - P::target_name(), - ); - target_require_source_header - .set(race_target.require_source_header(source_required_header).fuse()); - } else if target_best_nonces_required { - log::debug!(target: "bridge", "Asking {} about best message nonces", P::target_name()); - let at_block = race_state - .best_target_header_id - .as_ref() - .expect("target_best_nonces_required is only true when best_target_header_id is Some; qed") - .clone(); - target_best_nonces.set(race_target.nonces(at_block, false).fuse()); - } else if target_finalized_nonces_required { - log::debug!(target: "bridge", "Asking {} about finalized message nonces", P::target_name()); - let at_block = race_state - .best_finalized_target_header_id - .as_ref() - .expect( - "target_finalized_nonces_required is only true when\ - best_finalized_target_header_id is Some; qed", - ) - .clone(); - target_finalized_nonces.set(race_target.nonces(at_block, true).fuse()); - } else { - target_client_is_online = true; - } - } - } -} - -/// Print race progress. -fn print_race_progress(prev_time: Instant, strategy: &S) -> Instant -where - P: MessageRace, - S: RaceStrategy, -{ - let now_time = Instant::now(); - - let need_update = now_time.saturating_duration_since(prev_time) > Duration::from_secs(10); - if !need_update { - return prev_time - } - - let now_best_nonce_at_source = strategy.best_at_source(); - let now_best_nonce_at_target = strategy.best_at_target(); - log::info!( - target: "bridge", - "Synced {:?} of {:?} nonces in {} -> {} race", - now_best_nonce_at_target, - now_best_nonce_at_source, - P::source_name(), - P::target_name(), - ); - now_time -} - -async fn select_nonces_to_deliver( - race_state: impl RaceState, - strategy: &Strategy, -) -> Option<(SourceHeaderId, RangeInclusive, Strategy::ProofParameters)> -where - SourceHeaderId: Clone, - Strategy: RaceStrategy, -{ - let best_finalized_source_header_id_at_best_target = - race_state.best_finalized_source_header_id_at_best_target()?; - strategy - .select_nonces_to_deliver(race_state) - .await - .map(|(nonces_range, proof_parameters)| { - (best_finalized_source_header_id_at_best_target, nonces_range, proof_parameters) - }) -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::message_race_strategy::BasicStrategy; - use relay_utils::HeaderId; - - #[async_std::test] - async fn proof_is_generated_at_best_block_known_to_target_node() { - const GENERATED_AT: u64 = 6; - const BEST_AT_SOURCE: u64 = 10; - const BEST_AT_TARGET: u64 = 8; - - // target node only knows about source' BEST_AT_TARGET block - // source node has BEST_AT_SOURCE > BEST_AT_TARGET block - let mut race_state = RaceStateImpl::<_, _, (), ()> { - best_finalized_source_header_id_at_source: Some(HeaderId( - BEST_AT_SOURCE, - BEST_AT_SOURCE, - )), - best_finalized_source_header_id_at_best_target: Some(HeaderId( - BEST_AT_TARGET, - BEST_AT_TARGET, - )), - best_target_header_id: Some(HeaderId(0, 0)), - best_finalized_target_header_id: Some(HeaderId(0, 0)), - nonces_to_submit: None, - nonces_to_submit_batch: None, - nonces_submitted: None, - }; - - // we have some nonces to deliver and they're generated at GENERATED_AT < BEST_AT_SOURCE - let mut strategy = BasicStrategy::<_, _, _, _, _, ()>::new(); - strategy.source_nonces_updated( - HeaderId(GENERATED_AT, GENERATED_AT), - SourceClientNonces { new_nonces: 0..=10, confirmed_nonce: None }, - ); - strategy.best_target_nonces_updated( - TargetClientNonces { latest_nonce: 5u64, nonces_data: () }, - &mut race_state, - ); - - // the proof will be generated on source, but using BEST_AT_TARGET block - assert_eq!( - select_nonces_to_deliver(race_state, &strategy).await, - Some((HeaderId(BEST_AT_TARGET, BEST_AT_TARGET), 6..=10, (),)) - ); - } -} diff --git a/relays/messages/src/message_race_receiving.rs b/relays/messages/src/message_race_receiving.rs deleted file mode 100644 index e6497a1b7..000000000 --- a/relays/messages/src/message_race_receiving.rs +++ /dev/null @@ -1,235 +0,0 @@ -// Copyright 2019-2021 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -//! Message receiving race delivers proof-of-messages-delivery from "lane.target" to "lane.source". - -use crate::{ - message_lane::{MessageLane, SourceHeaderIdOf, TargetHeaderIdOf}, - message_lane_loop::{ - NoncesSubmitArtifacts, SourceClient as MessageLaneSourceClient, SourceClientState, - TargetClient as MessageLaneTargetClient, TargetClientState, - }, - message_race_loop::{ - MessageRace, NoncesRange, SourceClient, SourceClientNonces, TargetClient, - TargetClientNonces, - }, - message_race_strategy::BasicStrategy, - metrics::MessageLaneLoopMetrics, -}; - -use async_trait::async_trait; -use bp_messages::MessageNonce; -use futures::stream::FusedStream; -use relay_utils::FailedClient; -use std::{marker::PhantomData, ops::RangeInclusive}; - -/// Message receiving confirmations delivery strategy. -type ReceivingConfirmationsBasicStrategy

= BasicStrategy< -

::TargetHeaderNumber, -

::TargetHeaderHash, -

::SourceHeaderNumber, -

::SourceHeaderHash, - RangeInclusive, -

::MessagesReceivingProof, ->; - -/// Run receiving confirmations race. -pub async fn run( - source_client: impl MessageLaneSourceClient

, - source_state_updates: impl FusedStream>, - target_client: impl MessageLaneTargetClient

, - target_state_updates: impl FusedStream>, - metrics_msg: Option, -) -> Result<(), FailedClient> { - crate::message_race_loop::run( - ReceivingConfirmationsRaceSource { - client: target_client, - metrics_msg: metrics_msg.clone(), - _phantom: Default::default(), - }, - target_state_updates, - ReceivingConfirmationsRaceTarget { - client: source_client, - metrics_msg, - _phantom: Default::default(), - }, - source_state_updates, - ReceivingConfirmationsBasicStrategy::

::new(), - ) - .await -} - -/// Messages receiving confirmations race. -struct ReceivingConfirmationsRace

(std::marker::PhantomData

); - -impl MessageRace for ReceivingConfirmationsRace

{ - type SourceHeaderId = TargetHeaderIdOf

; - type TargetHeaderId = SourceHeaderIdOf

; - - type MessageNonce = MessageNonce; - type Proof = P::MessagesReceivingProof; - - fn source_name() -> String { - format!("{}::ReceivingConfirmationsDelivery", P::TARGET_NAME) - } - - fn target_name() -> String { - format!("{}::ReceivingConfirmationsDelivery", P::SOURCE_NAME) - } -} - -/// Message receiving confirmations race source, which is a target of the lane. -struct ReceivingConfirmationsRaceSource { - client: C, - metrics_msg: Option, - _phantom: PhantomData

, -} - -#[async_trait] -impl SourceClient> for ReceivingConfirmationsRaceSource -where - P: MessageLane, - C: MessageLaneTargetClient

, -{ - type Error = C::Error; - type NoncesRange = RangeInclusive; - type ProofParameters = (); - - async fn nonces( - &self, - at_block: TargetHeaderIdOf

, - prev_latest_nonce: MessageNonce, - ) -> Result<(TargetHeaderIdOf

, SourceClientNonces), Self::Error> { - let (at_block, latest_received_nonce) = self.client.latest_received_nonce(at_block).await?; - if let Some(metrics_msg) = self.metrics_msg.as_ref() { - metrics_msg.update_target_latest_received_nonce(latest_received_nonce); - } - Ok(( - at_block, - SourceClientNonces { - new_nonces: prev_latest_nonce + 1..=latest_received_nonce, - confirmed_nonce: None, - }, - )) - } - - #[allow(clippy::unit_arg)] - async fn generate_proof( - &self, - at_block: TargetHeaderIdOf

, - nonces: RangeInclusive, - _proof_parameters: Self::ProofParameters, - ) -> Result< - (TargetHeaderIdOf

, RangeInclusive, P::MessagesReceivingProof), - Self::Error, - > { - self.client - .prove_messages_receiving(at_block) - .await - .map(|(at_block, proof)| (at_block, nonces, proof)) - } -} - -/// Message receiving confirmations race target, which is a source of the lane. -struct ReceivingConfirmationsRaceTarget { - client: C, - metrics_msg: Option, - _phantom: PhantomData

, -} - -#[async_trait] -impl TargetClient> for ReceivingConfirmationsRaceTarget -where - P: MessageLane, - C: MessageLaneSourceClient

, -{ - type Error = C::Error; - type TargetNoncesData = (); - type BatchTransaction = C::BatchTransaction; - type TransactionTracker = C::TransactionTracker; - - async fn require_source_header( - &self, - id: TargetHeaderIdOf

, - ) -> Result, Self::Error> { - self.client.require_target_header_on_source(id).await - } - - async fn nonces( - &self, - at_block: SourceHeaderIdOf

, - update_metrics: bool, - ) -> Result<(SourceHeaderIdOf

, TargetClientNonces<()>), Self::Error> { - let (at_block, latest_confirmed_nonce) = - self.client.latest_confirmed_received_nonce(at_block).await?; - if update_metrics { - if let Some(metrics_msg) = self.metrics_msg.as_ref() { - metrics_msg.update_source_latest_confirmed_nonce(latest_confirmed_nonce); - } - } - Ok((at_block, TargetClientNonces { latest_nonce: latest_confirmed_nonce, nonces_data: () })) - } - - async fn submit_proof( - &self, - maybe_batch_tx: Option, - generated_at_block: TargetHeaderIdOf

, - nonces: RangeInclusive, - proof: P::MessagesReceivingProof, - ) -> Result, Self::Error> { - let tx_tracker = self - .client - .submit_messages_receiving_proof(maybe_batch_tx, generated_at_block, proof) - .await?; - Ok(NoncesSubmitArtifacts { nonces, tx_tracker }) - } -} - -impl NoncesRange for RangeInclusive { - fn begin(&self) -> MessageNonce { - *RangeInclusive::::start(self) - } - - fn end(&self) -> MessageNonce { - *RangeInclusive::::end(self) - } - - fn greater_than(self, nonce: MessageNonce) -> Option { - let next_nonce = nonce + 1; - let end = *self.end(); - if next_nonce > end { - None - } else { - Some(std::cmp::max(self.begin(), next_nonce)..=end) - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn range_inclusive_works_as_nonces_range() { - let range = 20..=30; - - assert_eq!(NoncesRange::begin(&range), 20); - assert_eq!(NoncesRange::end(&range), 30); - assert_eq!(range.clone().greater_than(10), Some(20..=30)); - assert_eq!(range.clone().greater_than(19), Some(20..=30)); - assert_eq!(range.clone().greater_than(20), Some(21..=30)); - assert_eq!(range.clone().greater_than(25), Some(26..=30)); - assert_eq!(range.clone().greater_than(29), Some(30..=30)); - assert_eq!(range.greater_than(30), None); - } -} diff --git a/relays/messages/src/message_race_strategy.rs b/relays/messages/src/message_race_strategy.rs deleted file mode 100644 index 93d178e55..000000000 --- a/relays/messages/src/message_race_strategy.rs +++ /dev/null @@ -1,628 +0,0 @@ -// Copyright 2019-2021 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -//! Basic delivery strategy. The strategy selects nonces if: -//! -//! 1) there are more nonces on the source side than on the target side; -//! 2) new nonces may be proved to target node (i.e. they have appeared at the block, which is known -//! to the target node). - -use crate::message_race_loop::{ - NoncesRange, RaceState, RaceStrategy, SourceClientNonces, TargetClientNonces, -}; - -use async_trait::async_trait; -use bp_messages::MessageNonce; -use relay_utils::HeaderId; -use std::{collections::VecDeque, fmt::Debug, marker::PhantomData, ops::RangeInclusive}; - -/// Queue of nonces known to the source node. -pub type SourceRangesQueue = - VecDeque<(HeaderId, SourceNoncesRange)>; - -/// Nonces delivery strategy. -#[derive(Debug)] -pub struct BasicStrategy< - SourceHeaderNumber, - SourceHeaderHash, - TargetHeaderNumber, - TargetHeaderHash, - SourceNoncesRange, - Proof, -> { - /// All queued nonces. - /// - /// The queue may contain already delivered nonces. We only remove entries from this - /// queue after corresponding nonces are finalized by the target chain. - source_queue: SourceRangesQueue, - /// The best nonce known to target node at its best block. `None` if it has not been received - /// yet. - best_target_nonce: Option, - /// Unused generic types dump. - _phantom: PhantomData<(TargetHeaderNumber, TargetHeaderHash, Proof)>, -} - -impl< - SourceHeaderNumber, - SourceHeaderHash, - TargetHeaderNumber, - TargetHeaderHash, - SourceNoncesRange, - Proof, - > - BasicStrategy< - SourceHeaderNumber, - SourceHeaderHash, - TargetHeaderNumber, - TargetHeaderHash, - SourceNoncesRange, - Proof, - > where - SourceHeaderHash: Clone, - SourceHeaderNumber: Clone + Ord, - SourceNoncesRange: NoncesRange, -{ - /// Create new delivery strategy. - pub fn new() -> Self { - BasicStrategy { - source_queue: VecDeque::new(), - best_target_nonce: None, - _phantom: Default::default(), - } - } - - /// Reference to source queue. - pub(crate) fn source_queue( - &self, - ) -> &VecDeque<(HeaderId, SourceNoncesRange)> { - &self.source_queue - } - - /// Mutable reference to source queue to use in tests. - #[cfg(test)] - pub(crate) fn source_queue_mut( - &mut self, - ) -> &mut VecDeque<(HeaderId, SourceNoncesRange)> { - &mut self.source_queue - } - - /// Returns indices of source queue entries, which may be delivered to the target node. - /// - /// The function may skip some nonces from the queue front if nonces from this entry are - /// already available at the **best** target block. After this block is finalized, the entry - /// will be removed from the queue. - /// - /// All entries before and including the range end index, are guaranteed to be witnessed - /// at source blocks that are known to be finalized at the target node. - /// - /// Returns `None` if no entries may be delivered. - pub fn available_source_queue_indices< - RS: RaceState< - HeaderId, - HeaderId, - >, - >( - &self, - race_state: RS, - ) -> Option> { - // if we do not know best nonce at target node, we can't select anything - let best_target_nonce = self.best_target_nonce?; - - // if we have already selected nonces that we want to submit, do nothing - if race_state.nonces_to_submit().is_some() { - return None - } - - // if we already submitted some nonces, do nothing - if race_state.nonces_submitted().is_some() { - return None - } - - // find first entry that may be delivered to the target node - let begin_index = self - .source_queue - .iter() - .enumerate() - .skip_while(|(_, (_, nonces))| nonces.end() <= best_target_nonce) - .map(|(index, _)| index) - .next()?; - - // 1) we want to deliver all nonces, starting from `target_nonce + 1` - // 2) we can't deliver new nonce until header, that has emitted this nonce, is finalized - // by target client - // 3) selector is used for more complicated logic - // - // => let's first select range of entries inside deque that are already finalized at - // the target client and pass this range to the selector - let best_header_at_target = race_state.best_finalized_source_header_id_at_best_target()?; - let end_index = self - .source_queue - .iter() - .enumerate() - .skip(begin_index) - .take_while(|(_, (queued_at, _))| queued_at.0 <= best_header_at_target.0) - .map(|(index, _)| index) - .last()?; - - Some(begin_index..=end_index) - } - - /// Remove all nonces that are less than or equal to given nonce from the source queue. - fn remove_le_nonces_from_source_queue(&mut self, nonce: MessageNonce) { - while let Some((queued_at, queued_range)) = self.source_queue.pop_front() { - if let Some(range_to_requeue) = queued_range.greater_than(nonce) { - self.source_queue.push_front((queued_at, range_to_requeue)); - break - } - } - } -} - -#[async_trait] -impl< - SourceHeaderNumber, - SourceHeaderHash, - TargetHeaderNumber, - TargetHeaderHash, - SourceNoncesRange, - Proof, - > - RaceStrategy< - HeaderId, - HeaderId, - Proof, - > - for BasicStrategy< - SourceHeaderNumber, - SourceHeaderHash, - TargetHeaderNumber, - TargetHeaderHash, - SourceNoncesRange, - Proof, - > where - SourceHeaderHash: Clone + Debug + Send + Sync, - SourceHeaderNumber: Clone + Ord + Debug + Send + Sync, - SourceNoncesRange: NoncesRange + Debug + Send + Sync, - TargetHeaderHash: Debug + Send + Sync, - TargetHeaderNumber: Debug + Send + Sync, - Proof: Debug + Send + Sync, -{ - type SourceNoncesRange = SourceNoncesRange; - type ProofParameters = (); - type TargetNoncesData = (); - - fn is_empty(&self) -> bool { - self.source_queue.is_empty() - } - - async fn required_source_header_at_target< - RS: RaceState< - HeaderId, - HeaderId, - >, - >( - &self, - race_state: RS, - ) -> Option> { - let current_best = race_state.best_finalized_source_header_id_at_best_target()?; - self.source_queue - .back() - .and_then(|(h, _)| if h.0 > current_best.0 { Some(h.clone()) } else { None }) - } - - fn best_at_source(&self) -> Option { - let best_in_queue = self.source_queue.back().map(|(_, range)| range.end()); - match (best_in_queue, self.best_target_nonce) { - (Some(best_in_queue), Some(best_target_nonce)) if best_in_queue > best_target_nonce => - Some(best_in_queue), - (_, Some(best_target_nonce)) => Some(best_target_nonce), - (_, None) => None, - } - } - - fn best_at_target(&self) -> Option { - self.best_target_nonce - } - - fn source_nonces_updated( - &mut self, - at_block: HeaderId, - nonces: SourceClientNonces, - ) { - let best_in_queue = self - .source_queue - .back() - .map(|(_, range)| range.end()) - .or(self.best_target_nonce) - .unwrap_or_default(); - self.source_queue.extend( - nonces - .new_nonces - .greater_than(best_in_queue) - .into_iter() - .map(move |range| (at_block.clone(), range)), - ) - } - - fn reset_best_target_nonces(&mut self) { - self.best_target_nonce = None; - } - - fn best_target_nonces_updated< - RS: RaceState< - HeaderId, - HeaderId, - >, - >( - &mut self, - nonces: TargetClientNonces<()>, - race_state: &mut RS, - ) { - let nonce = nonces.latest_nonce; - - // if **some** of nonces that we have selected to submit already present at the - // target chain => select new nonces - let need_to_select_new_nonces = race_state - .nonces_to_submit() - .map(|nonces| nonce >= *nonces.start()) - .unwrap_or(false); - if need_to_select_new_nonces { - log::trace!( - target: "bridge", - "Latest nonce at target is {}. Clearing nonces to submit: {:?}", - nonce, - race_state.nonces_to_submit(), - ); - - race_state.reset_nonces_to_submit(); - } - - // if **some** of nonces that we have submitted already present at the - // target chain => select new nonces - let need_new_nonces_to_submit = race_state - .nonces_submitted() - .map(|nonces| nonce >= *nonces.start()) - .unwrap_or(false); - if need_new_nonces_to_submit { - log::trace!( - target: "bridge", - "Latest nonce at target is {}. Clearing submitted nonces: {:?}", - nonce, - race_state.nonces_submitted(), - ); - - race_state.reset_nonces_submitted(); - } - - self.best_target_nonce = Some(nonce); - } - - fn finalized_target_nonces_updated< - RS: RaceState< - HeaderId, - HeaderId, - >, - >( - &mut self, - nonces: TargetClientNonces<()>, - _race_state: &mut RS, - ) { - self.remove_le_nonces_from_source_queue(nonces.latest_nonce); - self.best_target_nonce = Some(std::cmp::max( - self.best_target_nonce.unwrap_or(nonces.latest_nonce), - nonces.latest_nonce, - )); - } - - async fn select_nonces_to_deliver< - RS: RaceState< - HeaderId, - HeaderId, - >, - >( - &self, - race_state: RS, - ) -> Option<(RangeInclusive, Self::ProofParameters)> { - let available_indices = self.available_source_queue_indices(race_state)?; - let range_begin = std::cmp::max( - self.best_target_nonce? + 1, - self.source_queue[*available_indices.start()].1.begin(), - ); - let range_end = self.source_queue[*available_indices.end()].1.end(); - Some((range_begin..=range_end, ())) - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::{ - message_lane::{MessageLane, SourceHeaderIdOf, TargetHeaderIdOf}, - message_lane_loop::tests::{ - header_id, TestMessageLane, TestMessagesProof, TestSourceHeaderHash, - TestSourceHeaderNumber, - }, - message_race_loop::RaceStateImpl, - }; - - type SourceNoncesRange = RangeInclusive; - - type TestRaceStateImpl = RaceStateImpl< - SourceHeaderIdOf, - TargetHeaderIdOf, - TestMessagesProof, - (), - >; - - type BasicStrategy

= super::BasicStrategy< -

::SourceHeaderNumber, -

::SourceHeaderHash, -

::TargetHeaderNumber, -

::TargetHeaderHash, - SourceNoncesRange, -

::MessagesProof, - >; - - fn source_nonces(new_nonces: SourceNoncesRange) -> SourceClientNonces { - SourceClientNonces { new_nonces, confirmed_nonce: None } - } - - fn target_nonces(latest_nonce: MessageNonce) -> TargetClientNonces<()> { - TargetClientNonces { latest_nonce, nonces_data: () } - } - - #[test] - fn strategy_is_empty_works() { - let mut strategy = BasicStrategy::::new(); - assert!(strategy.is_empty()); - strategy.source_nonces_updated(header_id(1), source_nonces(1..=1)); - assert!(!strategy.is_empty()); - } - - #[test] - fn best_at_source_is_never_lower_than_target_nonce() { - let mut strategy = BasicStrategy::::new(); - assert_eq!(strategy.best_at_source(), None); - strategy.source_nonces_updated(header_id(1), source_nonces(1..=5)); - assert_eq!(strategy.best_at_source(), None); - strategy.best_target_nonces_updated(target_nonces(10), &mut TestRaceStateImpl::default()); - assert_eq!(strategy.source_queue, vec![(header_id(1), 1..=5)]); - assert_eq!(strategy.best_at_source(), Some(10)); - } - - #[test] - fn source_nonce_is_never_lower_than_known_target_nonce() { - let mut strategy = BasicStrategy::::new(); - strategy.best_target_nonces_updated(target_nonces(10), &mut TestRaceStateImpl::default()); - strategy.source_nonces_updated(header_id(1), source_nonces(1..=5)); - assert_eq!(strategy.source_queue, vec![]); - } - - #[test] - fn source_nonce_is_never_lower_than_latest_known_source_nonce() { - let mut strategy = BasicStrategy::::new(); - strategy.source_nonces_updated(header_id(1), source_nonces(1..=5)); - strategy.source_nonces_updated(header_id(2), source_nonces(1..=3)); - strategy.source_nonces_updated(header_id(2), source_nonces(1..=5)); - assert_eq!(strategy.source_queue, vec![(header_id(1), 1..=5)]); - } - - #[test] - fn updated_target_nonce_removes_queued_entries() { - let mut strategy = BasicStrategy::::new(); - strategy.source_nonces_updated(header_id(1), source_nonces(1..=5)); - strategy.source_nonces_updated(header_id(2), source_nonces(6..=10)); - strategy.source_nonces_updated(header_id(3), source_nonces(11..=15)); - strategy.source_nonces_updated(header_id(4), source_nonces(16..=20)); - strategy - .finalized_target_nonces_updated(target_nonces(15), &mut TestRaceStateImpl::default()); - assert_eq!(strategy.source_queue, vec![(header_id(4), 16..=20)]); - strategy - .finalized_target_nonces_updated(target_nonces(17), &mut TestRaceStateImpl::default()); - assert_eq!(strategy.source_queue, vec![(header_id(4), 18..=20)]); - } - - #[test] - fn selected_nonces_are_dropped_on_target_nonce_update() { - let mut state = TestRaceStateImpl::default(); - let mut strategy = BasicStrategy::::new(); - state.nonces_to_submit = Some((header_id(1), 5..=10, (5..=10, None))); - // we are going to submit 5..=10, so having latest nonce 4 at target is fine - strategy.best_target_nonces_updated(target_nonces(4), &mut state); - assert!(state.nonces_to_submit.is_some()); - // any nonce larger than 4 invalidates the `nonces_to_submit` - for nonce in 5..=11 { - state.nonces_to_submit = Some((header_id(1), 5..=10, (5..=10, None))); - strategy.best_target_nonces_updated(target_nonces(nonce), &mut state); - assert!(state.nonces_to_submit.is_none()); - } - } - - #[test] - fn submitted_nonces_are_dropped_on_target_nonce_update() { - let mut state = TestRaceStateImpl::default(); - let mut strategy = BasicStrategy::::new(); - state.nonces_submitted = Some(5..=10); - // we have submitted 5..=10, so having latest nonce 4 at target is fine - strategy.best_target_nonces_updated(target_nonces(4), &mut state); - assert!(state.nonces_submitted.is_some()); - // any nonce larger than 4 invalidates the `nonces_submitted` - for nonce in 5..=11 { - state.nonces_submitted = Some(5..=10); - strategy.best_target_nonces_updated(target_nonces(nonce), &mut state); - assert!(state.nonces_submitted.is_none()); - } - } - - #[async_std::test] - async fn nothing_is_selected_if_something_is_already_selected() { - let mut state = TestRaceStateImpl::default(); - let mut strategy = BasicStrategy::::new(); - state.nonces_to_submit = Some((header_id(1), 1..=10, (1..=10, None))); - strategy.best_target_nonces_updated(target_nonces(0), &mut state); - strategy.source_nonces_updated(header_id(1), source_nonces(1..=10)); - assert_eq!(strategy.select_nonces_to_deliver(state.clone()).await, None); - } - - #[async_std::test] - async fn nothing_is_selected_if_something_is_already_submitted() { - let mut state = TestRaceStateImpl::default(); - let mut strategy = BasicStrategy::::new(); - state.nonces_submitted = Some(1..=10); - strategy.best_target_nonces_updated(target_nonces(0), &mut state); - strategy.source_nonces_updated(header_id(1), source_nonces(1..=10)); - assert_eq!(strategy.select_nonces_to_deliver(state.clone()).await, None); - } - - #[async_std::test] - async fn select_nonces_to_deliver_works() { - let mut state = TestRaceStateImpl::default(); - let mut strategy = BasicStrategy::::new(); - strategy.best_target_nonces_updated(target_nonces(0), &mut state); - strategy.source_nonces_updated(header_id(1), source_nonces(1..=1)); - strategy.source_nonces_updated(header_id(2), source_nonces(2..=2)); - strategy.source_nonces_updated(header_id(3), source_nonces(3..=6)); - strategy.source_nonces_updated(header_id(5), source_nonces(7..=8)); - - state.best_finalized_source_header_id_at_best_target = Some(header_id(4)); - assert_eq!(strategy.select_nonces_to_deliver(state.clone()).await, Some((1..=6, ()))); - strategy.best_target_nonces_updated(target_nonces(6), &mut state); - assert_eq!(strategy.select_nonces_to_deliver(state.clone()).await, None); - - state.best_finalized_source_header_id_at_best_target = Some(header_id(5)); - assert_eq!(strategy.select_nonces_to_deliver(state.clone()).await, Some((7..=8, ()))); - strategy.best_target_nonces_updated(target_nonces(8), &mut state); - assert_eq!(strategy.select_nonces_to_deliver(state.clone()).await, None); - } - - #[test] - fn available_source_queue_indices_works() { - let mut state = TestRaceStateImpl::default(); - let mut strategy = BasicStrategy::::new(); - strategy.best_target_nonces_updated(target_nonces(0), &mut state); - strategy.source_nonces_updated(header_id(1), source_nonces(1..=3)); - strategy.source_nonces_updated(header_id(2), source_nonces(4..=6)); - strategy.source_nonces_updated(header_id(3), source_nonces(7..=9)); - - state.best_finalized_source_header_id_at_best_target = Some(header_id(0)); - assert_eq!(strategy.available_source_queue_indices(state.clone()), None); - - state.best_finalized_source_header_id_at_best_target = Some(header_id(1)); - assert_eq!(strategy.available_source_queue_indices(state.clone()), Some(0..=0)); - - state.best_finalized_source_header_id_at_best_target = Some(header_id(2)); - assert_eq!(strategy.available_source_queue_indices(state.clone()), Some(0..=1)); - - state.best_finalized_source_header_id_at_best_target = Some(header_id(3)); - assert_eq!(strategy.available_source_queue_indices(state.clone()), Some(0..=2)); - - state.best_finalized_source_header_id_at_best_target = Some(header_id(4)); - assert_eq!(strategy.available_source_queue_indices(state), Some(0..=2)); - } - - #[test] - fn remove_le_nonces_from_source_queue_works() { - let mut state = TestRaceStateImpl::default(); - let mut strategy = BasicStrategy::::new(); - strategy.best_target_nonces_updated(target_nonces(0), &mut state); - strategy.source_nonces_updated(header_id(1), source_nonces(1..=3)); - strategy.source_nonces_updated(header_id(2), source_nonces(4..=6)); - strategy.source_nonces_updated(header_id(3), source_nonces(7..=9)); - - fn source_queue_nonces( - source_queue: &SourceRangesQueue< - TestSourceHeaderHash, - TestSourceHeaderNumber, - SourceNoncesRange, - >, - ) -> Vec { - source_queue.iter().flat_map(|(_, range)| range.clone()).collect() - } - - strategy.remove_le_nonces_from_source_queue(1); - assert_eq!(source_queue_nonces(&strategy.source_queue), vec![2, 3, 4, 5, 6, 7, 8, 9],); - - strategy.remove_le_nonces_from_source_queue(5); - assert_eq!(source_queue_nonces(&strategy.source_queue), vec![6, 7, 8, 9],); - - strategy.remove_le_nonces_from_source_queue(9); - assert_eq!(source_queue_nonces(&strategy.source_queue), Vec::::new(),); - - strategy.remove_le_nonces_from_source_queue(100); - assert_eq!(source_queue_nonces(&strategy.source_queue), Vec::::new(),); - } - - #[async_std::test] - async fn previous_nonces_are_selected_if_reorg_happens_at_target_chain() { - let source_header_1 = header_id(1); - let target_header_1 = header_id(1); - - // we start in perfec sync state - all headers are synced and finalized on both ends - let mut state = TestRaceStateImpl { - best_finalized_source_header_id_at_source: Some(source_header_1), - best_finalized_source_header_id_at_best_target: Some(source_header_1), - best_target_header_id: Some(target_header_1), - best_finalized_target_header_id: Some(target_header_1), - nonces_to_submit: None, - nonces_to_submit_batch: None, - nonces_submitted: None, - }; - - // in this state we have 1 available nonce for delivery - let mut strategy = BasicStrategy:: { - source_queue: vec![(header_id(1), 1..=1)].into_iter().collect(), - best_target_nonce: Some(0), - _phantom: PhantomData, - }; - assert_eq!(strategy.select_nonces_to_deliver(state.clone()).await, Some((1..=1, ())),); - - // let's say we have submitted 1..=1 - state.nonces_submitted = Some(1..=1); - - // then new nonce 2 appear at the source block 2 - let source_header_2 = header_id(2); - state.best_finalized_source_header_id_at_source = Some(source_header_2); - strategy.source_nonces_updated( - source_header_2, - SourceClientNonces { new_nonces: 2..=2, confirmed_nonce: None }, - ); - // and nonce 1 appear at the best block of the target node (best finalized still has 0 - // nonces) - let target_header_2 = header_id(2); - state.best_target_header_id = Some(target_header_2); - strategy.best_target_nonces_updated( - TargetClientNonces { latest_nonce: 1, nonces_data: () }, - &mut state, - ); - - // then best target header is retracted - strategy.best_target_nonces_updated( - TargetClientNonces { latest_nonce: 0, nonces_data: () }, - &mut state, - ); - - // ... and some fork with zero delivered nonces is finalized - let target_header_2_fork = header_id(2_1); - state.best_finalized_source_header_id_at_source = Some(source_header_2); - state.best_finalized_source_header_id_at_best_target = Some(source_header_2); - state.best_target_header_id = Some(target_header_2_fork); - state.best_finalized_target_header_id = Some(target_header_2_fork); - strategy.finalized_target_nonces_updated( - TargetClientNonces { latest_nonce: 0, nonces_data: () }, - &mut state, - ); - - // now we have to select nonce 1 for delivery again - assert_eq!(strategy.select_nonces_to_deliver(state.clone()).await, Some((1..=2, ())),); - } -} diff --git a/relays/messages/src/metrics.rs b/relays/messages/src/metrics.rs deleted file mode 100644 index 69d80d178..000000000 --- a/relays/messages/src/metrics.rs +++ /dev/null @@ -1,148 +0,0 @@ -// Copyright 2019-2021 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Metrics for message lane relay loop. - -use crate::{ - message_lane::MessageLane, - message_lane_loop::{SourceClientState, TargetClientState}, -}; - -use bp_messages::MessageNonce; -use finality_relay::SyncLoopMetrics; -use relay_utils::metrics::{ - metric_name, register, GaugeVec, Metric, Opts, PrometheusError, Registry, U64, -}; - -/// Message lane relay metrics. -/// -/// Cloning only clones references. -#[derive(Clone)] -pub struct MessageLaneLoopMetrics { - /// Best finalized block numbers - "source", "source_at_target", "target_at_source". - source_to_target_finality_metrics: SyncLoopMetrics, - /// Best finalized block numbers - "source", "target", "source_at_target", "target_at_source". - target_to_source_finality_metrics: SyncLoopMetrics, - /// Lane state nonces: "source_latest_generated", "source_latest_confirmed", - /// "target_latest_received", "target_latest_confirmed". - lane_state_nonces: GaugeVec, -} - -impl MessageLaneLoopMetrics { - /// Create and register messages loop metrics. - pub fn new(prefix: Option<&str>) -> Result { - Ok(MessageLaneLoopMetrics { - source_to_target_finality_metrics: SyncLoopMetrics::new( - prefix, - "source", - "source_at_target", - )?, - target_to_source_finality_metrics: SyncLoopMetrics::new( - prefix, - "target", - "target_at_source", - )?, - lane_state_nonces: GaugeVec::new( - Opts::new(metric_name(prefix, "lane_state_nonces"), "Nonces of the lane state"), - &["type"], - )?, - }) - } - - /// Update source client state metrics. - pub fn update_source_state(&self, source_client_state: SourceClientState

) { - self.source_to_target_finality_metrics - .update_best_block_at_source(source_client_state.best_self.0); - if let Some(best_finalized_peer_at_best_self) = - source_client_state.best_finalized_peer_at_best_self - { - self.target_to_source_finality_metrics - .update_best_block_at_target(best_finalized_peer_at_best_self.0); - if let Some(actual_best_finalized_peer_at_best_self) = - source_client_state.actual_best_finalized_peer_at_best_self - { - self.target_to_source_finality_metrics.update_using_same_fork( - best_finalized_peer_at_best_self.1 == actual_best_finalized_peer_at_best_self.1, - ); - } - } - } - - /// Update target client state metrics. - pub fn update_target_state(&self, target_client_state: TargetClientState

) { - self.target_to_source_finality_metrics - .update_best_block_at_source(target_client_state.best_self.0); - if let Some(best_finalized_peer_at_best_self) = - target_client_state.best_finalized_peer_at_best_self - { - self.source_to_target_finality_metrics - .update_best_block_at_target(best_finalized_peer_at_best_self.0); - if let Some(actual_best_finalized_peer_at_best_self) = - target_client_state.actual_best_finalized_peer_at_best_self - { - self.source_to_target_finality_metrics.update_using_same_fork( - best_finalized_peer_at_best_self.1 == actual_best_finalized_peer_at_best_self.1, - ); - } - } - } - - /// Update latest generated nonce at source. - pub fn update_source_latest_generated_nonce( - &self, - source_latest_generated_nonce: MessageNonce, - ) { - self.lane_state_nonces - .with_label_values(&["source_latest_generated"]) - .set(source_latest_generated_nonce); - } - - /// Update the latest confirmed nonce at source. - pub fn update_source_latest_confirmed_nonce( - &self, - source_latest_confirmed_nonce: MessageNonce, - ) { - self.lane_state_nonces - .with_label_values(&["source_latest_confirmed"]) - .set(source_latest_confirmed_nonce); - } - - /// Update the latest received nonce at target. - pub fn update_target_latest_received_nonce(&self, target_latest_generated_nonce: MessageNonce) { - self.lane_state_nonces - .with_label_values(&["target_latest_received"]) - .set(target_latest_generated_nonce); - } - - /// Update the latest confirmed nonce at target. - pub fn update_target_latest_confirmed_nonce( - &self, - target_latest_confirmed_nonce: MessageNonce, - ) { - self.lane_state_nonces - .with_label_values(&["target_latest_confirmed"]) - .set(target_latest_confirmed_nonce); - } -} - -impl Metric for MessageLaneLoopMetrics { - fn register(&self, registry: &Registry) -> Result<(), PrometheusError> { - self.source_to_target_finality_metrics.register(registry)?; - self.target_to_source_finality_metrics.register(registry)?; - register(self.lane_state_nonces.clone(), registry)?; - Ok(()) - } -} diff --git a/relays/parachains/Cargo.toml b/relays/parachains/Cargo.toml deleted file mode 100644 index e691168e7..000000000 --- a/relays/parachains/Cargo.toml +++ /dev/null @@ -1,28 +0,0 @@ -[package] -name = "parachains-relay" -version = "0.1.0" -authors.workspace = true -edition.workspace = true -license = "GPL-3.0-or-later WITH Classpath-exception-2.0" -repository.workspace = true -publish = false - -[lints] -workspace = true - -[dependencies] -async-std = "1.9.0" -async-trait = "0.1.79" -futures = "0.3.30" -log = { workspace = true } -relay-utils = { path = "../utils" } - -# Bridge dependencies - -bp-polkadot-core = { path = "../../primitives/polkadot-core" } -relay-substrate-client = { path = "../client-substrate" } - -[dev-dependencies] -codec = { package = "parity-scale-codec", version = "3.6.1" } -relay-substrate-client = { path = "../client-substrate", features = ["test-helpers"] } -sp-core = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } diff --git a/relays/parachains/README.md b/relays/parachains/README.md deleted file mode 100644 index 9043b0b0a..000000000 --- a/relays/parachains/README.md +++ /dev/null @@ -1,50 +0,0 @@ -# Parachains Finality Relay - -The parachains finality relay works with two chains - source relay chain and target chain (which may be standalone -chain, relay chain or a parachain). The source chain must have the -[`paras` pallet](https://github.com/paritytech/polkadot/tree/master/runtime/parachains/src/paras) deployed at its -runtime. The target chain must have the [bridge parachains pallet](../../modules/parachains/) deployed at its runtime. - -The relay is configured to submit heads of one or several parachains. It pokes source chain periodically and compares -parachain heads that are known to the source relay chain to heads at the target chain. If there are new heads, -the relay submits them to the target chain. - -More: [Parachains Finality Relay Sequence Diagram](../../docs/parachains-finality-relay.html). - -## How to Use the Parachains Finality Relay - -There are only two traits that need to be implemented. The [`SourceChain`](./src/parachains_loop.rs) implementation -is supposed to connect to the source chain node. It must be able to read parachain heads from the `Heads` map of -the [`paras` pallet](https://github.com/paritytech/polkadot/tree/master/runtime/parachains/src/paras). -It also must create storage proofs of `Heads` map entries, when required. - -The [`TargetChain`](./src/parachains_loop.rs) implementation connects to the target chain node. It must be able -to return the best known head of given parachain. When required, it must be able to craft and submit parachains -finality delivery transaction to the target node. - -The main entrypoint for the crate is the [`run` function](./src/parachains_loop.rs), which takes source and target -clients and [`ParachainSyncParams`](./src/parachains_loop.rs) parameters. The most imporant parameter is the -`parachains` - it is the set of parachains, which relay tracks and updates. The other important parameter that -may affect the relay operational costs is the `strategy`. If it is set to `Any`, then the finality delivery -transaction is submitted if at least one of tracked parachain heads is updated. The other option is `All`. Then -the relay waits until all tracked parachain heads are updated and submits them all in a single finality delivery -transaction. - -## Parachain Finality Relay Metrics - -Every parachain in Polkadot is identified by the 32-bit number. All metrics, exposed by the parachains finality -relay have the `parachain` label, which is set to the parachain id. And the metrics are prefixed with the prefix, -that depends on the name of the source relay and target chains. The list below shows metrics names for -Rococo (source relay chain) to BridgeHubWestend (target chain) parachains finality relay. For other chains, simply -change chain names. So the metrics are: - -- `Rococo_to_BridgeHubWestend_Parachains_best_parachain_block_number_at_source` - returns best known parachain block - number, registered in the `paras` pallet at the source relay chain (Rococo in our example); - -- `Rococo_to_BridgeHubWestend_Parachains_best_parachain_block_number_at_target` - returns best known parachain block - number, registered in the bridge parachains pallet at the target chain (BridgeHubWestend in our example). - -If relay operates properly, you should see that -the `Rococo_to_BridgeHubWestend_Parachains_best_parachain_block_number_at_target` tries to reach -the `Rococo_to_BridgeHubWestend_Parachains_best_parachain_block_number_at_source`. -And the latter one always increases. diff --git a/relays/parachains/src/lib.rs b/relays/parachains/src/lib.rs deleted file mode 100644 index 81ea983a6..000000000 --- a/relays/parachains/src/lib.rs +++ /dev/null @@ -1,32 +0,0 @@ -// Copyright 2019-2021 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -use std::fmt::Debug; - -use relay_substrate_client::{Chain, Parachain}; - -pub mod parachains_loop; -pub mod parachains_loop_metrics; - -/// Finality proofs synchronization pipeline. -pub trait ParachainsPipeline: 'static + Clone + Debug + Send + Sync { - /// Relay chain which is storing parachain heads in its `paras` module. - type SourceRelayChain: Chain; - /// Parachain which headers we are syncing here. - type SourceParachain: Parachain; - /// Target chain (either relay or para) which wants to know about new parachain heads. - type TargetChain: Chain; -} diff --git a/relays/parachains/src/parachains_loop.rs b/relays/parachains/src/parachains_loop.rs deleted file mode 100644 index 41ebbf5aa..000000000 --- a/relays/parachains/src/parachains_loop.rs +++ /dev/null @@ -1,985 +0,0 @@ -// Copyright 2019-2021 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -use crate::{parachains_loop_metrics::ParachainsLoopMetrics, ParachainsPipeline}; - -use async_trait::async_trait; -use bp_polkadot_core::{ - parachains::{ParaHash, ParaHeadsProof, ParaId}, - BlockNumber as RelayBlockNumber, -}; -use futures::{ - future::{FutureExt, Shared}, - poll, select_biased, -}; -use relay_substrate_client::{Chain, HeaderIdOf, ParachainBase}; -use relay_utils::{ - metrics::MetricsParams, relay_loop::Client as RelayClient, FailedClient, - TrackedTransactionStatus, TransactionTracker, -}; -use std::{future::Future, pin::Pin, task::Poll}; - -/// Parachain header availability at a certain chain. -#[derive(Clone, Copy, Debug)] -pub enum AvailableHeader { - /// The client can not report actual parachain head at this moment. - /// - /// It is a "mild" error, which may appear when e.g. on-demand parachains relay is used. - /// This variant must be treated as "we don't want to update parachain head value at the - /// target chain at this moment". - Unavailable, - /// There's no parachain header at the relay chain. - /// - /// Normally it means that the parachain is not registered there. - Missing, - /// Parachain head with given hash is available at the source chain. - Available(T), -} - -impl AvailableHeader { - /// Return available header. - pub fn as_available(&self) -> Option<&T> { - match *self { - AvailableHeader::Available(ref header) => Some(header), - _ => None, - } - } -} - -impl From> for AvailableHeader { - fn from(maybe_header: Option) -> AvailableHeader { - match maybe_header { - Some(header) => AvailableHeader::Available(header), - None => AvailableHeader::Missing, - } - } -} - -/// Source client used in parachain heads synchronization loop. -#[async_trait] -pub trait SourceClient: RelayClient { - /// Returns `Ok(true)` if client is in synced state. - async fn ensure_synced(&self) -> Result; - - /// Get parachain head id at given block. - async fn parachain_head( - &self, - at_block: HeaderIdOf, - ) -> Result>, Self::Error>; - - /// Get parachain head proof at given block. - async fn prove_parachain_head( - &self, - at_block: HeaderIdOf, - ) -> Result<(ParaHeadsProof, ParaHash), Self::Error>; -} - -/// Target client used in parachain heads synchronization loop. -#[async_trait] -pub trait TargetClient: RelayClient { - /// Transaction tracker to track submitted transactions. - type TransactionTracker: TransactionTracker>; - - /// Get best block id. - async fn best_block(&self) -> Result, Self::Error>; - - /// Get best finalized source relay chain block id. - async fn best_finalized_source_relay_chain_block( - &self, - at_block: &HeaderIdOf, - ) -> Result, Self::Error>; - - /// Get parachain head id at given block. - async fn parachain_head( - &self, - at_block: HeaderIdOf, - ) -> Result>, Self::Error>; - - /// Submit parachain heads proof. - async fn submit_parachain_head_proof( - &self, - at_source_block: HeaderIdOf, - para_head_hash: ParaHash, - proof: ParaHeadsProof, - ) -> Result; -} - -/// Return prefix that will be used by default to expose Prometheus metrics of the parachains -/// sync loop. -pub fn metrics_prefix() -> String { - format!( - "{}_to_{}_Parachains_{}", - P::SourceRelayChain::NAME, - P::TargetChain::NAME, - P::SourceParachain::PARACHAIN_ID - ) -} - -/// Run parachain heads synchronization. -pub async fn run( - source_client: impl SourceClient

, - target_client: impl TargetClient

, - metrics_params: MetricsParams, - exit_signal: impl Future + 'static + Send, -) -> Result<(), relay_utils::Error> -where - P::SourceRelayChain: Chain, -{ - let exit_signal = exit_signal.shared(); - relay_utils::relay_loop(source_client, target_client) - .with_metrics(metrics_params) - .loop_metric(ParachainsLoopMetrics::new(Some(&metrics_prefix::

()))?)? - .expose() - .await? - .run(metrics_prefix::

(), move |source_client, target_client, metrics| { - run_until_connection_lost(source_client, target_client, metrics, exit_signal.clone()) - }) - .await -} - -/// Run parachain heads synchronization. -async fn run_until_connection_lost( - source_client: impl SourceClient

, - target_client: impl TargetClient

, - metrics: Option, - exit_signal: impl Future + Send, -) -> Result<(), FailedClient> -where - P::SourceRelayChain: Chain, -{ - let exit_signal = exit_signal.fuse(); - let min_block_interval = std::cmp::min( - P::SourceRelayChain::AVERAGE_BLOCK_INTERVAL, - P::TargetChain::AVERAGE_BLOCK_INTERVAL, - ); - - let mut submitted_heads_tracker: Option> = None; - - futures::pin_mut!(exit_signal); - - // Note that the internal loop breaks with `FailedClient` error even if error is non-connection. - // It is Ok for now, but it may need to be fixed in the future to use exponential backoff for - // regular errors. - - loop { - // Either wait for new block, or exit signal. - // Please note that we are prioritizing the exit signal since if both events happen at once - // it doesn't make sense to perform one more loop iteration. - select_biased! { - _ = exit_signal => return Ok(()), - _ = async_std::task::sleep(min_block_interval).fuse() => {}, - } - - // if source client is not yet synced, we'll need to sleep. Otherwise we risk submitting too - // much redundant transactions - match source_client.ensure_synced().await { - Ok(true) => (), - Ok(false) => { - log::warn!( - target: "bridge", - "{} client is syncing. Won't do anything until it is synced", - P::SourceRelayChain::NAME, - ); - continue - }, - Err(e) => { - log::warn!( - target: "bridge", - "{} client has failed to return its sync status: {:?}", - P::SourceRelayChain::NAME, - e, - ); - return Err(FailedClient::Source) - }, - } - - // if we have active transaction, we'll need to wait until it is mined or dropped - let best_target_block = target_client.best_block().await.map_err(|e| { - log::warn!(target: "bridge", "Failed to read best {} block: {:?}", P::SourceRelayChain::NAME, e); - FailedClient::Target - })?; - let head_at_target = - read_head_at_target(&target_client, metrics.as_ref(), &best_target_block).await?; - - // check if our transaction has been mined - if let Some(tracker) = submitted_heads_tracker.take() { - match tracker.update(&best_target_block, &head_at_target).await { - SubmittedHeadStatus::Waiting(tracker) => { - // no news about our transaction and we shall keep waiting - submitted_heads_tracker = Some(tracker); - continue - }, - SubmittedHeadStatus::Final(TrackedTransactionStatus::Finalized(_)) => { - // all heads have been updated, we don't need this tracker anymore - }, - SubmittedHeadStatus::Final(TrackedTransactionStatus::Lost) => { - log::warn!( - target: "bridge", - "Parachains synchronization from {} to {} has stalled. Going to restart", - P::SourceRelayChain::NAME, - P::TargetChain::NAME, - ); - - return Err(FailedClient::Both) - }, - } - } - - // we have no active transaction and may need to update heads, but do we have something for - // update? - let best_finalized_relay_block = target_client - .best_finalized_source_relay_chain_block(&best_target_block) - .await - .map_err(|e| { - log::warn!( - target: "bridge", - "Failed to read best finalized {} block from {}: {:?}", - P::SourceRelayChain::NAME, - P::TargetChain::NAME, - e, - ); - FailedClient::Target - })?; - let head_at_source = - read_head_at_source(&source_client, metrics.as_ref(), &best_finalized_relay_block) - .await?; - let is_update_required = is_update_required::

( - head_at_source, - head_at_target, - best_finalized_relay_block, - best_target_block, - ); - - if is_update_required { - let (head_proof, head_hash) = source_client - .prove_parachain_head(best_finalized_relay_block) - .await - .map_err(|e| { - log::warn!( - target: "bridge", - "Failed to prove {} parachain ParaId({}) heads: {:?}", - P::SourceRelayChain::NAME, - P::SourceParachain::PARACHAIN_ID, - e, - ); - FailedClient::Source - })?; - log::info!( - target: "bridge", - "Submitting {} parachain ParaId({}) head update transaction to {}. Para hash at source relay {:?}: {:?}", - P::SourceRelayChain::NAME, - P::SourceParachain::PARACHAIN_ID, - P::TargetChain::NAME, - best_finalized_relay_block, - head_hash, - ); - - let transaction_tracker = target_client - .submit_parachain_head_proof(best_finalized_relay_block, head_hash, head_proof) - .await - .map_err(|e| { - log::warn!( - target: "bridge", - "Failed to submit {} parachain ParaId({}) heads proof to {}: {:?}", - P::SourceRelayChain::NAME, - P::SourceParachain::PARACHAIN_ID, - P::TargetChain::NAME, - e, - ); - FailedClient::Target - })?; - submitted_heads_tracker = - Some(SubmittedHeadsTracker::

::new(head_at_source, transaction_tracker)); - } - } -} - -/// Returns `true` if we need to submit parachain-head-update transaction. -fn is_update_required( - head_at_source: AvailableHeader>, - head_at_target: Option>, - best_finalized_relay_block_at_source: HeaderIdOf, - best_target_block: HeaderIdOf, -) -> bool -where - P::SourceRelayChain: Chain, -{ - log::trace!( - target: "bridge", - "Checking if {} parachain ParaId({}) needs update at {}:\n\t\ - At {} ({:?}): {:?}\n\t\ - At {} ({:?}): {:?}", - P::SourceRelayChain::NAME, - P::SourceParachain::PARACHAIN_ID, - P::TargetChain::NAME, - P::SourceRelayChain::NAME, - best_finalized_relay_block_at_source, - head_at_source, - P::TargetChain::NAME, - best_target_block, - head_at_target, - ); - - let needs_update = match (head_at_source, head_at_target) { - (AvailableHeader::Unavailable, _) => { - // source client has politely asked us not to update current parachain head - // at the target chain - false - }, - (AvailableHeader::Available(head_at_source), Some(head_at_target)) - if head_at_source.number() > head_at_target.number() => - { - // source client knows head that is better than the head known to the target - // client - true - }, - (AvailableHeader::Available(_), Some(_)) => { - // this is normal case when relay has recently updated heads, when parachain is - // not progressing, or when our source client is still syncing - false - }, - (AvailableHeader::Available(_), None) => { - // parachain is not yet known to the target client. This is true when parachain - // or bridge has been just onboarded/started - true - }, - (AvailableHeader::Missing, Some(_)) => { - // parachain/parathread has been offboarded removed from the system. It needs to - // be propageted to the target client - true - }, - (AvailableHeader::Missing, None) => { - // all's good - parachain is unknown to both clients - false - }, - }; - - if needs_update { - log::trace!( - target: "bridge", - "{} parachain ParaId({}) needs update at {}: {:?} vs {:?}", - P::SourceRelayChain::NAME, - P::SourceParachain::PARACHAIN_ID, - P::TargetChain::NAME, - head_at_source, - head_at_target, - ); - } - - needs_update -} - -/// Reads parachain head from the source client. -async fn read_head_at_source( - source_client: &impl SourceClient

, - metrics: Option<&ParachainsLoopMetrics>, - at_relay_block: &HeaderIdOf, -) -> Result>, FailedClient> { - let para_head = source_client.parachain_head(*at_relay_block).await; - match para_head { - Ok(AvailableHeader::Available(para_head)) => { - if let Some(metrics) = metrics { - metrics.update_best_parachain_block_at_source( - ParaId(P::SourceParachain::PARACHAIN_ID), - para_head.number(), - ); - } - Ok(AvailableHeader::Available(para_head)) - }, - Ok(r) => Ok(r), - Err(e) => { - log::warn!( - target: "bridge", - "Failed to read head of {} parachain ParaId({:?}): {:?}", - P::SourceRelayChain::NAME, - P::SourceParachain::PARACHAIN_ID, - e, - ); - Err(FailedClient::Source) - }, - } -} - -/// Reads parachain head from the target client. -async fn read_head_at_target( - target_client: &impl TargetClient

, - metrics: Option<&ParachainsLoopMetrics>, - at_block: &HeaderIdOf, -) -> Result>, FailedClient> { - let para_head_id = target_client.parachain_head(*at_block).await; - match para_head_id { - Ok(Some(para_head_id)) => { - if let Some(metrics) = metrics { - metrics.update_best_parachain_block_at_target( - ParaId(P::SourceParachain::PARACHAIN_ID), - para_head_id.number(), - ); - } - Ok(Some(para_head_id)) - }, - Ok(None) => Ok(None), - Err(e) => { - log::warn!( - target: "bridge", - "Failed to read head of {} parachain ParaId({}) at {}: {:?}", - P::SourceRelayChain::NAME, - P::SourceParachain::PARACHAIN_ID, - P::TargetChain::NAME, - e, - ); - Err(FailedClient::Target) - }, - } -} - -/// Submitted heads status. -enum SubmittedHeadStatus { - /// Heads are not yet updated. - Waiting(SubmittedHeadsTracker

), - /// Heads transaction has either been finalized or lost (i.e. received its "final" status). - Final(TrackedTransactionStatus>), -} - -/// Type of the transaction tracker that the `SubmittedHeadsTracker` is using. -/// -/// It needs to be shared because of `poll` macro and our consuming `update` method. -type SharedTransactionTracker

= Shared< - Pin< - Box< - dyn Future< - Output = TrackedTransactionStatus< - HeaderIdOf<

::TargetChain>, - >, - > + Send, - >, - >, ->; - -/// Submitted parachain heads transaction. -struct SubmittedHeadsTracker { - /// Parachain header id that we have submitted. - submitted_head: AvailableHeader>, - /// Future that waits for submitted transaction finality or loss. - /// - /// It needs to be shared because of `poll` macro and our consuming `update` method. - transaction_tracker: SharedTransactionTracker

, -} - -impl SubmittedHeadsTracker

{ - /// Creates new parachain heads transaction tracker. - pub fn new( - submitted_head: AvailableHeader>, - transaction_tracker: impl TransactionTracker> + 'static, - ) -> Self { - SubmittedHeadsTracker { - submitted_head, - transaction_tracker: transaction_tracker.wait().fuse().boxed().shared(), - } - } - - /// Returns `None` if all submitted parachain heads have been updated. - pub async fn update( - self, - at_target_block: &HeaderIdOf, - head_at_target: &Option>, - ) -> SubmittedHeadStatus

{ - // check if our head has been updated - let is_head_updated = match (self.submitted_head, head_at_target) { - (AvailableHeader::Available(submitted_head), Some(head_at_target)) - if head_at_target.number() >= submitted_head.number() => - true, - (AvailableHeader::Missing, None) => true, - _ => false, - }; - if is_head_updated { - log::trace!( - target: "bridge", - "Head of parachain ParaId({}) has been updated at {}: {:?}", - P::SourceParachain::PARACHAIN_ID, - P::TargetChain::NAME, - head_at_target, - ); - - return SubmittedHeadStatus::Final(TrackedTransactionStatus::Finalized(*at_target_block)) - } - - // if underlying transaction tracker has reported that the transaction is lost, we may - // then restart our sync - let transaction_tracker = self.transaction_tracker.clone(); - match poll!(transaction_tracker) { - Poll::Ready(TrackedTransactionStatus::Lost) => - return SubmittedHeadStatus::Final(TrackedTransactionStatus::Lost), - Poll::Ready(TrackedTransactionStatus::Finalized(_)) => { - // so we are here and our transaction is mined+finalized, but some of heads were not - // updated => we're considering our loop as stalled - return SubmittedHeadStatus::Final(TrackedTransactionStatus::Lost) - }, - _ => (), - } - - SubmittedHeadStatus::Waiting(self) - } -} - -#[cfg(test)] -mod tests { - use super::*; - use async_std::sync::{Arc, Mutex}; - use codec::Encode; - use futures::{SinkExt, StreamExt}; - use relay_substrate_client::test_chain::{TestChain, TestParachain}; - use relay_utils::{HeaderId, MaybeConnectionError}; - use sp_core::H256; - - const PARA_10_HASH: ParaHash = H256([10u8; 32]); - const PARA_20_HASH: ParaHash = H256([20u8; 32]); - - #[derive(Clone, Debug)] - enum TestError { - Error, - } - - impl MaybeConnectionError for TestError { - fn is_connection_error(&self) -> bool { - false - } - } - - #[derive(Clone, Debug, PartialEq, Eq)] - struct TestParachainsPipeline; - - impl ParachainsPipeline for TestParachainsPipeline { - type SourceRelayChain = TestChain; - type SourceParachain = TestParachain; - type TargetChain = TestChain; - } - - #[derive(Clone, Debug)] - struct TestClient { - data: Arc>, - } - - #[derive(Clone, Debug)] - struct TestTransactionTracker(Option>>); - - #[async_trait] - impl TransactionTracker for TestTransactionTracker { - type HeaderId = HeaderIdOf; - - async fn wait(self) -> TrackedTransactionStatus> { - match self.0 { - Some(status) => status, - None => futures::future::pending().await, - } - } - } - - #[derive(Clone, Debug)] - struct TestClientData { - source_sync_status: Result, - source_head: Result>, TestError>, - source_proof: Result<(), TestError>, - - target_best_block: Result, TestError>, - target_best_finalized_source_block: Result, TestError>, - target_head: Result>, TestError>, - target_submit_result: Result<(), TestError>, - - exit_signal_sender: Option>>, - } - - impl TestClientData { - pub fn minimal() -> Self { - TestClientData { - source_sync_status: Ok(true), - source_head: Ok(AvailableHeader::Available(HeaderId(0, PARA_20_HASH))), - source_proof: Ok(()), - - target_best_block: Ok(HeaderId(0, Default::default())), - target_best_finalized_source_block: Ok(HeaderId(0, Default::default())), - target_head: Ok(None), - target_submit_result: Ok(()), - - exit_signal_sender: None, - } - } - - pub fn with_exit_signal_sender( - sender: futures::channel::mpsc::UnboundedSender<()>, - ) -> Self { - let mut client = Self::minimal(); - client.exit_signal_sender = Some(Box::new(sender)); - client - } - } - - impl From for TestClient { - fn from(data: TestClientData) -> TestClient { - TestClient { data: Arc::new(Mutex::new(data)) } - } - } - - #[async_trait] - impl RelayClient for TestClient { - type Error = TestError; - - async fn reconnect(&mut self) -> Result<(), TestError> { - unimplemented!() - } - } - - #[async_trait] - impl SourceClient for TestClient { - async fn ensure_synced(&self) -> Result { - self.data.lock().await.source_sync_status.clone() - } - - async fn parachain_head( - &self, - _at_block: HeaderIdOf, - ) -> Result>, TestError> { - self.data.lock().await.source_head.clone() - } - - async fn prove_parachain_head( - &self, - _at_block: HeaderIdOf, - ) -> Result<(ParaHeadsProof, ParaHash), TestError> { - let head = *self.data.lock().await.source_head.clone()?.as_available().unwrap(); - let storage_proof = vec![head.hash().encode()]; - let proof = (ParaHeadsProof { storage_proof }, head.hash()); - self.data.lock().await.source_proof.clone().map(|_| proof) - } - } - - #[async_trait] - impl TargetClient for TestClient { - type TransactionTracker = TestTransactionTracker; - - async fn best_block(&self) -> Result, TestError> { - self.data.lock().await.target_best_block.clone() - } - - async fn best_finalized_source_relay_chain_block( - &self, - _at_block: &HeaderIdOf, - ) -> Result, TestError> { - self.data.lock().await.target_best_finalized_source_block.clone() - } - - async fn parachain_head( - &self, - _at_block: HeaderIdOf, - ) -> Result>, TestError> { - self.data.lock().await.target_head.clone() - } - - async fn submit_parachain_head_proof( - &self, - _at_source_block: HeaderIdOf, - _updated_parachain_head: ParaHash, - _proof: ParaHeadsProof, - ) -> Result { - let mut data = self.data.lock().await; - data.target_submit_result.clone()?; - - if let Some(mut exit_signal_sender) = data.exit_signal_sender.take() { - exit_signal_sender.send(()).await.unwrap(); - } - Ok(TestTransactionTracker(Some( - TrackedTransactionStatus::Finalized(Default::default()), - ))) - } - } - - #[test] - fn when_source_client_fails_to_return_sync_state() { - let mut test_source_client = TestClientData::minimal(); - test_source_client.source_sync_status = Err(TestError::Error); - - assert_eq!( - async_std::task::block_on(run_until_connection_lost( - TestClient::from(test_source_client), - TestClient::from(TestClientData::minimal()), - None, - futures::future::pending(), - )), - Err(FailedClient::Source), - ); - } - - #[test] - fn when_target_client_fails_to_return_best_block() { - let mut test_target_client = TestClientData::minimal(); - test_target_client.target_best_block = Err(TestError::Error); - - assert_eq!( - async_std::task::block_on(run_until_connection_lost( - TestClient::from(TestClientData::minimal()), - TestClient::from(test_target_client), - None, - futures::future::pending(), - )), - Err(FailedClient::Target), - ); - } - - #[test] - fn when_target_client_fails_to_read_heads() { - let mut test_target_client = TestClientData::minimal(); - test_target_client.target_head = Err(TestError::Error); - - assert_eq!( - async_std::task::block_on(run_until_connection_lost( - TestClient::from(TestClientData::minimal()), - TestClient::from(test_target_client), - None, - futures::future::pending(), - )), - Err(FailedClient::Target), - ); - } - - #[test] - fn when_target_client_fails_to_read_best_finalized_source_block() { - let mut test_target_client = TestClientData::minimal(); - test_target_client.target_best_finalized_source_block = Err(TestError::Error); - - assert_eq!( - async_std::task::block_on(run_until_connection_lost( - TestClient::from(TestClientData::minimal()), - TestClient::from(test_target_client), - None, - futures::future::pending(), - )), - Err(FailedClient::Target), - ); - } - - #[test] - fn when_source_client_fails_to_read_heads() { - let mut test_source_client = TestClientData::minimal(); - test_source_client.source_head = Err(TestError::Error); - - assert_eq!( - async_std::task::block_on(run_until_connection_lost( - TestClient::from(test_source_client), - TestClient::from(TestClientData::minimal()), - None, - futures::future::pending(), - )), - Err(FailedClient::Source), - ); - } - - #[test] - fn when_source_client_fails_to_prove_heads() { - let mut test_source_client = TestClientData::minimal(); - test_source_client.source_proof = Err(TestError::Error); - - assert_eq!( - async_std::task::block_on(run_until_connection_lost( - TestClient::from(test_source_client), - TestClient::from(TestClientData::minimal()), - None, - futures::future::pending(), - )), - Err(FailedClient::Source), - ); - } - - #[test] - fn when_target_client_rejects_update_transaction() { - let mut test_target_client = TestClientData::minimal(); - test_target_client.target_submit_result = Err(TestError::Error); - - assert_eq!( - async_std::task::block_on(run_until_connection_lost( - TestClient::from(TestClientData::minimal()), - TestClient::from(test_target_client), - None, - futures::future::pending(), - )), - Err(FailedClient::Target), - ); - } - - #[test] - fn minimal_working_case() { - let (exit_signal_sender, exit_signal) = futures::channel::mpsc::unbounded(); - assert_eq!( - async_std::task::block_on(run_until_connection_lost( - TestClient::from(TestClientData::minimal()), - TestClient::from(TestClientData::with_exit_signal_sender(exit_signal_sender)), - None, - exit_signal.into_future().map(|(_, _)| ()), - )), - Ok(()), - ); - } - - fn test_tx_tracker() -> SubmittedHeadsTracker { - SubmittedHeadsTracker::new( - AvailableHeader::Available(HeaderId(20, PARA_20_HASH)), - TestTransactionTracker(None), - ) - } - - impl From> for Option<()> { - fn from(status: SubmittedHeadStatus) -> Option<()> { - match status { - SubmittedHeadStatus::Waiting(_) => Some(()), - _ => None, - } - } - } - - #[async_std::test] - async fn tx_tracker_update_when_head_at_target_has_none_value() { - assert_eq!( - Some(()), - test_tx_tracker() - .update(&HeaderId(0, Default::default()), &Some(HeaderId(10, PARA_10_HASH))) - .await - .into(), - ); - } - - #[async_std::test] - async fn tx_tracker_update_when_head_at_target_has_old_value() { - assert_eq!( - Some(()), - test_tx_tracker() - .update(&HeaderId(0, Default::default()), &Some(HeaderId(10, PARA_10_HASH))) - .await - .into(), - ); - } - - #[async_std::test] - async fn tx_tracker_update_when_head_at_target_has_same_value() { - assert!(matches!( - test_tx_tracker() - .update(&HeaderId(0, Default::default()), &Some(HeaderId(20, PARA_20_HASH))) - .await, - SubmittedHeadStatus::Final(TrackedTransactionStatus::Finalized(_)), - )); - } - - #[async_std::test] - async fn tx_tracker_update_when_head_at_target_has_better_value() { - assert!(matches!( - test_tx_tracker() - .update(&HeaderId(0, Default::default()), &Some(HeaderId(30, PARA_20_HASH))) - .await, - SubmittedHeadStatus::Final(TrackedTransactionStatus::Finalized(_)), - )); - } - - #[async_std::test] - async fn tx_tracker_update_when_tx_is_lost() { - let mut tx_tracker = test_tx_tracker(); - tx_tracker.transaction_tracker = - futures::future::ready(TrackedTransactionStatus::Lost).boxed().shared(); - assert!(matches!( - tx_tracker - .update(&HeaderId(0, Default::default()), &Some(HeaderId(10, PARA_10_HASH))) - .await, - SubmittedHeadStatus::Final(TrackedTransactionStatus::Lost), - )); - } - - #[async_std::test] - async fn tx_tracker_update_when_tx_is_finalized_but_heads_are_not_updated() { - let mut tx_tracker = test_tx_tracker(); - tx_tracker.transaction_tracker = - futures::future::ready(TrackedTransactionStatus::Finalized(Default::default())) - .boxed() - .shared(); - assert!(matches!( - tx_tracker - .update(&HeaderId(0, Default::default()), &Some(HeaderId(10, PARA_10_HASH))) - .await, - SubmittedHeadStatus::Final(TrackedTransactionStatus::Lost), - )); - } - - #[test] - fn parachain_is_not_updated_if_it_is_unavailable() { - assert!(!is_update_required::( - AvailableHeader::Unavailable, - None, - Default::default(), - Default::default(), - )); - assert!(!is_update_required::( - AvailableHeader::Unavailable, - Some(HeaderId(10, PARA_10_HASH)), - Default::default(), - Default::default(), - )); - } - - #[test] - fn parachain_is_not_updated_if_it_is_unknown_to_both_clients() { - assert!(!is_update_required::( - AvailableHeader::Missing, - None, - Default::default(), - Default::default(), - ),); - } - - #[test] - fn parachain_is_not_updated_if_target_has_better_head() { - assert!(!is_update_required::( - AvailableHeader::Available(HeaderId(10, Default::default())), - Some(HeaderId(20, Default::default())), - Default::default(), - Default::default(), - ),); - } - - #[test] - fn parachain_is_updated_after_offboarding() { - assert!(is_update_required::( - AvailableHeader::Missing, - Some(HeaderId(20, Default::default())), - Default::default(), - Default::default(), - ),); - } - - #[test] - fn parachain_is_updated_after_onboarding() { - assert!(is_update_required::( - AvailableHeader::Available(HeaderId(30, Default::default())), - None, - Default::default(), - Default::default(), - ),); - } - - #[test] - fn parachain_is_updated_if_newer_head_is_known() { - assert!(is_update_required::( - AvailableHeader::Available(HeaderId(40, Default::default())), - Some(HeaderId(30, Default::default())), - Default::default(), - Default::default(), - ),); - } -} diff --git a/relays/parachains/src/parachains_loop_metrics.rs b/relays/parachains/src/parachains_loop_metrics.rs deleted file mode 100644 index 8138a43b3..000000000 --- a/relays/parachains/src/parachains_loop_metrics.rs +++ /dev/null @@ -1,86 +0,0 @@ -// Copyright 2019-2021 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -use bp_polkadot_core::parachains::ParaId; -use relay_utils::{ - metrics::{metric_name, register, Gauge, Metric, PrometheusError, Registry, U64}, - UniqueSaturatedInto, -}; - -/// Parachains sync metrics. -#[derive(Clone)] -pub struct ParachainsLoopMetrics { - /// Best parachains header numbers at the source. - best_source_block_numbers: Gauge, - /// Best parachains header numbers at the target. - best_target_block_numbers: Gauge, -} - -impl ParachainsLoopMetrics { - /// Create and register parachains loop metrics. - pub fn new(prefix: Option<&str>) -> Result { - Ok(ParachainsLoopMetrics { - best_source_block_numbers: Gauge::new( - metric_name(prefix, "best_parachain_block_number_at_source"), - "Best parachain block numbers at the source relay chain".to_string(), - )?, - best_target_block_numbers: Gauge::new( - metric_name(prefix, "best_parachain_block_number_at_target"), - "Best parachain block numbers at the target chain".to_string(), - )?, - }) - } - - /// Update best block number at source. - pub fn update_best_parachain_block_at_source>( - &self, - parachain: ParaId, - block_number: Number, - ) { - let block_number = block_number.unique_saturated_into(); - log::trace!( - target: "bridge-metrics", - "Updated value of metric 'best_parachain_block_number_at_source[{:?}]': {:?}", - parachain, - block_number, - ); - self.best_source_block_numbers.set(block_number); - } - - /// Update best block number at target. - pub fn update_best_parachain_block_at_target>( - &self, - parachain: ParaId, - block_number: Number, - ) { - let block_number = block_number.unique_saturated_into(); - log::trace!( - target: "bridge-metrics", - "Updated value of metric 'best_parachain_block_number_at_target[{:?}]': {:?}", - parachain, - block_number, - ); - self.best_target_block_numbers.set(block_number); - } -} - -impl Metric for ParachainsLoopMetrics { - fn register(&self, registry: &Registry) -> Result<(), PrometheusError> { - register(self.best_source_block_numbers.clone(), registry)?; - register(self.best_target_block_numbers.clone(), registry)?; - Ok(()) - } -} diff --git a/relays/utils/Cargo.toml b/relays/utils/Cargo.toml deleted file mode 100644 index 8d9addb9b..000000000 --- a/relays/utils/Cargo.toml +++ /dev/null @@ -1,38 +0,0 @@ -[package] -name = "relay-utils" -version = "0.1.0" -authors.workspace = true -edition.workspace = true -license = "GPL-3.0-or-later WITH Classpath-exception-2.0" -repository.workspace = true -publish = false - -[lints] -workspace = true - -[dependencies] -ansi_term = "0.12" -anyhow = "1.0" -async-std = "1.9.0" -async-trait = "0.1.79" -backoff = "0.4" -isahc = "1.2" -env_logger = "0.11.3" -futures = "0.3.30" -jsonpath_lib = "0.3" -log = { workspace = true } -num-traits = "0.2" -serde_json = { workspace = true, default-features = true } -sysinfo = "0.30" -time = { version = "0.3", features = ["formatting", "local-offset", "std"] } -tokio = { version = "1.37", features = ["rt"] } -thiserror = { workspace = true } - -# Bridge dependencies - -bp-runtime = { path = "../../primitives/runtime" } - -# Substrate dependencies - -sp-runtime = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } -substrate-prometheus-endpoint = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } diff --git a/relays/utils/src/error.rs b/relays/utils/src/error.rs deleted file mode 100644 index 26f1d0cac..000000000 --- a/relays/utils/src/error.rs +++ /dev/null @@ -1,46 +0,0 @@ -// Copyright 2019-2021 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -use std::net::AddrParseError; -use thiserror::Error; - -/// Result type used by relay utilities. -pub type Result = std::result::Result; - -/// Relay utilities errors. -#[derive(Error, Debug)] -pub enum Error { - /// Failed to request a float value from HTTP service. - #[error("Failed to fetch token price from remote server: {0}")] - FetchTokenPrice(#[source] anyhow::Error), - /// Failed to parse the response from HTTP service. - #[error("Failed to parse HTTP service response: {0:?}. Response: {1:?}")] - ParseHttp(serde_json::Error, String), - /// Failed to select response value from the Json response. - #[error("Failed to select value from response: {0:?}. Response: {1:?}")] - SelectResponseValue(jsonpath_lib::JsonPathError, String), - /// Failed to parse float value from the selected value. - #[error( - "Failed to parse float value {0:?} from response. It is assumed to be positive and normal" - )] - ParseFloat(f64), - /// Couldn't found value in the JSON response. - #[error("Missing required value from response: {0:?}")] - MissingResponseValue(String), - /// Invalid host address was used for exposing Prometheus metrics. - #[error("Invalid host {0} is used to expose Prometheus metrics: {1}")] - ExposingMetricsInvalidHost(String, AddrParseError), - /// Prometheus error. - #[error("{0}")] - Prometheus(#[from] substrate_prometheus_endpoint::prometheus::Error), -} diff --git a/relays/utils/src/initialize.rs b/relays/utils/src/initialize.rs deleted file mode 100644 index 8224c1803..000000000 --- a/relays/utils/src/initialize.rs +++ /dev/null @@ -1,136 +0,0 @@ -// Copyright 2019-2021 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Relayer initialization functions. - -use std::{cell::RefCell, fmt::Display, io::Write}; - -async_std::task_local! { - pub(crate) static LOOP_NAME: RefCell = RefCell::new(String::default()); -} - -/// Initialize relay environment. -pub fn initialize_relay() { - initialize_logger(true); -} - -/// Initialize Relay logger instance. -pub fn initialize_logger(with_timestamp: bool) { - let format = time::format_description::parse( - "[year]-[month]-[day] \ - [hour repr:24]:[minute]:[second] [offset_hour sign:mandatory]", - ) - .expect("static format string is valid"); - - let mut builder = env_logger::Builder::new(); - builder.filter_level(log::LevelFilter::Warn); - builder.filter_module("bridge", log::LevelFilter::Info); - builder.parse_default_env(); - if with_timestamp { - builder.format(move |buf, record| { - let timestamp = time::OffsetDateTime::now_local() - .unwrap_or_else(|_| time::OffsetDateTime::now_utc()); - let timestamp = timestamp.format(&format).unwrap_or_else(|_| timestamp.to_string()); - - let log_level = color_level(record.level()); - let log_target = color_target(record.target()); - let timestamp = if cfg!(windows) { - Either::Left(timestamp) - } else { - Either::Right(ansi_term::Colour::Fixed(8).bold().paint(timestamp)) - }; - - writeln!( - buf, - "{}{} {} {} {}", - loop_name_prefix(), - timestamp, - log_level, - log_target, - record.args(), - ) - }); - } else { - builder.format(move |buf, record| { - let log_level = color_level(record.level()); - let log_target = color_target(record.target()); - - writeln!(buf, "{}{log_level} {log_target} {}", loop_name_prefix(), record.args(),) - }); - } - - builder.init(); -} - -/// Initialize relay loop. Must only be called once per every loop task. -pub(crate) fn initialize_loop(loop_name: String) { - LOOP_NAME.with(|g_loop_name| *g_loop_name.borrow_mut() = loop_name); -} - -/// Returns loop name prefix to use in logs. The prefix is initialized with the `initialize_loop` -/// call. -fn loop_name_prefix() -> String { - // try_with to avoid panic outside of async-std task context - LOOP_NAME - .try_with(|loop_name| { - // using borrow is ok here, because loop is only initialized once (=> borrow_mut will - // only be called once) - let loop_name = loop_name.borrow(); - if loop_name.is_empty() { - String::new() - } else { - format!("[{loop_name}] ") - } - }) - .unwrap_or_else(|_| String::new()) -} - -enum Either { - Left(A), - Right(B), -} -impl Display for Either { - fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result { - match self { - Self::Left(a) => write!(fmt, "{a}"), - Self::Right(b) => write!(fmt, "{b}"), - } - } -} - -fn color_target(target: &str) -> impl Display + '_ { - if cfg!(windows) { - Either::Left(target) - } else { - Either::Right(ansi_term::Colour::Fixed(8).paint(target)) - } -} - -fn color_level(level: log::Level) -> impl Display { - if cfg!(windows) { - Either::Left(level) - } else { - let s = level.to_string(); - use ansi_term::Colour as Color; - Either::Right(match level { - log::Level::Error => Color::Fixed(9).bold().paint(s), - log::Level::Warn => Color::Fixed(11).bold().paint(s), - log::Level::Info => Color::Fixed(10).paint(s), - log::Level::Debug => Color::Fixed(14).paint(s), - log::Level::Trace => Color::Fixed(12).paint(s), - }) - } -} diff --git a/relays/utils/src/lib.rs b/relays/utils/src/lib.rs deleted file mode 100644 index 2776620be..000000000 --- a/relays/utils/src/lib.rs +++ /dev/null @@ -1,318 +0,0 @@ -// Copyright 2019-2021 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Utilities used by different relays. - -pub use bp_runtime::HeaderId; -pub use error::Error; -pub use relay_loop::{relay_loop, relay_metrics}; -pub use sp_runtime::traits::{UniqueSaturatedFrom, UniqueSaturatedInto}; -use std::fmt::Debug; - -use async_trait::async_trait; -use backoff::{backoff::Backoff, ExponentialBackoff}; -use futures::future::{BoxFuture, FutureExt}; -use std::time::Duration; -use thiserror::Error; - -/// Default relay loop stall timeout. If transactions generated by relay are immortal, then -/// this timeout is used. -/// -/// There are no any strict requirements on block time in Substrate. But we assume here that all -/// Substrate-based chains will be designed to produce relatively fast (compared to the slowest -/// blockchains) blocks. So 1 hour seems to be a good guess for (even congested) chains to mine -/// transaction, or remove it from the pool. -pub const STALL_TIMEOUT: Duration = Duration::from_secs(60 * 60); - -/// Max delay after connection-unrelated error happened before we'll try the -/// same request again. -pub const MAX_BACKOFF_INTERVAL: Duration = Duration::from_secs(60); -/// Delay after connection-related error happened before we'll try -/// reconnection again. -pub const CONNECTION_ERROR_DELAY: Duration = Duration::from_secs(10); - -pub mod error; -pub mod initialize; -pub mod metrics; -pub mod relay_loop; - -/// Block number traits shared by all chains that relay is able to serve. -pub trait BlockNumberBase: - 'static - + From - + UniqueSaturatedInto - + Ord - + Clone - + Copy - + Default - + Send - + Sync - + std::fmt::Debug - + std::fmt::Display - + std::hash::Hash - + std::ops::Add - + std::ops::Sub - + num_traits::CheckedSub - + num_traits::Saturating - + num_traits::Zero - + num_traits::One -{ -} - -impl BlockNumberBase for T where - T: 'static - + From - + UniqueSaturatedInto - + Ord - + Clone - + Copy - + Default - + Send - + Sync - + std::fmt::Debug - + std::fmt::Display - + std::hash::Hash - + std::ops::Add - + std::ops::Sub - + num_traits::CheckedSub - + num_traits::Saturating - + num_traits::Zero - + num_traits::One -{ -} - -/// Macro that returns (client, Err(error)) tuple from function if result is Err(error). -#[macro_export] -macro_rules! bail_on_error { - ($result: expr) => { - match $result { - (client, Ok(result)) => (client, result), - (client, Err(error)) => return (client, Err(error)), - } - }; -} - -/// Macro that returns (client, Err(error)) tuple from function if result is Err(error). -#[macro_export] -macro_rules! bail_on_arg_error { - ($result: expr, $client: ident) => { - match $result { - Ok(result) => result, - Err(error) => return ($client, Err(error)), - } - }; -} - -/// Error type that can signal connection errors. -pub trait MaybeConnectionError { - /// Returns true if error (maybe) represents connection error. - fn is_connection_error(&self) -> bool; -} - -/// Final status of the tracked transaction. -#[derive(Debug, Clone, Copy, PartialEq)] -pub enum TrackedTransactionStatus { - /// Transaction has been lost. - Lost, - /// Transaction has been mined and finalized at given block. - Finalized(BlockId), -} - -/// Transaction tracker. -#[async_trait] -pub trait TransactionTracker: Send { - /// Header id, used by the chain. - type HeaderId: Clone + Debug + Send; - - /// Wait until transaction is either finalized or invalidated/lost. - async fn wait(self) -> TrackedTransactionStatus; -} - -/// Future associated with `TransactionTracker`, monitoring the transaction status. -pub type TrackedTransactionFuture<'a, T> = - BoxFuture<'a, TrackedTransactionStatus<::HeaderId>>; - -/// Stringified error that may be either connection-related or not. -#[derive(Error, Debug)] -pub enum StringifiedMaybeConnectionError { - /// The error is connection-related error. - #[error("{0}")] - Connection(String), - /// The error is connection-unrelated error. - #[error("{0}")] - NonConnection(String), -} - -impl StringifiedMaybeConnectionError { - /// Create new stringified connection error. - pub fn new(is_connection_error: bool, error: String) -> Self { - if is_connection_error { - StringifiedMaybeConnectionError::Connection(error) - } else { - StringifiedMaybeConnectionError::NonConnection(error) - } - } -} - -impl MaybeConnectionError for StringifiedMaybeConnectionError { - fn is_connection_error(&self) -> bool { - match *self { - StringifiedMaybeConnectionError::Connection(_) => true, - StringifiedMaybeConnectionError::NonConnection(_) => false, - } - } -} - -/// Exponential backoff for connection-unrelated errors retries. -pub fn retry_backoff() -> ExponentialBackoff { - ExponentialBackoff { - // we do not want relayer to stop - max_elapsed_time: None, - max_interval: MAX_BACKOFF_INTERVAL, - ..Default::default() - } -} - -/// Compact format of IDs vector. -pub fn format_ids(mut ids: impl ExactSizeIterator) -> String { - const NTH_PROOF: &str = "we have checked len; qed"; - match ids.len() { - 0 => "".into(), - 1 => format!("{:?}", ids.next().expect(NTH_PROOF)), - 2 => { - let id0 = ids.next().expect(NTH_PROOF); - let id1 = ids.next().expect(NTH_PROOF); - format!("[{id0:?}, {id1:?}]") - }, - len => { - let id0 = ids.next().expect(NTH_PROOF); - let id_last = ids.last().expect(NTH_PROOF); - format!("{len}:[{id0:?} ... {id_last:?}]") - }, - } -} - -/// Stream that emits item every `timeout_ms` milliseconds. -pub fn interval(timeout: Duration) -> impl futures::Stream { - futures::stream::unfold((), move |_| async move { - async_std::task::sleep(timeout).await; - Some(((), ())) - }) -} - -/// Which client has caused error. -#[derive(Debug, Eq, Clone, Copy, PartialEq)] -pub enum FailedClient { - /// It is the source client who has caused error. - Source, - /// It is the target client who has caused error. - Target, - /// Both clients are failing, or we just encountered some other error that - /// should be treated like that. - Both, -} - -/// Future process result. -#[derive(Debug, Clone, Copy)] -pub enum ProcessFutureResult { - /// Future has been processed successfully. - Success, - /// Future has failed with non-connection error. - Failed, - /// Future has failed with connection error. - ConnectionFailed, -} - -impl ProcessFutureResult { - /// Returns true if result is Success. - pub fn is_ok(self) -> bool { - match self { - ProcessFutureResult::Success => true, - ProcessFutureResult::Failed | ProcessFutureResult::ConnectionFailed => false, - } - } - - /// Returns `Ok(())` if future has succeeded. - /// Returns `Err(failed_client)` otherwise. - pub fn fail_if_error(self, failed_client: FailedClient) -> Result<(), FailedClient> { - if self.is_ok() { - Ok(()) - } else { - Err(failed_client) - } - } - - /// Returns Ok(true) if future has succeeded. - /// Returns Ok(false) if future has failed with non-connection error. - /// Returns Err if future is `ConnectionFailed`. - pub fn fail_if_connection_error( - self, - failed_client: FailedClient, - ) -> Result { - match self { - ProcessFutureResult::Success => Ok(true), - ProcessFutureResult::Failed => Ok(false), - ProcessFutureResult::ConnectionFailed => Err(failed_client), - } - } -} - -/// Process result of the future from a client. -pub fn process_future_result( - result: Result, - retry_backoff: &mut ExponentialBackoff, - on_success: impl FnOnce(TResult), - go_offline_future: &mut std::pin::Pin<&mut futures::future::Fuse>, - go_offline: impl FnOnce(Duration) -> TGoOfflineFuture, - error_pattern: impl FnOnce() -> String, -) -> ProcessFutureResult -where - TError: std::fmt::Debug + MaybeConnectionError, - TGoOfflineFuture: FutureExt, -{ - match result { - Ok(result) => { - on_success(result); - retry_backoff.reset(); - ProcessFutureResult::Success - }, - Err(error) if error.is_connection_error() => { - log::error!( - target: "bridge", - "{}: {:?}. Going to restart", - error_pattern(), - error, - ); - - retry_backoff.reset(); - go_offline_future.set(go_offline(CONNECTION_ERROR_DELAY).fuse()); - ProcessFutureResult::ConnectionFailed - }, - Err(error) => { - let retry_delay = retry_backoff.next_backoff().unwrap_or(CONNECTION_ERROR_DELAY); - log::error!( - target: "bridge", - "{}: {:?}. Retrying in {}", - error_pattern(), - error, - retry_delay.as_secs_f64(), - ); - - go_offline_future.set(go_offline(retry_delay).fuse()); - ProcessFutureResult::Failed - }, - } -} diff --git a/relays/utils/src/metrics.rs b/relays/utils/src/metrics.rs deleted file mode 100644 index 2e6c8236d..000000000 --- a/relays/utils/src/metrics.rs +++ /dev/null @@ -1,192 +0,0 @@ -// Copyright 2019-2021 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -pub use float_json_value::FloatJsonValueMetric; -pub use global::GlobalMetrics; -pub use substrate_prometheus_endpoint::{ - prometheus::core::{Atomic, Collector}, - register, Counter, CounterVec, Gauge, GaugeVec, Opts, PrometheusError, Registry, F64, I64, U64, -}; - -use async_std::sync::{Arc, RwLock}; -use async_trait::async_trait; -use std::{fmt::Debug, time::Duration}; - -mod float_json_value; -mod global; - -/// Shared reference to `f64` value that is updated by the metric. -pub type F64SharedRef = Arc>>; -/// Int gauge metric type. -pub type IntGauge = Gauge; - -/// Unparsed address that needs to be used to expose Prometheus metrics. -#[derive(Debug, Clone)] -pub struct MetricsAddress { - /// Serve HTTP requests at given host. - pub host: String, - /// Serve HTTP requests at given port. - pub port: u16, -} - -/// Prometheus endpoint MetricsParams. -#[derive(Debug, Clone)] -pub struct MetricsParams { - /// Interface and TCP port to be used when exposing Prometheus metrics. - pub address: Option, - /// Metrics registry. May be `Some(_)` if several components share the same endpoint. - pub registry: Registry, -} - -/// Metric API. -pub trait Metric: Clone + Send + Sync + 'static { - fn register(&self, registry: &Registry) -> Result<(), PrometheusError>; -} - -/// Standalone metric API. -/// -/// Metrics of this kind know how to update themselves, so we may just spawn and forget the -/// asynchronous self-update task. -#[async_trait] -pub trait StandaloneMetric: Metric { - /// Update metric values. - async fn update(&self); - - /// Metrics update interval. - fn update_interval(&self) -> Duration; - - /// Register and spawn metric. Metric is only spawned if it is registered for the first time. - fn register_and_spawn(self, registry: &Registry) -> Result<(), PrometheusError> { - match self.register(registry) { - Ok(()) => { - self.spawn(); - Ok(()) - }, - Err(PrometheusError::AlreadyReg) => Ok(()), - Err(e) => Err(e), - } - } - - /// Spawn the self update task that will keep update metric value at given intervals. - fn spawn(self) { - async_std::task::spawn(async move { - let update_interval = self.update_interval(); - loop { - self.update().await; - async_std::task::sleep(update_interval).await; - } - }); - } -} - -impl Default for MetricsAddress { - fn default() -> Self { - MetricsAddress { host: "127.0.0.1".into(), port: 9616 } - } -} - -impl MetricsParams { - /// Creates metrics params from metrics address. - pub fn new( - address: Option, - relay_version: String, - relay_commit: String, - ) -> Result { - const BUILD_INFO_METRIC: &str = "substrate_relay_build_info"; - - let registry = Registry::new(); - register( - Gauge::::with_opts( - Opts::new( - BUILD_INFO_METRIC, - "A metric with a constant '1' value labeled by version", - ) - .const_label("version", &relay_version) - .const_label("commit", &relay_commit), - )?, - ®istry, - )? - .set(1); - - log::info!( - target: "bridge", - "Exposed {} metric: version={} commit={}", - BUILD_INFO_METRIC, - relay_version, - relay_commit, - ); - - Ok(MetricsParams { address, registry }) - } - - /// Creates metrics params so that metrics are not exposed. - pub fn disabled() -> Self { - MetricsParams { address: None, registry: Registry::new() } - } - - /// Do not expose metrics. - #[must_use] - pub fn disable(mut self) -> Self { - self.address = None; - self - } -} - -/// Returns metric name optionally prefixed with given prefix. -pub fn metric_name(prefix: Option<&str>, name: &str) -> String { - if let Some(prefix) = prefix { - format!("{prefix}_{name}") - } else { - name.into() - } -} - -/// Set value of gauge metric. -/// -/// If value is `Ok(None)` or `Err(_)`, metric would have default value. -pub fn set_gauge_value, E: Debug>( - gauge: &Gauge, - value: Result, E>, -) { - gauge.set(match value { - Ok(Some(value)) => { - log::trace!( - target: "bridge-metrics", - "Updated value of metric '{:?}': {:?}", - gauge.desc().first().map(|d| &d.fq_name), - value, - ); - value - }, - Ok(None) => { - log::warn!( - target: "bridge-metrics", - "Failed to update metric '{:?}': value is empty", - gauge.desc().first().map(|d| &d.fq_name), - ); - Default::default() - }, - Err(error) => { - log::warn!( - target: "bridge-metrics", - "Failed to update metric '{:?}': {:?}", - gauge.desc().first().map(|d| &d.fq_name), - error, - ); - Default::default() - }, - }) -} diff --git a/relays/utils/src/metrics/float_json_value.rs b/relays/utils/src/metrics/float_json_value.rs deleted file mode 100644 index 17b09e050..000000000 --- a/relays/utils/src/metrics/float_json_value.rs +++ /dev/null @@ -1,147 +0,0 @@ -// Copyright 2019-2020 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -use crate::{ - error::{self, Error}, - metrics::{ - metric_name, register, F64SharedRef, Gauge, Metric, PrometheusError, Registry, - StandaloneMetric, F64, - }, -}; - -use async_std::sync::{Arc, RwLock}; -use async_trait::async_trait; -use std::time::Duration; - -/// Value update interval. -const UPDATE_INTERVAL: Duration = Duration::from_secs(300); - -/// Metric that represents float value received from HTTP service as float gauge. -/// -/// The float value returned by the service is assumed to be normal (`f64::is_normal` -/// should return `true`) and strictly positive. -#[derive(Debug, Clone)] -pub struct FloatJsonValueMetric { - url: String, - json_path: String, - metric: Gauge, - shared_value_ref: F64SharedRef, -} - -impl FloatJsonValueMetric { - /// Create new metric instance with given name and help. - pub fn new( - url: String, - json_path: String, - name: String, - help: String, - ) -> Result { - let shared_value_ref = Arc::new(RwLock::new(None)); - Ok(FloatJsonValueMetric { - url, - json_path, - metric: Gauge::new(metric_name(None, &name), help)?, - shared_value_ref, - }) - } - - /// Get shared reference to metric value. - pub fn shared_value_ref(&self) -> F64SharedRef { - self.shared_value_ref.clone() - } - - /// Request value from HTTP service. - async fn request_value(&self) -> anyhow::Result { - use isahc::{AsyncReadResponseExt, HttpClient, Request}; - - let request = Request::get(&self.url).header("Accept", "application/json").body(())?; - let raw_response = HttpClient::new()?.send_async(request).await?.text().await?; - Ok(raw_response) - } - - /// Read value from HTTP service. - async fn read_value(&self) -> error::Result { - let raw_response = self.request_value().await.map_err(Error::FetchTokenPrice)?; - parse_service_response(&self.json_path, &raw_response) - } -} - -impl Metric for FloatJsonValueMetric { - fn register(&self, registry: &Registry) -> Result<(), PrometheusError> { - register(self.metric.clone(), registry).map(drop) - } -} - -#[async_trait] -impl StandaloneMetric for FloatJsonValueMetric { - fn update_interval(&self) -> Duration { - UPDATE_INTERVAL - } - - async fn update(&self) { - let value = self.read_value().await; - let maybe_ok = value.as_ref().ok().copied(); - crate::metrics::set_gauge_value(&self.metric, value.map(Some)); - *self.shared_value_ref.write().await = maybe_ok; - } -} - -/// Parse HTTP service response. -fn parse_service_response(json_path: &str, response: &str) -> error::Result { - let json = - serde_json::from_str(response).map_err(|err| Error::ParseHttp(err, response.to_owned()))?; - - let mut selector = jsonpath_lib::selector(&json); - let maybe_selected_value = - selector(json_path).map_err(|err| Error::SelectResponseValue(err, response.to_owned()))?; - let selected_value = maybe_selected_value - .first() - .and_then(|v| v.as_f64()) - .ok_or_else(|| Error::MissingResponseValue(response.to_owned()))?; - if !selected_value.is_normal() || selected_value < 0.0 { - return Err(Error::ParseFloat(selected_value)) - } - - Ok(selected_value) -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn parse_service_response_works() { - assert_eq!( - parse_service_response("$.kusama.usd", r#"{"kusama":{"usd":433.05}}"#).map_err(drop), - Ok(433.05), - ); - } - - #[test] - fn parse_service_response_rejects_negative_numbers() { - assert!(parse_service_response("$.kusama.usd", r#"{"kusama":{"usd":-433.05}}"#).is_err()); - } - - #[test] - fn parse_service_response_rejects_zero_numbers() { - assert!(parse_service_response("$.kusama.usd", r#"{"kusama":{"usd":0.0}}"#).is_err()); - } - - #[test] - fn parse_service_response_rejects_nan() { - assert!(parse_service_response("$.kusama.usd", r#"{"kusama":{"usd":NaN}}"#).is_err()); - } -} diff --git a/relays/utils/src/metrics/global.rs b/relays/utils/src/metrics/global.rs deleted file mode 100644 index 9b22fb86e..000000000 --- a/relays/utils/src/metrics/global.rs +++ /dev/null @@ -1,118 +0,0 @@ -// Copyright 2019-2021 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -//! Global system-wide Prometheus metrics exposed by relays. - -use crate::metrics::{ - metric_name, register, Gauge, GaugeVec, Metric, Opts, PrometheusError, Registry, - StandaloneMetric, F64, U64, -}; - -use async_std::sync::{Arc, Mutex}; -use async_trait::async_trait; -use std::time::Duration; -use sysinfo::{RefreshKind, System}; - -/// Global metrics update interval. -const UPDATE_INTERVAL: Duration = Duration::from_secs(10); - -/// Global Prometheus metrics. -#[derive(Debug, Clone)] -pub struct GlobalMetrics { - system: Arc>, - system_average_load: GaugeVec, - process_cpu_usage_percentage: Gauge, - process_memory_usage_bytes: Gauge, -} - -impl GlobalMetrics { - /// Create and register global metrics. - pub fn new() -> Result { - Ok(GlobalMetrics { - system: Arc::new(Mutex::new(System::new_with_specifics(RefreshKind::everything()))), - system_average_load: GaugeVec::new( - Opts::new(metric_name(None, "system_average_load"), "System load average"), - &["over"], - )?, - process_cpu_usage_percentage: Gauge::new( - metric_name(None, "process_cpu_usage_percentage"), - "Process CPU usage", - )?, - process_memory_usage_bytes: Gauge::new( - metric_name(None, "process_memory_usage_bytes"), - "Process memory (resident set size) usage", - )?, - }) - } -} - -impl Metric for GlobalMetrics { - fn register(&self, registry: &Registry) -> Result<(), PrometheusError> { - register(self.system_average_load.clone(), registry)?; - register(self.process_cpu_usage_percentage.clone(), registry)?; - register(self.process_memory_usage_bytes.clone(), registry)?; - Ok(()) - } -} - -#[async_trait] -impl StandaloneMetric for GlobalMetrics { - async fn update(&self) { - // update system-wide metrics - let mut system = self.system.lock().await; - let load = sysinfo::System::load_average(); - self.system_average_load.with_label_values(&["1min"]).set(load.one); - self.system_average_load.with_label_values(&["5min"]).set(load.five); - self.system_average_load.with_label_values(&["15min"]).set(load.fifteen); - - // update process-related metrics - let pid = sysinfo::get_current_pid().expect( - "only fails where pid is unavailable (os=unknown || arch=wasm32);\ - relay is not supposed to run in such MetricsParamss;\ - qed", - ); - let is_process_refreshed = system.refresh_process(pid); - match (is_process_refreshed, system.process(pid)) { - (true, Some(process_info)) => { - let cpu_usage = process_info.cpu_usage() as f64; - let memory_usage = process_info.memory() * 1024; - log::trace!( - target: "bridge-metrics", - "Refreshed process metrics: CPU={}, memory={}", - cpu_usage, - memory_usage, - ); - - self.process_cpu_usage_percentage.set(if cpu_usage.is_finite() { - cpu_usage - } else { - 0f64 - }); - self.process_memory_usage_bytes.set(memory_usage); - }, - _ => { - log::warn!( - target: "bridge-metrics", - "Failed to refresh process information. Metrics may show obsolete values", - ); - }, - } - } - - fn update_interval(&self) -> Duration { - UPDATE_INTERVAL - } -} diff --git a/relays/utils/src/relay_loop.rs b/relays/utils/src/relay_loop.rs deleted file mode 100644 index 7105190a4..000000000 --- a/relays/utils/src/relay_loop.rs +++ /dev/null @@ -1,262 +0,0 @@ -// Copyright 2019-2021 Parity Technologies (UK) Ltd. -// This file is part of Parity Bridges Common. - -// Parity Bridges Common is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity Bridges Common is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity Bridges Common. If not, see . - -use crate::{ - error::Error, - metrics::{Metric, MetricsAddress, MetricsParams}, - FailedClient, MaybeConnectionError, -}; - -use async_trait::async_trait; -use std::{fmt::Debug, future::Future, net::SocketAddr, time::Duration}; -use substrate_prometheus_endpoint::{init_prometheus, Registry}; - -/// Default pause between reconnect attempts. -pub const RECONNECT_DELAY: Duration = Duration::from_secs(10); - -/// Basic blockchain client from relay perspective. -#[async_trait] -pub trait Client: 'static + Clone + Send + Sync { - /// Type of error these clients returns. - type Error: 'static + Debug + MaybeConnectionError + Send + Sync; - - /// Try to reconnect to source node. - async fn reconnect(&mut self) -> Result<(), Self::Error>; - - /// Try to reconnect to the source node in an infinite loop until it succeeds. - async fn reconnect_until_success(&mut self, delay: Duration) { - loop { - match self.reconnect().await { - Ok(()) => break, - Err(error) => { - log::warn!( - target: "bridge", - "Failed to reconnect to client. Going to retry in {}s: {:?}", - delay.as_secs(), - error, - ); - - async_std::task::sleep(delay).await; - }, - } - } - } -} - -#[async_trait] -impl Client for () { - type Error = crate::StringifiedMaybeConnectionError; - - async fn reconnect(&mut self) -> Result<(), Self::Error> { - Ok(()) - } -} - -/// Returns generic loop that may be customized and started. -pub fn relay_loop(source_client: SC, target_client: TC) -> Loop { - Loop { reconnect_delay: RECONNECT_DELAY, source_client, target_client, loop_metric: None } -} - -/// Returns generic relay loop metrics that may be customized and used in one or several relay -/// loops. -pub fn relay_metrics(params: MetricsParams) -> LoopMetrics<(), (), ()> { - LoopMetrics { - relay_loop: Loop { - reconnect_delay: RECONNECT_DELAY, - source_client: (), - target_client: (), - loop_metric: None, - }, - address: params.address, - registry: params.registry, - loop_metric: None, - } -} - -/// Generic relay loop. -pub struct Loop { - reconnect_delay: Duration, - source_client: SC, - target_client: TC, - loop_metric: Option, -} - -/// Relay loop metrics builder. -pub struct LoopMetrics { - relay_loop: Loop, - address: Option, - registry: Registry, - loop_metric: Option, -} - -impl Loop { - /// Customize delay between reconnect attempts. - #[must_use] - pub fn reconnect_delay(mut self, reconnect_delay: Duration) -> Self { - self.reconnect_delay = reconnect_delay; - self - } - - /// Start building loop metrics using given prefix. - pub fn with_metrics(self, params: MetricsParams) -> LoopMetrics { - LoopMetrics { - relay_loop: Loop { - reconnect_delay: self.reconnect_delay, - source_client: self.source_client, - target_client: self.target_client, - loop_metric: None, - }, - address: params.address, - registry: params.registry, - loop_metric: None, - } - } - - /// Run relay loop. - /// - /// This function represents an outer loop, which in turn calls provided `run_loop` function to - /// do actual job. When `run_loop` returns, this outer loop reconnects to failed client (source, - /// target or both) and calls `run_loop` again. - pub async fn run(mut self, loop_name: String, run_loop: R) -> Result<(), Error> - where - R: 'static + Send + Fn(SC, TC, Option) -> F, - F: 'static + Send + Future>, - SC: 'static + Client, - TC: 'static + Client, - LM: 'static + Send + Clone, - { - let run_loop_task = async move { - crate::initialize::initialize_loop(loop_name); - - loop { - let loop_metric = self.loop_metric.clone(); - let future_result = - run_loop(self.source_client.clone(), self.target_client.clone(), loop_metric); - let result = future_result.await; - - match result { - Ok(()) => break, - Err(failed_client) => { - log::debug!(target: "bridge", "Restarting relay loop"); - - reconnect_failed_client( - failed_client, - self.reconnect_delay, - &mut self.source_client, - &mut self.target_client, - ) - .await - }, - } - } - Ok(()) - }; - - async_std::task::spawn(run_loop_task).await - } -} - -impl LoopMetrics { - /// Add relay loop metrics. - /// - /// Loop metrics will be passed to the loop callback. - pub fn loop_metric( - self, - metric: NewLM, - ) -> Result, Error> { - metric.register(&self.registry)?; - - Ok(LoopMetrics { - relay_loop: self.relay_loop, - address: self.address, - registry: self.registry, - loop_metric: Some(metric), - }) - } - - /// Convert into `MetricsParams` structure so that metrics registry may be extended later. - pub fn into_params(self) -> MetricsParams { - MetricsParams { address: self.address, registry: self.registry } - } - - /// Expose metrics using address passed at creation. - /// - /// If passed `address` is `None`, metrics are not exposed. - pub async fn expose(self) -> Result, Error> { - if let Some(address) = self.address { - let socket_addr = SocketAddr::new( - address - .host - .parse() - .map_err(|err| Error::ExposingMetricsInvalidHost(address.host.clone(), err))?, - address.port, - ); - - let registry = self.registry; - async_std::task::spawn(async move { - let runtime = - match tokio::runtime::Builder::new_current_thread().enable_all().build() { - Ok(runtime) => runtime, - Err(err) => { - log::trace!( - target: "bridge-metrics", - "Failed to create tokio runtime. Prometheus metrics are not available: {:?}", - err, - ); - return - }, - }; - - runtime.block_on(async move { - log::trace!( - target: "bridge-metrics", - "Starting prometheus endpoint at: {:?}", - socket_addr, - ); - let result = init_prometheus(socket_addr, registry).await; - log::trace!( - target: "bridge-metrics", - "Prometheus endpoint has exited with result: {:?}", - result, - ); - }); - }); - } - - Ok(Loop { - reconnect_delay: self.relay_loop.reconnect_delay, - source_client: self.relay_loop.source_client, - target_client: self.relay_loop.target_client, - loop_metric: self.loop_metric, - }) - } -} - -/// Deal with the clients that have returned connection error. -pub async fn reconnect_failed_client( - failed_client: FailedClient, - reconnect_delay: Duration, - source_client: &mut impl Client, - target_client: &mut impl Client, -) { - if failed_client == FailedClient::Source || failed_client == FailedClient::Both { - source_client.reconnect_until_success(reconnect_delay).await; - } - - if failed_client == FailedClient::Target || failed_client == FailedClient::Both { - target_client.reconnect_until_success(reconnect_delay).await; - } -} diff --git a/scripts/verify-pallets-build.sh b/scripts/verify-pallets-build.sh deleted file mode 100755 index 9c57a2a3c..000000000 --- a/scripts/verify-pallets-build.sh +++ /dev/null @@ -1,141 +0,0 @@ -#!/bin/bash - -# A script to remove everything from bridges repository/subtree, except: -# -# - modules/grandpa; -# - modules/messages; -# - modules/parachains; -# - modules/relayers; -# - everything required from primitives folder. - -set -eux - -# show CLI help -function show_help() { - set +x - echo " " - echo Error: $1 - echo "Usage:" - echo " ./scripts/verify-pallets-build.sh Exit with code 0 if pallets code is well decoupled from the other code in the repo" - echo "Options:" - echo " --no-revert Leaves only runtime code on exit" - echo " --ignore-git-state Ignores git actual state" - exit 1 -} - -# parse CLI args -NO_REVERT= -IGNORE_GIT_STATE= -for i in "$@" -do - case $i in - --no-revert) - NO_REVERT=true - shift - ;; - --ignore-git-state) - IGNORE_GIT_STATE=true - shift - ;; - *) - show_help "Unknown option: $i" - ;; - esac -done - -# the script is able to work only on clean git copy, unless we want to ignore this check -[[ ! -z "${IGNORE_GIT_STATE}" ]] || [[ -z "$(git status --porcelain)" ]] || { echo >&2 "The git copy must be clean"; exit 1; } - -# let's avoid any restrictions on where this script can be called for - bridges repo may be -# plugged into any other repo folder. So the script (and other stuff that needs to be removed) -# may be located either in call dir, or one of it subdirs. -BRIDGES_FOLDER="$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )/.." - -# let's leave repository/subtree in its original (clean) state if something fails below -function revert_to_clean_state { - [[ ! -z "${NO_REVERT}" ]] || { echo "Reverting to clean state..."; git checkout .; } -} -trap revert_to_clean_state EXIT - -# remove everything we think is not required for our needs -rm -rf $BRIDGES_FOLDER/.config -rm -rf $BRIDGES_FOLDER/.github -rm -rf $BRIDGES_FOLDER/.maintain -rm -rf $BRIDGES_FOLDER/deployments -rm -f $BRIDGES_FOLDER/docs/dockerhub-* -rm -rf $BRIDGES_FOLDER/fuzz -rm -rf $BRIDGES_FOLDER/modules/beefy -rm -rf $BRIDGES_FOLDER/modules/shift-session-manager -rm -rf $BRIDGES_FOLDER/primitives/beefy -rm -rf $BRIDGES_FOLDER/relays -rm -rf $BRIDGES_FOLDER/relay-clients -rm -rf $BRIDGES_FOLDER/scripts/add_license.sh -rm -rf $BRIDGES_FOLDER/scripts/build-containers.sh -rm -rf $BRIDGES_FOLDER/scripts/ci-cache.sh -rm -rf $BRIDGES_FOLDER/scripts/dump-logs.sh -rm -rf $BRIDGES_FOLDER/scripts/license_header -rm -rf $BRIDGES_FOLDER/scripts/regenerate_runtimes.sh -rm -rf $BRIDGES_FOLDER/scripts/update-weights.sh -rm -rf $BRIDGES_FOLDER/scripts/update-weights-setup.sh -rm -rf $BRIDGES_FOLDER/scripts/update_substrate.sh -rm -rf $BRIDGES_FOLDER/substrate-relay -rm -rf $BRIDGES_FOLDER/tools -rm -f $BRIDGES_FOLDER/.dockerignore -rm -f $BRIDGES_FOLDER/local.Dockerfile.dockerignore -rm -f $BRIDGES_FOLDER/deny.toml -rm -f $BRIDGES_FOLDER/.gitlab-ci.yml -rm -f $BRIDGES_FOLDER/.editorconfig -rm -f $BRIDGES_FOLDER/Cargo.toml -rm -f $BRIDGES_FOLDER/ci.Dockerfile -rm -f $BRIDGES_FOLDER/local.Dockerfile -rm -f $BRIDGES_FOLDER/CODEOWNERS -rm -f $BRIDGES_FOLDER/Dockerfile -rm -f $BRIDGES_FOLDER/rustfmt.toml -rm -f $BRIDGES_FOLDER/RELEASE.md - -# let's fix Cargo.toml a bit (it'll be helpful if we are in the bridges repo) -if [[ ! -f "Cargo.toml" ]]; then - cat > Cargo.toml <<-CARGO_TOML - [workspace.package] - authors = ["Parity Technologies "] - edition = "2021" - repository = "https://github.com/paritytech/parity-bridges-common.git" - license = "GPL-3.0-only" - - [workspace] - resolver = "2" - - members = [ - "bin/runtime-common", - "modules/*", - "primitives/*", - ] - CARGO_TOML -fi - -# let's test if everything we need compiles - -cargo check -p pallet-bridge-grandpa -cargo check -p pallet-bridge-grandpa --features runtime-benchmarks -cargo check -p pallet-bridge-grandpa --features try-runtime -cargo check -p pallet-bridge-messages -cargo check -p pallet-bridge-messages --features runtime-benchmarks -cargo check -p pallet-bridge-messages --features try-runtime -cargo check -p pallet-bridge-parachains -cargo check -p pallet-bridge-parachains --features runtime-benchmarks -cargo check -p pallet-bridge-parachains --features try-runtime -cargo check -p pallet-bridge-relayers -cargo check -p pallet-bridge-relayers --features runtime-benchmarks -cargo check -p pallet-bridge-relayers --features try-runtime -cargo check -p pallet-xcm-bridge-hub-router -cargo check -p pallet-xcm-bridge-hub-router --features runtime-benchmarks -cargo check -p pallet-xcm-bridge-hub-router --features try-runtime -cargo check -p bridge-runtime-common -cargo check -p bridge-runtime-common --features runtime-benchmarks -cargo check -p bridge-runtime-common --features integrity-test - -# we're removing lock file after all checks are done. Otherwise we may use different -# Substrate/Polkadot/Cumulus commits and our checks will fail -rm -f $BRIDGES_FOLDER/Cargo.lock - -echo "OK" diff --git a/substrate-relay/Cargo.toml b/substrate-relay/Cargo.toml index 3a428099e..7fa48a29f 100644 --- a/substrate-relay/Cargo.toml +++ b/substrate-relay/Cargo.toml @@ -27,19 +27,19 @@ signal-hook-async-std = "0.2.2" strum = { version = "0.26.2", features = ["derive"] } # Bridge dependencies -bp-bridge-hub-polkadot = { path = "../chains/chain-bridge-hub-polkadot" } -bp-bridge-hub-rococo = { path = "../chains/chain-bridge-hub-rococo" } -bp-header-chain = { path = "../primitives/header-chain" } -bp-messages = { path = "../primitives/messages" } -bp-parachains = { path = "../primitives/parachains" } -bp-polkadot-bulletin = { path = "../chains/chain-polkadot-bulletin" } -bp-polkadot = { path = "../chains/chain-polkadot" } -bp-polkadot-core = { path = "../primitives/polkadot-core" } -bp-rococo = { path = "../chains/chain-rococo" } -bp-runtime = { path = "../primitives/runtime" } -bridge-runtime-common = { path = "../bin/runtime-common" } -pallet-bridge-parachains = { path = "../modules/parachains" } -parachains-relay = { path = "../relays/parachains" } +bp-bridge-hub-polkadot = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } +bp-bridge-hub-rococo = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } +bp-header-chain = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } +bp-messages = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } +bp-parachains = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } +bp-polkadot-bulletin = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } +bp-polkadot = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } +bp-polkadot-core = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } +bp-rococo = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } +bp-runtime = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } +bridge-runtime-common = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } +pallet-bridge-parachains = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } +parachains-relay = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } relay-bridge-hub-kusama-client = { path = "../relay-clients/client-bridge-hub-kusama" } relay-bridge-hub-polkadot-client = { path = "../relay-clients/client-bridge-hub-polkadot" } relay-bridge-hub-rococo-client = { path = "../relay-clients/client-bridge-hub-rococo" } @@ -48,10 +48,10 @@ relay-kusama-client = { path = "../relay-clients/client-kusama" } relay-polkadot-client = { path = "../relay-clients/client-polkadot" } relay-polkadot-bulletin-client = { path = "../relay-clients/client-polkadot-bulletin" } relay-rococo-client = { path = "../relay-clients/client-rococo" } -relay-substrate-client = { path = "../relays/client-substrate" } -relay-utils = { path = "../relays/utils" } +relay-substrate-client = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } +relay-utils = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } relay-westend-client = { path = "../relay-clients/client-westend" } -substrate-relay-helper = { path = "../relays/lib-substrate-relay" } +substrate-relay-helper = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } # Substrate Dependencies @@ -60,7 +60,7 @@ sp-core = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master sp-runtime = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } [dev-dependencies] -bp-test-utils = { path = "../primitives/test-utils" } +bp-test-utils = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } hex-literal = "0.4" sp-keyring = { git = "https://github.com/paritytech/polkadot-sdk", branch = "master" } tempfile = "3.10" -- GitLab From 11b56b7455a481a4843d3546cacc1afa601e2cd8 Mon Sep 17 00:00:00 2001 From: Serban Iorga Date: Wed, 10 Apr 2024 17:50:51 +0300 Subject: [PATCH 26/39] Delete the testing folder (#2922) The testing folder has also been moved to polkadot-sdk --- testing/README.md | 31 - .../bridge_hub_rococo_local_network.toml | 88 -- .../bridge_hub_westend_local_network.toml | 88 -- .../rococo-westend/bridges_rococo_westend.sh | 401 --------- testing/environments/rococo-westend/helper.sh | 3 - .../rococo-westend/rococo-init.zndsl | 8 - .../environments/rococo-westend/rococo.zndsl | 7 - testing/environments/rococo-westend/spawn.sh | 70 -- .../rococo-westend/start_relayer.sh | 23 - .../rococo-westend/westend-init.zndsl | 7 - .../environments/rococo-westend/westend.zndsl | 6 - .../best-finalized-header-at-bridged-chain.js | 25 - .../js-helpers/chains/rococo-at-westend.js | 6 - .../js-helpers/chains/westend-at-rococo.js | 6 - .../native-assets-balance-increased.js | 21 - ...only-mandatory-headers-synced-when-idle.js | 44 - .../only-required-headers-synced-when-idle.js | 81 -- .../framework/js-helpers/relayer-rewards.js | 28 - testing/framework/js-helpers/utils.js | 103 --- .../js-helpers/wait-hrmp-channel-opened.js | 22 - .../js-helpers/wrapped-assets-balance.js | 26 - testing/framework/utils/bridges.sh | 309 ------- testing/framework/utils/common.sh | 45 -- .../utils/generate_hex_encoded_call/index.js | 165 ---- .../package-lock.json | 759 ------------------ .../generate_hex_encoded_call/package.json | 11 - testing/framework/utils/zombienet.sh | 39 - testing/run-new-test.sh | 48 -- testing/run-tests.sh | 138 ---- testing/scripts/invoke-script.sh | 7 - testing/scripts/start-relayer.sh | 7 - testing/scripts/sync-exit.sh | 14 - .../roc-reaches-westend.zndsl | 12 - testing/tests/0001-asset-transfer/run.sh | 25 - .../wnd-reaches-rococo.zndsl | 12 - .../wroc-reaches-rococo.zndsl | 10 - .../wwnd-reaches-westend.zndsl | 10 - .../rococo-to-westend.zndsl | 8 - .../run.sh | 35 - .../westend-to-rococo.zndsl | 7 - ...ynced-while-active-rococo-to-westend.zndsl | 26 - ...ynced-while-active-westend-to-rococo.zndsl | 26 - 42 files changed, 2807 deletions(-) delete mode 100644 testing/README.md delete mode 100644 testing/environments/rococo-westend/bridge_hub_rococo_local_network.toml delete mode 100644 testing/environments/rococo-westend/bridge_hub_westend_local_network.toml delete mode 100755 testing/environments/rococo-westend/bridges_rococo_westend.sh delete mode 100755 testing/environments/rococo-westend/helper.sh delete mode 100644 testing/environments/rococo-westend/rococo-init.zndsl delete mode 100644 testing/environments/rococo-westend/rococo.zndsl delete mode 100755 testing/environments/rococo-westend/spawn.sh delete mode 100755 testing/environments/rococo-westend/start_relayer.sh delete mode 100644 testing/environments/rococo-westend/westend-init.zndsl delete mode 100644 testing/environments/rococo-westend/westend.zndsl delete mode 100644 testing/framework/js-helpers/best-finalized-header-at-bridged-chain.js delete mode 100644 testing/framework/js-helpers/chains/rococo-at-westend.js delete mode 100644 testing/framework/js-helpers/chains/westend-at-rococo.js delete mode 100644 testing/framework/js-helpers/native-assets-balance-increased.js delete mode 100644 testing/framework/js-helpers/only-mandatory-headers-synced-when-idle.js delete mode 100644 testing/framework/js-helpers/only-required-headers-synced-when-idle.js delete mode 100644 testing/framework/js-helpers/relayer-rewards.js delete mode 100644 testing/framework/js-helpers/utils.js delete mode 100644 testing/framework/js-helpers/wait-hrmp-channel-opened.js delete mode 100644 testing/framework/js-helpers/wrapped-assets-balance.js delete mode 100755 testing/framework/utils/bridges.sh delete mode 100644 testing/framework/utils/common.sh delete mode 100644 testing/framework/utils/generate_hex_encoded_call/index.js delete mode 100644 testing/framework/utils/generate_hex_encoded_call/package-lock.json delete mode 100644 testing/framework/utils/generate_hex_encoded_call/package.json delete mode 100644 testing/framework/utils/zombienet.sh delete mode 100755 testing/run-new-test.sh delete mode 100755 testing/run-tests.sh delete mode 100755 testing/scripts/invoke-script.sh delete mode 100755 testing/scripts/start-relayer.sh delete mode 100755 testing/scripts/sync-exit.sh delete mode 100644 testing/tests/0001-asset-transfer/roc-reaches-westend.zndsl delete mode 100755 testing/tests/0001-asset-transfer/run.sh delete mode 100644 testing/tests/0001-asset-transfer/wnd-reaches-rococo.zndsl delete mode 100644 testing/tests/0001-asset-transfer/wroc-reaches-rococo.zndsl delete mode 100644 testing/tests/0001-asset-transfer/wwnd-reaches-westend.zndsl delete mode 100644 testing/tests/0002-mandatory-headers-synced-while-idle/rococo-to-westend.zndsl delete mode 100755 testing/tests/0002-mandatory-headers-synced-while-idle/run.sh delete mode 100644 testing/tests/0002-mandatory-headers-synced-while-idle/westend-to-rococo.zndsl delete mode 100644 testing/tests/0003-required-headers-synced-while-active-rococo-to-westend.zndsl delete mode 100644 testing/tests/0003-required-headers-synced-while-active-westend-to-rococo.zndsl diff --git a/testing/README.md b/testing/README.md deleted file mode 100644 index bd467a410..000000000 --- a/testing/README.md +++ /dev/null @@ -1,31 +0,0 @@ -# Bridges Tests for Local Rococo <> Westend Bridge - -This folder contains [zombienet](https://github.com/paritytech/zombienet/) based integration tests for both -onchain and offchain bridges code. Due to some -[technical difficulties](https://github.com/paritytech/parity-bridges-common/pull/2649#issue-1965339051), we -are using native zombienet provider, which means that you need to build some binaries locally. - -To start those tests, you need to: - -- download latest [zombienet release](https://github.com/paritytech/zombienet/releases); - -- build Polkadot binary by running `cargo build -p polkadot --release --features fast-runtime` command in the -[`polkadot-sdk`](https://github.com/paritytech/polkadot-sdk) repository clone; - -- build Polkadot Parachain binary by running `cargo build -p polkadot-parachain-bin --release` command in the -[`polkadot-sdk`](https://github.com/paritytech/polkadot-sdk) repository clone; - -- ensure that you have [`node`](https://nodejs.org/en) installed. Additionally, we'll need globally installed -`polkadot/api-cli` package (use `npm install -g @polkadot/api-cli@beta` to install it); - -- build Substrate relay by running `cargo build -p substrate-relay --release` command in the -[`parity-bridges-common`](https://github.com/paritytech/parity-bridges-common) repository clone. - -- copy fresh `substrate-relay` binary, built in previous point, to the `~/local_bridge_testing/bin/substrate-relay`; - -- change the `POLKADOT_SDK_PATH` and `ZOMBIENET_BINARY_PATH` (and ensure that the nearby variables -have correct values) in the `./run-tests.sh`. - -After that, you could run tests with the `./run-tests.sh` command. Hopefully, it'll show the -"All tests have completed successfully" message in the end. Otherwise, it'll print paths to zombienet -process logs, which, in turn, may be used to track locations of all spinned relay and parachain nodes. diff --git a/testing/environments/rococo-westend/bridge_hub_rococo_local_network.toml b/testing/environments/rococo-westend/bridge_hub_rococo_local_network.toml deleted file mode 100644 index 52271f944..000000000 --- a/testing/environments/rococo-westend/bridge_hub_rococo_local_network.toml +++ /dev/null @@ -1,88 +0,0 @@ -[settings] -node_spawn_timeout = 240 - -[relaychain] -default_command = "{{POLKADOT_BINARY}}" -default_args = [ "-lparachain=debug,xcm=trace" ] -chain = "rococo-local" - - [[relaychain.nodes]] - name = "alice-rococo-validator" - validator = true - rpc_port = 9932 - ws_port = 9942 - balance = 2000000000000 - - [[relaychain.nodes]] - name = "bob-rococo-validator" - validator = true - rpc_port = 9933 - ws_port = 9943 - balance = 2000000000000 - - [[relaychain.nodes]] - name = "charlie-rococo-validator" - validator = true - rpc_port = 9934 - ws_port = 9944 - balance = 2000000000000 - -[[parachains]] -id = 1013 -chain = "bridge-hub-rococo-local" -cumulus_based = true - - # run alice as parachain collator - [[parachains.collators]] - name = "bridge-hub-rococo-collator1" - validator = true - command = "{{POLKADOT_PARACHAIN_BINARY}}" - rpc_port = 8933 - ws_port = 8943 - args = [ - "-lparachain=debug,runtime::bridge-hub=trace,runtime::bridge=trace,runtime::bridge-dispatch=trace,bridge=trace,runtime::bridge-messages=trace,xcm=trace" - ] - - # run bob as parachain collator - [[parachains.collators]] - name = "bridge-hub-rococo-collator2" - validator = true - command = "{{POLKADOT_PARACHAIN_BINARY}}" - rpc_port = 8934 - ws_port = 8944 - args = [ - "-lparachain=trace,runtime::bridge-hub=trace,runtime::bridge=trace,runtime::bridge-dispatch=trace,bridge=trace,runtime::bridge-messages=trace,xcm=trace" - ] - -[[parachains]] -id = 1000 -chain = "asset-hub-rococo-local" -cumulus_based = true - - [[parachains.collators]] - name = "asset-hub-rococo-collator1" - rpc_port = 9911 - ws_port = 9910 - command = "{{POLKADOT_PARACHAIN_BINARY}}" - args = [ - "-lparachain=debug,xcm=trace,runtime::bridge-transfer=trace" - ] - - [[parachains.collators]] - name = "asset-hub-rococo-collator2" - command = "{{POLKADOT_PARACHAIN_BINARY}}" - args = [ - "-lparachain=debug,xcm=trace,runtime::bridge-transfer=trace" - ] - -#[[hrmp_channels]] -#sender = 1000 -#recipient = 1013 -#max_capacity = 4 -#max_message_size = 524288 -# -#[[hrmp_channels]] -#sender = 1013 -#recipient = 1000 -#max_capacity = 4 -#max_message_size = 524288 diff --git a/testing/environments/rococo-westend/bridge_hub_westend_local_network.toml b/testing/environments/rococo-westend/bridge_hub_westend_local_network.toml deleted file mode 100644 index f2550bcc9..000000000 --- a/testing/environments/rococo-westend/bridge_hub_westend_local_network.toml +++ /dev/null @@ -1,88 +0,0 @@ -[settings] -node_spawn_timeout = 240 - -[relaychain] -default_command = "{{POLKADOT_BINARY}}" -default_args = [ "-lparachain=debug,xcm=trace" ] -chain = "westend-local" - - [[relaychain.nodes]] - name = "alice-westend-validator" - validator = true - rpc_port = 9935 - ws_port = 9945 - balance = 2000000000000 - - [[relaychain.nodes]] - name = "bob-westend-validator" - validator = true - rpc_port = 9936 - ws_port = 9946 - balance = 2000000000000 - - [[relaychain.nodes]] - name = "charlie-westend-validator" - validator = true - rpc_port = 9937 - ws_port = 9947 - balance = 2000000000000 - -[[parachains]] -id = 1002 -chain = "bridge-hub-westend-local" -cumulus_based = true - - # run alice as parachain collator - [[parachains.collators]] - name = "bridge-hub-westend-collator1" - validator = true - command = "{{POLKADOT_PARACHAIN_BINARY}}" - rpc_port = 8935 - ws_port = 8945 - args = [ - "-lparachain=debug,runtime::mmr=info,substrate=info,runtime=info,runtime::bridge-hub=trace,runtime::bridge=trace,runtime::bridge-dispatch=trace,bridge=trace,runtime::bridge-messages=trace,xcm=trace" - ] - - # run bob as parachain collator - [[parachains.collators]] - name = "bridge-hub-westend-collator2" - validator = true - command = "{{POLKADOT_PARACHAIN_BINARY}}" - rpc_port = 8936 - ws_port = 8946 - args = [ - "-lparachain=trace,runtime::mmr=info,substrate=info,runtime=info,runtime::bridge-hub=trace,runtime::bridge=trace,runtime::bridge-dispatch=trace,bridge=trace,runtime::bridge-messages=trace,xcm=trace" - ] - -[[parachains]] -id = 1000 -chain = "asset-hub-westend-local" -cumulus_based = true - - [[parachains.collators]] - name = "asset-hub-westend-collator1" - rpc_port = 9011 - ws_port = 9010 - command = "{{POLKADOT_PARACHAIN_BINARY}}" - args = [ - "-lparachain=debug,xcm=trace,runtime::bridge-transfer=trace" - ] - - [[parachains.collators]] - name = "asset-hub-westend-collator2" - command = "{{POLKADOT_PARACHAIN_BINARY}}" - args = [ - "-lparachain=debug,xcm=trace,runtime::bridge-transfer=trace" - ] - -#[[hrmp_channels]] -#sender = 1000 -#recipient = 1002 -#max_capacity = 4 -#max_message_size = 524288 -# -#[[hrmp_channels]] -#sender = 1002 -#recipient = 1000 -#max_capacity = 4 -#max_message_size = 524288 diff --git a/testing/environments/rococo-westend/bridges_rococo_westend.sh b/testing/environments/rococo-westend/bridges_rococo_westend.sh deleted file mode 100755 index 66c9ddc03..000000000 --- a/testing/environments/rococo-westend/bridges_rococo_westend.sh +++ /dev/null @@ -1,401 +0,0 @@ -#!/bin/bash - -# import common functions -source "$FRAMEWORK_PATH/utils/bridges.sh" - -# Expected sovereign accounts. -# -# Generated by: -# -# #[test] -# fn generate_sovereign_accounts() { -# use sp_core::crypto::Ss58Codec; -# use polkadot_parachain_primitives::primitives::Sibling; -# -# parameter_types! { -# pub UniversalLocationAHR: InteriorMultiLocation = X2(GlobalConsensus(Rococo), Parachain(1000)); -# pub UniversalLocationAHW: InteriorMultiLocation = X2(GlobalConsensus(Westend), Parachain(1000)); -# } -# -# // SS58=42 -# println!("GLOBAL_CONSENSUS_ROCOCO_SOVEREIGN_ACCOUNT=\"{}\"", -# frame_support::sp_runtime::AccountId32::new( -# GlobalConsensusConvertsFor::::convert_location( -# &MultiLocation { parents: 2, interior: X1(GlobalConsensus(Rococo)) }).unwrap() -# ).to_ss58check_with_version(42_u16.into()) -# ); -# println!("ASSET_HUB_WESTEND_SOVEREIGN_ACCOUNT_AT_BRIDGE_HUB_WESTEND=\"{}\"", -# frame_support::sp_runtime::AccountId32::new( -# SiblingParachainConvertsVia::::convert_location( -# &MultiLocation { parents: 1, interior: X1(Parachain(1000)) }).unwrap() -# ).to_ss58check_with_version(42_u16.into()) -# ); -# -# // SS58=42 -# println!("GLOBAL_CONSENSUS_WESTEND_SOVEREIGN_ACCOUNT=\"{}\"", -# frame_support::sp_runtime::AccountId32::new( -# GlobalConsensusConvertsFor::::convert_location( -# &MultiLocation { parents: 2, interior: X1(GlobalConsensus(Westend)) }).unwrap() -# ).to_ss58check_with_version(42_u16.into()) -# ); -# println!("ASSET_HUB_ROCOCO_SOVEREIGN_ACCOUNT_AT_BRIDGE_HUB_ROCOCO=\"{}\"", -# frame_support::sp_runtime::AccountId32::new( -# SiblingParachainConvertsVia::::convert_location( -# &MultiLocation { parents: 1, interior: X1(Parachain(1000)) }).unwrap() -# ).to_ss58check_with_version(42_u16.into()) -# ); -# } -GLOBAL_CONSENSUS_ROCOCO_SOVEREIGN_ACCOUNT="5GxRGwT8bU1JeBPTUXc7LEjZMxNrK8MyL2NJnkWFQJTQ4sii" -ASSET_HUB_WESTEND_SOVEREIGN_ACCOUNT_AT_BRIDGE_HUB_WESTEND="5Eg2fntNprdN3FgH4sfEaaZhYtddZQSQUqvYJ1f2mLtinVhV" -GLOBAL_CONSENSUS_WESTEND_SOVEREIGN_ACCOUNT="5He2Qdztyxxa4GoagY6q1jaiLMmKy1gXS7PdZkhfj8ZG9hk5" -ASSET_HUB_ROCOCO_SOVEREIGN_ACCOUNT_AT_BRIDGE_HUB_ROCOCO="5Eg2fntNprdN3FgH4sfEaaZhYtddZQSQUqvYJ1f2mLtinVhV" - -# Expected sovereign accounts for rewards on BridgeHubs. -# -# Generated by: -# #[test] -# fn generate_sovereign_accounts_for_rewards() { -# use bp_messages::LaneId; -# use bp_relayers::{PayRewardFromAccount, RewardsAccountOwner, RewardsAccountParams}; -# use sp_core::crypto::Ss58Codec; -# -# // SS58=42 -# println!( -# "ON_BRIDGE_HUB_ROCOCO_SOVEREIGN_ACCOUNT_FOR_LANE_00000002_bhwd_ThisChain=\"{}\"", -# frame_support::sp_runtime::AccountId32::new( -# PayRewardFromAccount::<[u8; 32], [u8; 32]>::rewards_account(RewardsAccountParams::new( -# LaneId([0, 0, 0, 2]), -# *b"bhwd", -# RewardsAccountOwner::ThisChain -# )) -# ) -# .to_ss58check_with_version(42_u16.into()) -# ); -# // SS58=42 -# println!( -# "ON_BRIDGE_HUB_ROCOCO_SOVEREIGN_ACCOUNT_FOR_LANE_00000002_bhwd_BridgedChain=\"{}\"", -# frame_support::sp_runtime::AccountId32::new( -# PayRewardFromAccount::<[u8; 32], [u8; 32]>::rewards_account(RewardsAccountParams::new( -# LaneId([0, 0, 0, 2]), -# *b"bhwd", -# RewardsAccountOwner::BridgedChain -# )) -# ) -# .to_ss58check_with_version(42_u16.into()) -# ); -# -# // SS58=42 -# println!( -# "ON_BRIDGE_HUB_WESTEND_SOVEREIGN_ACCOUNT_FOR_LANE_00000002_bhro_ThisChain=\"{}\"", -# frame_support::sp_runtime::AccountId32::new( -# PayRewardFromAccount::<[u8; 32], [u8; 32]>::rewards_account(RewardsAccountParams::new( -# LaneId([0, 0, 0, 2]), -# *b"bhro", -# RewardsAccountOwner::ThisChain -# )) -# ) -# .to_ss58check_with_version(42_u16.into()) -# ); -# // SS58=42 -# println!( -# "ON_BRIDGE_HUB_WESTEND_SOVEREIGN_ACCOUNT_FOR_LANE_00000002_bhro_BridgedChain=\"{}\"", -# frame_support::sp_runtime::AccountId32::new( -# PayRewardFromAccount::<[u8; 32], [u8; 32]>::rewards_account(RewardsAccountParams::new( -# LaneId([0, 0, 0, 2]), -# *b"bhro", -# RewardsAccountOwner::BridgedChain -# )) -# ) -# .to_ss58check_with_version(42_u16.into()) -# ); -# } -ON_BRIDGE_HUB_ROCOCO_SOVEREIGN_ACCOUNT_FOR_LANE_00000002_bhwd_ThisChain="5EHnXaT5BhiSGP5hbdsoVGtzi2sQVgpDNToTxLYeQvKoMPEm" -ON_BRIDGE_HUB_ROCOCO_SOVEREIGN_ACCOUNT_FOR_LANE_00000002_bhwd_BridgedChain="5EHnXaT5BhiSGP5hbdt5EJSapXYbxEv678jyWHEUskCXcjqo" -ON_BRIDGE_HUB_WESTEND_SOVEREIGN_ACCOUNT_FOR_LANE_00000002_bhro_ThisChain="5EHnXaT5BhiSGP5h9Rg8sgUJqoLym3iEaWUiboT8S9AT5xFh" -ON_BRIDGE_HUB_WESTEND_SOVEREIGN_ACCOUNT_FOR_LANE_00000002_bhro_BridgedChain="5EHnXaT5BhiSGP5h9RgQci1txJ2BDbp7KBRE9k8xty3BMUSi" - -LANE_ID="00000002" -XCM_VERSION=3 - -function init_ro_wnd() { - local relayer_path=$(ensure_relayer) - - RUST_LOG=runtime=trace,rpc=trace,bridge=trace \ - $relayer_path init-bridge rococo-to-bridge-hub-westend \ - --source-host localhost \ - --source-port 9942 \ - --source-version-mode Auto \ - --target-host localhost \ - --target-port 8945 \ - --target-version-mode Auto \ - --target-signer //Bob -} - -function init_wnd_ro() { - local relayer_path=$(ensure_relayer) - - RUST_LOG=runtime=trace,rpc=trace,bridge=trace \ - $relayer_path init-bridge westend-to-bridge-hub-rococo \ - --source-host localhost \ - --source-port 9945 \ - --source-version-mode Auto \ - --target-host localhost \ - --target-port 8943 \ - --target-version-mode Auto \ - --target-signer //Bob -} - -function run_relay() { - local relayer_path=$(ensure_relayer) - - RUST_LOG=runtime=trace,rpc=trace,bridge=trace \ - $relayer_path relay-headers-and-messages bridge-hub-rococo-bridge-hub-westend \ - --rococo-host localhost \ - --rococo-port 9942 \ - --rococo-version-mode Auto \ - --bridge-hub-rococo-host localhost \ - --bridge-hub-rococo-port 8943 \ - --bridge-hub-rococo-version-mode Auto \ - --bridge-hub-rococo-signer //Charlie \ - --bridge-hub-rococo-transactions-mortality 4 \ - --westend-host localhost \ - --westend-port 9945 \ - --westend-version-mode Auto \ - --bridge-hub-westend-host localhost \ - --bridge-hub-westend-port 8945 \ - --bridge-hub-westend-version-mode Auto \ - --bridge-hub-westend-signer //Charlie \ - --bridge-hub-westend-transactions-mortality 4 \ - --lane "${LANE_ID}" -} - -case "$1" in - run-relay) - init_wnd_ro - init_ro_wnd - run_relay - ;; - init-asset-hub-rococo-local) - ensure_polkadot_js_api - # create foreign assets for native Westend token (governance call on Rococo) - force_create_foreign_asset \ - "ws://127.0.0.1:9942" \ - "//Alice" \ - 1000 \ - "ws://127.0.0.1:9910" \ - "$(jq --null-input '{ "parents": 2, "interior": { "X1": { "GlobalConsensus": "Westend" } } }')" \ - "$GLOBAL_CONSENSUS_WESTEND_SOVEREIGN_ACCOUNT" \ - 10000000000 \ - true - # HRMP - open_hrmp_channels \ - "ws://127.0.0.1:9942" \ - "//Alice" \ - 1000 1013 4 524288 - open_hrmp_channels \ - "ws://127.0.0.1:9942" \ - "//Alice" \ - 1013 1000 4 524288 - # set XCM version of remote AssetHubWestend - force_xcm_version \ - "ws://127.0.0.1:9942" \ - "//Alice" \ - 1000 \ - "ws://127.0.0.1:9910" \ - "$(jq --null-input '{ "parents": 2, "interior": { "X2": [ { "GlobalConsensus": "Westend" }, { "Parachain": 1000 } ] } }')" \ - $XCM_VERSION - ;; - init-bridge-hub-rococo-local) - ensure_polkadot_js_api - # SA of sibling asset hub pays for the execution - transfer_balance \ - "ws://127.0.0.1:8943" \ - "//Alice" \ - "$ASSET_HUB_ROCOCO_SOVEREIGN_ACCOUNT_AT_BRIDGE_HUB_ROCOCO" \ - $((1000000000000 + 50000000000 * 20)) - # drip SA of lane dedicated to asset hub for paying rewards for delivery - transfer_balance \ - "ws://127.0.0.1:8943" \ - "//Alice" \ - "$ON_BRIDGE_HUB_ROCOCO_SOVEREIGN_ACCOUNT_FOR_LANE_00000002_bhwd_ThisChain" \ - $((1000000000000 + 2000000000000)) - # drip SA of lane dedicated to asset hub for paying rewards for delivery confirmation - transfer_balance \ - "ws://127.0.0.1:8943" \ - "//Alice" \ - "$ON_BRIDGE_HUB_ROCOCO_SOVEREIGN_ACCOUNT_FOR_LANE_00000002_bhwd_BridgedChain" \ - $((1000000000000 + 2000000000000)) - # set XCM version of remote BridgeHubWestend - force_xcm_version \ - "ws://127.0.0.1:9942" \ - "//Alice" \ - 1013 \ - "ws://127.0.0.1:8943" \ - "$(jq --null-input '{ "parents": 2, "interior": { "X2": [ { "GlobalConsensus": "Westend" }, { "Parachain": 1002 } ] } }')" \ - $XCM_VERSION - ;; - init-asset-hub-westend-local) - ensure_polkadot_js_api - # create foreign assets for native Rococo token (governance call on Westend) - force_create_foreign_asset \ - "ws://127.0.0.1:9945" \ - "//Alice" \ - 1000 \ - "ws://127.0.0.1:9010" \ - "$(jq --null-input '{ "parents": 2, "interior": { "X1": { "GlobalConsensus": "Rococo" } } }')" \ - "$GLOBAL_CONSENSUS_ROCOCO_SOVEREIGN_ACCOUNT" \ - 10000000000 \ - true - # HRMP - open_hrmp_channels \ - "ws://127.0.0.1:9945" \ - "//Alice" \ - 1000 1002 4 524288 - open_hrmp_channels \ - "ws://127.0.0.1:9945" \ - "//Alice" \ - 1002 1000 4 524288 - # set XCM version of remote AssetHubRococo - force_xcm_version \ - "ws://127.0.0.1:9945" \ - "//Alice" \ - 1000 \ - "ws://127.0.0.1:9010" \ - "$(jq --null-input '{ "parents": 2, "interior": { "X2": [ { "GlobalConsensus": "Rococo" }, { "Parachain": 1000 } ] } }')" \ - $XCM_VERSION - ;; - init-bridge-hub-westend-local) - # SA of sibling asset hub pays for the execution - transfer_balance \ - "ws://127.0.0.1:8945" \ - "//Alice" \ - "$ASSET_HUB_WESTEND_SOVEREIGN_ACCOUNT_AT_BRIDGE_HUB_WESTEND" \ - $((1000000000000000 + 50000000000 * 20)) - # drip SA of lane dedicated to asset hub for paying rewards for delivery - transfer_balance \ - "ws://127.0.0.1:8945" \ - "//Alice" \ - "$ON_BRIDGE_HUB_WESTEND_SOVEREIGN_ACCOUNT_FOR_LANE_00000002_bhro_ThisChain" \ - $((1000000000000000 + 2000000000000)) - # drip SA of lane dedicated to asset hub for paying rewards for delivery confirmation - transfer_balance \ - "ws://127.0.0.1:8945" \ - "//Alice" \ - "$ON_BRIDGE_HUB_WESTEND_SOVEREIGN_ACCOUNT_FOR_LANE_00000002_bhro_BridgedChain" \ - $((1000000000000000 + 2000000000000)) - # set XCM version of remote BridgeHubRococo - force_xcm_version \ - "ws://127.0.0.1:9945" \ - "//Alice" \ - 1002 \ - "ws://127.0.0.1:8945" \ - "$(jq --null-input '{ "parents": 2, "interior": { "X2": [ { "GlobalConsensus": "Rococo" }, { "Parachain": 1013 } ] } }')" \ - $XCM_VERSION - ;; - reserve-transfer-assets-from-asset-hub-rococo-local) - amount=$2 - ensure_polkadot_js_api - # send ROCs to Alice account on AHW - limited_reserve_transfer_assets \ - "ws://127.0.0.1:9910" \ - "//Alice" \ - "$(jq --null-input '{ "V3": { "parents": 2, "interior": { "X2": [ { "GlobalConsensus": "Westend" }, { "Parachain": 1000 } ] } } }')" \ - "$(jq --null-input '{ "V3": { "parents": 0, "interior": { "X1": { "AccountId32": { "id": [212, 53, 147, 199, 21, 253, 211, 28, 97, 20, 26, 189, 4, 169, 159, 214, 130, 44, 133, 88, 133, 76, 205, 227, 154, 86, 132, 231, 165, 109, 162, 125] } } } } }')" \ - "$(jq --null-input '{ "V3": [ { "id": { "Concrete": { "parents": 1, "interior": "Here" } }, "fun": { "Fungible": '$amount' } } ] }')" \ - 0 \ - "Unlimited" - ;; - withdraw-reserve-assets-from-asset-hub-rococo-local) - amount=$2 - ensure_polkadot_js_api - # send back only 100000000000 wrappedWNDs to Alice account on AHW - limited_reserve_transfer_assets \ - "ws://127.0.0.1:9910" \ - "//Alice" \ - "$(jq --null-input '{ "V3": { "parents": 2, "interior": { "X2": [ { "GlobalConsensus": "Westend" }, { "Parachain": 1000 } ] } } }')" \ - "$(jq --null-input '{ "V3": { "parents": 0, "interior": { "X1": { "AccountId32": { "id": [212, 53, 147, 199, 21, 253, 211, 28, 97, 20, 26, 189, 4, 169, 159, 214, 130, 44, 133, 88, 133, 76, 205, 227, 154, 86, 132, 231, 165, 109, 162, 125] } } } } }')" \ - "$(jq --null-input '{ "V3": [ { "id": { "Concrete": { "parents": 2, "interior": { "X1": { "GlobalConsensus": "Westend" } } } }, "fun": { "Fungible": '$amount' } } ] }')" \ - 0 \ - "Unlimited" - ;; - reserve-transfer-assets-from-asset-hub-westend-local) - amount=$2 - ensure_polkadot_js_api - # send WNDs to Alice account on AHR - limited_reserve_transfer_assets \ - "ws://127.0.0.1:9010" \ - "//Alice" \ - "$(jq --null-input '{ "V3": { "parents": 2, "interior": { "X2": [ { "GlobalConsensus": "Rococo" }, { "Parachain": 1000 } ] } } }')" \ - "$(jq --null-input '{ "V3": { "parents": 0, "interior": { "X1": { "AccountId32": { "id": [212, 53, 147, 199, 21, 253, 211, 28, 97, 20, 26, 189, 4, 169, 159, 214, 130, 44, 133, 88, 133, 76, 205, 227, 154, 86, 132, 231, 165, 109, 162, 125] } } } } }')" \ - "$(jq --null-input '{ "V3": [ { "id": { "Concrete": { "parents": 1, "interior": "Here" } }, "fun": { "Fungible": '$amount' } } ] }')" \ - 0 \ - "Unlimited" - ;; - withdraw-reserve-assets-from-asset-hub-westend-local) - amount=$2 - ensure_polkadot_js_api - # send back only 100000000000 wrappedROCs to Alice account on AHR - limited_reserve_transfer_assets \ - "ws://127.0.0.1:9010" \ - "//Alice" \ - "$(jq --null-input '{ "V3": { "parents": 2, "interior": { "X2": [ { "GlobalConsensus": "Rococo" }, { "Parachain": 1000 } ] } } }')" \ - "$(jq --null-input '{ "V3": { "parents": 0, "interior": { "X1": { "AccountId32": { "id": [212, 53, 147, 199, 21, 253, 211, 28, 97, 20, 26, 189, 4, 169, 159, 214, 130, 44, 133, 88, 133, 76, 205, 227, 154, 86, 132, 231, 165, 109, 162, 125] } } } } }')" \ - "$(jq --null-input '{ "V3": [ { "id": { "Concrete": { "parents": 2, "interior": { "X1": { "GlobalConsensus": "Rococo" } } } }, "fun": { "Fungible": '$amount' } } ] }')" \ - 0 \ - "Unlimited" - ;; - claim-rewards-bridge-hub-rococo-local) - ensure_polkadot_js_api - # bhwd -> [62, 68, 77, 64] -> 0x62687764 - claim_rewards \ - "ws://127.0.0.1:8943" \ - "//Charlie" \ - "0x${LANE_ID}" \ - "0x62687764" \ - "ThisChain" - claim_rewards \ - "ws://127.0.0.1:8943" \ - "//Charlie" \ - "0x${LANE_ID}" \ - "0x62687764" \ - "BridgedChain" - ;; - claim-rewards-bridge-hub-westend-local) - # bhro -> [62, 68, 72, 6f] -> 0x6268726f - claim_rewards \ - "ws://127.0.0.1:8945" \ - "//Charlie" \ - "0x${LANE_ID}" \ - "0x6268726f" \ - "ThisChain" - claim_rewards \ - "ws://127.0.0.1:8945" \ - "//Charlie" \ - "0x${LANE_ID}" \ - "0x6268726f" \ - "BridgedChain" - ;; - stop) - pkill -f polkadot - pkill -f parachain - ;; - import) - # to avoid trigger anything here - ;; - *) - echo "A command is require. Supported commands for: - Local (zombienet) run: - - run-relay - - init-asset-hub-rococo-local - - init-bridge-hub-rococo-local - - init-asset-hub-westend-local - - init-bridge-hub-westend-local - - reserve-transfer-assets-from-asset-hub-rococo-local - - withdraw-reserve-assets-from-asset-hub-rococo-local - - reserve-transfer-assets-from-asset-hub-westend-local - - withdraw-reserve-assets-from-asset-hub-westend-local - - claim-rewards-bridge-hub-rococo-local - - claim-rewards-bridge-hub-westend-local"; - exit 1 - ;; -esac diff --git a/testing/environments/rococo-westend/helper.sh b/testing/environments/rococo-westend/helper.sh deleted file mode 100755 index 0a13ded21..000000000 --- a/testing/environments/rococo-westend/helper.sh +++ /dev/null @@ -1,3 +0,0 @@ -#!/bin/bash - -$ENV_PATH/bridges_rococo_westend.sh "$@" diff --git a/testing/environments/rococo-westend/rococo-init.zndsl b/testing/environments/rococo-westend/rococo-init.zndsl deleted file mode 100644 index c913e4db3..000000000 --- a/testing/environments/rococo-westend/rococo-init.zndsl +++ /dev/null @@ -1,8 +0,0 @@ -Description: Check if the HRMP channel between Rococo BH and Rococo AH was opened successfully -Network: ./bridge_hub_rococo_local_network.toml -Creds: config - -# ensure that initialization has completed -asset-hub-rococo-collator1: js-script {{FRAMEWORK_PATH}}/js-helpers/wait-hrmp-channel-opened.js with "1013" within 300 seconds - - diff --git a/testing/environments/rococo-westend/rococo.zndsl b/testing/environments/rococo-westend/rococo.zndsl deleted file mode 100644 index a75286445..000000000 --- a/testing/environments/rococo-westend/rococo.zndsl +++ /dev/null @@ -1,7 +0,0 @@ -Description: Check if the with-Westend GRANDPA pallet was initialized at Rococo BH -Network: ./bridge_hub_rococo_local_network.toml -Creds: config - -# relay is already started - let's wait until with-Westend GRANDPA pallet is initialized at Rococo -bridge-hub-rococo-collator1: js-script {{FRAMEWORK_PATH}}/js-helpers/best-finalized-header-at-bridged-chain.js with "Westend,0" within 400 seconds - diff --git a/testing/environments/rococo-westend/spawn.sh b/testing/environments/rococo-westend/spawn.sh deleted file mode 100755 index cbd0b1bc6..000000000 --- a/testing/environments/rococo-westend/spawn.sh +++ /dev/null @@ -1,70 +0,0 @@ -#!/bin/bash - -set -e - -trap "trap - SIGTERM && kill -9 -$$" SIGINT SIGTERM EXIT - -source "$FRAMEWORK_PATH/utils/zombienet.sh" - -# whether to init the chains (open HRMP channels, set XCM version, create reserve assets, etc) -init=0 -start_relayer=0 -while [ $# -ne 0 ] -do - arg="$1" - case "$arg" in - --init) - init=1 - ;; - --start-relayer) - start_relayer=1 - ;; - esac - shift -done - -logs_dir=$TEST_DIR/logs -helper_script="${BASH_SOURCE%/*}/helper.sh" - -rococo_def=${BASH_SOURCE%/*}/bridge_hub_rococo_local_network.toml -start_zombienet $TEST_DIR $rococo_def rococo_dir rococo_pid -echo - -westend_def=${BASH_SOURCE%/*}/bridge_hub_westend_local_network.toml -start_zombienet $TEST_DIR $westend_def westend_dir westend_pid -echo - -if [[ $init -eq 1 ]]; then - rococo_init_log=$logs_dir/rococo-init.log - echo -e "Setting up the rococo side of the bridge. Logs available at: $rococo_init_log\n" - - westend_init_log=$logs_dir/westend-init.log - echo -e "Setting up the westend side of the bridge. Logs available at: $westend_init_log\n" - - $helper_script init-asset-hub-rococo-local >> $rococo_init_log 2>&1 & - rococo_init_pid=$! - $helper_script init-asset-hub-westend-local >> $westend_init_log 2>&1 & - westend_init_pid=$! - wait -n $rococo_init_pid $westend_init_pid - - - $helper_script init-bridge-hub-rococo-local >> $rococo_init_log 2>&1 & - rococo_init_pid=$! - $helper_script init-bridge-hub-westend-local >> $westend_init_log 2>&1 & - westend_init_pid=$! - wait -n $rococo_init_pid $westend_init_pid - - run_zndsl ${BASH_SOURCE%/*}/rococo-init.zndsl $rococo_dir - run_zndsl ${BASH_SOURCE%/*}/westend-init.zndsl $westend_dir -fi - -if [[ $start_relayer -eq 1 ]]; then - ${BASH_SOURCE%/*}/start_relayer.sh $rococo_dir $westend_dir relayer_pid -fi - -echo $rococo_dir > $TEST_DIR/rococo.env -echo $westend_dir > $TEST_DIR/westend.env -echo - -wait -n $rococo_pid $westend_pid $relayer_pid -kill -9 -$$ diff --git a/testing/environments/rococo-westend/start_relayer.sh b/testing/environments/rococo-westend/start_relayer.sh deleted file mode 100755 index 7ddd312d3..000000000 --- a/testing/environments/rococo-westend/start_relayer.sh +++ /dev/null @@ -1,23 +0,0 @@ -#!/bin/bash - -set -e - -source "$FRAMEWORK_PATH/utils/common.sh" -source "$FRAMEWORK_PATH/utils/zombienet.sh" - -rococo_dir=$1 -westend_dir=$2 -__relayer_pid=$3 - -logs_dir=$TEST_DIR/logs -helper_script="${BASH_SOURCE%/*}/helper.sh" - -relayer_log=$logs_dir/relayer.log -echo -e "Starting rococo-westend relayer. Logs available at: $relayer_log\n" -start_background_process "$helper_script run-relay" $relayer_log relayer_pid - -run_zndsl ${BASH_SOURCE%/*}/rococo.zndsl $rococo_dir -run_zndsl ${BASH_SOURCE%/*}/westend.zndsl $westend_dir - -eval $__relayer_pid="'$relayer_pid'" - diff --git a/testing/environments/rococo-westend/westend-init.zndsl b/testing/environments/rococo-westend/westend-init.zndsl deleted file mode 100644 index 0f5428eed..000000000 --- a/testing/environments/rococo-westend/westend-init.zndsl +++ /dev/null @@ -1,7 +0,0 @@ -Description: Check if the HRMP channel between Westend BH and Westend AH was opened successfully -Network: ./bridge_hub_westend_local_network.toml -Creds: config - -# ensure that initialization has completed -asset-hub-westend-collator1: js-script {{FRAMEWORK_PATH}}/js-helpers/wait-hrmp-channel-opened.js with "1002" within 600 seconds - diff --git a/testing/environments/rococo-westend/westend.zndsl b/testing/environments/rococo-westend/westend.zndsl deleted file mode 100644 index 21d4ebf3b..000000000 --- a/testing/environments/rococo-westend/westend.zndsl +++ /dev/null @@ -1,6 +0,0 @@ -Description: Check if the with-Rococo GRANDPA pallet was initialized at Westend BH -Network: ./bridge_hub_westend_local_network.toml -Creds: config - -# relay is already started - let's wait until with-Rococo GRANDPA pallet is initialized at Westend -bridge-hub-westend-collator1: js-script {{FRAMEWORK_PATH}}/js-helpers/best-finalized-header-at-bridged-chain.js with "Rococo,0" within 400 seconds diff --git a/testing/framework/js-helpers/best-finalized-header-at-bridged-chain.js b/testing/framework/js-helpers/best-finalized-header-at-bridged-chain.js deleted file mode 100644 index af4f18aee..000000000 --- a/testing/framework/js-helpers/best-finalized-header-at-bridged-chain.js +++ /dev/null @@ -1,25 +0,0 @@ -async function run(nodeName, networkInfo, args) { - const {wsUri, userDefinedTypes} = networkInfo.nodesByName[nodeName]; - const api = await zombie.connect(wsUri, userDefinedTypes); - - // TODO: could be replaced with https://github.com/polkadot-js/api/issues/4930 (depends on metadata v15) later - const bridgedChainName = args[0]; - const expectedBridgedChainHeaderNumber = Number(args[1]); - const runtimeApiMethod = bridgedChainName + "FinalityApi_best_finalized"; - - while (true) { - const encodedBestFinalizedHeaderId = await api.rpc.state.call(runtimeApiMethod, []); - const bestFinalizedHeaderId = api.createType("Option", encodedBestFinalizedHeaderId); - if (bestFinalizedHeaderId.isSome) { - const bestFinalizedHeaderNumber = Number(bestFinalizedHeaderId.unwrap().toHuman()[0]); - if (bestFinalizedHeaderNumber > expectedBridgedChainHeaderNumber) { - return bestFinalizedHeaderNumber; - } - } - - // else sleep and retry - await new Promise((resolve) => setTimeout(resolve, 6000)); - } -} - -module.exports = { run } diff --git a/testing/framework/js-helpers/chains/rococo-at-westend.js b/testing/framework/js-helpers/chains/rococo-at-westend.js deleted file mode 100644 index bcce3b3a3..000000000 --- a/testing/framework/js-helpers/chains/rococo-at-westend.js +++ /dev/null @@ -1,6 +0,0 @@ -module.exports = { - grandpaPalletName: "bridgeRococoGrandpa", - parachainsPalletName: "bridgeRococoParachains", - messagesPalletName: "bridgeRococoMessages", - bridgedBridgeHubParaId: 1013, -} diff --git a/testing/framework/js-helpers/chains/westend-at-rococo.js b/testing/framework/js-helpers/chains/westend-at-rococo.js deleted file mode 100644 index 6a15b64a2..000000000 --- a/testing/framework/js-helpers/chains/westend-at-rococo.js +++ /dev/null @@ -1,6 +0,0 @@ -module.exports = { - grandpaPalletName: "bridgeWestendGrandpa", - parachainsPalletName: "bridgeWestendParachains", - messagesPalletName: "bridgeWestendMessages", - bridgedBridgeHubParaId: 1002, -} diff --git a/testing/framework/js-helpers/native-assets-balance-increased.js b/testing/framework/js-helpers/native-assets-balance-increased.js deleted file mode 100644 index 749c3e2fe..000000000 --- a/testing/framework/js-helpers/native-assets-balance-increased.js +++ /dev/null @@ -1,21 +0,0 @@ -async function run(nodeName, networkInfo, args) { - const {wsUri, userDefinedTypes} = networkInfo.nodesByName[nodeName]; - const api = await zombie.connect(wsUri, userDefinedTypes); - - const accountAddress = args[0]; - const expectedIncrease = BigInt(args[1]); - const initialAccountData = await api.query.system.account(accountAddress); - const initialAccountBalance = initialAccountData.data['free']; - while (true) { - const accountData = await api.query.system.account(accountAddress); - const accountBalance = accountData.data['free']; - if (accountBalance > initialAccountBalance + expectedIncrease) { - return accountBalance; - } - - // else sleep and retry - await new Promise((resolve) => setTimeout(resolve, 6000)); - } -} - -module.exports = {run} diff --git a/testing/framework/js-helpers/only-mandatory-headers-synced-when-idle.js b/testing/framework/js-helpers/only-mandatory-headers-synced-when-idle.js deleted file mode 100644 index 979179245..000000000 --- a/testing/framework/js-helpers/only-mandatory-headers-synced-when-idle.js +++ /dev/null @@ -1,44 +0,0 @@ -const utils = require("./utils"); - -async function run(nodeName, networkInfo, args) { - const {wsUri, userDefinedTypes} = networkInfo.nodesByName[nodeName]; - const api = await zombie.connect(wsUri, userDefinedTypes); - - // parse arguments - const exitAfterSeconds = Number(args[0]); - const bridgedChain = require("./chains/" + args[1]); - - // start listening to new blocks - let totalGrandpaHeaders = 0; - let initialParachainHeaderImported = false; - api.rpc.chain.subscribeNewHeads(async function (header) { - const apiAtParent = await api.at(header.parentHash); - const apiAtCurrent = await api.at(header.hash); - const currentEvents = await apiAtCurrent.query.system.events(); - - totalGrandpaHeaders += await utils.ensureOnlyMandatoryGrandpaHeadersImported( - bridgedChain, - apiAtParent, - apiAtCurrent, - currentEvents, - ); - initialParachainHeaderImported = await utils.ensureOnlyInitialParachainHeaderImported( - bridgedChain, - apiAtParent, - apiAtCurrent, - currentEvents, - ); - }); - - // wait given time - await new Promise(resolve => setTimeout(resolve, exitAfterSeconds * 1000)); - // if we haven't seen any new GRANDPA or parachain headers => fail - if (totalGrandpaHeaders == 0) { - throw new Error("No bridged relay chain headers imported"); - } - if (!initialParachainHeaderImported) { - throw new Error("No bridged parachain headers imported"); - } -} - -module.exports = { run } diff --git a/testing/framework/js-helpers/only-required-headers-synced-when-idle.js b/testing/framework/js-helpers/only-required-headers-synced-when-idle.js deleted file mode 100644 index 8c3130e4f..000000000 --- a/testing/framework/js-helpers/only-required-headers-synced-when-idle.js +++ /dev/null @@ -1,81 +0,0 @@ -const utils = require("./utils"); - -async function run(nodeName, networkInfo, args) { - const {wsUri, userDefinedTypes} = networkInfo.nodesByName[nodeName]; - const api = await zombie.connect(wsUri, userDefinedTypes); - - // parse arguments - const exitAfterSeconds = Number(args[0]); - const bridgedChain = require("./chains/" + args[1]); - - // start listening to new blocks - let atLeastOneMessageReceived = false; - let atLeastOneMessageDelivered = false; - const unsubscribe = await api.rpc.chain.subscribeNewHeads(async function (header) { - const apiAtParent = await api.at(header.parentHash); - const apiAtCurrent = await api.at(header.hash); - const currentEvents = await apiAtCurrent.query.system.events(); - - const messagesReceived = currentEvents.find((record) => { - return record.event.section == bridgedChain.messagesPalletName - && record.event.method == "MessagesReceived"; - }) != undefined; - const messagesDelivered = currentEvents.find((record) => { - return record.event.section == bridgedChain.messagesPalletName && - record.event.method == "MessagesDelivered"; - }) != undefined; - const hasMessageUpdates = messagesReceived || messagesDelivered; - atLeastOneMessageReceived = atLeastOneMessageReceived || messagesReceived; - atLeastOneMessageDelivered = atLeastOneMessageDelivered || messagesDelivered; - - if (!hasMessageUpdates) { - // if there are no any message update transactions, we only expect mandatory GRANDPA - // headers and initial parachain headers - await utils.ensureOnlyMandatoryGrandpaHeadersImported( - bridgedChain, - apiAtParent, - apiAtCurrent, - currentEvents, - ); - await utils.ensureOnlyInitialParachainHeaderImported( - bridgedChain, - apiAtParent, - apiAtCurrent, - currentEvents, - ); - } else { - const messageTransactions = (messagesReceived ? 1 : 0) + (messagesDelivered ? 1 : 0); - - // otherwise we only accept at most one GRANDPA header - const newGrandpaHeaders = utils.countGrandpaHeaderImports(bridgedChain, currentEvents); - if (newGrandpaHeaders > 1) { - utils.logEvents(currentEvents); - throw new Error("Unexpected relay chain header import: " + newGrandpaHeaders + " / " + messageTransactions); - } - - // ...and at most one parachain header - const newParachainHeaders = utils.countParachainHeaderImports(bridgedChain, currentEvents); - if (newParachainHeaders > 1) { - utils.logEvents(currentEvents); - throw new Error("Unexpected parachain header import: " + newParachainHeaders + " / " + messageTransactions); - } - } - }); - - // wait until we have received + delivered messages OR until timeout - await utils.pollUntil( - exitAfterSeconds, - () => { return atLeastOneMessageReceived && atLeastOneMessageDelivered; }, - () => { unsubscribe(); }, - () => { - if (!atLeastOneMessageReceived) { - throw new Error("No messages received from bridged chain"); - } - if (!atLeastOneMessageDelivered) { - throw new Error("No messages delivered to bridged chain"); - } - }, - ); -} - -module.exports = { run } diff --git a/testing/framework/js-helpers/relayer-rewards.js b/testing/framework/js-helpers/relayer-rewards.js deleted file mode 100644 index 5347c6496..000000000 --- a/testing/framework/js-helpers/relayer-rewards.js +++ /dev/null @@ -1,28 +0,0 @@ -async function run(nodeName, networkInfo, args) { - const {wsUri, userDefinedTypes} = networkInfo.nodesByName[nodeName]; - const api = await zombie.connect(wsUri, userDefinedTypes); - - // TODO: could be replaced with https://github.com/polkadot-js/api/issues/4930 (depends on metadata v15) later - const relayerAccountAddress = args[0]; - const laneId = args[1]; - const bridgedChainId = args[2]; - const relayerFundOwner = args[3]; - const expectedRelayerReward = BigInt(args[4]); - while (true) { - const relayerReward = await api.query.bridgeRelayers.relayerRewards( - relayerAccountAddress, - { laneId: laneId, bridgedChainId: bridgedChainId, owner: relayerFundOwner } - ); - if (relayerReward.isSome) { - const relayerRewardBalance = relayerReward.unwrap().toBigInt(); - if (relayerRewardBalance > expectedRelayerReward) { - return relayerRewardBalance; - } - } - - // else sleep and retry - await new Promise((resolve) => setTimeout(resolve, 6000)); - } -} - -module.exports = { run } diff --git a/testing/framework/js-helpers/utils.js b/testing/framework/js-helpers/utils.js deleted file mode 100644 index f6e9f5623..000000000 --- a/testing/framework/js-helpers/utils.js +++ /dev/null @@ -1,103 +0,0 @@ -module.exports = { - logEvents: function(events) { - let stringifiedEvents = ""; - events.forEach((record) => { - if (stringifiedEvents != "") { - stringifiedEvents += ", "; - } - stringifiedEvents += record.event.section + "::" + record.event.method; - }); - console.log("Block events: " + stringifiedEvents); - }, - countGrandpaHeaderImports: function(bridgedChain, events) { - return events.reduce( - (count, record) => { - const { event } = record; - if (event.section == bridgedChain.grandpaPalletName && event.method == "UpdatedBestFinalizedHeader") { - count += 1; - } - return count; - }, - 0, - ); - }, - countParachainHeaderImports: function(bridgedChain, events) { - return events.reduce( - (count, record) => { - const { event } = record; - if (event.section == bridgedChain.parachainsPalletName && event.method == "UpdatedParachainHead") { - count += 1; - } - return count; - }, - 0, - ); - }, - pollUntil: async function( - timeoutInSecs, - predicate, - cleanup, - onFailure, - ) { - const begin = new Date().getTime(); - const end = begin + timeoutInSecs * 1000; - while (new Date().getTime() < end) { - if (predicate()) { - cleanup(); - return; - } - await new Promise(resolve => setTimeout(resolve, 100)); - } - - cleanup(); - onFailure(); - }, - ensureOnlyMandatoryGrandpaHeadersImported: async function( - bridgedChain, - apiAtParent, - apiAtCurrent, - currentEvents, - ) { - // remember id of bridged relay chain GRANDPA authorities set at parent block - const authoritySetAtParent = await apiAtParent.query[bridgedChain.grandpaPalletName].currentAuthoritySet(); - const authoritySetIdAtParent = authoritySetAtParent["setId"]; - - // now read the id of bridged relay chain GRANDPA authorities set at current block - const authoritySetAtCurrent = await apiAtCurrent.query[bridgedChain.grandpaPalletName].currentAuthoritySet(); - const authoritySetIdAtCurrent = authoritySetAtCurrent["setId"]; - - // we expect to see no more than `authoritySetIdAtCurrent - authoritySetIdAtParent` new GRANDPA headers - const maxNewGrandpaHeaders = authoritySetIdAtCurrent - authoritySetIdAtParent; - const newGrandpaHeaders = module.exports.countGrandpaHeaderImports(bridgedChain, currentEvents); - - // check that our assumptions are correct - if (newGrandpaHeaders > maxNewGrandpaHeaders) { - module.exports.logEvents(currentEvents); - throw new Error("Unexpected relay chain header import: " + newGrandpaHeaders + " / " + maxNewGrandpaHeaders); - } - - return newGrandpaHeaders; - }, - ensureOnlyInitialParachainHeaderImported: async function( - bridgedChain, - apiAtParent, - apiAtCurrent, - currentEvents, - ) { - // remember whether we already know bridged parachain header at a parent block - const bestBridgedParachainHeader = await apiAtParent.query[bridgedChain.parachainsPalletName].parasInfo(bridgedChain.bridgedBridgeHubParaId);; - const hasBestBridgedParachainHeader = bestBridgedParachainHeader.isSome; - - // we expect to see: no more than `1` bridged parachain header if there were no parachain header before. - const maxNewParachainHeaders = hasBestBridgedParachainHeader ? 0 : 1; - const newParachainHeaders = module.exports.countParachainHeaderImports(bridgedChain, currentEvents); - - // check that our assumptions are correct - if (newParachainHeaders > maxNewParachainHeaders) { - module.exports.logEvents(currentEvents); - throw new Error("Unexpected parachain header import: " + newParachainHeaders + " / " + maxNewParachainHeaders); - } - - return hasBestBridgedParachainHeader; - }, -} diff --git a/testing/framework/js-helpers/wait-hrmp-channel-opened.js b/testing/framework/js-helpers/wait-hrmp-channel-opened.js deleted file mode 100644 index 765d48cc4..000000000 --- a/testing/framework/js-helpers/wait-hrmp-channel-opened.js +++ /dev/null @@ -1,22 +0,0 @@ -async function run(nodeName, networkInfo, args) { - const {wsUri, userDefinedTypes} = networkInfo.nodesByName[nodeName]; - const api = await zombie.connect(wsUri, userDefinedTypes); - - const sibling = args[0]; - - while (true) { - const messagingStateAsObj = await api.query.parachainSystem.relevantMessagingState(); - const messagingState = api.createType("Option", messagingStateAsObj); - if (messagingState.isSome) { - const egressChannels = messagingState.unwrap().egressChannels; - if (egressChannels.find(x => x[0] == sibling)) { - return; - } - } - - // else sleep and retry - await new Promise((resolve) => setTimeout(resolve, 6000)); - } -} - -module.exports = { run } diff --git a/testing/framework/js-helpers/wrapped-assets-balance.js b/testing/framework/js-helpers/wrapped-assets-balance.js deleted file mode 100644 index 272871185..000000000 --- a/testing/framework/js-helpers/wrapped-assets-balance.js +++ /dev/null @@ -1,26 +0,0 @@ -async function run(nodeName, networkInfo, args) { - const {wsUri, userDefinedTypes} = networkInfo.nodesByName[nodeName]; - const api = await zombie.connect(wsUri, userDefinedTypes); - - // TODO: could be replaced with https://github.com/polkadot-js/api/issues/4930 (depends on metadata v15) later - const accountAddress = args[0]; - const expectedForeignAssetBalance = BigInt(args[1]); - const bridgedNetworkName = args[2]; - while (true) { - const foreignAssetAccount = await api.query.foreignAssets.account( - { parents: 2, interior: { X1: { GlobalConsensus: bridgedNetworkName } } }, - accountAddress - ); - if (foreignAssetAccount.isSome) { - const foreignAssetAccountBalance = foreignAssetAccount.unwrap().balance.toBigInt(); - if (foreignAssetAccountBalance > expectedForeignAssetBalance) { - return foreignAssetAccountBalance; - } - } - - // else sleep and retry - await new Promise((resolve) => setTimeout(resolve, 6000)); - } -} - -module.exports = { run } diff --git a/testing/framework/utils/bridges.sh b/testing/framework/utils/bridges.sh deleted file mode 100755 index 7c8399461..000000000 --- a/testing/framework/utils/bridges.sh +++ /dev/null @@ -1,309 +0,0 @@ -#!/bin/bash - -function relayer_path() { - local default_path=~/local_bridge_testing/bin/substrate-relay - local path="${SUBSTRATE_RELAY_BINARY:-$default_path}" - echo "$path" -} - -function ensure_relayer() { - local path=$(relayer_path) - if [[ ! -f "$path" ]]; then - echo " Required substrate-relay binary '$path' does not exist!" - echo " You need to build it and copy to this location!" - echo " Please, check ./parachains/runtimes/bridge-hubs/README.md (Prepare/Build/Deploy)" - exit 1 - fi - - echo $path -} - -function ensure_polkadot_js_api() { - if ! which polkadot-js-api &> /dev/null; then - echo '' - echo 'Required command `polkadot-js-api` not in PATH, please, install, e.g.:' - echo "npm install -g @polkadot/api-cli@beta" - echo " or" - echo "yarn global add @polkadot/api-cli" - echo '' - exit 1 - fi - if ! which jq &> /dev/null; then - echo '' - echo 'Required command `jq` not in PATH, please, install, e.g.:' - echo "apt install -y jq" - echo '' - exit 1 - fi - generate_hex_encoded_call_data "check" "--" - local retVal=$? - if [ $retVal -ne 0 ]; then - echo "" - echo "" - echo "-------------------" - echo "Installing (nodejs) sub module: ${BASH_SOURCE%/*}/generate_hex_encoded_call" - pushd ${BASH_SOURCE%/*}/generate_hex_encoded_call - npm install - popd - fi -} - -function call_polkadot_js_api() { - # --noWait: without that argument `polkadot-js-api` waits until transaction is included into the block. - # With it, it just submits it to the tx pool and exits. - # --nonce -1: means to compute transaction nonce using `system_accountNextIndex` RPC, which includes all - # transaction that are in the tx pool. - polkadot-js-api --noWait --nonce -1 "$@" -} - -function generate_hex_encoded_call_data() { - local type=$1 - local endpoint=$2 - local output=$3 - shift - shift - shift - echo "Input params: $@" - - node ${BASH_SOURCE%/*}/../utils/generate_hex_encoded_call "$type" "$endpoint" "$output" "$@" - local retVal=$? - - if [ $type != "check" ]; then - local hex_encoded_data=$(cat $output) - echo "Generated hex-encoded bytes to file '$output': $hex_encoded_data" - fi - - return $retVal -} - -function transfer_balance() { - local runtime_para_endpoint=$1 - local seed=$2 - local target_account=$3 - local amount=$4 - echo " calling transfer_balance:" - echo " runtime_para_endpoint: ${runtime_para_endpoint}" - echo " seed: ${seed}" - echo " target_account: ${target_account}" - echo " amount: ${amount}" - echo "--------------------------------------------------" - - call_polkadot_js_api \ - --ws "${runtime_para_endpoint}" \ - --seed "${seed?}" \ - tx.balances.transferAllowDeath \ - "${target_account}" \ - "${amount}" -} - -function send_governance_transact() { - local relay_url=$1 - local relay_chain_seed=$2 - local para_id=$3 - local hex_encoded_data=$4 - local require_weight_at_most_ref_time=$5 - local require_weight_at_most_proof_size=$6 - echo " calling send_governance_transact:" - echo " relay_url: ${relay_url}" - echo " relay_chain_seed: ${relay_chain_seed}" - echo " para_id: ${para_id}" - echo " hex_encoded_data: ${hex_encoded_data}" - echo " require_weight_at_most_ref_time: ${require_weight_at_most_ref_time}" - echo " require_weight_at_most_proof_size: ${require_weight_at_most_proof_size}" - echo " params:" - - local dest=$(jq --null-input \ - --arg para_id "$para_id" \ - '{ "V3": { "parents": 0, "interior": { "X1": { "Parachain": $para_id } } } }') - - local message=$(jq --null-input \ - --argjson hex_encoded_data $hex_encoded_data \ - --arg require_weight_at_most_ref_time "$require_weight_at_most_ref_time" \ - --arg require_weight_at_most_proof_size "$require_weight_at_most_proof_size" \ - ' - { - "V3": [ - { - "UnpaidExecution": { - "weight_limit": "Unlimited" - } - }, - { - "Transact": { - "origin_kind": "Superuser", - "require_weight_at_most": { - "ref_time": $require_weight_at_most_ref_time, - "proof_size": $require_weight_at_most_proof_size, - }, - "call": { - "encoded": $hex_encoded_data - } - } - } - ] - } - ') - - echo "" - echo " dest:" - echo "${dest}" - echo "" - echo " message:" - echo "${message}" - echo "" - echo "--------------------------------------------------" - - call_polkadot_js_api \ - --ws "${relay_url?}" \ - --seed "${relay_chain_seed?}" \ - --sudo \ - tx.xcmPallet.send \ - "${dest}" \ - "${message}" -} - -function open_hrmp_channels() { - local relay_url=$1 - local relay_chain_seed=$2 - local sender_para_id=$3 - local recipient_para_id=$4 - local max_capacity=$5 - local max_message_size=$6 - echo " calling open_hrmp_channels:" - echo " relay_url: ${relay_url}" - echo " relay_chain_seed: ${relay_chain_seed}" - echo " sender_para_id: ${sender_para_id}" - echo " recipient_para_id: ${recipient_para_id}" - echo " max_capacity: ${max_capacity}" - echo " max_message_size: ${max_message_size}" - echo " params:" - echo "--------------------------------------------------" - call_polkadot_js_api \ - --ws "${relay_url?}" \ - --seed "${relay_chain_seed?}" \ - --sudo \ - tx.hrmp.forceOpenHrmpChannel \ - ${sender_para_id} \ - ${recipient_para_id} \ - ${max_capacity} \ - ${max_message_size} -} - -function force_xcm_version() { - local relay_url=$1 - local relay_chain_seed=$2 - local runtime_para_id=$3 - local runtime_para_endpoint=$4 - local dest=$5 - local xcm_version=$6 - echo " calling force_xcm_version:" - echo " relay_url: ${relay_url}" - echo " relay_chain_seed: ${relay_chain_seed}" - echo " runtime_para_id: ${runtime_para_id}" - echo " runtime_para_endpoint: ${runtime_para_endpoint}" - echo " dest: ${dest}" - echo " xcm_version: ${xcm_version}" - echo " params:" - - # 1. generate data for Transact (PolkadotXcm::force_xcm_version) - local tmp_output_file=$(mktemp) - generate_hex_encoded_call_data "force-xcm-version" "${runtime_para_endpoint}" "${tmp_output_file}" "$dest" "$xcm_version" - local hex_encoded_data=$(cat $tmp_output_file) - - # 2. trigger governance call - send_governance_transact "${relay_url}" "${relay_chain_seed}" "${runtime_para_id}" "${hex_encoded_data}" 200000000 12000 -} - -function force_create_foreign_asset() { - local relay_url=$1 - local relay_chain_seed=$2 - local runtime_para_id=$3 - local runtime_para_endpoint=$4 - local asset_multilocation=$5 - local asset_owner_account_id=$6 - local min_balance=$7 - local is_sufficient=$8 - echo " calling force_create_foreign_asset:" - echo " relay_url: ${relay_url}" - echo " relay_chain_seed: ${relay_chain_seed}" - echo " runtime_para_id: ${runtime_para_id}" - echo " runtime_para_endpoint: ${runtime_para_endpoint}" - echo " asset_multilocation: ${asset_multilocation}" - echo " asset_owner_account_id: ${asset_owner_account_id}" - echo " min_balance: ${min_balance}" - echo " is_sufficient: ${is_sufficient}" - echo " params:" - - # 1. generate data for Transact (ForeignAssets::force_create) - local tmp_output_file=$(mktemp) - generate_hex_encoded_call_data "force-create-asset" "${runtime_para_endpoint}" "${tmp_output_file}" "$asset_multilocation" "$asset_owner_account_id" $is_sufficient $min_balance - local hex_encoded_data=$(cat $tmp_output_file) - - # 2. trigger governance call - send_governance_transact "${relay_url}" "${relay_chain_seed}" "${runtime_para_id}" "${hex_encoded_data}" 200000000 12000 -} - -function limited_reserve_transfer_assets() { - local url=$1 - local seed=$2 - local destination=$3 - local beneficiary=$4 - local assets=$5 - local fee_asset_item=$6 - local weight_limit=$7 - echo " calling limited_reserve_transfer_assets:" - echo " url: ${url}" - echo " seed: ${seed}" - echo " destination: ${destination}" - echo " beneficiary: ${beneficiary}" - echo " assets: ${assets}" - echo " fee_asset_item: ${fee_asset_item}" - echo " weight_limit: ${weight_limit}" - echo "" - echo "--------------------------------------------------" - - call_polkadot_js_api \ - --ws "${url?}" \ - --seed "${seed?}" \ - tx.polkadotXcm.limitedReserveTransferAssets \ - "${destination}" \ - "${beneficiary}" \ - "${assets}" \ - "${fee_asset_item}" \ - "${weight_limit}" -} - -function claim_rewards() { - local runtime_para_endpoint=$1 - local seed=$2 - local lane_id=$3 - local bridged_chain_id=$4 - local owner=$5 - echo " calling claim_rewards:" - echo " runtime_para_endpoint: ${runtime_para_endpoint}" - echo " seed: ${seed}" - echo " lane_id: ${lane_id}" - echo " bridged_chain_id: ${bridged_chain_id}" - echo " owner: ${owner}" - echo "" - - local rewards_account_params=$(jq --null-input \ - --arg lane_id "$lane_id" \ - --arg bridged_chain_id "$bridged_chain_id" \ - --arg owner "$owner" \ - '{ - "laneId": $lane_id, - "bridgedChainId": $bridged_chain_id, - "owner": $owner - }') - - echo " rewards_account_params:" - echo "${rewards_account_params}" - echo "--------------------------------------------------" - - call_polkadot_js_api \ - --ws "${runtime_para_endpoint}" \ - --seed "${seed?}" \ - tx.bridgeRelayers.claimRewards \ - "${rewards_account_params}" -} \ No newline at end of file diff --git a/testing/framework/utils/common.sh b/testing/framework/utils/common.sh deleted file mode 100644 index 06f41320b..000000000 --- a/testing/framework/utils/common.sh +++ /dev/null @@ -1,45 +0,0 @@ -#!/bin/bash - -function start_background_process() { - local command=$1 - local log_file=$2 - local __pid=$3 - - $command > $log_file 2>&1 & - eval $__pid="'$!'" -} - -function wait_for_process_file() { - local pid=$1 - local file=$2 - local timeout=$3 - local __found=$4 - - local time=0 - until [ -e $file ]; do - if ! kill -0 $pid; then - echo "Process finished unsuccessfully" - return - fi - if (( time++ >= timeout )); then - echo "Timeout waiting for file $file: $timeout seconds" - eval $__found=0 - return - fi - sleep 1 - done - - echo "File $file found after $time seconds" - eval $__found=1 -} - -function ensure_process_file() { - local pid=$1 - local file=$2 - local timeout=$3 - - wait_for_process_file $pid $file $timeout file_found - if [ "$file_found" != "1" ]; then - exit 1 - fi -} diff --git a/testing/framework/utils/generate_hex_encoded_call/index.js b/testing/framework/utils/generate_hex_encoded_call/index.js deleted file mode 100644 index c8e361b25..000000000 --- a/testing/framework/utils/generate_hex_encoded_call/index.js +++ /dev/null @@ -1,165 +0,0 @@ -const fs = require("fs"); -const { exit } = require("process"); -const { WsProvider, ApiPromise } = require("@polkadot/api"); -const util = require("@polkadot/util"); - -// connect to a substrate chain and return the api object -async function connect(endpoint, types = {}) { - const provider = new WsProvider(endpoint); - const api = await ApiPromise.create({ - provider, - types, - throwOnConnect: false, - }); - return api; -} - -function writeHexEncodedBytesToOutput(method, outputFile) { - console.log("Payload (hex): ", method.toHex()); - console.log("Payload (bytes): ", Array.from(method.toU8a())); - console.log("Payload (plain): ", JSON.stringify(method)); - fs.writeFileSync(outputFile, JSON.stringify(Array.from(method.toU8a()))); -} - -function remarkWithEvent(endpoint, outputFile) { - console.log(`Generating remarkWithEvent from RPC endpoint: ${endpoint} to outputFile: ${outputFile}`); - connect(endpoint) - .then((api) => { - const call = api.tx.system.remarkWithEvent("Hello"); - writeHexEncodedBytesToOutput(call.method, outputFile); - exit(0); - }) - .catch((e) => { - console.error(e); - exit(1); - }); -} - -function addExporterConfig(endpoint, outputFile, bridgedNetwork, bridgeConfig) { - console.log(`Generating addExporterConfig from RPC endpoint: ${endpoint} to outputFile: ${outputFile} based on bridgedNetwork: ${bridgedNetwork}, bridgeConfig: ${bridgeConfig}`); - connect(endpoint) - .then((api) => { - const call = api.tx.bridgeTransfer.addExporterConfig(bridgedNetwork, JSON.parse(bridgeConfig)); - writeHexEncodedBytesToOutput(call.method, outputFile); - exit(0); - }) - .catch((e) => { - console.error(e); - exit(1); - }); -} - -function addUniversalAlias(endpoint, outputFile, location, junction) { - console.log(`Generating addUniversalAlias from RPC endpoint: ${endpoint} to outputFile: ${outputFile} based on location: ${location}, junction: ${junction}`); - connect(endpoint) - .then((api) => { - const call = api.tx.bridgeTransfer.addUniversalAlias(JSON.parse(location), JSON.parse(junction)); - writeHexEncodedBytesToOutput(call.method, outputFile); - exit(0); - }) - .catch((e) => { - console.error(e); - exit(1); - }); -} - -function addReserveLocation(endpoint, outputFile, reserve_location) { - console.log(`Generating addReserveLocation from RPC endpoint: ${endpoint} to outputFile: ${outputFile} based on reserve_location: ${reserve_location}`); - connect(endpoint) - .then((api) => { - const call = api.tx.bridgeTransfer.addReserveLocation(JSON.parse(reserve_location)); - writeHexEncodedBytesToOutput(call.method, outputFile); - exit(0); - }) - .catch((e) => { - console.error(e); - exit(1); - }); -} - -function removeExporterConfig(endpoint, outputFile, bridgedNetwork) { - console.log(`Generating removeExporterConfig from RPC endpoint: ${endpoint} to outputFile: ${outputFile} based on bridgedNetwork: ${bridgedNetwork}`); - connect(endpoint) - .then((api) => { - const call = api.tx.bridgeTransfer.removeExporterConfig(bridgedNetwork); - writeHexEncodedBytesToOutput(call.method, outputFile); - exit(0); - }) - .catch((e) => { - console.error(e); - exit(1); - }); -} - -function forceCreateAsset(endpoint, outputFile, assetId, assetOwnerAccountId, isSufficient, minBalance) { - var isSufficient = isSufficient == "true" ? true : false; - console.log(`Generating forceCreateAsset from RPC endpoint: ${endpoint} to outputFile: ${outputFile} based on assetId: ${assetId}, assetOwnerAccountId: ${assetOwnerAccountId}, isSufficient: ${isSufficient}, minBalance: ${minBalance}`); - connect(endpoint) - .then((api) => { - const call = api.tx.foreignAssets.forceCreate(JSON.parse(assetId), assetOwnerAccountId, isSufficient, minBalance); - writeHexEncodedBytesToOutput(call.method, outputFile); - exit(0); - }) - .catch((e) => { - console.error(e); - exit(1); - }); -} - -function forceXcmVersion(endpoint, outputFile, dest, xcm_version) { - console.log(`Generating forceXcmVersion from RPC endpoint: ${endpoint} to outputFile: ${outputFile}, dest: ${dest}, xcm_version: ${xcm_version}`); - connect(endpoint) - .then((api) => { - const call = api.tx.polkadotXcm.forceXcmVersion(JSON.parse(dest), xcm_version); - writeHexEncodedBytesToOutput(call.method, outputFile); - exit(0); - }) - .catch((e) => { - console.error(e); - exit(1); - }); -} - -if (!process.argv[2] || !process.argv[3]) { - console.log("usage: node ./script/generate_hex_encoded_call "); - exit(1); -} - -const type = process.argv[2]; -const rpcEndpoint = process.argv[3]; -const output = process.argv[4]; -const inputArgs = process.argv.slice(5, process.argv.length); -console.log(`Generating hex-encoded call data for:`); -console.log(` type: ${type}`); -console.log(` rpcEndpoint: ${rpcEndpoint}`); -console.log(` output: ${output}`); -console.log(` inputArgs: ${inputArgs}`); - -switch (type) { - case 'remark-with-event': - remarkWithEvent(rpcEndpoint, output); - break; - case 'add-exporter-config': - addExporterConfig(rpcEndpoint, output, inputArgs[0], inputArgs[1]); - break; - case 'remove-exporter-config': - removeExporterConfig(rpcEndpoint, output, inputArgs[0], inputArgs[1]); - break; - case 'add-universal-alias': - addUniversalAlias(rpcEndpoint, output, inputArgs[0], inputArgs[1]); - break; - case 'add-reserve-location': - addReserveLocation(rpcEndpoint, output, inputArgs[0]); - break; - case 'force-create-asset': - forceCreateAsset(rpcEndpoint, output, inputArgs[0], inputArgs[1], inputArgs[2], inputArgs[3]); - break; - case 'force-xcm-version': - forceXcmVersion(rpcEndpoint, output, inputArgs[0], inputArgs[1]); - break; - case 'check': - console.log(`Checking nodejs installation, if you see this everything is ready!`); - break; - default: - console.log(`Sorry, we are out of ${type} - not yet supported!`); -} diff --git a/testing/framework/utils/generate_hex_encoded_call/package-lock.json b/testing/framework/utils/generate_hex_encoded_call/package-lock.json deleted file mode 100644 index b2dddaa19..000000000 --- a/testing/framework/utils/generate_hex_encoded_call/package-lock.json +++ /dev/null @@ -1,759 +0,0 @@ -{ - "name": "y", - "version": "y", - "lockfileVersion": 3, - "requires": true, - "packages": { - "": { - "name": "y", - "version": "y", - "license": "MIT", - "dependencies": { - "@polkadot/api": "^10.11", - "@polkadot/util": "^12.6" - } - }, - "node_modules/@noble/curves": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/@noble/curves/-/curves-1.3.0.tgz", - "integrity": "sha512-t01iSXPuN+Eqzb4eBX0S5oubSqXbK/xXa1Ne18Hj8f9pStxztHCE2gfboSp/dZRLSqfuLpRK2nDXDK+W9puocA==", - "dependencies": { - "@noble/hashes": "1.3.3" - }, - "funding": { - "url": "https://paulmillr.com/funding/" - } - }, - "node_modules/@noble/hashes": { - "version": "1.3.3", - "resolved": "https://registry.npmjs.org/@noble/hashes/-/hashes-1.3.3.tgz", - "integrity": "sha512-V7/fPHgl+jsVPXqqeOzT8egNj2iBIVt+ECeMMG8TdcnTikP3oaBtUVqpT/gYCR68aEBJSF+XbYUxStjbFMqIIA==", - "engines": { - "node": ">= 16" - }, - "funding": { - "url": "https://paulmillr.com/funding/" - } - }, - "node_modules/@polkadot/api": { - "version": "10.11.2", - "resolved": "https://registry.npmjs.org/@polkadot/api/-/api-10.11.2.tgz", - "integrity": "sha512-AorCZxCWCoTtdbl4DPUZh+ACe/pbLIS1BkdQY0AFJuZllm0x/yWzjgampcPd5jQAA/O3iKShRBkZqj6Mk9yG/A==", - "dependencies": { - "@polkadot/api-augment": "10.11.2", - "@polkadot/api-base": "10.11.2", - "@polkadot/api-derive": "10.11.2", - "@polkadot/keyring": "^12.6.2", - "@polkadot/rpc-augment": "10.11.2", - "@polkadot/rpc-core": "10.11.2", - "@polkadot/rpc-provider": "10.11.2", - "@polkadot/types": "10.11.2", - "@polkadot/types-augment": "10.11.2", - "@polkadot/types-codec": "10.11.2", - "@polkadot/types-create": "10.11.2", - "@polkadot/types-known": "10.11.2", - "@polkadot/util": "^12.6.2", - "@polkadot/util-crypto": "^12.6.2", - "eventemitter3": "^5.0.1", - "rxjs": "^7.8.1", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=18" - } - }, - "node_modules/@polkadot/api-augment": { - "version": "10.11.2", - "resolved": "https://registry.npmjs.org/@polkadot/api-augment/-/api-augment-10.11.2.tgz", - "integrity": "sha512-PTpnqpezc75qBqUtgrc0GYB8h9UHjfbHSRZamAbecIVAJ2/zc6CqtnldeaBlIu1IKTgBzi3FFtTyYu+ZGbNT2Q==", - "dependencies": { - "@polkadot/api-base": "10.11.2", - "@polkadot/rpc-augment": "10.11.2", - "@polkadot/types": "10.11.2", - "@polkadot/types-augment": "10.11.2", - "@polkadot/types-codec": "10.11.2", - "@polkadot/util": "^12.6.2", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=18" - } - }, - "node_modules/@polkadot/api-base": { - "version": "10.11.2", - "resolved": "https://registry.npmjs.org/@polkadot/api-base/-/api-base-10.11.2.tgz", - "integrity": "sha512-4LIjaUfO9nOzilxo7XqzYKCNMtmUypdk8oHPdrRnSjKEsnK7vDsNi+979z2KXNXd2KFSCFHENmI523fYnMnReg==", - "dependencies": { - "@polkadot/rpc-core": "10.11.2", - "@polkadot/types": "10.11.2", - "@polkadot/util": "^12.6.2", - "rxjs": "^7.8.1", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=18" - } - }, - "node_modules/@polkadot/api-derive": { - "version": "10.11.2", - "resolved": "https://registry.npmjs.org/@polkadot/api-derive/-/api-derive-10.11.2.tgz", - "integrity": "sha512-m3BQbPionkd1iSlknddxnL2hDtolPIsT+aRyrtn4zgMRPoLjHFmTmovvg8RaUyYofJtZeYrnjMw0mdxiSXx7eA==", - "dependencies": { - "@polkadot/api": "10.11.2", - "@polkadot/api-augment": "10.11.2", - "@polkadot/api-base": "10.11.2", - "@polkadot/rpc-core": "10.11.2", - "@polkadot/types": "10.11.2", - "@polkadot/types-codec": "10.11.2", - "@polkadot/util": "^12.6.2", - "@polkadot/util-crypto": "^12.6.2", - "rxjs": "^7.8.1", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=18" - } - }, - "node_modules/@polkadot/keyring": { - "version": "12.6.2", - "resolved": "https://registry.npmjs.org/@polkadot/keyring/-/keyring-12.6.2.tgz", - "integrity": "sha512-O3Q7GVmRYm8q7HuB3S0+Yf/q/EB2egKRRU3fv9b3B7V+A52tKzA+vIwEmNVaD1g5FKW9oB97rmpggs0zaKFqHw==", - "dependencies": { - "@polkadot/util": "12.6.2", - "@polkadot/util-crypto": "12.6.2", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "@polkadot/util": "12.6.2", - "@polkadot/util-crypto": "12.6.2" - } - }, - "node_modules/@polkadot/networks": { - "version": "12.6.2", - "resolved": "https://registry.npmjs.org/@polkadot/networks/-/networks-12.6.2.tgz", - "integrity": "sha512-1oWtZm1IvPWqvMrldVH6NI2gBoCndl5GEwx7lAuQWGr7eNL+6Bdc5K3Z9T0MzFvDGoi2/CBqjX9dRKo39pDC/w==", - "dependencies": { - "@polkadot/util": "12.6.2", - "@substrate/ss58-registry": "^1.44.0", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=18" - } - }, - "node_modules/@polkadot/rpc-augment": { - "version": "10.11.2", - "resolved": "https://registry.npmjs.org/@polkadot/rpc-augment/-/rpc-augment-10.11.2.tgz", - "integrity": "sha512-9AhT0WW81/8jYbRcAC6PRmuxXqNhJje8OYiulBQHbG1DTCcjAfz+6VQBke9BwTStzPq7d526+yyBKD17O3zlAA==", - "dependencies": { - "@polkadot/rpc-core": "10.11.2", - "@polkadot/types": "10.11.2", - "@polkadot/types-codec": "10.11.2", - "@polkadot/util": "^12.6.2", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=18" - } - }, - "node_modules/@polkadot/rpc-core": { - "version": "10.11.2", - "resolved": "https://registry.npmjs.org/@polkadot/rpc-core/-/rpc-core-10.11.2.tgz", - "integrity": "sha512-Ot0CFLWx8sZhLZog20WDuniPA01Bk2StNDsdAQgcFKPwZw6ShPaZQCHuKLQK6I6DodOrem9FXX7c1hvoKJP5Ww==", - "dependencies": { - "@polkadot/rpc-augment": "10.11.2", - "@polkadot/rpc-provider": "10.11.2", - "@polkadot/types": "10.11.2", - "@polkadot/util": "^12.6.2", - "rxjs": "^7.8.1", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=18" - } - }, - "node_modules/@polkadot/rpc-provider": { - "version": "10.11.2", - "resolved": "https://registry.npmjs.org/@polkadot/rpc-provider/-/rpc-provider-10.11.2.tgz", - "integrity": "sha512-he5jWMpDJp7e+vUzTZDzpkB7ps3H8psRally+/ZvZZScPvFEjfczT7I1WWY9h58s8+ImeVP/lkXjL9h/gUOt3Q==", - "dependencies": { - "@polkadot/keyring": "^12.6.2", - "@polkadot/types": "10.11.2", - "@polkadot/types-support": "10.11.2", - "@polkadot/util": "^12.6.2", - "@polkadot/util-crypto": "^12.6.2", - "@polkadot/x-fetch": "^12.6.2", - "@polkadot/x-global": "^12.6.2", - "@polkadot/x-ws": "^12.6.2", - "eventemitter3": "^5.0.1", - "mock-socket": "^9.3.1", - "nock": "^13.4.0", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=18" - }, - "optionalDependencies": { - "@substrate/connect": "0.7.35" - } - }, - "node_modules/@polkadot/types": { - "version": "10.11.2", - "resolved": "https://registry.npmjs.org/@polkadot/types/-/types-10.11.2.tgz", - "integrity": "sha512-d52j3xXni+C8GdYZVTSfu8ROAnzXFMlyRvXtor0PudUc8UQHOaC4+mYAkTBGA2gKdmL8MHSfRSbhcxHhsikY6Q==", - "dependencies": { - "@polkadot/keyring": "^12.6.2", - "@polkadot/types-augment": "10.11.2", - "@polkadot/types-codec": "10.11.2", - "@polkadot/types-create": "10.11.2", - "@polkadot/util": "^12.6.2", - "@polkadot/util-crypto": "^12.6.2", - "rxjs": "^7.8.1", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=18" - } - }, - "node_modules/@polkadot/types-augment": { - "version": "10.11.2", - "resolved": "https://registry.npmjs.org/@polkadot/types-augment/-/types-augment-10.11.2.tgz", - "integrity": "sha512-8eB8ew04wZiE5GnmFvEFW1euJWmF62SGxb1O+8wL3zoUtB9Xgo1vB6w6xbTrd+HLV6jNSeXXnbbF1BEUvi9cNg==", - "dependencies": { - "@polkadot/types": "10.11.2", - "@polkadot/types-codec": "10.11.2", - "@polkadot/util": "^12.6.2", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=18" - } - }, - "node_modules/@polkadot/types-codec": { - "version": "10.11.2", - "resolved": "https://registry.npmjs.org/@polkadot/types-codec/-/types-codec-10.11.2.tgz", - "integrity": "sha512-3xjOQL+LOOMzYqlgP9ROL0FQnzU8lGflgYewzau7AsDlFziSEtb49a9BpYo6zil4koC+QB8zQ9OHGFumG08T8w==", - "dependencies": { - "@polkadot/util": "^12.6.2", - "@polkadot/x-bigint": "^12.6.2", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=18" - } - }, - "node_modules/@polkadot/types-create": { - "version": "10.11.2", - "resolved": "https://registry.npmjs.org/@polkadot/types-create/-/types-create-10.11.2.tgz", - "integrity": "sha512-SJt23NxYvefRxVZZm6mT9ed1pR6FDoIGQ3xUpbjhTLfU2wuhpKjekMVorYQ6z/gK2JLMu2kV92Ardsz+6GX5XQ==", - "dependencies": { - "@polkadot/types-codec": "10.11.2", - "@polkadot/util": "^12.6.2", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=18" - } - }, - "node_modules/@polkadot/types-known": { - "version": "10.11.2", - "resolved": "https://registry.npmjs.org/@polkadot/types-known/-/types-known-10.11.2.tgz", - "integrity": "sha512-kbEIX7NUQFxpDB0FFGNyXX/odY7jbp56RGD+Z4A731fW2xh/DgAQrI994xTzuh0c0EqPE26oQm3kATSpseqo9w==", - "dependencies": { - "@polkadot/networks": "^12.6.2", - "@polkadot/types": "10.11.2", - "@polkadot/types-codec": "10.11.2", - "@polkadot/types-create": "10.11.2", - "@polkadot/util": "^12.6.2", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=18" - } - }, - "node_modules/@polkadot/types-support": { - "version": "10.11.2", - "resolved": "https://registry.npmjs.org/@polkadot/types-support/-/types-support-10.11.2.tgz", - "integrity": "sha512-X11hoykFYv/3efg4coZy2hUOUc97JhjQMJLzDhHniFwGLlYU8MeLnPdCVGkXx0xDDjTo4/ptS1XpZ5HYcg+gRw==", - "dependencies": { - "@polkadot/util": "^12.6.2", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=18" - } - }, - "node_modules/@polkadot/util": { - "version": "12.6.2", - "resolved": "https://registry.npmjs.org/@polkadot/util/-/util-12.6.2.tgz", - "integrity": "sha512-l8TubR7CLEY47240uki0TQzFvtnxFIO7uI/0GoWzpYD/O62EIAMRsuY01N4DuwgKq2ZWD59WhzsLYmA5K6ksdw==", - "dependencies": { - "@polkadot/x-bigint": "12.6.2", - "@polkadot/x-global": "12.6.2", - "@polkadot/x-textdecoder": "12.6.2", - "@polkadot/x-textencoder": "12.6.2", - "@types/bn.js": "^5.1.5", - "bn.js": "^5.2.1", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=18" - } - }, - "node_modules/@polkadot/util-crypto": { - "version": "12.6.2", - "resolved": "https://registry.npmjs.org/@polkadot/util-crypto/-/util-crypto-12.6.2.tgz", - "integrity": "sha512-FEWI/dJ7wDMNN1WOzZAjQoIcCP/3vz3wvAp5QQm+lOrzOLj0iDmaIGIcBkz8HVm3ErfSe/uKP0KS4jgV/ib+Mg==", - "dependencies": { - "@noble/curves": "^1.3.0", - "@noble/hashes": "^1.3.3", - "@polkadot/networks": "12.6.2", - "@polkadot/util": "12.6.2", - "@polkadot/wasm-crypto": "^7.3.2", - "@polkadot/wasm-util": "^7.3.2", - "@polkadot/x-bigint": "12.6.2", - "@polkadot/x-randomvalues": "12.6.2", - "@scure/base": "^1.1.5", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "@polkadot/util": "12.6.2" - } - }, - "node_modules/@polkadot/wasm-bridge": { - "version": "7.3.2", - "resolved": "https://registry.npmjs.org/@polkadot/wasm-bridge/-/wasm-bridge-7.3.2.tgz", - "integrity": "sha512-AJEXChcf/nKXd5Q/YLEV5dXQMle3UNT7jcXYmIffZAo/KI394a+/24PaISyQjoNC0fkzS1Q8T5pnGGHmXiVz2g==", - "dependencies": { - "@polkadot/wasm-util": "7.3.2", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "@polkadot/util": "*", - "@polkadot/x-randomvalues": "*" - } - }, - "node_modules/@polkadot/wasm-crypto": { - "version": "7.3.2", - "resolved": "https://registry.npmjs.org/@polkadot/wasm-crypto/-/wasm-crypto-7.3.2.tgz", - "integrity": "sha512-+neIDLSJ6jjVXsjyZ5oLSv16oIpwp+PxFqTUaZdZDoA2EyFRQB8pP7+qLsMNk+WJuhuJ4qXil/7XiOnZYZ+wxw==", - "dependencies": { - "@polkadot/wasm-bridge": "7.3.2", - "@polkadot/wasm-crypto-asmjs": "7.3.2", - "@polkadot/wasm-crypto-init": "7.3.2", - "@polkadot/wasm-crypto-wasm": "7.3.2", - "@polkadot/wasm-util": "7.3.2", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "@polkadot/util": "*", - "@polkadot/x-randomvalues": "*" - } - }, - "node_modules/@polkadot/wasm-crypto-asmjs": { - "version": "7.3.2", - "resolved": "https://registry.npmjs.org/@polkadot/wasm-crypto-asmjs/-/wasm-crypto-asmjs-7.3.2.tgz", - "integrity": "sha512-QP5eiUqUFur/2UoF2KKKYJcesc71fXhQFLT3D4ZjG28Mfk2ZPI0QNRUfpcxVQmIUpV5USHg4geCBNuCYsMm20Q==", - "dependencies": { - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "@polkadot/util": "*" - } - }, - "node_modules/@polkadot/wasm-crypto-init": { - "version": "7.3.2", - "resolved": "https://registry.npmjs.org/@polkadot/wasm-crypto-init/-/wasm-crypto-init-7.3.2.tgz", - "integrity": "sha512-FPq73zGmvZtnuJaFV44brze3Lkrki3b4PebxCy9Fplw8nTmisKo9Xxtfew08r0njyYh+uiJRAxPCXadkC9sc8g==", - "dependencies": { - "@polkadot/wasm-bridge": "7.3.2", - "@polkadot/wasm-crypto-asmjs": "7.3.2", - "@polkadot/wasm-crypto-wasm": "7.3.2", - "@polkadot/wasm-util": "7.3.2", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "@polkadot/util": "*", - "@polkadot/x-randomvalues": "*" - } - }, - "node_modules/@polkadot/wasm-crypto-wasm": { - "version": "7.3.2", - "resolved": "https://registry.npmjs.org/@polkadot/wasm-crypto-wasm/-/wasm-crypto-wasm-7.3.2.tgz", - "integrity": "sha512-15wd0EMv9IXs5Abp1ZKpKKAVyZPhATIAHfKsyoWCEFDLSOA0/K0QGOxzrAlsrdUkiKZOq7uzSIgIDgW8okx2Mw==", - "dependencies": { - "@polkadot/wasm-util": "7.3.2", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "@polkadot/util": "*" - } - }, - "node_modules/@polkadot/wasm-util": { - "version": "7.3.2", - "resolved": "https://registry.npmjs.org/@polkadot/wasm-util/-/wasm-util-7.3.2.tgz", - "integrity": "sha512-bmD+Dxo1lTZyZNxbyPE380wd82QsX+43mgCm40boyKrRppXEyQmWT98v/Poc7chLuskYb6X8IQ6lvvK2bGR4Tg==", - "dependencies": { - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "@polkadot/util": "*" - } - }, - "node_modules/@polkadot/x-bigint": { - "version": "12.6.2", - "resolved": "https://registry.npmjs.org/@polkadot/x-bigint/-/x-bigint-12.6.2.tgz", - "integrity": "sha512-HSIk60uFPX4GOFZSnIF7VYJz7WZA7tpFJsne7SzxOooRwMTWEtw3fUpFy5cYYOeLh17/kHH1Y7SVcuxzVLc74Q==", - "dependencies": { - "@polkadot/x-global": "12.6.2", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=18" - } - }, - "node_modules/@polkadot/x-fetch": { - "version": "12.6.2", - "resolved": "https://registry.npmjs.org/@polkadot/x-fetch/-/x-fetch-12.6.2.tgz", - "integrity": "sha512-8wM/Z9JJPWN1pzSpU7XxTI1ldj/AfC8hKioBlUahZ8gUiJaOF7K9XEFCrCDLis/A1BoOu7Ne6WMx/vsJJIbDWw==", - "dependencies": { - "@polkadot/x-global": "12.6.2", - "node-fetch": "^3.3.2", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=18" - } - }, - "node_modules/@polkadot/x-global": { - "version": "12.6.2", - "resolved": "https://registry.npmjs.org/@polkadot/x-global/-/x-global-12.6.2.tgz", - "integrity": "sha512-a8d6m+PW98jmsYDtAWp88qS4dl8DyqUBsd0S+WgyfSMtpEXu6v9nXDgPZgwF5xdDvXhm+P0ZfVkVTnIGrScb5g==", - "dependencies": { - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=18" - } - }, - "node_modules/@polkadot/x-randomvalues": { - "version": "12.6.2", - "resolved": "https://registry.npmjs.org/@polkadot/x-randomvalues/-/x-randomvalues-12.6.2.tgz", - "integrity": "sha512-Vr8uG7rH2IcNJwtyf5ebdODMcr0XjoCpUbI91Zv6AlKVYOGKZlKLYJHIwpTaKKB+7KPWyQrk4Mlym/rS7v9feg==", - "dependencies": { - "@polkadot/x-global": "12.6.2", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "@polkadot/util": "12.6.2", - "@polkadot/wasm-util": "*" - } - }, - "node_modules/@polkadot/x-textdecoder": { - "version": "12.6.2", - "resolved": "https://registry.npmjs.org/@polkadot/x-textdecoder/-/x-textdecoder-12.6.2.tgz", - "integrity": "sha512-M1Bir7tYvNappfpFWXOJcnxUhBUFWkUFIdJSyH0zs5LmFtFdbKAeiDXxSp2Swp5ddOZdZgPac294/o2TnQKN1w==", - "dependencies": { - "@polkadot/x-global": "12.6.2", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=18" - } - }, - "node_modules/@polkadot/x-textencoder": { - "version": "12.6.2", - "resolved": "https://registry.npmjs.org/@polkadot/x-textencoder/-/x-textencoder-12.6.2.tgz", - "integrity": "sha512-4N+3UVCpI489tUJ6cv3uf0PjOHvgGp9Dl+SZRLgFGt9mvxnvpW/7+XBADRMtlG4xi5gaRK7bgl5bmY6OMDsNdw==", - "dependencies": { - "@polkadot/x-global": "12.6.2", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=18" - } - }, - "node_modules/@polkadot/x-ws": { - "version": "12.6.2", - "resolved": "https://registry.npmjs.org/@polkadot/x-ws/-/x-ws-12.6.2.tgz", - "integrity": "sha512-cGZWo7K5eRRQCRl2LrcyCYsrc3lRbTlixZh3AzgU8uX4wASVGRlNWi/Hf4TtHNe1ExCDmxabJzdIsABIfrr7xw==", - "dependencies": { - "@polkadot/x-global": "12.6.2", - "tslib": "^2.6.2", - "ws": "^8.15.1" - }, - "engines": { - "node": ">=18" - } - }, - "node_modules/@scure/base": { - "version": "1.1.5", - "resolved": "https://registry.npmjs.org/@scure/base/-/base-1.1.5.tgz", - "integrity": "sha512-Brj9FiG2W1MRQSTB212YVPRrcbjkv48FoZi/u4l/zds/ieRrqsh7aUf6CLwkAq61oKXr/ZlTzlY66gLIj3TFTQ==", - "funding": { - "url": "https://paulmillr.com/funding/" - } - }, - "node_modules/@substrate/connect": { - "version": "0.7.35", - "resolved": "https://registry.npmjs.org/@substrate/connect/-/connect-0.7.35.tgz", - "integrity": "sha512-Io8vkalbwaye+7yXfG1Nj52tOOoJln2bMlc7Q9Yy3vEWqZEVkgKmcPVzbwV0CWL3QD+KMPDA2Dnw/X7EdwgoLw==", - "hasInstallScript": true, - "optional": true, - "dependencies": { - "@substrate/connect-extension-protocol": "^1.0.1", - "smoldot": "2.0.7" - } - }, - "node_modules/@substrate/connect-extension-protocol": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/@substrate/connect-extension-protocol/-/connect-extension-protocol-1.0.1.tgz", - "integrity": "sha512-161JhCC1csjH3GE5mPLEd7HbWtwNSPJBg3p1Ksz9SFlTzj/bgEwudiRN2y5i0MoLGCIJRYKyKGMxVnd29PzNjg==", - "optional": true - }, - "node_modules/@substrate/ss58-registry": { - "version": "1.44.0", - "resolved": "https://registry.npmjs.org/@substrate/ss58-registry/-/ss58-registry-1.44.0.tgz", - "integrity": "sha512-7lQ/7mMCzVNSEfDS4BCqnRnKCFKpcOaPrxMeGTXHX1YQzM/m2BBHjbK2C3dJvjv7GYxMiaTq/HdWQj1xS6ss+A==" - }, - "node_modules/@types/bn.js": { - "version": "5.1.5", - "resolved": "https://registry.npmjs.org/@types/bn.js/-/bn.js-5.1.5.tgz", - "integrity": "sha512-V46N0zwKRF5Q00AZ6hWtN0T8gGmDUaUzLWQvHFo5yThtVwK/VCenFY3wXVbOvNfajEpsTfQM4IN9k/d6gUVX3A==", - "dependencies": { - "@types/node": "*" - } - }, - "node_modules/@types/node": { - "version": "20.10.5", - "resolved": "https://registry.npmjs.org/@types/node/-/node-20.10.5.tgz", - "integrity": "sha512-nNPsNE65wjMxEKI93yOP+NPGGBJz/PoN3kZsVLee0XMiJolxSekEVD8wRwBUBqkwc7UWop0edW50yrCQW4CyRw==", - "dependencies": { - "undici-types": "~5.26.4" - } - }, - "node_modules/bn.js": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-5.2.1.tgz", - "integrity": "sha512-eXRvHzWyYPBuB4NBy0cmYQjGitUrtqwbvlzP3G6VFnNRbsZQIxQ10PbKKHt8gZ/HW/D/747aDl+QkDqg3KQLMQ==" - }, - "node_modules/data-uri-to-buffer": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/data-uri-to-buffer/-/data-uri-to-buffer-4.0.1.tgz", - "integrity": "sha512-0R9ikRb668HB7QDxT1vkpuUBtqc53YyAwMwGeUFKRojY/NWKvdZ+9UYtRfGmhqNbRkTSVpMbmyhXipFFv2cb/A==", - "engines": { - "node": ">= 12" - } - }, - "node_modules/debug": { - "version": "4.3.4", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", - "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", - "dependencies": { - "ms": "2.1.2" - }, - "engines": { - "node": ">=6.0" - }, - "peerDependenciesMeta": { - "supports-color": { - "optional": true - } - } - }, - "node_modules/eventemitter3": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/eventemitter3/-/eventemitter3-5.0.1.tgz", - "integrity": "sha512-GWkBvjiSZK87ELrYOSESUYeVIc9mvLLf/nXalMOS5dYrgZq9o5OVkbZAVM06CVxYsCwH9BDZFPlQTlPA1j4ahA==" - }, - "node_modules/fetch-blob": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/fetch-blob/-/fetch-blob-3.2.0.tgz", - "integrity": "sha512-7yAQpD2UMJzLi1Dqv7qFYnPbaPx7ZfFK6PiIxQ4PfkGPyNyl2Ugx+a/umUonmKqjhM4DnfbMvdX6otXq83soQQ==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/jimmywarting" - }, - { - "type": "paypal", - "url": "https://paypal.me/jimmywarting" - } - ], - "dependencies": { - "node-domexception": "^1.0.0", - "web-streams-polyfill": "^3.0.3" - }, - "engines": { - "node": "^12.20 || >= 14.13" - } - }, - "node_modules/formdata-polyfill": { - "version": "4.0.10", - "resolved": "https://registry.npmjs.org/formdata-polyfill/-/formdata-polyfill-4.0.10.tgz", - "integrity": "sha512-buewHzMvYL29jdeQTVILecSaZKnt/RJWjoZCF5OW60Z67/GmSLBkOFM7qh1PI3zFNtJbaZL5eQu1vLfazOwj4g==", - "dependencies": { - "fetch-blob": "^3.1.2" - }, - "engines": { - "node": ">=12.20.0" - } - }, - "node_modules/json-stringify-safe": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", - "integrity": "sha512-ZClg6AaYvamvYEE82d3Iyd3vSSIjQ+odgjaTzRuO3s7toCdFKczob2i0zCh7JE8kWn17yvAWhUVxvqGwUalsRA==" - }, - "node_modules/mock-socket": { - "version": "9.3.1", - "resolved": "https://registry.npmjs.org/mock-socket/-/mock-socket-9.3.1.tgz", - "integrity": "sha512-qxBgB7Qa2sEQgHFjj0dSigq7fX4k6Saisd5Nelwp2q8mlbAFh5dHV9JTTlF8viYJLSSWgMCZFUom8PJcMNBoJw==", - "engines": { - "node": ">= 8" - } - }, - "node_modules/ms": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" - }, - "node_modules/nock": { - "version": "13.4.0", - "resolved": "https://registry.npmjs.org/nock/-/nock-13.4.0.tgz", - "integrity": "sha512-W8NVHjO/LCTNA64yxAPHV/K47LpGYcVzgKd3Q0n6owhwvD0Dgoterc25R4rnZbckJEb6Loxz1f5QMuJpJnbSyQ==", - "dependencies": { - "debug": "^4.1.0", - "json-stringify-safe": "^5.0.1", - "propagate": "^2.0.0" - }, - "engines": { - "node": ">= 10.13" - } - }, - "node_modules/node-domexception": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/node-domexception/-/node-domexception-1.0.0.tgz", - "integrity": "sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/jimmywarting" - }, - { - "type": "github", - "url": "https://paypal.me/jimmywarting" - } - ], - "engines": { - "node": ">=10.5.0" - } - }, - "node_modules/node-fetch": { - "version": "3.3.2", - "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-3.3.2.tgz", - "integrity": "sha512-dRB78srN/l6gqWulah9SrxeYnxeddIG30+GOqK/9OlLVyLg3HPnr6SqOWTWOXKRwC2eGYCkZ59NNuSgvSrpgOA==", - "dependencies": { - "data-uri-to-buffer": "^4.0.0", - "fetch-blob": "^3.1.4", - "formdata-polyfill": "^4.0.10" - }, - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/node-fetch" - } - }, - "node_modules/propagate": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/propagate/-/propagate-2.0.1.tgz", - "integrity": "sha512-vGrhOavPSTz4QVNuBNdcNXePNdNMaO1xj9yBeH1ScQPjk/rhg9sSlCXPhMkFuaNNW/syTvYqsnbIJxMBfRbbag==", - "engines": { - "node": ">= 8" - } - }, - "node_modules/rxjs": { - "version": "7.8.1", - "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-7.8.1.tgz", - "integrity": "sha512-AA3TVj+0A2iuIoQkWEK/tqFjBq2j+6PO6Y0zJcvzLAFhEFIO3HL0vls9hWLncZbAAbK0mar7oZ4V079I/qPMxg==", - "dependencies": { - "tslib": "^2.1.0" - } - }, - "node_modules/smoldot": { - "version": "2.0.7", - "resolved": "https://registry.npmjs.org/smoldot/-/smoldot-2.0.7.tgz", - "integrity": "sha512-VAOBqEen6vises36/zgrmAT1GWk2qE3X8AGnO7lmQFdskbKx8EovnwS22rtPAG+Y1Rk23/S22kDJUdPANyPkBA==", - "optional": true, - "dependencies": { - "ws": "^8.8.1" - } - }, - "node_modules/tslib": { - "version": "2.6.2", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.2.tgz", - "integrity": "sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==" - }, - "node_modules/undici-types": { - "version": "5.26.5", - "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz", - "integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==" - }, - "node_modules/web-streams-polyfill": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/web-streams-polyfill/-/web-streams-polyfill-3.2.1.tgz", - "integrity": "sha512-e0MO3wdXWKrLbL0DgGnUV7WHVuw9OUvL4hjgnPkIeEvESk74gAITi5G606JtZPp39cd8HA9VQzCIvA49LpPN5Q==", - "engines": { - "node": ">= 8" - } - }, - "node_modules/ws": { - "version": "8.16.0", - "resolved": "https://registry.npmjs.org/ws/-/ws-8.16.0.tgz", - "integrity": "sha512-HS0c//TP7Ina87TfiPUz1rQzMhHrl/SG2guqRcTOIUYD2q8uhUdNHZYJUaQ8aTGPzCh+c6oawMKW35nFl1dxyQ==", - "engines": { - "node": ">=10.0.0" - }, - "peerDependencies": { - "bufferutil": "^4.0.1", - "utf-8-validate": ">=5.0.2" - }, - "peerDependenciesMeta": { - "bufferutil": { - "optional": true - }, - "utf-8-validate": { - "optional": true - } - } - } - } -} diff --git a/testing/framework/utils/generate_hex_encoded_call/package.json b/testing/framework/utils/generate_hex_encoded_call/package.json deleted file mode 100644 index ecf0a2483..000000000 --- a/testing/framework/utils/generate_hex_encoded_call/package.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "name": "y", - "version": "y", - "description": "create a scale hex-encoded call values from given message", - "main": "index.js", - "license": "MIT", - "dependencies": { - "@polkadot/api": "^10.11", - "@polkadot/util": "^12.6" - } -} diff --git a/testing/framework/utils/zombienet.sh b/testing/framework/utils/zombienet.sh deleted file mode 100644 index bbcd1a306..000000000 --- a/testing/framework/utils/zombienet.sh +++ /dev/null @@ -1,39 +0,0 @@ -#!/bin/bash - -source "${BASH_SOURCE%/*}/common.sh" - -function start_zombienet() { - local test_dir=$1 - local definition_path=$2 - local __zombienet_dir=$3 - local __zombienet_pid=$4 - - local zombienet_name=`basename $definition_path .toml` - local zombienet_dir=$test_dir/$zombienet_name - eval $__zombienet_dir="'$zombienet_dir'" - mkdir -p $zombienet_dir - rm -rf $zombienet_dir - - local logs_dir=$test_dir/logs - mkdir -p $logs_dir - local zombienet_log=$logs_dir/$zombienet_name.log - - echo "Starting $zombienet_name zombienet. Logs available at: $zombienet_log" - start_background_process \ - "$ZOMBIENET_BINARY spawn --dir $zombienet_dir --provider native $definition_path" \ - "$zombienet_log" zombienet_pid - - ensure_process_file $zombienet_pid "$zombienet_dir/zombie.json" 180 - echo "$zombienet_name zombienet started successfully" - - eval $__zombienet_pid="'$zombienet_pid'" -} - -function run_zndsl() { - local zndsl_file=$1 - local zombienet_dir=$2 - - echo "Running $zndsl_file." - $ZOMBIENET_BINARY test --dir $zombienet_dir --provider native $zndsl_file $zombienet_dir/zombie.json - echo -} diff --git a/testing/run-new-test.sh b/testing/run-new-test.sh deleted file mode 100755 index 7c84a69aa..000000000 --- a/testing/run-new-test.sh +++ /dev/null @@ -1,48 +0,0 @@ -#!/bin/bash - -set -e - -trap 'kill -9 -$$ || echo "Environment already teared down"' SIGINT SIGTERM EXIT - -test=$1 -shift - -# whether to use paths for zombienet+bridges tests container or for local testing -ZOMBIENET_DOCKER_PATHS=0 -while [ $# -ne 0 ] -do - arg="$1" - case "$arg" in - --docker) - ZOMBIENET_DOCKER_PATHS=1 - ;; - esac - shift -done - -export POLKADOT_SDK_PATH=`realpath ${BASH_SOURCE%/*}/../..` -export FRAMEWORK_PATH=`realpath ${BASH_SOURCE%/*}/framework` - -# set path to binaries -if [ "$ZOMBIENET_DOCKER_PATHS" -eq 1 ]; then - # otherwise zombienet uses some hardcoded paths - unset RUN_IN_CONTAINER - unset ZOMBIENET_IMAGE - - export POLKADOT_BINARY=/usr/local/bin/polkadot - export POLKADOT_PARACHAIN_BINARY=/usr/local/bin/polkadot-parachain - - export ZOMBIENET_BINARY=/usr/local/bin/zombie - export SUBSTRATE_RELAY_BINARY=/usr/local/bin/substrate-relay -else - export POLKADOT_BINARY=$POLKADOT_SDK_PATH/target/release/polkadot - export POLKADOT_PARACHAIN_BINARY=$POLKADOT_SDK_PATH/target/release/polkadot-parachain - - export ZOMBIENET_BINARY=~/local_bridge_testing/bin/zombienet-linux-x64 - export SUBSTRATE_RELAY_BINARY=~/local_bridge_testing/bin/substrate-relay -fi - -export TEST_DIR=`mktemp -d /tmp/bridges-tests-run-XXXXX` -echo -e "Test folder: $TEST_DIR\n" - -${BASH_SOURCE%/*}/tests/$test/run.sh diff --git a/testing/run-tests.sh b/testing/run-tests.sh deleted file mode 100755 index fd12b57f5..000000000 --- a/testing/run-tests.sh +++ /dev/null @@ -1,138 +0,0 @@ -#!/bin/bash -set -x -shopt -s nullglob - -trap "trap - SIGINT SIGTERM EXIT && killall -q -9 substrate-relay && kill -- -$$" SIGINT SIGTERM EXIT - -# run tests in range [TESTS_BEGIN; TESTS_END) -TESTS_BEGIN=1 -TESTS_END=1000 -# whether to use paths for zombienet+bridges tests container or for local testing -ZOMBIENET_DOCKER_PATHS=0 -while [ $# -ne 0 ] -do - arg="$1" - case "$arg" in - --docker) - ZOMBIENET_DOCKER_PATHS=1 - ;; - --test) - shift - TESTS_BEGIN="$1" - TESTS_END="$1" - ;; - esac - shift -done - -# assuming that we'll be using native provide && all processes will be executing locally -# (we need absolute paths here, because they're used when scripts are called by zombienet from tmp folders) -export POLKADOT_SDK_PATH=`realpath $(dirname "$0")/../..` -export BRIDGE_TESTS_FOLDER=$POLKADOT_SDK_PATH/bridges/testing/tests - -# set path to binaries -if [ "$ZOMBIENET_DOCKER_PATHS" -eq 1 ]; then - export POLKADOT_BINARY=/usr/local/bin/polkadot - export POLKADOT_PARACHAIN_BINARY=/usr/local/bin/polkadot-parachain - - export SUBSTRATE_RELAY_BINARY=/usr/local/bin/substrate-relay - export ZOMBIENET_BINARY_PATH=/usr/local/bin/zombie -else - export POLKADOT_BINARY=$POLKADOT_SDK_PATH/target/release/polkadot - export POLKADOT_PARACHAIN_BINARY=$POLKADOT_SDK_PATH/target/release/polkadot-parachain - - export SUBSTRATE_RELAY_BINARY=~/local_bridge_testing/bin/substrate-relay - export ZOMBIENET_BINARY_PATH=~/local_bridge_testing/bin/zombienet-linux -fi - -# check if `wait` supports -p flag -if [ `printf "$BASH_VERSION\n5.1" | sort -V | head -n 1` = "5.1" ]; then IS_BASH_5_1=1; else IS_BASH_5_1=0; fi - -# bridge configuration -export LANE_ID="00000002" - -# tests configuration -ALL_TESTS_FOLDER=`mktemp -d /tmp/bridges-zombienet-tests.XXXXX` - -function start_coproc() { - local command=$1 - local name=$2 - local logname=`basename $name` - local coproc_log=`mktemp -p $TEST_FOLDER $logname.XXXXX` - coproc COPROC { - # otherwise zombienet uses some hardcoded paths - unset RUN_IN_CONTAINER - unset ZOMBIENET_IMAGE - - $command >$coproc_log 2>&1 - } - TEST_COPROCS[$COPROC_PID, 0]=$name - TEST_COPROCS[$COPROC_PID, 1]=$coproc_log - echo "Spawned $name coprocess. StdOut + StdErr: $coproc_log" - - return $COPROC_PID -} - -# execute every test from tests folder -TEST_INDEX=$TESTS_BEGIN -while true -do - declare -A TEST_COPROCS - TEST_COPROCS_COUNT=0 - TEST_PREFIX=$(printf "%04d" $TEST_INDEX) - - # it'll be used by the `sync-exit.sh` script - export TEST_FOLDER=`mktemp -d -p $ALL_TESTS_FOLDER test-$TEST_PREFIX.XXXXX` - - # check if there are no more tests - zndsl_files=($BRIDGE_TESTS_FOLDER/$TEST_PREFIX-*.zndsl) - if [ ${#zndsl_files[@]} -eq 0 ]; then - break - fi - - # start tests - for zndsl_file in "${zndsl_files[@]}"; do - start_coproc "$ZOMBIENET_BINARY_PATH --provider native test $zndsl_file" "$zndsl_file" - echo -n "1">>$TEST_FOLDER/exit-sync - ((TEST_COPROCS_COUNT++)) - done - # wait until all tests are completed - for n in `seq 1 $TEST_COPROCS_COUNT`; do - if [ "$IS_BASH_5_1" -eq 1 ]; then - wait -n -p COPROC_PID - exit_code=$? - coproc_name=${TEST_COPROCS[$COPROC_PID, 0]} - coproc_log=${TEST_COPROCS[$COPROC_PID, 1]} - coproc_stdout=$(cat $coproc_log) - else - wait -n - exit_code=$? - coproc_name="" - coproc_stdout="" - fi - echo "Process $coproc_name has finished with exit code: $exit_code" - - # if exit code is not zero, exit - if [ $exit_code -ne 0 ]; then - echo "=====================================================================" - echo "=== Shutting down. Log of failed process below ===" - echo "=====================================================================" - echo "$coproc_stdout" - - exit 1 - fi - done - - # proceed to next index - ((TEST_INDEX++)) - if [ "$TEST_INDEX" -ge "$TESTS_END" ]; then - break - fi - - # kill relay here - it is started manually by tests - killall substrate-relay -done - -echo "=====================================================================" -echo "=== All tests have completed successfully ===" -echo "=====================================================================" diff --git a/testing/scripts/invoke-script.sh b/testing/scripts/invoke-script.sh deleted file mode 100755 index cd0557b07..000000000 --- a/testing/scripts/invoke-script.sh +++ /dev/null @@ -1,7 +0,0 @@ -#!/bin/bash - -INVOKE_LOG=`mktemp -p $TEST_FOLDER invoke.XXXXX` - -pushd $POLKADOT_SDK_PATH/bridges/testing/environments/rococo-westend -./bridges_rococo_westend.sh $1 >$INVOKE_LOG 2>&1 -popd diff --git a/testing/scripts/start-relayer.sh b/testing/scripts/start-relayer.sh deleted file mode 100755 index 38ea62fad..000000000 --- a/testing/scripts/start-relayer.sh +++ /dev/null @@ -1,7 +0,0 @@ -#!/bin/bash - -RELAY_LOG=`mktemp -p $TEST_FOLDER relay.XXXXX` - -pushd $POLKADOT_SDK_PATH/bridges/testing/environments/rococo-westend -./bridges_rococo_westend.sh run-relay >$RELAY_LOG 2>&1& -popd diff --git a/testing/scripts/sync-exit.sh b/testing/scripts/sync-exit.sh deleted file mode 100755 index cc20b098e..000000000 --- a/testing/scripts/sync-exit.sh +++ /dev/null @@ -1,14 +0,0 @@ -#!/bin/bash -set -e - -# every network adds a char to the file, let's remove ours -truncate -s -1 $TEST_FOLDER/exit-sync - -# when all chars are removed, then our test is done -while true -do - if [ `stat --printf="%s" $TEST_FOLDER/exit-sync` -eq 0 ]; then - exit - fi - sleep 100 -done diff --git a/testing/tests/0001-asset-transfer/roc-reaches-westend.zndsl b/testing/tests/0001-asset-transfer/roc-reaches-westend.zndsl deleted file mode 100644 index cdb7d28e9..000000000 --- a/testing/tests/0001-asset-transfer/roc-reaches-westend.zndsl +++ /dev/null @@ -1,12 +0,0 @@ -Description: User is able to transfer ROC from Rococo Asset Hub to Westend Asset Hub and back -Network: {{ENV_PATH}}/bridge_hub_westend_local_network.toml -Creds: config - -# send 5 ROC to //Alice from Rococo AH to Westend AH -asset-hub-westend-collator1: run {{ENV_PATH}}/helper.sh with "reserve-transfer-assets-from-asset-hub-rococo-local 5000000000000" within 120 seconds - -# check that //Alice received at least 4.8 ROC on Westend AH -asset-hub-westend-collator1: js-script {{FRAMEWORK_PATH}}/js-helpers/wrapped-assets-balance.js with "5GrwvaEF5zXb26Fz9rcQpDWS57CtERHpNehXCPcNoHGKutQY,4800000000000,Rococo" within 600 seconds - -# check that the relayer //Charlie is rewarded by Westend AH -bridge-hub-westend-collator1: js-script {{FRAMEWORK_PATH}}/js-helpers/relayer-rewards.js with "5FLSigC9HGRKVhB9FiEo4Y3koPsNmBmLJbpXg2mp1hXcS59Y,0x00000002,0x6268726F,ThisChain,0" within 30 seconds diff --git a/testing/tests/0001-asset-transfer/run.sh b/testing/tests/0001-asset-transfer/run.sh deleted file mode 100755 index a7bb12291..000000000 --- a/testing/tests/0001-asset-transfer/run.sh +++ /dev/null @@ -1,25 +0,0 @@ -#!/bin/bash - -set -e - -source "${BASH_SOURCE%/*}/../../framework/utils/common.sh" -source "${BASH_SOURCE%/*}/../../framework/utils/zombienet.sh" - -export ENV_PATH=`realpath ${BASH_SOURCE%/*}/../../environments/rococo-westend` - -$ENV_PATH/spawn.sh --init --start-relayer & -env_pid=$! - -ensure_process_file $env_pid $TEST_DIR/rococo.env 600 -rococo_dir=`cat $TEST_DIR/rococo.env` -echo - -ensure_process_file $env_pid $TEST_DIR/westend.env 300 -westend_dir=`cat $TEST_DIR/westend.env` -echo - -run_zndsl ${BASH_SOURCE%/*}/roc-reaches-westend.zndsl $westend_dir -run_zndsl ${BASH_SOURCE%/*}/wnd-reaches-rococo.zndsl $rococo_dir - -run_zndsl ${BASH_SOURCE%/*}/wroc-reaches-rococo.zndsl $rococo_dir -run_zndsl ${BASH_SOURCE%/*}/wwnd-reaches-westend.zndsl $westend_dir diff --git a/testing/tests/0001-asset-transfer/wnd-reaches-rococo.zndsl b/testing/tests/0001-asset-transfer/wnd-reaches-rococo.zndsl deleted file mode 100644 index dbc03864e..000000000 --- a/testing/tests/0001-asset-transfer/wnd-reaches-rococo.zndsl +++ /dev/null @@ -1,12 +0,0 @@ -Description: User is able to transfer WND from Westend Asset Hub to Rococo Asset Hub and back -Network: {{ENV_PATH}}/bridge_hub_rococo_local_network.toml -Creds: config - -# send 5 WND to //Alice from Westend AH to Rococo AH -asset-hub-rococo-collator1: run {{ENV_PATH}}/helper.sh with "reserve-transfer-assets-from-asset-hub-westend-local 5000000000000" within 120 seconds - -# check that //Alice received at least 4.8 WND on Rococo AH -asset-hub-rococo-collator1: js-script {{FRAMEWORK_PATH}}/js-helpers/wrapped-assets-balance.js with "5GrwvaEF5zXb26Fz9rcQpDWS57CtERHpNehXCPcNoHGKutQY,4800000000000,Westend" within 600 seconds - -# check that the relayer //Charlie is rewarded by Rococo AH -bridge-hub-rococo-collator1: js-script {{FRAMEWORK_PATH}}/js-helpers/relayer-rewards.js with "5FLSigC9HGRKVhB9FiEo4Y3koPsNmBmLJbpXg2mp1hXcS59Y,0x00000002,0x62687764,ThisChain,0" within 30 seconds diff --git a/testing/tests/0001-asset-transfer/wroc-reaches-rococo.zndsl b/testing/tests/0001-asset-transfer/wroc-reaches-rococo.zndsl deleted file mode 100644 index 9967732ca..000000000 --- a/testing/tests/0001-asset-transfer/wroc-reaches-rococo.zndsl +++ /dev/null @@ -1,10 +0,0 @@ -Description: User is able to transfer ROC from Rococo Asset Hub to Westend Asset Hub and back -Network: {{ENV_PATH}}/bridge_hub_westend_local_network.toml -Creds: config - -# send 3 wROC back to Alice from Westend AH to Rococo AH -asset-hub-rococo-collator1: run {{ENV_PATH}}/helper.sh with "withdraw-reserve-assets-from-asset-hub-westend-local 3000000000000" within 120 seconds - -# check that //Alice received at least 2.8 wROC on Rococo AH -# (we wait until //Alice account increases here - there are no other transactions that may increase it) -asset-hub-rococo-collator1: js-script {{FRAMEWORK_PATH}}/js-helpers/native-assets-balance-increased.js with "5GrwvaEF5zXb26Fz9rcQpDWS57CtERHpNehXCPcNoHGKutQY,2800000000000" within 600 seconds diff --git a/testing/tests/0001-asset-transfer/wwnd-reaches-westend.zndsl b/testing/tests/0001-asset-transfer/wwnd-reaches-westend.zndsl deleted file mode 100644 index 2037b0baf..000000000 --- a/testing/tests/0001-asset-transfer/wwnd-reaches-westend.zndsl +++ /dev/null @@ -1,10 +0,0 @@ -Description: User is able to transfer ROC from Rococo Asset Hub to Westend Asset Hub and back -Network: {{ENV_PATH}}/bridge_hub_westend_local_network.toml -Creds: config - -# send 3 wWND back to Alice from Rococo AH to Westend AH -asset-hub-westend-collator1: run {{ENV_PATH}}/helper.sh with "withdraw-reserve-assets-from-asset-hub-rococo-local 3000000000000" within 120 seconds - -# check that //Alice received at least 2.8 wWND on Westend AH -# (we wait until //Alice account increases here - there are no other transactions that may increase it) -asset-hub-westend-collator1: js-script {{FRAMEWORK_PATH}}/js-helpers/native-assets-balance-increased.js with "5GrwvaEF5zXb26Fz9rcQpDWS57CtERHpNehXCPcNoHGKutQY,2800000000000" within 600 seconds diff --git a/testing/tests/0002-mandatory-headers-synced-while-idle/rococo-to-westend.zndsl b/testing/tests/0002-mandatory-headers-synced-while-idle/rococo-to-westend.zndsl deleted file mode 100644 index 6e381f537..000000000 --- a/testing/tests/0002-mandatory-headers-synced-while-idle/rococo-to-westend.zndsl +++ /dev/null @@ -1,8 +0,0 @@ -Description: While relayer is idle, we only sync mandatory Rococo (and a single Rococo BH) headers to Westend BH. -Network: {{ENV_PATH}}/bridge_hub_westend_local_network.toml -Creds: config - -# ensure that relayer is only syncing mandatory headers while idle. This includes both headers that were -# generated while relay was offline and those in the next 100 seconds while script is active. -bridge-hub-westend-collator1: js-script {{FRAMEWORK_PATH}}/js-helpers/only-mandatory-headers-synced-when-idle.js with "300,rococo-at-westend" within 600 seconds - diff --git a/testing/tests/0002-mandatory-headers-synced-while-idle/run.sh b/testing/tests/0002-mandatory-headers-synced-while-idle/run.sh deleted file mode 100755 index 3a604b387..000000000 --- a/testing/tests/0002-mandatory-headers-synced-while-idle/run.sh +++ /dev/null @@ -1,35 +0,0 @@ -#!/bin/bash - -set -e - -source "${BASH_SOURCE%/*}/../../framework/utils/common.sh" -source "${BASH_SOURCE%/*}/../../framework/utils/zombienet.sh" - -export ENV_PATH=`realpath ${BASH_SOURCE%/*}/../../environments/rococo-westend` - -$ENV_PATH/spawn.sh & -env_pid=$! - -ensure_process_file $env_pid $TEST_DIR/rococo.env 600 -rococo_dir=`cat $TEST_DIR/rococo.env` -echo - -ensure_process_file $env_pid $TEST_DIR/westend.env 300 -westend_dir=`cat $TEST_DIR/westend.env` -echo - -# Sleep for some time before starting the relayer. We want to sleep for at least 1 session, -# which is expected to be 60 seconds for the test environment. -echo -e "Sleeping 90s before starting relayer ...\n" -sleep 90 -${BASH_SOURCE%/*}/../../environments/rococo-westend/start_relayer.sh $rococo_dir $westend_dir relayer_pid - -# Sometimes the relayer syncs multiple parachain heads in the beginning leading to test failures. -# See issue: https://github.com/paritytech/parity-bridges-common/issues/2838. -# TODO: Remove this sleep after the issue is fixed. -echo -e "Sleeping 180s before runing the tests ...\n" -sleep 180 - -run_zndsl ${BASH_SOURCE%/*}/rococo-to-westend.zndsl $westend_dir -run_zndsl ${BASH_SOURCE%/*}/westend-to-rococo.zndsl $rococo_dir - diff --git a/testing/tests/0002-mandatory-headers-synced-while-idle/westend-to-rococo.zndsl b/testing/tests/0002-mandatory-headers-synced-while-idle/westend-to-rococo.zndsl deleted file mode 100644 index b4b3e4367..000000000 --- a/testing/tests/0002-mandatory-headers-synced-while-idle/westend-to-rococo.zndsl +++ /dev/null @@ -1,7 +0,0 @@ -Description: While relayer is idle, we only sync mandatory Westend (and a single Westend BH) headers to Rococo BH. -Network: {{ENV_PATH}}/bridge_hub_rococo_local_network.toml -Creds: config - -# ensure that relayer is only syncing mandatory headers while idle. This includes both headers that were -# generated while relay was offline and those in the next 100 seconds while script is active. -bridge-hub-rococo-collator1: js-script {{FRAMEWORK_PATH}}/js-helpers/only-mandatory-headers-synced-when-idle.js with "300,westend-at-rococo" within 600 seconds diff --git a/testing/tests/0003-required-headers-synced-while-active-rococo-to-westend.zndsl b/testing/tests/0003-required-headers-synced-while-active-rococo-to-westend.zndsl deleted file mode 100644 index 07b91481d..000000000 --- a/testing/tests/0003-required-headers-synced-while-active-rococo-to-westend.zndsl +++ /dev/null @@ -1,26 +0,0 @@ -Description: While relayer is active, we only sync mandatory and required Rococo (and Rococo BH) headers to Westend BH. -Network: ../environments/rococo-westend/bridge_hub_westend_local_network.toml -Creds: config - -# step 1: initialize Westend AH -asset-hub-westend-collator1: run ../scripts/invoke-script.sh with "init-asset-hub-westend-local" within 60 seconds - -# step 2: initialize Westend bridge hub -bridge-hub-westend-collator1: run ../scripts/invoke-script.sh with "init-bridge-hub-westend-local" within 60 seconds - -# step 3: ensure that initialization has completed -asset-hub-westend-collator1: js-script ../js-helpers/wait-hrmp-channel-opened.js with "1002" within 600 seconds - -# step 4: send message from Westend to Rococo -asset-hub-westend-collator1: run ../scripts/invoke-script.sh with "reserve-transfer-assets-from-asset-hub-westend-local" within 60 seconds - -# step 5: start relayer -# (we are starting it after sending the message to be sure that relayer won't relay messages before our js script -# will be started at step 6) -# (it is started by sibling 0003-required-headers-synced-while-active-westend-to-rococo.zndsl) - -# step 6: ensure that relayer won't sync any extra headers while delivering messages and confirmations -bridge-hub-westend-collator1: js-script ../js-helpers/only-required-headers-synced-when-active.js with "500,rococo-at-westend" within 600 seconds - -# wait until other network test has completed OR exit with an error too -asset-hub-westend-collator1: run ../scripts/sync-exit.sh within 600 seconds diff --git a/testing/tests/0003-required-headers-synced-while-active-westend-to-rococo.zndsl b/testing/tests/0003-required-headers-synced-while-active-westend-to-rococo.zndsl deleted file mode 100644 index a6b11fc24..000000000 --- a/testing/tests/0003-required-headers-synced-while-active-westend-to-rococo.zndsl +++ /dev/null @@ -1,26 +0,0 @@ -Description: While relayer is active, we only sync mandatory and required Westend (and Westend BH) headers to Rococo BH. -Network: ../environments/rococo-westend/bridge_hub_rococo_local_network.toml -Creds: config - -# step 1: initialize Rococo AH -asset-hub-rococo-collator1: run ../scripts/invoke-script.sh with "init-asset-hub-rococo-local" within 60 seconds - -# step 2: initialize Rococo bridge hub -bridge-hub-rococo-collator1: run ../scripts/invoke-script.sh with "init-bridge-hub-rococo-local" within 60 seconds - -# step 3: ensure that initialization has completed -asset-hub-rococo-collator1: js-script ../js-helpers/wait-hrmp-channel-opened.js with "1013" within 600 seconds - -# step 4: send message from Rococo to Westend -asset-hub-rococo-collator1: run ../scripts/invoke-script.sh with "reserve-transfer-assets-from-asset-hub-rococo-local" within 60 seconds - -# step 5: start relayer -# (we are starting it after sending the message to be sure that relayer won't relay messages before our js script -# will be started at step 6) -bridge-hub-rococo-collator1: run ../scripts/start-relayer.sh within 60 seconds - -# step 6: ensure that relayer won't sync any extra headers while delivering messages and confirmations -bridge-hub-rococo-collator1: js-script ../js-helpers/only-required-headers-synced-when-active.js with "500,westend-at-rococo" within 600 seconds - -# wait until other network test has completed OR exit with an error too -asset-hub-rococo-collator1: run ../scripts/sync-exit.sh within 600 seconds -- GitLab From 92a722ab814766f795b1ac713a4929db054094b1 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 11 Apr 2024 06:45:04 +0000 Subject: [PATCH 27/39] Bump quote from 1.0.35 to 1.0.36 Bumps [quote](https://github.com/dtolnay/quote) from 1.0.35 to 1.0.36. - [Release notes](https://github.com/dtolnay/quote/releases) - [Commits](https://github.com/dtolnay/quote/compare/1.0.35...1.0.36) --- updated-dependencies: - dependency-name: quote dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- Cargo.lock | 158 ++++++++++++++++++++++++++--------------------------- Cargo.toml | 2 +- 2 files changed, 80 insertions(+), 80 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 3097d7422..0606d47e5 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -207,7 +207,7 @@ dependencies = [ "itertools", "proc-macro-error", "proc-macro2 1.0.79", - "quote 1.0.35", + "quote 1.0.36", "syn 2.0.58", ] @@ -379,7 +379,7 @@ version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3ed4aa4fe255d0bc6d79373f7e31d2ea147bcf486cba1be5ba7ea85abdb92348" dependencies = [ - "quote 1.0.35", + "quote 1.0.36", "syn 1.0.109", ] @@ -392,7 +392,7 @@ dependencies = [ "num-bigint", "num-traits", "proc-macro2 1.0.79", - "quote 1.0.35", + "quote 1.0.36", "syn 1.0.109", ] @@ -470,7 +470,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ae3281bc6d0fd7e549af32b52511e1302185bd688fd3359fa36423346ff682ea" dependencies = [ "proc-macro2 1.0.79", - "quote 1.0.35", + "quote 1.0.36", "syn 1.0.109", ] @@ -554,7 +554,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "726535892e8eae7e70657b4c8ea93d26b8553afb1ce617caee529ef96d7dee6c" dependencies = [ "proc-macro2 1.0.79", - "quote 1.0.35", + "quote 1.0.36", "syn 1.0.109", "synstructure", ] @@ -566,7 +566,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2777730b2039ac0f95f093556e61b6d26cebed5393ca6f152717777cec3a42ed" dependencies = [ "proc-macro2 1.0.79", - "quote 1.0.35", + "quote 1.0.36", "syn 1.0.109", ] @@ -576,7 +576,7 @@ version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a3203e79f4dd9bdda415ed03cf14dae5a2bf775c683a00f94e9cd1faf0f596e5" dependencies = [ - "quote 1.0.35", + "quote 1.0.36", "syn 1.0.109", ] @@ -790,7 +790,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a507401cad91ec6a857ed5513a2073c82a9b9048762b885bb98655b306964681" dependencies = [ "proc-macro2 1.0.79", - "quote 1.0.35", + "quote 1.0.36", "syn 2.0.58", ] @@ -2055,7 +2055,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3" dependencies = [ "proc-macro2 1.0.79", - "quote 1.0.35", + "quote 1.0.36", "syn 2.0.58", ] @@ -2101,7 +2101,7 @@ dependencies = [ "fnv", "ident_case", "proc-macro2 1.0.79", - "quote 1.0.35", + "quote 1.0.36", "strsim 0.10.0", "syn 1.0.109", ] @@ -2115,7 +2115,7 @@ dependencies = [ "fnv", "ident_case", "proc-macro2 1.0.79", - "quote 1.0.35", + "quote 1.0.36", "strsim 0.10.0", "syn 2.0.58", ] @@ -2127,7 +2127,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a4aab4dbc9f7611d8b55048a3a16d2d010c2c8334e46304b40ac1cc14bf3b48e" dependencies = [ "darling_core 0.14.4", - "quote 1.0.35", + "quote 1.0.36", "syn 1.0.109", ] @@ -2138,7 +2138,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a668eda54683121533a393014d8692171709ff57a7d61f187b6e782719f8933f" dependencies = [ "darling_core 0.20.8", - "quote 1.0.35", + "quote 1.0.36", "syn 2.0.58", ] @@ -2208,7 +2208,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b" dependencies = [ "proc-macro2 1.0.79", - "quote 1.0.35", + "quote 1.0.36", "syn 1.0.109", ] @@ -2219,7 +2219,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e79116f119dd1dba1abf1f3405f03b9b0e79a27a3883864bfebded8a3dc768cd" dependencies = [ "proc-macro2 1.0.79", - "quote 1.0.35", + "quote 1.0.36", "syn 1.0.109", ] @@ -2230,7 +2230,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d65d7ce8132b7c0e54497a4d9a55a1c2a0912a0d786cf894472ba818fba45762" dependencies = [ "proc-macro2 1.0.79", - "quote 1.0.35", + "quote 1.0.36", "syn 2.0.58", ] @@ -2242,7 +2242,7 @@ checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321" dependencies = [ "convert_case", "proc-macro2 1.0.79", - "quote 1.0.35", + "quote 1.0.36", "rustc_version", "syn 1.0.109", ] @@ -2311,7 +2311,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "487585f4d0c6655fe74905e2504d8ad6908e4db67f744eb140876906c2f3175d" dependencies = [ "proc-macro2 1.0.79", - "quote 1.0.35", + "quote 1.0.36", "syn 2.0.58", ] @@ -2350,7 +2350,7 @@ dependencies = [ "derive-syn-parse 0.2.0", "once_cell", "proc-macro2 1.0.79", - "quote 1.0.35", + "quote 1.0.36", "regex", "syn 2.0.58", "termcolor", @@ -2393,7 +2393,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "558e40ea573c374cf53507fd240b7ee2f5477df7cfebdb97323ec61c719399c5" dependencies = [ "proc-macro2 1.0.79", - "quote 1.0.35", + "quote 1.0.36", "syn 1.0.109", ] @@ -2523,7 +2523,7 @@ checksum = "c9720bba047d567ffc8a3cba48bf19126600e249ab7f128e9233e6376976a116" dependencies = [ "heck 0.4.1", "proc-macro2 1.0.79", - "quote 1.0.35", + "quote 1.0.36", "syn 1.0.109", ] @@ -2535,7 +2535,7 @@ checksum = "5ffccbb6966c05b32ef8fbac435df276c4ae4d3dc55a8cd0eb9745e6c12f546a" dependencies = [ "heck 0.4.1", "proc-macro2 1.0.79", - "quote 1.0.35", + "quote 1.0.36", "syn 2.0.58", ] @@ -2709,7 +2709,7 @@ dependencies = [ "fs-err", "prettier-please", "proc-macro2 1.0.79", - "quote 1.0.35", + "quote 1.0.36", "syn 2.0.58", ] @@ -2981,7 +2981,7 @@ dependencies = [ "macro_magic", "proc-macro-warning", "proc-macro2 1.0.79", - "quote 1.0.35", + "quote 1.0.36", "sp-crypto-hashing", "syn 2.0.58", ] @@ -2994,7 +2994,7 @@ dependencies = [ "frame-support-procedural-tools-derive", "proc-macro-crate 3.1.0", "proc-macro2 1.0.79", - "quote 1.0.35", + "quote 1.0.36", "syn 2.0.58", ] @@ -3004,7 +3004,7 @@ version = "11.0.0" source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "proc-macro2 1.0.79", - "quote 1.0.35", + "quote 1.0.36", "syn 2.0.58", ] @@ -3127,7 +3127,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac" dependencies = [ "proc-macro2 1.0.79", - "quote 1.0.35", + "quote 1.0.36", "syn 2.0.58", ] @@ -3671,7 +3671,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "11d7a9f6330b71fea57921c9b61c47ee6e84f72d394754eff6163ae67e7395eb" dependencies = [ "proc-macro2 1.0.79", - "quote 1.0.35", + "quote 1.0.36", "syn 1.0.109", ] @@ -3691,7 +3691,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b139284b5cf57ecfa712bcc66950bb635b31aff41c188e8a4cfc758eca374a3f" dependencies = [ "proc-macro2 1.0.79", - "quote 1.0.35", + "quote 1.0.36", ] [[package]] @@ -4015,7 +4015,7 @@ dependencies = [ "heck 0.4.1", "proc-macro-crate 3.1.0", "proc-macro2 1.0.79", - "quote 1.0.35", + "quote 1.0.36", "syn 2.0.58", ] @@ -4466,7 +4466,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0fba456131824ab6acd4c7bf61e9c0f0a3014b5fc9868ccb8e10d344594cdc4f" dependencies = [ "heck 0.4.1", - "quote 1.0.35", + "quote 1.0.36", "syn 1.0.109", ] @@ -4792,7 +4792,7 @@ checksum = "e03844fc635e92f3a0067e25fa4bf3e3dbf3f2927bf3aa01bb7bc8f1c428949d" dependencies = [ "macro_magic_core", "macro_magic_macros", - "quote 1.0.35", + "quote 1.0.36", "syn 2.0.58", ] @@ -4806,7 +4806,7 @@ dependencies = [ "derive-syn-parse 0.1.5", "macro_magic_core_macros", "proc-macro2 1.0.79", - "quote 1.0.35", + "quote 1.0.36", "syn 2.0.58", ] @@ -4817,7 +4817,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9ea73aa640dc01d62a590d48c0c3521ed739d53b27f919b25c3551e233481654" dependencies = [ "proc-macro2 1.0.79", - "quote 1.0.35", + "quote 1.0.36", "syn 2.0.58", ] @@ -4828,7 +4828,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ef9d79ae96aaba821963320eb2b6e34d17df1e5a83d8a1985c29cc5be59577b3" dependencies = [ "macro_magic_core", - "quote 1.0.35", + "quote 1.0.36", "syn 2.0.58", ] @@ -5016,7 +5016,7 @@ checksum = "22ce75669015c4f47b289fd4d4f56e894e4c96003ffdf3ac51313126f94c6cbb" dependencies = [ "cfg-if", "proc-macro2 1.0.79", - "quote 1.0.35", + "quote 1.0.36", "syn 1.0.109", ] @@ -5093,7 +5093,7 @@ dependencies = [ "proc-macro-crate 1.1.3", "proc-macro-error", "proc-macro2 1.0.79", - "quote 1.0.35", + "quote 1.0.36", "syn 1.0.109", "synstructure", ] @@ -5141,7 +5141,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "91761aed67d03ad966ef783ae962ef9bbaca728d2dd7ceb7939ec110fffad998" dependencies = [ "proc-macro2 1.0.79", - "quote 1.0.35", + "quote 1.0.36", "syn 1.0.109", ] @@ -5443,7 +5443,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" dependencies = [ "proc-macro2 1.0.79", - "quote 1.0.35", + "quote 1.0.36", "syn 2.0.58", ] @@ -5750,7 +5750,7 @@ checksum = "be30eaf4b0a9fba5336683b38de57bb86d179a35862ba6bfcf57625d006bde5b" dependencies = [ "proc-macro-crate 2.0.0", "proc-macro2 1.0.79", - "quote 1.0.35", + "quote 1.0.36", "syn 1.0.109", ] @@ -5923,7 +5923,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2f38a4412a78282e09a2cf38d195ea5420d15ba0602cb375210efbc877243965" dependencies = [ "proc-macro2 1.0.79", - "quote 1.0.35", + "quote 1.0.36", "syn 2.0.58", ] @@ -6082,7 +6082,7 @@ checksum = "5c4fdfc49717fb9a196e74a5d28e0bc764eb394a2c803eb11133a31ac996c60c" dependencies = [ "polkavm-common", "proc-macro2 1.0.79", - "quote 1.0.35", + "quote 1.0.36", "syn 2.0.58", ] @@ -6268,7 +6268,7 @@ checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c" dependencies = [ "proc-macro-error-attr", "proc-macro2 1.0.79", - "quote 1.0.35", + "quote 1.0.36", "syn 1.0.109", "version_check", ] @@ -6280,7 +6280,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869" dependencies = [ "proc-macro2 1.0.79", - "quote 1.0.35", + "quote 1.0.36", "version_check", ] @@ -6291,7 +6291,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "834da187cfe638ae8abb0203f0b33e5ccdb02a28e7199f2f47b3e2754f50edca" dependencies = [ "proc-macro2 1.0.79", - "quote 1.0.35", + "quote 1.0.36", "syn 2.0.58", ] @@ -6346,7 +6346,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "440f724eba9f6996b75d63681b0a92b06947f1457076d503a4d2e2c8f56442b8" dependencies = [ "proc-macro2 1.0.79", - "quote 1.0.35", + "quote 1.0.36", "syn 2.0.58", ] @@ -6391,7 +6391,7 @@ dependencies = [ "anyhow", "itertools", "proc-macro2 1.0.79", - "quote 1.0.35", + "quote 1.0.36", "syn 1.0.109", ] @@ -6512,9 +6512,9 @@ dependencies = [ [[package]] name = "quote" -version = "1.0.35" +version = "1.0.36" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "291ec9ab5efd934aaf503a6466c5d5251535d108ee747472c3977cc5acc868ef" +checksum = "0fa76aaf39101c457836aec0ce2316dbdc3ab723cdda1c6bd4e6ad4208acaca7" dependencies = [ "proc-macro2 1.0.79", ] @@ -6709,7 +6709,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5fddb4f8d99b0a2ebafc65a87a69a7b9875e4b1ae1f00db265d300ef7f28bccc" dependencies = [ "proc-macro2 1.0.79", - "quote 1.0.35", + "quote 1.0.36", "syn 2.0.58", ] @@ -7398,7 +7398,7 @@ source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555 dependencies = [ "proc-macro-crate 3.1.0", "proc-macro2 1.0.79", - "quote 1.0.35", + "quote 1.0.36", "syn 2.0.58", ] @@ -7737,7 +7737,7 @@ dependencies = [ "darling 0.14.4", "proc-macro-crate 1.1.3", "proc-macro2 1.0.79", - "quote 1.0.35", + "quote 1.0.36", "syn 1.0.109", ] @@ -7765,7 +7765,7 @@ dependencies = [ "darling 0.14.4", "proc-macro-crate 1.1.3", "proc-macro2 1.0.79", - "quote 1.0.35", + "quote 1.0.36", "syn 1.0.109", ] @@ -7791,7 +7791,7 @@ checksum = "18cf6c6447f813ef19eb450e985bcce6705f9ce7660db221b59093d15c79c4b7" dependencies = [ "proc-macro-crate 1.1.3", "proc-macro2 1.0.79", - "quote 1.0.35", + "quote 1.0.36", "syn 1.0.109", ] @@ -7997,7 +7997,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7eb0b34b42edc17f6b7cac84a52a1c5f0e1bb2227e997ca9011ea3dd34e8610b" dependencies = [ "proc-macro2 1.0.79", - "quote 1.0.35", + "quote 1.0.36", "syn 2.0.58", ] @@ -8417,7 +8417,7 @@ dependencies = [ "expander", "proc-macro-crate 3.1.0", "proc-macro2 1.0.79", - "quote 1.0.35", + "quote 1.0.36", "syn 2.0.58", ] @@ -8640,7 +8640,7 @@ name = "sp-crypto-hashing-proc-macro" version = "0.1.0" source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ - "quote 1.0.35", + "quote 1.0.36", "sp-crypto-hashing", "syn 2.0.58", ] @@ -8660,7 +8660,7 @@ version = "14.0.0" source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "proc-macro2 1.0.79", - "quote 1.0.35", + "quote 1.0.36", "syn 2.0.58", ] @@ -8670,7 +8670,7 @@ version = "14.0.0" source = "git+https://github.com/paritytech/polkadot-sdk#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "proc-macro2 1.0.79", - "quote 1.0.35", + "quote 1.0.36", "syn 2.0.58", ] @@ -8887,7 +8887,7 @@ dependencies = [ "expander", "proc-macro-crate 3.1.0", "proc-macro2 1.0.79", - "quote 1.0.35", + "quote 1.0.36", "syn 2.0.58", ] @@ -8900,7 +8900,7 @@ dependencies = [ "expander", "proc-macro-crate 3.1.0", "proc-macro2 1.0.79", - "quote 1.0.35", + "quote 1.0.36", "syn 2.0.58", ] @@ -9096,7 +9096,7 @@ source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555 dependencies = [ "parity-scale-codec", "proc-macro2 1.0.79", - "quote 1.0.35", + "quote 1.0.36", "syn 2.0.58", ] @@ -9167,7 +9167,7 @@ dependencies = [ "Inflector", "num-format", "proc-macro2 1.0.79", - "quote 1.0.35", + "quote 1.0.36", "serde", "serde_json", "unicode-xid 0.2.4", @@ -9298,7 +9298,7 @@ dependencies = [ "heck 0.3.3", "proc-macro-error", "proc-macro2 1.0.79", - "quote 1.0.35", + "quote 1.0.36", "syn 1.0.109", ] @@ -9319,7 +9319,7 @@ checksum = "c6cf59daf282c0a494ba14fd21610a0325f9f90ec9d1231dea26bcb1d696c946" dependencies = [ "heck 0.4.1", "proc-macro2 1.0.79", - "quote 1.0.35", + "quote 1.0.36", "rustversion", "syn 2.0.58", ] @@ -9508,7 +9508,7 @@ dependencies = [ "jsonrpsee 0.20.3", "parity-scale-codec", "proc-macro2 1.0.79", - "quote 1.0.35", + "quote 1.0.36", "scale-info", "subxt-metadata", "syn 2.0.58", @@ -9576,7 +9576,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" dependencies = [ "proc-macro2 1.0.79", - "quote 1.0.35", + "quote 1.0.36", "unicode-ident", ] @@ -9587,7 +9587,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "44cfb93f38070beee36b3fef7d4f5a16f27751d94b187b666a5cc5e9b0d30687" dependencies = [ "proc-macro2 1.0.79", - "quote 1.0.35", + "quote 1.0.36", "unicode-ident", ] @@ -9598,7 +9598,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f36bdaa60a83aca3921b5259d5400cbf5e90fc51931376a9bd4a0eb79aa7210f" dependencies = [ "proc-macro2 1.0.79", - "quote 1.0.35", + "quote 1.0.36", "syn 1.0.109", "unicode-xid 0.2.4", ] @@ -9712,7 +9712,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e4c60d69f36615a077cc7663b9cb8e42275722d23e58a7fa3d2c7f2915d09d04" dependencies = [ "proc-macro2 1.0.79", - "quote 1.0.35", + "quote 1.0.36", "syn 2.0.58", ] @@ -9723,7 +9723,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c61f3ba182994efc43764a46c018c347bc492c79f024e705f46567b418f6d4f7" dependencies = [ "proc-macro2 1.0.79", - "quote 1.0.35", + "quote 1.0.36", "syn 2.0.58", ] @@ -9819,7 +9819,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5b8a1e28f2deaa14e508979454cb3a223b10b938b45af148bc0986de36f1923b" dependencies = [ "proc-macro2 1.0.79", - "quote 1.0.35", + "quote 1.0.36", "syn 2.0.58", ] @@ -9997,7 +9997,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" dependencies = [ "proc-macro2 1.0.79", - "quote 1.0.35", + "quote 1.0.36", "syn 2.0.58", ] @@ -10445,7 +10445,7 @@ dependencies = [ "log", "once_cell", "proc-macro2 1.0.79", - "quote 1.0.35", + "quote 1.0.36", "syn 2.0.58", "wasm-bindgen-shared", ] @@ -10468,7 +10468,7 @@ version = "0.2.92" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a1f8823de937b71b9460c0c34e25f3da88250760bec0ebac694b49997550d726" dependencies = [ - "quote 1.0.35", + "quote 1.0.36", "wasm-bindgen-macro-support", ] @@ -10479,7 +10479,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7" dependencies = [ "proc-macro2 1.0.79", - "quote 1.0.35", + "quote 1.0.36", "syn 2.0.58", "wasm-bindgen-backend", "wasm-bindgen-shared", @@ -11202,7 +11202,7 @@ source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555 dependencies = [ "Inflector", "proc-macro2 1.0.79", - "quote 1.0.35", + "quote 1.0.36", "syn 2.0.58", ] @@ -11251,7 +11251,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9ce1b18ccd8e73a9321186f97e46f9f04b778851177567b1975109d26a08d2a6" dependencies = [ "proc-macro2 1.0.79", - "quote 1.0.35", + "quote 1.0.36", "syn 2.0.58", ] @@ -11271,7 +11271,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ "proc-macro2 1.0.79", - "quote 1.0.35", + "quote 1.0.36", "syn 2.0.58", ] diff --git a/Cargo.toml b/Cargo.toml index 2666706d3..06758ff73 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -52,7 +52,7 @@ complexity = { level = "deny", priority = 1 } [workspace.dependencies] log = { version = "0.4.20", default-features = false } -quote = { version = "1.0.33" } +quote = { version = "1.0.36" } serde = { version = "1.0.197", default-features = false } serde_json = { version = "1.0.115", default-features = false } thiserror = { version = "1.0.58" } -- GitLab From 581b81dc7c9c2b00d7601a67fcdba78baf4a7123 Mon Sep 17 00:00:00 2001 From: Serban Iorga Date: Thu, 11 Apr 2024 10:13:32 +0300 Subject: [PATCH 28/39] [dependabot] ignore migrated crates (#2943) * [dependabot] ignore migrated crates * ignore more migrated crates --- .github/dependabot.yml | 34 +++++++++++++++++++++++++++++++--- 1 file changed, 31 insertions(+), 3 deletions(-) diff --git a/.github/dependabot.yml b/.github/dependabot.yml index c0c8ea648..8319805c6 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -8,7 +8,35 @@ updates: timezone: Europe/Berlin open-pull-requests-limit: 20 ignore: - # Substrate (+ Polkadot/Cumulus pallets) dependencies + # Bridges polkadot-sdk dependencies + - dependency-name: bp-* + versions: + - ">= 0" + - dependency-name: bridge-runtime-common + versions: + - ">= 0" + - dependency-name: equivocation-detector + versions: + - ">= 0" + - dependency-name: finality-relay + versions: + - ">= 0" + - dependency-name: messages-relay + versions: + - ">= 0" + - dependency-name: parachains-relay + versions: + - ">= 0" + - dependency-name: relay-substrate-client + versions: + - ">= 0" + - dependency-name: relay-utils + versions: + - ">= 0" + - dependency-name: substrate-relay-helper + versions: + - ">= 0" + # Substrate polkadot-sdk (+ Polkadot/Cumulus pallets) dependencies - dependency-name: beefy-* versions: - ">= 0" @@ -42,7 +70,7 @@ updates: - dependency-name: binary-merkle-tree versions: - ">= 0" - # Polkadot dependencies + # Polkadot polkadot-sdk dependencies - dependency-name: kusama-* versions: - ">= 0" @@ -52,7 +80,7 @@ updates: - dependency-name: xcm* versions: - ">= 0" - # Cumulus dependencies + # Cumulus polkadot-sdk dependencies - dependency-name: cumulus-* versions: - ">= 0" -- GitLab From df0d367e97488c2c88b5dd7f6e3b7b9664e006c9 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 11 Apr 2024 07:01:22 +0000 Subject: [PATCH 29/39] Bump anyhow from 1.0.81 to 1.0.82 Bumps [anyhow](https://github.com/dtolnay/anyhow) from 1.0.81 to 1.0.82. - [Release notes](https://github.com/dtolnay/anyhow/releases) - [Commits](https://github.com/dtolnay/anyhow/compare/1.0.81...1.0.82) --- updated-dependencies: - dependency-name: anyhow dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 0606d47e5..8ea52a73b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -184,9 +184,9 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.81" +version = "1.0.82" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0952808a6c2afd1aa8947271f3a60f1a6763c7b912d210184c5149b5cf147247" +checksum = "f538837af36e6f6a9be0faa67f9a314f8119e4e4b5867c6ab40ed60360142519" [[package]] name = "approx" -- GitLab From 0364a0aa476df9a77cd712bda10c5b425a92f69a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 11 Apr 2024 07:16:19 +0000 Subject: [PATCH 30/39] Bump subxt from 0.32.1 to 0.35.2 Bumps [subxt](https://github.com/paritytech/subxt) from 0.32.1 to 0.35.2. - [Release notes](https://github.com/paritytech/subxt/releases) - [Changelog](https://github.com/paritytech/subxt/blob/master/CHANGELOG.md) - [Commits](https://github.com/paritytech/subxt/compare/v0.32.1...v0.35.2) --- updated-dependencies: - dependency-name: subxt dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- Cargo.lock | 485 ++++++++---------- .../client-bridge-hub-kusama/Cargo.toml | 2 +- .../client-bridge-hub-polkadot/Cargo.toml | 2 +- .../client-bridge-hub-rococo/Cargo.toml | 2 +- .../client-bridge-hub-westend/Cargo.toml | 2 +- relay-clients/client-kusama/Cargo.toml | 2 +- .../client-polkadot-bulletin/Cargo.toml | 2 +- relay-clients/client-polkadot/Cargo.toml | 2 +- relay-clients/client-rococo/Cargo.toml | 2 +- relay-clients/client-westend/Cargo.toml | 2 +- 10 files changed, 217 insertions(+), 286 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 8ea52a73b..8fa7e4899 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -204,7 +204,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "21cc1548309245035eb18aa7f0967da6bc65587005170c56e6ef2788a4cf3f4e" dependencies = [ "include_dir", - "itertools", + "itertools 0.10.5", "proc-macro-error", "proc-macro2 1.0.79", "quote 1.0.36", @@ -297,7 +297,7 @@ dependencies = [ "ark-std", "derivative", "hashbrown 0.13.2", - "itertools", + "itertools 0.10.5", "num-traits", "rayon", "zeroize", @@ -365,7 +365,7 @@ dependencies = [ "ark-std", "derivative", "digest 0.10.7", - "itertools", + "itertools 0.10.5", "num-bigint", "num-traits", "paste", @@ -620,14 +620,13 @@ dependencies = [ [[package]] name = "async-fs" -version = "1.6.0" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "279cf904654eeebfa37ac9bb1598880884924aab82e290aa65c9e77a0e142e06" +checksum = "bc19683171f287921f2405677dd2ed2549c3b3bda697a563ebc3a121ace2aba1" dependencies = [ - "async-lock 2.8.0", - "autocfg", + "async-lock 3.3.0", "blocking", - "futures-lite 1.13.0", + "futures-lite 2.3.0", ] [[package]] @@ -706,30 +705,33 @@ dependencies = [ [[package]] name = "async-net" -version = "1.8.0" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0434b1ed18ce1cf5769b8ac540e33f01fa9471058b5e89da9e06f3c882a8c12f" +checksum = "b948000fad4873c1c9339d60f2623323a0cfd3816e5181033c6a5cb68b2accf7" dependencies = [ - "async-io 1.13.0", + "async-io 2.3.2", "blocking", - "futures-lite 1.13.0", + "futures-lite 2.3.0", ] [[package]] name = "async-process" -version = "1.8.1" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ea6438ba0a08d81529c69b36700fa2f95837bfe3e776ab39cde9c14d9149da88" +checksum = "d999d925640d51b662b7b4e404224dd81de70f4aa4a199383c2c5e5b86885fa3" dependencies = [ - "async-io 1.13.0", - "async-lock 2.8.0", + "async-channel 2.2.0", + "async-io 2.3.2", + "async-lock 3.3.0", "async-signal", + "async-task", "blocking", "cfg-if", - "event-listener 3.1.0", - "futures-lite 1.13.0", + "event-listener 5.3.0", + "futures-lite 2.3.0", "rustix 0.38.32", - "windows-sys 0.48.0", + "tracing", + "windows-sys 0.52.0", ] [[package]] @@ -808,10 +810,10 @@ dependencies = [ ] [[package]] -name = "atomic" -version = "0.5.3" +name = "atomic-take" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c59bdb34bc650a32731b31bd8f0829cc15d24a708ee31559e0bb34f2bc320cba" +checksum = "a8ab6b55fe97976e46f91ddbed8d147d966475dc29b2032757ba47e02376fbc3" [[package]] name = "atomic-waker" @@ -1864,7 +1866,7 @@ dependencies = [ "cranelift-codegen", "cranelift-entity", "cranelift-frontend", - "itertools", + "itertools 0.10.5", "log", "smallvec", "wasmparser", @@ -2059,19 +2061,6 @@ dependencies = [ "syn 2.0.58", ] -[[package]] -name = "curve25519-dalek-ng" -version = "4.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1c359b7249347e46fb28804470d071c921156ad62b3eef5d34e2ba867533dec8" -dependencies = [ - "byteorder", - "digest 0.9.0", - "rand_core 0.6.4", - "subtle-ng", - "zeroize", -] - [[package]] name = "darling" version = "0.14.4" @@ -2480,6 +2469,21 @@ dependencies = [ "zeroize", ] +[[package]] +name = "ed25519-zebra" +version = "4.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d9ce6874da5d4415896cd45ffbc4d1cfc0c4f9c079427bd870742c30f2f65a9" +dependencies = [ + "curve25519-dalek 4.1.1", + "ed25519 2.2.3", + "hashbrown 0.14.3", + "hex", + "rand_core 0.6.4", + "sha2 0.10.8", + "zeroize", +] + [[package]] name = "either" version = "1.10.0" @@ -2646,17 +2650,6 @@ version = "2.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0" -[[package]] -name = "event-listener" -version = "3.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d93877bcde0eb80ca09131a08d23f0a5c18a620b01db137dba666d18cd9b30c2" -dependencies = [ - "concurrent-queue", - "parking", - "pin-project-lite 0.2.14", -] - [[package]] name = "event-listener" version = "4.0.3" @@ -2977,12 +2970,12 @@ dependencies = [ "derive-syn-parse 0.2.0", "expander", "frame-support-procedural-tools", - "itertools", + "itertools 0.10.5", "macro_magic", "proc-macro-warning", "proc-macro2 1.0.79", "quote 1.0.36", - "sp-crypto-hashing", + "sp-crypto-hashing 0.1.0 (git+https://github.com/paritytech/polkadot-sdk?branch=master)", "syn 2.0.58", ] @@ -3748,12 +3741,6 @@ dependencies = [ "num-traits", ] -[[package]] -name = "intx" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6f38a50a899dc47a6d0ed5508e7f601a2e34c3a85303514b5d137f3c10a0c75" - [[package]] name = "io-lifetimes" version = "1.0.11" @@ -3836,6 +3823,15 @@ dependencies = [ "either", ] +[[package]] +name = "itertools" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba291022dbbd398a455acf126c1e341954079855bc60dfdda641363bd6922569" +dependencies = [ + "either", +] + [[package]] name = "itoa" version = "1.0.11" @@ -3871,53 +3867,23 @@ dependencies = [ "serde_json", ] -[[package]] -name = "jsonrpsee" -version = "0.20.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "affdc52f7596ccb2d7645231fc6163bb314630c989b64998f3699a28b4d5d4dc" -dependencies = [ - "jsonrpsee-client-transport 0.20.3", - "jsonrpsee-core 0.20.3", - "jsonrpsee-http-client", - "jsonrpsee-types 0.20.3", -] - [[package]] name = "jsonrpsee" version = "0.22.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c4b0e68d9af1f066c06d6e2397583795b912d78537d7d907c561e82c13d69fa1" dependencies = [ - "jsonrpsee-core 0.22.4", + "jsonrpsee-client-transport", + "jsonrpsee-core", + "jsonrpsee-http-client", "jsonrpsee-proc-macros", "jsonrpsee-server", - "jsonrpsee-types 0.22.4", + "jsonrpsee-types", "jsonrpsee-ws-client", "tokio", "tracing", ] -[[package]] -name = "jsonrpsee-client-transport" -version = "0.20.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5b005c793122d03217da09af68ba9383363caa950b90d3436106df8cabce935" -dependencies = [ - "futures-util", - "http", - "jsonrpsee-core 0.20.3", - "pin-project", - "rustls-native-certs 0.6.3", - "soketto", - "thiserror", - "tokio", - "tokio-rustls 0.24.1", - "tokio-util", - "tracing", - "url", -] - [[package]] name = "jsonrpsee-client-transport" version = "0.22.4" @@ -3926,7 +3892,7 @@ checksum = "92f254f56af1ae84815b9b1325094743dcf05b92abb5e94da2e81a35cff0cada" dependencies = [ "futures-util", "http", - "jsonrpsee-core 0.22.4", + "jsonrpsee-core", "pin-project", "rustls-native-certs 0.7.0", "rustls-pki-types", @@ -3939,28 +3905,6 @@ dependencies = [ "url", ] -[[package]] -name = "jsonrpsee-core" -version = "0.20.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da2327ba8df2fdbd5e897e2b5ed25ce7f299d345b9736b6828814c3dbd1fd47b" -dependencies = [ - "anyhow", - "async-lock 2.8.0", - "async-trait", - "beef", - "futures-timer", - "futures-util", - "hyper", - "jsonrpsee-types 0.20.3", - "rustc-hash", - "serde", - "serde_json", - "thiserror", - "tokio", - "tracing", -] - [[package]] name = "jsonrpsee-core" version = "0.22.4" @@ -3973,7 +3917,7 @@ dependencies = [ "futures-timer", "futures-util", "hyper", - "jsonrpsee-types 0.22.4", + "jsonrpsee-types", "parking_lot 0.12.1", "pin-project", "rand 0.8.5", @@ -3988,15 +3932,15 @@ dependencies = [ [[package]] name = "jsonrpsee-http-client" -version = "0.20.3" +version = "0.22.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f80c17f62c7653ce767e3d7288b793dfec920f97067ceb189ebdd3570f2bc20" +checksum = "ac13bc1e44cd00448a5ff485824a128629c945f02077804cb659c07a0ba41395" dependencies = [ "async-trait", "hyper", "hyper-rustls", - "jsonrpsee-core 0.20.3", - "jsonrpsee-types 0.20.3", + "jsonrpsee-core", + "jsonrpsee-types", "serde", "serde_json", "thiserror", @@ -4028,8 +3972,8 @@ dependencies = [ "futures-util", "http", "hyper", - "jsonrpsee-core 0.22.4", - "jsonrpsee-types 0.22.4", + "jsonrpsee-core", + "jsonrpsee-types", "pin-project", "route-recognizer", "serde", @@ -4043,20 +3987,6 @@ dependencies = [ "tracing", ] -[[package]] -name = "jsonrpsee-types" -version = "0.20.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5be0be325642e850ed0bdff426674d2e66b2b7117c9be23a7caef68a2902b7d9" -dependencies = [ - "anyhow", - "beef", - "serde", - "serde_json", - "thiserror", - "tracing", -] - [[package]] name = "jsonrpsee-types" version = "0.22.4" @@ -4077,9 +4007,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "32f00abe918bf34b785f87459b9205790e5361a3f7437adb50e928dc243f27eb" dependencies = [ "http", - "jsonrpsee-client-transport 0.22.4", - "jsonrpsee-core 0.22.4", - "jsonrpsee-types 0.22.4", + "jsonrpsee-client-transport", + "jsonrpsee-core", + "jsonrpsee-types", "url", ] @@ -4766,6 +4696,15 @@ dependencies = [ "hashbrown 0.13.2", ] +[[package]] +name = "lru" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3262e75e648fce39813cb56ac41f3c3e3f65217ebf3844d818d1f9398cfb0dc" +dependencies = [ + "hashbrown 0.14.3", +] + [[package]] name = "lru-cache" version = "0.1.2" @@ -6176,7 +6115,7 @@ checksum = "59230a63c37f3e18569bdb90e4a89cbf5bf8b06fea0b84e65ea10cc4df47addd" dependencies = [ "difflib", "float-cmp", - "itertools", + "itertools 0.10.5", "normalize-line-endings", "predicates-core", "regex", @@ -6368,7 +6307,7 @@ checksum = "119533552c9a7ffacc21e099c24a0ac8bb19c2a2a3f363de84cd9b844feab270" dependencies = [ "bytes", "heck 0.4.1", - "itertools", + "itertools 0.10.5", "lazy_static", "log", "multimap", @@ -6389,7 +6328,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e5d2d8d10f3c6ded6da8b05b5fb3b8a5082514344d56c9f871412d29b4e075b4" dependencies = [ "anyhow", - "itertools", + "itertools 0.10.5", "proc-macro2 1.0.79", "quote 1.0.36", "syn 1.0.109", @@ -6946,7 +6885,7 @@ dependencies = [ "frame-support", "frame-system", "futures", - "jsonrpsee 0.22.4", + "jsonrpsee", "log", "num-traits", "pallet-balances", @@ -7309,12 +7248,12 @@ checksum = "80af6f9131f277a45a3fba6ce8e2258037bb0477a67e610d3c1fe046ab31de47" [[package]] name = "ruzstd" -version = "0.4.0" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac3ffab8f9715a0d455df4bbb9d21e91135aab3cd3ca187af0cd0c3c3f868fdc" +checksum = "58c4eb8a81997cf040a091d1f7e1938aeab6749d3a0dfa73af43cdc32393483d" dependencies = [ "byteorder", - "thiserror-core", + "derive_more", "twox-hash", ] @@ -7383,7 +7322,7 @@ dependencies = [ "serde_json", "sp-blockchain", "sp-core", - "sp-crypto-hashing", + "sp-crypto-hashing 0.1.0 (git+https://github.com/paritytech/polkadot-sdk?branch=master)", "sp-genesis-builder", "sp-io", "sp-runtime", @@ -7636,7 +7575,7 @@ name = "sc-rpc-api" version = "0.33.0" source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ - "jsonrpsee 0.22.4", + "jsonrpsee", "parity-scale-codec", "sc-chain-spec", "sc-mixnet", @@ -7704,38 +7643,38 @@ dependencies = [ [[package]] name = "scale-bits" -version = "0.4.0" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "036575c29af9b6e4866ffb7fa055dbf623fe7a9cc159b33786de6013a6969d89" +checksum = "662d10dcd57b1c2a3c41c9cf68f71fb09747ada1ea932ad961aca7e2ca28315f" dependencies = [ "parity-scale-codec", "scale-info", + "scale-type-resolver", "serde", ] [[package]] name = "scale-decode" -version = "0.9.0" +version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7789f5728e4e954aaa20cadcc370b99096fb8645fca3c9333ace44bb18f30095" +checksum = "afc79ba56a1c742f5aeeed1f1801f3edf51f7e818f0a54582cac6f131364ea7b" dependencies = [ "derive_more", "parity-scale-codec", "primitive-types", "scale-bits", "scale-decode-derive", - "scale-info", + "scale-type-resolver", "smallvec", ] [[package]] name = "scale-decode-derive" -version = "0.9.0" +version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "27873eb6005868f8cc72dcfe109fae664cf51223d35387bc2f28be4c28d94c47" +checksum = "5398fdb3c7bea3cb419bac4983aadacae93fe1a7b5f693f4ebd98c3821aad7a5" dependencies = [ "darling 0.14.4", - "proc-macro-crate 1.1.3", "proc-macro2 1.0.79", "quote 1.0.36", "syn 1.0.109", @@ -7743,24 +7682,24 @@ dependencies = [ [[package]] name = "scale-encode" -version = "0.5.0" +version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d70cb4b29360105483fac1ed567ff95d65224a14dd275b6303ed0a654c78de5" +checksum = "628800925a33794fb5387781b883b5e14d130fece9af5a63613867b8de07c5c7" dependencies = [ "derive_more", "parity-scale-codec", "primitive-types", "scale-bits", "scale-encode-derive", - "scale-info", + "scale-type-resolver", "smallvec", ] [[package]] name = "scale-encode-derive" -version = "0.5.0" +version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "995491f110efdc6bea96d6a746140e32bfceb4ea47510750a5467295a4707a25" +checksum = "7a304e1af7cdfbe7a24e08b012721456cc8cecdedadc14b3d10513eada63233c" dependencies = [ "darling 0.14.4", "proc-macro-crate 1.1.3", @@ -7795,11 +7734,34 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "scale-type-resolver" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "10b800069bfd43374e0f96f653e0d46882a2cb16d6d961ac43bea80f26c76843" +dependencies = [ + "scale-info", + "smallvec", +] + +[[package]] +name = "scale-typegen" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d470fa75e71b12b3244a4113adc4bc49891f3daba2054703cacd06256066397e" +dependencies = [ + "proc-macro2 1.0.79", + "quote 1.0.36", + "scale-info", + "syn 2.0.58", + "thiserror", +] + [[package]] name = "scale-value" -version = "0.12.0" +version = "0.14.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6538d1cc1af9c0baf401c57da8a6d4730ef582db0d330d2efa56ec946b5b0283" +checksum = "c07ccfee963104335c971aaf8b7b0e749be8569116322df23f1f75c4ca9e4a28" dependencies = [ "base58", "blake2 0.10.6", @@ -7811,6 +7773,7 @@ dependencies = [ "scale-decode", "scale-encode", "scale-info", + "scale-type-resolver", "serde", "yap", ] @@ -7835,22 +7798,6 @@ dependencies = [ "hashbrown 0.13.2", ] -[[package]] -name = "schnorrkel" -version = "0.10.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "844b7645371e6ecdf61ff246ba1958c29e802881a749ae3fb1993675d210d28d" -dependencies = [ - "arrayref", - "arrayvec 0.7.4", - "curve25519-dalek-ng", - "merlin", - "rand_core 0.6.4", - "sha2 0.9.9", - "subtle-ng", - "zeroize", -] - [[package]] name = "schnorrkel" version = "0.11.4" @@ -8197,9 +8144,9 @@ checksum = "620a1d43d70e142b1d46a929af51d44f383db9c7a2ec122de2cd992ccfcf3c18" [[package]] name = "siphasher" -version = "0.3.11" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38b58827f4464d87d377d175e90bf58eb00fd8716ff0a62f80356b5e61555d0d" +checksum = "56199f7ddabf13fe5074ce809e7d3f42b42ae711800501b5b16ea82ad029c39d" [[package]] name = "slab" @@ -8235,46 +8182,48 @@ checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" [[package]] name = "smol" -version = "1.3.0" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "13f2b548cd8447f8de0fdf1c592929f70f4fc7039a05e47404b0d096ec6987a1" +checksum = "e635339259e51ef85ac7aa29a1cd991b957047507288697a690e80ab97d07cad" dependencies = [ - "async-channel 1.9.0", + "async-channel 2.2.0", "async-executor", "async-fs", - "async-io 1.13.0", - "async-lock 2.8.0", + "async-io 2.3.2", + "async-lock 3.3.0", "async-net", "async-process", "blocking", - "futures-lite 1.13.0", + "futures-lite 2.3.0", ] [[package]] name = "smoldot" -version = "0.8.0" +version = "0.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1cce5e2881b30bad7ef89f383a816ad0b22c45915911f28499026de4a76d20ee" +checksum = "e6d1eaa97d77be4d026a1e7ffad1bb3b78448763b357ea6f8188d3e6f736a9b9" dependencies = [ "arrayvec 0.7.4", - "async-lock 2.8.0", - "atomic", + "async-lock 3.3.0", + "atomic-take", "base64 0.21.7", "bip39", "blake2-rfc", "bs58 0.5.1", + "chacha20", "crossbeam-queue", "derive_more", - "ed25519-zebra", + "ed25519-zebra 4.0.3", "either", - "event-listener 2.5.3", + "event-listener 4.0.3", "fnv", - "futures-channel", + "futures-lite 2.3.0", "futures-util", "hashbrown 0.14.3", "hex", "hmac 0.12.1", - "itertools", + "itertools 0.12.1", + "libm", "libsecp256k1", "merlin", "no-std-net", @@ -8284,51 +8233,59 @@ dependencies = [ "num-traits", "pbkdf2", "pin-project", + "poly1305", "rand 0.8.5", "rand_chacha 0.3.1", "ruzstd", - "schnorrkel 0.10.2", + "schnorrkel", "serde", "serde_json", "sha2 0.10.8", + "sha3", "siphasher", "slab", "smallvec", - "smol", - "snow", "soketto", - "tiny-keccak", "twox-hash", "wasmi", + "x25519-dalek 2.0.1", + "zeroize", ] [[package]] name = "smoldot-light" -version = "0.6.0" +version = "0.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b2f7b4687b83ff244ef6137735ed5716ad37dcdf3ee16c4eb1a32fb9808fa47" +checksum = "5496f2d116b7019a526b1039ec2247dd172b8670633b1a64a614c9ea12c9d8c7" dependencies = [ - "async-lock 2.8.0", + "async-channel 2.2.0", + "async-lock 3.3.0", + "base64 0.21.7", "blake2-rfc", "derive_more", "either", - "event-listener 2.5.3", + "event-listener 4.0.3", "fnv", "futures-channel", + "futures-lite 2.3.0", "futures-util", "hashbrown 0.14.3", "hex", - "itertools", + "itertools 0.12.1", "log", - "lru 0.10.1", + "lru 0.12.3", + "no-std-net", "parking_lot 0.12.1", + "pin-project", "rand 0.8.5", + "rand_chacha 0.3.1", "serde", "serde_json", "siphasher", "slab", "smol", "smoldot", + "zeroize", ] [[package]] @@ -8552,12 +8509,12 @@ dependencies = [ "bounded-collections", "bs58 0.5.1", "dyn-clonable", - "ed25519-zebra", + "ed25519-zebra 3.1.0", "futures", "hash-db", "hash256-std-hasher", "impl-serde", - "itertools", + "itertools 0.10.5", "k256", "libsecp256k1", "log", @@ -8569,11 +8526,11 @@ dependencies = [ "primitive-types", "rand 0.8.5", "scale-info", - "schnorrkel 0.11.4", + "schnorrkel", "secp256k1", "secrecy", "serde", - "sp-crypto-hashing", + "sp-crypto-hashing 0.1.0 (git+https://github.com/paritytech/polkadot-sdk?branch=master)", "sp-debug-derive 14.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=master)", "sp-externalities 0.25.0 (git+https://github.com/paritytech/polkadot-sdk?branch=master)", "sp-runtime-interface 24.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=master)", @@ -8587,21 +8544,6 @@ dependencies = [ "zeroize", ] -[[package]] -name = "sp-core-hashing" -version = "9.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2ee599a8399448e65197f9a6cee338ad192e9023e35e31f22382964c3c174c68" -dependencies = [ - "blake2b_simd", - "byteorder", - "digest 0.10.7", - "sha2 0.10.8", - "sha3", - "sp-std 8.0.0", - "twox-hash", -] - [[package]] name = "sp-crypto-ec-utils" version = "0.10.0" @@ -8622,6 +8564,20 @@ dependencies = [ "sp-runtime-interface 24.0.0 (git+https://github.com/paritytech/polkadot-sdk)", ] +[[package]] +name = "sp-crypto-hashing" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bc9927a7f81334ed5b8a98a4a978c81324d12bd9713ec76b5c68fd410174c5eb" +dependencies = [ + "blake2b_simd", + "byteorder", + "digest 0.10.7", + "sha2 0.10.8", + "sha3", + "twox-hash", +] + [[package]] name = "sp-crypto-hashing" version = "0.1.0" @@ -8641,7 +8597,7 @@ version = "0.1.0" source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "quote 1.0.36", - "sp-crypto-hashing", + "sp-crypto-hashing 0.1.0 (git+https://github.com/paritytech/polkadot-sdk?branch=master)", "syn 2.0.58", ] @@ -8733,7 +8689,7 @@ dependencies = [ "rustversion", "secp256k1", "sp-core", - "sp-crypto-hashing", + "sp-crypto-hashing 0.1.0 (git+https://github.com/paritytech/polkadot-sdk?branch=master)", "sp-externalities 0.25.0 (git+https://github.com/paritytech/polkadot-sdk?branch=master)", "sp-keystore", "sp-runtime-interface 24.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=master)", @@ -8967,7 +8923,7 @@ dependencies = [ "sp-api", "sp-application-crypto", "sp-core", - "sp-crypto-hashing", + "sp-crypto-hashing 0.1.0 (git+https://github.com/paritytech/polkadot-sdk?branch=master)", "sp-externalities 0.25.0 (git+https://github.com/paritytech/polkadot-sdk?branch=master)", "sp-runtime", "sp-runtime-interface 24.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=master)", @@ -8975,12 +8931,6 @@ dependencies = [ "x25519-dalek 2.0.1", ] -[[package]] -name = "sp-std" -version = "8.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "53458e3c57df53698b3401ec0934bea8e8cfce034816873c0b0abbd83d7bac0d" - [[package]] name = "sp-std" version = "14.0.0" @@ -9331,7 +9281,7 @@ source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555 dependencies = [ "hmac 0.12.1", "pbkdf2", - "schnorrkel 0.11.4", + "schnorrkel", "sha2 0.10.8", "zeroize", ] @@ -9457,17 +9407,11 @@ version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc" -[[package]] -name = "subtle-ng" -version = "2.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "734676eb262c623cec13c3155096e08d1f8f29adce39ba17948b18dad1e54142" - [[package]] name = "subxt" -version = "0.32.1" +version = "0.35.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "588b8ce92699eeb06290f4fb02dad4f7e426c4e6db4d53889c6bcbc808cf24ac" +checksum = "388162448313740aabe675bff00698e72f876b1c6ec85d4d2c34783cfa32a0f7" dependencies = [ "async-trait", "base58", @@ -9478,7 +9422,8 @@ dependencies = [ "futures", "hex", "impl-serde", - "jsonrpsee 0.20.3", + "instant", + "jsonrpsee", "parity-scale-codec", "primitive-types", "scale-bits", @@ -9488,28 +9433,31 @@ dependencies = [ "scale-value", "serde", "serde_json", - "sp-core-hashing", + "sp-crypto-hashing 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "subxt-lightclient", "subxt-macro", "subxt-metadata", "thiserror", + "tokio-util", "tracing", + "url", ] [[package]] name = "subxt-codegen" -version = "0.32.1" +version = "0.35.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "98f5a534c8d475919e9c845d51fc2316da4fcadd04fe17552d932d2106de930e" +checksum = "cd94344feea939a37b919b7381e038dededfd1a88e01bedda67fe40847abfc78" dependencies = [ "frame-metadata 16.0.0", "heck 0.4.1", "hex", - "jsonrpsee 0.20.3", + "jsonrpsee", "parity-scale-codec", "proc-macro2 1.0.79", "quote 1.0.36", "scale-info", + "scale-typegen", "subxt-metadata", "syn 2.0.58", "thiserror", @@ -9518,9 +9466,9 @@ dependencies = [ [[package]] name = "subxt-lightclient" -version = "0.32.1" +version = "0.35.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "10fd0ac9b091211f962b6ae19e26cd08e0b86efa064dfb7fac69c8f79f122329" +checksum = "c0dbc6ed49c3c5607fc7423d7ebda4dae858eb3040cdaec602105a240d4f412f" dependencies = [ "futures", "futures-util", @@ -9535,27 +9483,31 @@ dependencies = [ [[package]] name = "subxt-macro" -version = "0.32.1" +version = "0.35.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "12e8be9ab6fe88b8c13edbe15911e148482cfb905a8b8d5b8d766a64c54be0bd" +checksum = "4707a920898a7c653210bc946d26904e81ae6fcbb4f91e3a56101d5979f72fe9" dependencies = [ "darling 0.20.8", + "parity-scale-codec", "proc-macro-error", + "quote 1.0.36", + "scale-typegen", "subxt-codegen", "syn 2.0.58", ] [[package]] name = "subxt-metadata" -version = "0.32.1" +version = "0.35.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6898275765d36a37e5ef564358e0341cf41b5f3a91683d7d8b859381b65ac8a" +checksum = "65ffc8b7d246ebd38611f818547ee8e09fd69717cb79aae22e3a54fc423e6e14" dependencies = [ + "derive_more", "frame-metadata 16.0.0", + "hashbrown 0.14.3", "parity-scale-codec", "scale-info", - "sp-core-hashing", - "thiserror", + "sp-crypto-hashing 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -9696,26 +9648,6 @@ dependencies = [ "thiserror-impl", ] -[[package]] -name = "thiserror-core" -version = "1.0.50" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c001ee18b7e5e3f62cbf58c7fe220119e68d902bb7443179c0c8aef30090e999" -dependencies = [ - "thiserror-core-impl", -] - -[[package]] -name = "thiserror-core-impl" -version = "1.0.50" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e4c60d69f36615a077cc7663b9cb8e42275722d23e58a7fa3d2c7f2915d09d04" -dependencies = [ - "proc-macro2 1.0.79", - "quote 1.0.36", - "syn 2.0.58", -] - [[package]] name = "thiserror-impl" version = "1.0.58" @@ -10517,11 +10449,10 @@ dependencies = [ [[package]] name = "wasmi" -version = "0.30.0" +version = "0.31.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e51fb5c61993e71158abf5bb863df2674ca3ec39ed6471c64f07aeaf751d67b4" +checksum = "77a8281d1d660cdf54c76a3efa9ddd0c270cada1383a995db3ccb43d166456c7" dependencies = [ - "intx", "smallvec", "spin 0.9.8", "wasmi_arena", @@ -10537,9 +10468,9 @@ checksum = "104a7f73be44570cac297b3035d76b169d6599637631cf37a1703326a0727073" [[package]] name = "wasmi_core" -version = "0.12.0" +version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "624e6333e861ef49095d2d678b76ebf30b06bf37effca845be7e5b87c90071b7" +checksum = "dcf1a7db34bff95b85c261002720c00c3a6168256dcb93041d3fa2054d19856a" dependencies = [ "downcast-rs", "libm", diff --git a/relay-clients/client-bridge-hub-kusama/Cargo.toml b/relay-clients/client-bridge-hub-kusama/Cargo.toml index ff2754f5a..167d9af41 100644 --- a/relay-clients/client-bridge-hub-kusama/Cargo.toml +++ b/relay-clients/client-bridge-hub-kusama/Cargo.toml @@ -12,7 +12,7 @@ workspace = true [dependencies] codec = { package = "parity-scale-codec", version = "3.6.1", features = ["derive"] } scale-info = { version = "2.11.1", default-features = false, features = ["derive"] } -subxt = { version = "0.32.1", default-features = false, features = ["native"] } +subxt = { version = "0.35.2", default-features = false, features = ["native"] } # Bridge dependencies diff --git a/relay-clients/client-bridge-hub-polkadot/Cargo.toml b/relay-clients/client-bridge-hub-polkadot/Cargo.toml index 89874eff4..e9ccadcd2 100644 --- a/relay-clients/client-bridge-hub-polkadot/Cargo.toml +++ b/relay-clients/client-bridge-hub-polkadot/Cargo.toml @@ -12,7 +12,7 @@ workspace = true [dependencies] codec = { package = "parity-scale-codec", version = "3.6.1", features = ["derive"] } scale-info = { version = "2.11.1", default-features = false, features = ["derive"] } -subxt = { version = "0.32.1", default-features = false, features = ["native"] } +subxt = { version = "0.35.2", default-features = false, features = ["native"] } # Bridge dependencies diff --git a/relay-clients/client-bridge-hub-rococo/Cargo.toml b/relay-clients/client-bridge-hub-rococo/Cargo.toml index d9d6d5ebd..3ec4a71a2 100644 --- a/relay-clients/client-bridge-hub-rococo/Cargo.toml +++ b/relay-clients/client-bridge-hub-rococo/Cargo.toml @@ -12,7 +12,7 @@ workspace = true [dependencies] codec = { package = "parity-scale-codec", version = "3.6.1", features = ["derive"] } scale-info = { version = "2.11.1", default-features = false, features = ["derive"] } -subxt = { version = "0.32.1", default-features = false, features = ["native"] } +subxt = { version = "0.35.2", default-features = false, features = ["native"] } # Bridge dependencies diff --git a/relay-clients/client-bridge-hub-westend/Cargo.toml b/relay-clients/client-bridge-hub-westend/Cargo.toml index f28c97c40..e6edc94ec 100644 --- a/relay-clients/client-bridge-hub-westend/Cargo.toml +++ b/relay-clients/client-bridge-hub-westend/Cargo.toml @@ -12,7 +12,7 @@ workspace = true [dependencies] codec = { package = "parity-scale-codec", version = "3.6.1", features = ["derive"] } scale-info = { version = "2.11.1", default-features = false, features = ["derive"] } -subxt = { version = "0.32.1", default-features = false, features = ["native"] } +subxt = { version = "0.35.2", default-features = false, features = ["native"] } # Bridge dependencies diff --git a/relay-clients/client-kusama/Cargo.toml b/relay-clients/client-kusama/Cargo.toml index a19038fe3..40119d9c3 100644 --- a/relay-clients/client-kusama/Cargo.toml +++ b/relay-clients/client-kusama/Cargo.toml @@ -12,7 +12,7 @@ workspace = true [dependencies] codec = { package = "parity-scale-codec", version = "3.6.1", features = ["derive"] } scale-info = { version = "2.11.1", default-features = false, features = ["derive"] } -subxt = { version = "0.32.1", default-features = false, features = ["native"] } +subxt = { version = "0.35.2", default-features = false, features = ["native"] } # Bridge dependencies diff --git a/relay-clients/client-polkadot-bulletin/Cargo.toml b/relay-clients/client-polkadot-bulletin/Cargo.toml index c9f3762fe..6029ebff3 100644 --- a/relay-clients/client-polkadot-bulletin/Cargo.toml +++ b/relay-clients/client-polkadot-bulletin/Cargo.toml @@ -12,7 +12,7 @@ workspace = true [dependencies] codec = { package = "parity-scale-codec", version = "3.6.1", features = ["derive"] } scale-info = { version = "2.11.1", default-features = false, features = ["derive"] } -subxt = { version = "0.32.1", default-features = false, features = ["native"] } +subxt = { version = "0.35.2", default-features = false, features = ["native"] } # Bridge dependencies diff --git a/relay-clients/client-polkadot/Cargo.toml b/relay-clients/client-polkadot/Cargo.toml index 9233e529e..795004897 100644 --- a/relay-clients/client-polkadot/Cargo.toml +++ b/relay-clients/client-polkadot/Cargo.toml @@ -12,7 +12,7 @@ workspace = true [dependencies] codec = { package = "parity-scale-codec", version = "3.6.1", features = ["derive"] } scale-info = { version = "2.11.1", default-features = false, features = ["derive"] } -subxt = { version = "0.32.1", default-features = false, features = ["native"] } +subxt = { version = "0.35.2", default-features = false, features = ["native"] } # Bridge dependencies diff --git a/relay-clients/client-rococo/Cargo.toml b/relay-clients/client-rococo/Cargo.toml index 3df01015e..6a07655b9 100644 --- a/relay-clients/client-rococo/Cargo.toml +++ b/relay-clients/client-rococo/Cargo.toml @@ -12,7 +12,7 @@ workspace = true [dependencies] codec = { package = "parity-scale-codec", version = "3.6.1", features = ["derive"] } scale-info = { version = "2.11.1", default-features = false, features = ["derive"] } -subxt = { version = "0.32.1", default-features = false, features = ["native"] } +subxt = { version = "0.35.2", default-features = false, features = ["native"] } # Bridge dependencies diff --git a/relay-clients/client-westend/Cargo.toml b/relay-clients/client-westend/Cargo.toml index dc252ea47..a57d63634 100644 --- a/relay-clients/client-westend/Cargo.toml +++ b/relay-clients/client-westend/Cargo.toml @@ -12,7 +12,7 @@ workspace = true [dependencies] codec = { package = "parity-scale-codec", version = "3.6.1", features = ["derive"] } scale-info = { version = "2.11.1", default-features = false, features = ["derive"] } -subxt = { version = "0.32.1", default-features = false, features = ["native"] } +subxt = { version = "0.35.2", default-features = false, features = ["native"] } # Bridge dependencies -- GitLab From c752f16c5c25fb6b6fffef23529703361b84592e Mon Sep 17 00:00:00 2001 From: Serban Iorga Date: Mon, 15 Apr 2024 15:23:05 +0300 Subject: [PATCH 31/39] Update CI image (#2951) * Update CI image * cargo update -p curve25519-dalek@4.1.1 --- .gitlab-ci.yml | 2 +- Cargo.lock | 20 ++++++++++---------- Dockerfile | 2 +- README.md | 4 ++-- 4 files changed, 14 insertions(+), 14 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 3c5b4aa6f..ae6195e58 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -10,7 +10,7 @@ variables: GIT_DEPTH: 100 CARGO_INCREMENTAL: 0 ARCH: "x86_64" - CI_IMAGE: "paritytech/ci-unified:latest" + CI_IMAGE: "paritytech/ci-unified:bullseye-1.77.0-2024-04-10-v20240408" RUST_BACKTRACE: full BUILDAH_IMAGE: "quay.io/buildah/stable:v1.29" BUILDAH_COMMAND: "buildah --storage-driver overlay2" diff --git a/Cargo.lock b/Cargo.lock index 8fa7e4899..030ad22e0 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2035,9 +2035,9 @@ dependencies = [ [[package]] name = "curve25519-dalek" -version = "4.1.1" +version = "4.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e89b8c6a2e4b1f45971ad09761aafb85514a84744b67a95e32c3cc1352d1f65c" +checksum = "0a677b8922c94e01bdbb12126b0bc852f00447528dee1782229af9c720c3f348" dependencies = [ "cfg-if", "cpufeatures", @@ -2446,7 +2446,7 @@ version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4a3daa8e81a3963a60642bcc1f90a670680bd4a77535faa384e9d1c79d620871" dependencies = [ - "curve25519-dalek 4.1.1", + "curve25519-dalek 4.1.2", "ed25519 2.2.3", "rand_core 0.6.4", "serde", @@ -2475,7 +2475,7 @@ version = "4.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7d9ce6874da5d4415896cd45ffbc4d1cfc0c4f9c079427bd870742c30f2f65a9" dependencies = [ - "curve25519-dalek 4.1.1", + "curve25519-dalek 4.1.2", "ed25519 2.2.3", "hashbrown 0.14.3", "hex", @@ -4918,7 +4918,7 @@ dependencies = [ "bitflags 1.3.2", "blake2 0.10.6", "c2-chacha", - "curve25519-dalek 4.1.1", + "curve25519-dalek 4.1.2", "either", "hashlink", "lioness", @@ -5661,7 +5661,7 @@ checksum = "4e69bf016dc406eff7d53a7d3f7cf1c2e72c82b9088aac1118591e36dd2cd3e9" dependencies = [ "bitcoin_hashes 0.13.0", "rand 0.8.5", - "rand_core 0.6.4", + "rand_core 0.5.1", "serde", "unicode-normalization", ] @@ -7807,7 +7807,7 @@ dependencies = [ "aead", "arrayref", "arrayvec 0.7.4", - "curve25519-dalek 4.1.1", + "curve25519-dalek 4.1.2", "getrandom_or_panic", "merlin", "rand_core 0.6.4", @@ -8297,7 +8297,7 @@ dependencies = [ "aes-gcm", "blake2 0.10.6", "chacha20poly1305", - "curve25519-dalek 4.1.1", + "curve25519-dalek 4.1.2", "rand_core 0.6.4", "ring 0.17.8", "rustc_version", @@ -8913,7 +8913,7 @@ version = "10.0.0" source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" dependencies = [ "aes-gcm", - "curve25519-dalek 4.1.1", + "curve25519-dalek 4.1.2", "ed25519-dalek 2.1.1", "hkdf", "parity-scale-codec", @@ -11085,7 +11085,7 @@ version = "2.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c7e468321c81fb07fa7f4c636c3972b9100f0346e5b6a9f2bd0603a52f7ed277" dependencies = [ - "curve25519-dalek 4.1.1", + "curve25519-dalek 4.1.2", "rand_core 0.6.4", "serde", "zeroize", diff --git a/Dockerfile b/Dockerfile index ed18e4abe..cb5be7195 100644 --- a/Dockerfile +++ b/Dockerfile @@ -8,7 +8,7 @@ # # See the `deployments/README.md` for all the available `PROJECT` values. -FROM docker.io/paritytech/ci-unified:latest as builder +FROM docker.io/paritytech/ci-unified:bullseye-1.77.0-2024-04-10-v20240408 as builder USER root WORKDIR /parity-bridges-common diff --git a/README.md b/README.md index 8bfa39841..466ac3e7d 100644 --- a/README.md +++ b/README.md @@ -41,7 +41,7 @@ Also you can build the repo with [Parity CI Docker image](https://github.com/paritytech/scripts/tree/master/dockerfiles/ci-unified): ```bash -docker pull paritytech/ci-unified:latest +docker pull paritytech/ci-unified:bullseye-1.77.0-2024-04-10-v20240408 mkdir ~/cache chown 1000:1000 ~/cache #processes in the container runs as "nonroot" user with UID 1000 docker run --rm -it -w /shellhere/parity-bridges-common \ @@ -49,7 +49,7 @@ docker run --rm -it -w /shellhere/parity-bridges-common \ -v "$(pwd)":/shellhere/parity-bridges-common \ -e CARGO_HOME=/cache/cargo/ \ -e SCCACHE_DIR=/cache/sccache/ \ - -e CARGO_TARGET_DIR=/cache/target/ paritytech/ci-unified:latest cargo build --all + -e CARGO_TARGET_DIR=/cache/target/ paritytech/ci-unified:bullseye-1.77.0-2024-04-10-v20240408 cargo build --all #artifacts can be found in ~/cache/target ``` -- GitLab From 4dd72be5f030fce7458a3873d9b3657fc6075548 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 15 Apr 2024 12:24:41 +0000 Subject: [PATCH 32/39] Bump async-trait from 0.1.79 to 0.1.80 Bumps [async-trait](https://github.com/dtolnay/async-trait) from 0.1.79 to 0.1.80. - [Release notes](https://github.com/dtolnay/async-trait/releases) - [Commits](https://github.com/dtolnay/async-trait/compare/0.1.79...0.1.80) --- updated-dependencies: - dependency-name: async-trait dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- Cargo.lock | 6 +++--- substrate-relay/Cargo.toml | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 030ad22e0..0343a78cd 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -787,9 +787,9 @@ checksum = "fbb36e985947064623dbd357f727af08ffd077f93d696782f3c56365fa2e2799" [[package]] name = "async-trait" -version = "0.1.79" +version = "0.1.80" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a507401cad91ec6a857ed5513a2073c82a9b9048762b885bb98655b306964681" +checksum = "c6fa2087f2753a7da8cc1c0dbfcf89579dd57458e36769de5ac750b4671737ca" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.36", @@ -5661,7 +5661,7 @@ checksum = "4e69bf016dc406eff7d53a7d3f7cf1c2e72c82b9088aac1118591e36dd2cd3e9" dependencies = [ "bitcoin_hashes 0.13.0", "rand 0.8.5", - "rand_core 0.5.1", + "rand_core 0.6.4", "serde", "unicode-normalization", ] diff --git a/substrate-relay/Cargo.toml b/substrate-relay/Cargo.toml index 7fa48a29f..620ef7dbe 100644 --- a/substrate-relay/Cargo.toml +++ b/substrate-relay/Cargo.toml @@ -12,7 +12,7 @@ workspace = true [dependencies] anyhow = "1.0" async-std = "1.9.0" -async-trait = "0.1.79" +async-trait = "0.1.80" codec = { package = "parity-scale-codec", version = "3.6.1" } env_logger = "0.11" futures = "0.3.30" -- GitLab From f80f01d44c967f6c4abc59b9f909fefb46441f59 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 15 Apr 2024 12:24:47 +0000 Subject: [PATCH 33/39] Bump subxt from 0.35.2 to 0.35.3 Bumps [subxt](https://github.com/paritytech/subxt) from 0.35.2 to 0.35.3. - [Release notes](https://github.com/paritytech/subxt/releases) - [Changelog](https://github.com/paritytech/subxt/blob/v0.35.3/CHANGELOG.md) - [Commits](https://github.com/paritytech/subxt/compare/v0.35.2...v0.35.3) --- updated-dependencies: - dependency-name: subxt dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- Cargo.lock | 20 +++++++++---------- .../client-bridge-hub-kusama/Cargo.toml | 2 +- .../client-bridge-hub-polkadot/Cargo.toml | 2 +- .../client-bridge-hub-rococo/Cargo.toml | 2 +- .../client-bridge-hub-westend/Cargo.toml | 2 +- relay-clients/client-kusama/Cargo.toml | 2 +- .../client-polkadot-bulletin/Cargo.toml | 2 +- relay-clients/client-polkadot/Cargo.toml | 2 +- relay-clients/client-rococo/Cargo.toml | 2 +- relay-clients/client-westend/Cargo.toml | 2 +- 10 files changed, 19 insertions(+), 19 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 0343a78cd..d786e7d18 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -9409,9 +9409,9 @@ checksum = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc" [[package]] name = "subxt" -version = "0.35.2" +version = "0.35.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "388162448313740aabe675bff00698e72f876b1c6ec85d4d2c34783cfa32a0f7" +checksum = "bd68bef23f4de5e513ab4c29af69053e232b098f9c87ab552d7ea153b4a1fbc5" dependencies = [ "async-trait", "base58", @@ -9445,9 +9445,9 @@ dependencies = [ [[package]] name = "subxt-codegen" -version = "0.35.2" +version = "0.35.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd94344feea939a37b919b7381e038dededfd1a88e01bedda67fe40847abfc78" +checksum = "9d9e2b256b71d31a2629e44eb9cbfd944eb7d577c9e0c8e9802cc3c3943af2d9" dependencies = [ "frame-metadata 16.0.0", "heck 0.4.1", @@ -9466,9 +9466,9 @@ dependencies = [ [[package]] name = "subxt-lightclient" -version = "0.35.2" +version = "0.35.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0dbc6ed49c3c5607fc7423d7ebda4dae858eb3040cdaec602105a240d4f412f" +checksum = "1d51f1ac12e3be7aafea4d037730a57da4f22f2e9c73955666081ffa2697c6f1" dependencies = [ "futures", "futures-util", @@ -9483,9 +9483,9 @@ dependencies = [ [[package]] name = "subxt-macro" -version = "0.35.2" +version = "0.35.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4707a920898a7c653210bc946d26904e81ae6fcbb4f91e3a56101d5979f72fe9" +checksum = "98dc84d7e6a0abd7ed407cce0bf60d7d58004f699460cffb979640717d1ab506" dependencies = [ "darling 0.20.8", "parity-scale-codec", @@ -9498,9 +9498,9 @@ dependencies = [ [[package]] name = "subxt-metadata" -version = "0.35.2" +version = "0.35.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "65ffc8b7d246ebd38611f818547ee8e09fd69717cb79aae22e3a54fc423e6e14" +checksum = "cc10c54028d079a9f1be65188707cd29e5ffd8d0031a2b1346a0941d57b7ab7e" dependencies = [ "derive_more", "frame-metadata 16.0.0", diff --git a/relay-clients/client-bridge-hub-kusama/Cargo.toml b/relay-clients/client-bridge-hub-kusama/Cargo.toml index 167d9af41..ac3c382ba 100644 --- a/relay-clients/client-bridge-hub-kusama/Cargo.toml +++ b/relay-clients/client-bridge-hub-kusama/Cargo.toml @@ -12,7 +12,7 @@ workspace = true [dependencies] codec = { package = "parity-scale-codec", version = "3.6.1", features = ["derive"] } scale-info = { version = "2.11.1", default-features = false, features = ["derive"] } -subxt = { version = "0.35.2", default-features = false, features = ["native"] } +subxt = { version = "0.35.3", default-features = false, features = ["native"] } # Bridge dependencies diff --git a/relay-clients/client-bridge-hub-polkadot/Cargo.toml b/relay-clients/client-bridge-hub-polkadot/Cargo.toml index e9ccadcd2..14671ce8f 100644 --- a/relay-clients/client-bridge-hub-polkadot/Cargo.toml +++ b/relay-clients/client-bridge-hub-polkadot/Cargo.toml @@ -12,7 +12,7 @@ workspace = true [dependencies] codec = { package = "parity-scale-codec", version = "3.6.1", features = ["derive"] } scale-info = { version = "2.11.1", default-features = false, features = ["derive"] } -subxt = { version = "0.35.2", default-features = false, features = ["native"] } +subxt = { version = "0.35.3", default-features = false, features = ["native"] } # Bridge dependencies diff --git a/relay-clients/client-bridge-hub-rococo/Cargo.toml b/relay-clients/client-bridge-hub-rococo/Cargo.toml index 3ec4a71a2..a872cea3f 100644 --- a/relay-clients/client-bridge-hub-rococo/Cargo.toml +++ b/relay-clients/client-bridge-hub-rococo/Cargo.toml @@ -12,7 +12,7 @@ workspace = true [dependencies] codec = { package = "parity-scale-codec", version = "3.6.1", features = ["derive"] } scale-info = { version = "2.11.1", default-features = false, features = ["derive"] } -subxt = { version = "0.35.2", default-features = false, features = ["native"] } +subxt = { version = "0.35.3", default-features = false, features = ["native"] } # Bridge dependencies diff --git a/relay-clients/client-bridge-hub-westend/Cargo.toml b/relay-clients/client-bridge-hub-westend/Cargo.toml index e6edc94ec..2e1e21c26 100644 --- a/relay-clients/client-bridge-hub-westend/Cargo.toml +++ b/relay-clients/client-bridge-hub-westend/Cargo.toml @@ -12,7 +12,7 @@ workspace = true [dependencies] codec = { package = "parity-scale-codec", version = "3.6.1", features = ["derive"] } scale-info = { version = "2.11.1", default-features = false, features = ["derive"] } -subxt = { version = "0.35.2", default-features = false, features = ["native"] } +subxt = { version = "0.35.3", default-features = false, features = ["native"] } # Bridge dependencies diff --git a/relay-clients/client-kusama/Cargo.toml b/relay-clients/client-kusama/Cargo.toml index 40119d9c3..6872d8117 100644 --- a/relay-clients/client-kusama/Cargo.toml +++ b/relay-clients/client-kusama/Cargo.toml @@ -12,7 +12,7 @@ workspace = true [dependencies] codec = { package = "parity-scale-codec", version = "3.6.1", features = ["derive"] } scale-info = { version = "2.11.1", default-features = false, features = ["derive"] } -subxt = { version = "0.35.2", default-features = false, features = ["native"] } +subxt = { version = "0.35.3", default-features = false, features = ["native"] } # Bridge dependencies diff --git a/relay-clients/client-polkadot-bulletin/Cargo.toml b/relay-clients/client-polkadot-bulletin/Cargo.toml index 6029ebff3..0113daff9 100644 --- a/relay-clients/client-polkadot-bulletin/Cargo.toml +++ b/relay-clients/client-polkadot-bulletin/Cargo.toml @@ -12,7 +12,7 @@ workspace = true [dependencies] codec = { package = "parity-scale-codec", version = "3.6.1", features = ["derive"] } scale-info = { version = "2.11.1", default-features = false, features = ["derive"] } -subxt = { version = "0.35.2", default-features = false, features = ["native"] } +subxt = { version = "0.35.3", default-features = false, features = ["native"] } # Bridge dependencies diff --git a/relay-clients/client-polkadot/Cargo.toml b/relay-clients/client-polkadot/Cargo.toml index 795004897..66d8cf3b7 100644 --- a/relay-clients/client-polkadot/Cargo.toml +++ b/relay-clients/client-polkadot/Cargo.toml @@ -12,7 +12,7 @@ workspace = true [dependencies] codec = { package = "parity-scale-codec", version = "3.6.1", features = ["derive"] } scale-info = { version = "2.11.1", default-features = false, features = ["derive"] } -subxt = { version = "0.35.2", default-features = false, features = ["native"] } +subxt = { version = "0.35.3", default-features = false, features = ["native"] } # Bridge dependencies diff --git a/relay-clients/client-rococo/Cargo.toml b/relay-clients/client-rococo/Cargo.toml index 6a07655b9..9953f9006 100644 --- a/relay-clients/client-rococo/Cargo.toml +++ b/relay-clients/client-rococo/Cargo.toml @@ -12,7 +12,7 @@ workspace = true [dependencies] codec = { package = "parity-scale-codec", version = "3.6.1", features = ["derive"] } scale-info = { version = "2.11.1", default-features = false, features = ["derive"] } -subxt = { version = "0.35.2", default-features = false, features = ["native"] } +subxt = { version = "0.35.3", default-features = false, features = ["native"] } # Bridge dependencies diff --git a/relay-clients/client-westend/Cargo.toml b/relay-clients/client-westend/Cargo.toml index a57d63634..166003517 100644 --- a/relay-clients/client-westend/Cargo.toml +++ b/relay-clients/client-westend/Cargo.toml @@ -12,7 +12,7 @@ workspace = true [dependencies] codec = { package = "parity-scale-codec", version = "3.6.1", features = ["derive"] } scale-info = { version = "2.11.1", default-features = false, features = ["derive"] } -subxt = { version = "0.35.2", default-features = false, features = ["native"] } +subxt = { version = "0.35.3", default-features = false, features = ["native"] } # Bridge dependencies -- GitLab From 83193de0bb97f8bafb10896515490fe3660293a9 Mon Sep 17 00:00:00 2001 From: Svyatoslav Nikolsky Date: Thu, 18 Apr 2024 14:16:44 +0300 Subject: [PATCH 34/39] Relayer 1.3.0 (#2959) * updated RELEASE.md * bump relay version * bump BHK version to 1_002_000 --- Cargo.lock | 2 +- RELEASE.md | 20 +- .../src/codegen_runtime.rs | 2555 ++++++++++++++--- .../client-bridge-hub-kusama/src/lib.rs | 2 +- scripts/regenerate_runtimes.sh | 2 + substrate-relay/Cargo.toml | 2 +- 6 files changed, 2096 insertions(+), 487 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index d786e7d18..8c7d1a76e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -9300,7 +9300,7 @@ dependencies = [ [[package]] name = "substrate-relay" -version = "1.2.1" +version = "1.3.0" dependencies = [ "anyhow", "async-std", diff --git a/RELEASE.md b/RELEASE.md index e45beddde..cb00ceb0d 100644 --- a/RELEASE.md +++ b/RELEASE.md @@ -6,16 +6,16 @@ come first and details come in the last sections. ### Making a Release All releases are supposed to be done from the -[`polkadot-staging` branch](https://github.com/paritytech/parity-bridges-common/tree/polkadot-staging). +[`master` branch](https://github.com/paritytech/parity-bridges-common/tree/master). This branch is assumed to contain changes, that are reviewed and audited. To prepare a release: 1. Make sure all required changes are merged to the - [`polkadot-staging` branch](https://github.com/paritytech/parity-bridges-common/tree/polkadot-staging); + [`master` branch](https://github.com/paritytech/parity-bridges-common/tree/master); 2. Select release version: go to the `Cargo.toml` of `substrate-relay` crate - ([here](https://github.com/paritytech/parity-bridges-common/blob/polkadot-staging/relays/bin-substrate/Cargo.toml#L3)) + ([here](https://github.com/paritytech/parity-bridges-common/blob/master/relays/bin-substrate/Cargo.toml#L3)) to look for the latest version. Then increment the minor or major version. **NOTE**: we are not going to properly support [semver](https://semver.org) @@ -28,11 +28,11 @@ To prepare a release: It could be combined with the (1) if changes are not large. Make sure to add the [`A-release`](https://github.com/paritytech/parity-bridges-common/labels/A-release) label to your PR - in the future we'll add workflow to make pre-releases - when such PR is merged to the `polkadot-staging` branch; + when such PR is merged to the `master` branch; 4. Wait for approvals and merge PR, mentioned in (3); -5. Checkout updated `polkadot-staging` branch and do `git pull`; +5. Checkout updated `master` branch and do `git pull`; 6. Make a new git tag with the `substrate-relay` version: ```sh @@ -123,15 +123,15 @@ support it. Normally it means: 1. Bumping bundled chain versions in following places: -- for `Rococo` and `RBH`: [here](https://github.com/paritytech/parity-bridges-common/blob/polkadot-staging/relays/bin-substrate/src/chains/rococo.rs); +- for `Rococo` and `RBH`: [here](https://github.com/paritytech/parity-bridges-common/blob/master/relays/bin-substrate/src/chains/rococo.rs); -- for `Westend` and `WBH`: [here](https://github.com/paritytech/parity-bridges-common/blob/polkadot-staging/relays/bin-substrate/src/chains/westend.rs); +- for `Westend` and `WBH`: [here](https://github.com/paritytech/parity-bridges-common/blob/master/relays/bin-substrate/src/chains/westend.rs); -- for `Kusama` and `KBH`: [here](https://github.com/paritytech/parity-bridges-common/blob/polkadot-staging/relays/bin-substrate/src/chains/polkadot.rs) +- for `Kusama` and `KBH`: [here](https://github.com/paritytech/parity-bridges-common/blob/master/relays/bin-substrate/src/chains/polkadot.rs) -- for `Polkadot` and `PBH`: [here](https://github.com/paritytech/parity-bridges-common/blob/polkadot-staging/relays/bin-substrate/src/chains/polkadot.rs); +- for `Polkadot` and `PBH`: [here](https://github.com/paritytech/parity-bridges-common/blob/master/relays/bin-substrate/src/chains/polkadot.rs); -- for `PBC`: [here](https://github.com/paritytech/parity-bridges-common/blob/polkadot-staging/relays/bin-substrate/src/chains/polkadot_bulletin.rs). +- for `PBC`: [here](https://github.com/paritytech/parity-bridges-common/blob/master/relays/bin-substrate/src/chains/polkadot_bulletin.rs). 2. Regenerating bundled runtime wrapper code using `runtime-codegen` binary: diff --git a/relay-clients/client-bridge-hub-kusama/src/codegen_runtime.rs b/relay-clients/client-bridge-hub-kusama/src/codegen_runtime.rs index 2da4c3014..bf2301044 100644 --- a/relay-clients/client-bridge-hub-kusama/src/codegen_runtime.rs +++ b/relay-clients/client-bridge-hub-kusama/src/codegen_runtime.rs @@ -17,7 +17,7 @@ //! Autogenerated runtime API //! THIS FILE WAS AUTOGENERATED USING parity-bridges-common::runtime-codegen //! EXECUTED COMMAND: target/debug/runtime-codegen --from-node-url -//! wss://kusama-bridge-hub-rpc.polkadot.io +//! wss://kusama-bridge-hub-rpc.polkadot.io/ #[allow(dead_code, unused_imports, non_camel_case_types)] #[allow(clippy::all)] @@ -31,6 +31,11 @@ pub mod api { use super::runtime_types; pub mod bounded_collections { use super::runtime_types; + pub mod bounded_btree_set { + use super::runtime_types; + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct BoundedBTreeSet<_0>(pub ::std::vec::Vec<_0>); + } pub mod bounded_vec { use super::runtime_types; #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] @@ -244,6 +249,23 @@ pub mod api { #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] pub struct StrippableError; } + pub mod bridge_hub_common { + use super::runtime_types; + pub mod message_queue { + use super::runtime_types; + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum AggregateMessageOrigin { + #[codec(index = 0)] + Here, + #[codec(index = 1)] + Parent, + #[codec(index = 2)] + Sibling(runtime_types::polkadot_parachain_primitives::primitives::Id), + #[codec(index = 3)] + Snowbridge(runtime_types::snowbridge_core::ChannelId), + } + } + } pub mod bridge_hub_kusama_runtime { use super::runtime_types; #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] @@ -273,6 +295,8 @@ pub mod api { ParachainSystem(runtime_types::cumulus_pallet_parachain_system::pallet::Call), #[codec(index = 2)] Timestamp(runtime_types::pallet_timestamp::pallet::Call), + #[codec(index = 3)] + ParachainInfo(runtime_types::staging_parachain_info::pallet::Call), #[codec(index = 10)] Balances(runtime_types::pallet_balances::pallet::Call), #[codec(index = 21)] @@ -283,6 +307,8 @@ pub mod api { XcmpQueue(runtime_types::cumulus_pallet_xcmp_queue::pallet::Call), #[codec(index = 31)] PolkadotXcm(runtime_types::pallet_xcm::pallet::Call), + #[codec(index = 32)] + CumulusXcm(runtime_types::cumulus_pallet_xcm::pallet::Call), #[codec(index = 33)] DmpQueue(runtime_types::cumulus_pallet_dmp_queue::pallet::Call), #[codec(index = 40)] @@ -297,6 +323,20 @@ pub mod api { BridgePolkadotParachains(runtime_types::pallet_bridge_parachains::pallet::Call), #[codec(index = 53)] BridgePolkadotMessages(runtime_types::pallet_bridge_messages::pallet::Call), + #[codec(index = 80)] + EthereumInboundQueue(runtime_types::snowbridge_pallet_inbound_queue::pallet::Call), + #[codec(index = 81)] + EthereumOutboundQueue( + runtime_types::snowbridge_pallet_outbound_queue::pallet::Call, + ), + #[codec(index = 82)] + EthereumBeaconClient( + runtime_types::snowbridge_pallet_ethereum_client::pallet::Call, + ), + #[codec(index = 83)] + EthereumSystem(runtime_types::snowbridge_pallet_system::pallet::Call), + #[codec(index = 175)] + MessageQueue(runtime_types::pallet_message_queue::pallet::Call), } #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] pub enum RuntimeError { @@ -314,10 +354,6 @@ pub mod api { XcmpQueue(runtime_types::cumulus_pallet_xcmp_queue::pallet::Error), #[codec(index = 31)] PolkadotXcm(runtime_types::pallet_xcm::pallet::Error), - #[codec(index = 32)] - CumulusXcm(runtime_types::cumulus_pallet_xcm::pallet::Error), - #[codec(index = 33)] - DmpQueue(runtime_types::cumulus_pallet_dmp_queue::pallet::Error), #[codec(index = 40)] Utility(runtime_types::pallet_utility::pallet::Error), #[codec(index = 41)] @@ -330,6 +366,20 @@ pub mod api { BridgePolkadotParachains(runtime_types::pallet_bridge_parachains::pallet::Error), #[codec(index = 53)] BridgePolkadotMessages(runtime_types::pallet_bridge_messages::pallet::Error), + #[codec(index = 80)] + EthereumInboundQueue(runtime_types::snowbridge_pallet_inbound_queue::pallet::Error), + #[codec(index = 81)] + EthereumOutboundQueue( + runtime_types::snowbridge_pallet_outbound_queue::pallet::Error, + ), + #[codec(index = 82)] + EthereumBeaconClient( + runtime_types::snowbridge_pallet_ethereum_client::pallet::Error, + ), + #[codec(index = 83)] + EthereumSystem(runtime_types::snowbridge_pallet_system::pallet::Error), + #[codec(index = 175)] + MessageQueue(runtime_types::pallet_message_queue::pallet::Error), } #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] pub enum RuntimeEvent { @@ -365,6 +415,20 @@ pub mod api { BridgePolkadotParachains(runtime_types::pallet_bridge_parachains::pallet::Event), #[codec(index = 53)] BridgePolkadotMessages(runtime_types::pallet_bridge_messages::pallet::Event), + #[codec(index = 80)] + EthereumInboundQueue(runtime_types::snowbridge_pallet_inbound_queue::pallet::Event), + #[codec(index = 81)] + EthereumOutboundQueue( + runtime_types::snowbridge_pallet_outbound_queue::pallet::Event, + ), + #[codec(index = 82)] + EthereumBeaconClient( + runtime_types::snowbridge_pallet_ethereum_client::pallet::Event, + ), + #[codec(index = 83)] + EthereumSystem(runtime_types::snowbridge_pallet_system::pallet::Event), + #[codec(index = 175)] + MessageQueue(runtime_types::pallet_message_queue::pallet::Event), } #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] pub enum RuntimeHoldReason {} @@ -392,7 +456,7 @@ pub mod api { #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] pub struct RefundBridgedParachainMessages; #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub struct RefundTransactionExtensionAdapter<_0>(pub _0); + pub struct RefundSignedExtensionAdapter<_0>(pub _0); } } pub mod cumulus_pallet_dmp_queue { @@ -400,65 +464,56 @@ pub mod api { pub mod pallet { use super::runtime_types; #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub enum Call { - #[codec(index = 0)] - service_overweight { - index: ::core::primitive::u64, - weight_limit: ::sp_weights::Weight, - }, - } + pub enum Call {} #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub enum Error { + pub enum Event { #[codec(index = 0)] - Unknown, + StartedExport, #[codec(index = 1)] - OverLimit, + Exported { page: ::core::primitive::u32 }, + #[codec(index = 2)] + ExportFailed { page: ::core::primitive::u32 }, + #[codec(index = 3)] + CompletedExport, + #[codec(index = 4)] + StartedOverweightExport, + #[codec(index = 5)] + ExportedOverweight { index: ::core::primitive::u64 }, + #[codec(index = 6)] + ExportOverweightFailed { index: ::core::primitive::u64 }, + #[codec(index = 7)] + CompletedOverweightExport, + #[codec(index = 8)] + StartedCleanup, + #[codec(index = 9)] + CleanedSome { keys_removed: ::core::primitive::u32 }, + #[codec(index = 10)] + Completed { error: ::core::primitive::bool }, } #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub enum Event { + pub enum MigrationState { #[codec(index = 0)] - InvalidFormat { message_hash: [::core::primitive::u8; 32usize] }, + NotStarted, #[codec(index = 1)] - UnsupportedVersion { message_hash: [::core::primitive::u8; 32usize] }, + StartedExport { next_begin_used: ::core::primitive::u32 }, #[codec(index = 2)] - ExecutedDownward { - message_hash: [::core::primitive::u8; 32usize], - message_id: [::core::primitive::u8; 32usize], - outcome: runtime_types::xcm::v3::traits::Outcome, - }, + CompletedExport, #[codec(index = 3)] - WeightExhausted { - message_hash: [::core::primitive::u8; 32usize], - message_id: [::core::primitive::u8; 32usize], - remaining_weight: ::sp_weights::Weight, - required_weight: ::sp_weights::Weight, - }, + StartedOverweightExport { next_overweight_index: ::core::primitive::u64 }, #[codec(index = 4)] - OverweightEnqueued { - message_hash: [::core::primitive::u8; 32usize], - message_id: [::core::primitive::u8; 32usize], - overweight_index: ::core::primitive::u64, - required_weight: ::sp_weights::Weight, - }, + CompletedOverweightExport, #[codec(index = 5)] - OverweightServiced { - overweight_index: ::core::primitive::u64, - weight_used: ::sp_weights::Weight, + StartedCleanup { + cursor: ::core::option::Option< + runtime_types::bounded_collections::bounded_vec::BoundedVec< + ::core::primitive::u8, + >, + >, }, #[codec(index = 6)] - MaxMessagesExhausted { message_hash: [::core::primitive::u8; 32usize] }, + Completed, } } - #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub struct ConfigData { - pub max_individual: ::sp_weights::Weight, - } - #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub struct PageIndexData { - pub begin_used: ::core::primitive::u32, - pub end_used: ::core::primitive::u32, - pub overweight_count: ::core::primitive::u64, - } } pub mod cumulus_pallet_parachain_system { use super::runtime_types; @@ -495,15 +550,13 @@ pub mod api { #[codec(index = 2)] ValidationFunctionDiscarded, #[codec(index = 3)] - UpgradeAuthorized { code_hash: ::subxt::utils::H256 }, - #[codec(index = 4)] DownwardMessagesReceived { count: ::core::primitive::u32 }, - #[codec(index = 5)] + #[codec(index = 4)] DownwardMessagesProcessed { weight_used: ::sp_weights::Weight, dmq_head: ::subxt::utils::H256, }, - #[codec(index = 6)] + #[codec(index = 5)] UpwardMessageSent { message_hash: ::core::option::Option<[::core::primitive::u8; 32usize]>, }, @@ -533,18 +586,13 @@ pub mod api { #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] pub struct UsedBandwidth { pub ump_msg_count : :: core :: primitive :: u32 , pub ump_total_bytes : :: core :: primitive :: u32 , pub hrmp_outgoing : :: subxt :: utils :: KeyedVec < runtime_types :: polkadot_parachain_primitives :: primitives :: Id , runtime_types :: cumulus_pallet_parachain_system :: unincluded_segment :: HrmpChannelUpdate > , } } - #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub struct CodeUpgradeAuthorization { - pub code_hash: ::subxt::utils::H256, - pub check_version: ::core::primitive::bool, - } } pub mod cumulus_pallet_xcm { use super::runtime_types; pub mod pallet { use super::runtime_types; #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub enum Error {} + pub enum Call {} #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] pub enum Event { #[codec(index = 0)] @@ -554,7 +602,7 @@ pub mod api { #[codec(index = 2)] ExecutedDownward( [::core::primitive::u8; 32usize], - runtime_types::xcm::v3::traits::Outcome, + runtime_types::staging_xcm::v4::traits::Outcome, ), } #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] @@ -572,11 +620,6 @@ pub mod api { use super::runtime_types; #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] pub enum Call { - #[codec(index = 0)] - service_overweight { - index: ::core::primitive::u64, - weight_limit: ::sp_weights::Weight, - }, #[codec(index = 1)] suspend_xcm_execution, #[codec(index = 2)] @@ -587,75 +630,23 @@ pub mod api { update_drop_threshold { new: ::core::primitive::u32 }, #[codec(index = 5)] update_resume_threshold { new: ::core::primitive::u32 }, - #[codec(index = 6)] - update_threshold_weight { new: ::sp_weights::Weight }, - #[codec(index = 7)] - update_weight_restrict_decay { new: ::sp_weights::Weight }, - #[codec(index = 8)] - update_xcmp_max_individual_weight { new: ::sp_weights::Weight }, } #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] pub enum Error { #[codec(index = 0)] - FailedToSend, + BadQueueConfig, #[codec(index = 1)] - BadXcmOrigin, + AlreadySuspended, #[codec(index = 2)] - BadXcm, - #[codec(index = 3)] - BadOverweightIndex, - #[codec(index = 4)] - WeightOverLimit, + AlreadyResumed, } #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] pub enum Event { #[codec(index = 0)] - Success { - message_hash: [::core::primitive::u8; 32usize], - message_id: [::core::primitive::u8; 32usize], - weight: ::sp_weights::Weight, - }, - #[codec(index = 1)] - Fail { - message_hash: [::core::primitive::u8; 32usize], - message_id: [::core::primitive::u8; 32usize], - error: runtime_types::xcm::v3::traits::Error, - weight: ::sp_weights::Weight, - }, - #[codec(index = 2)] - BadVersion { message_hash: [::core::primitive::u8; 32usize] }, - #[codec(index = 3)] - BadFormat { message_hash: [::core::primitive::u8; 32usize] }, - #[codec(index = 4)] XcmpMessageSent { message_hash: [::core::primitive::u8; 32usize] }, - #[codec(index = 5)] - OverweightEnqueued { - sender: runtime_types::polkadot_parachain_primitives::primitives::Id, - sent_at: ::core::primitive::u32, - index: ::core::primitive::u64, - required: ::sp_weights::Weight, - }, - #[codec(index = 6)] - OverweightServiced { index: ::core::primitive::u64, used: ::sp_weights::Weight }, } } #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub struct InboundChannelDetails { - pub sender: runtime_types::polkadot_parachain_primitives::primitives::Id, - pub state: runtime_types::cumulus_pallet_xcmp_queue::InboundState, - pub message_metadata: ::std::vec::Vec<( - ::core::primitive::u32, - runtime_types::polkadot_parachain_primitives::primitives::XcmpMessageFormat, - )>, - } - #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub enum InboundState { - #[codec(index = 0)] - Ok, - #[codec(index = 1)] - Suspended, - } - #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] pub struct OutboundChannelDetails { pub recipient: runtime_types::polkadot_parachain_primitives::primitives::Id, pub state: runtime_types::cumulus_pallet_xcmp_queue::OutboundState, @@ -675,9 +666,6 @@ pub mod api { pub suspend_threshold: ::core::primitive::u32, pub drop_threshold: ::core::primitive::u32, pub resume_threshold: ::core::primitive::u32, - pub threshold_weight: ::sp_weights::Weight, - pub weight_restrict_decay: ::sp_weights::Weight, - pub xcmp_max_individual_weight: ::sp_weights::Weight, } } pub mod cumulus_primitives_core { @@ -791,6 +779,22 @@ pub mod api { } pub mod traits { use super::runtime_types; + pub mod messages { + use super::runtime_types; + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum ProcessMessageError { + #[codec(index = 0)] + BadFormat, + #[codec(index = 1)] + Corrupt, + #[codec(index = 2)] + Unsupported, + #[codec(index = 3)] + Overweight(::sp_weights::Weight), + #[codec(index = 4)] + Yield, + } + } pub mod tokens { use super::runtime_types; pub mod misc { @@ -900,6 +904,12 @@ pub mod api { }, #[codec(index = 7)] remark_with_event { remark: ::std::vec::Vec<::core::primitive::u8> }, + #[codec(index = 9)] + authorize_upgrade { code_hash: ::subxt::utils::H256 }, + #[codec(index = 10)] + authorize_upgrade_without_checks { code_hash: ::subxt::utils::H256 }, + #[codec(index = 11)] + apply_authorized_upgrade { code: ::std::vec::Vec<::core::primitive::u8> }, } #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] pub enum Error { @@ -915,6 +925,10 @@ pub mod api { NonZeroRefCount, #[codec(index = 5)] CallFiltered, + #[codec(index = 6)] + NothingAuthorized, + #[codec(index = 7)] + Unauthorized, } #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] pub enum Event { @@ -935,6 +949,11 @@ pub mod api { KilledAccount { account: ::sp_core::crypto::AccountId32 }, #[codec(index = 5)] Remarked { sender: ::sp_core::crypto::AccountId32, hash: ::subxt::utils::H256 }, + #[codec(index = 6)] + UpgradeAuthorized { + code_hash: ::subxt::utils::H256, + check_version: ::core::primitive::bool, + }, } } #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] @@ -946,6 +965,11 @@ pub mod api { pub data: _1, } #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct CodeUpgradeAuthorization { + pub code_hash: ::subxt::utils::H256, + pub check_version: ::core::primitive::bool, + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] pub struct EventRecord<_0, _1> { pub phase: runtime_types::frame_system::Phase, pub event: _0, @@ -1010,6 +1034,12 @@ pub mod api { #[codec(compact)] new_free: ::core::primitive::u128, }, + #[codec(index = 9)] + force_adjust_total_issuance { + direction: runtime_types::pallet_balances::types::AdjustmentDirection, + #[codec(compact)] + delta: ::core::primitive::u128, + }, } #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] pub enum Error { @@ -1033,6 +1063,10 @@ pub mod api { TooManyHolds, #[codec(index = 9)] TooManyFreezes, + #[codec(index = 10)] + IssuanceDeactivated, + #[codec(index = 11)] + DeltaZero, } #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] pub enum Event { @@ -1115,6 +1149,11 @@ pub mod api { Frozen { who: ::sp_core::crypto::AccountId32, amount: ::core::primitive::u128 }, #[codec(index = 20)] Thawed { who: ::sp_core::crypto::AccountId32, amount: ::core::primitive::u128 }, + #[codec(index = 21)] + TotalIssuanceForced { + old: ::core::primitive::u128, + new: ::core::primitive::u128, + }, } } pub mod types { @@ -1127,6 +1166,13 @@ pub mod api { pub flags: runtime_types::pallet_balances::types::ExtraFlags, } #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum AdjustmentDirection { + #[codec(index = 0)] + Increase, + #[codec(index = 1)] + Decrease, + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] pub struct BalanceLock<_0> { pub id: [::core::primitive::u8; 8usize], pub amount: _0, @@ -1271,7 +1317,7 @@ pub mod api { # [codec (index = 0)] set_owner { new_owner : :: core :: option :: Option < :: sp_core :: crypto :: AccountId32 > , } , # [codec (index = 1)] set_operating_mode { operating_mode : runtime_types :: bp_messages :: MessagesOperatingMode , } , # [codec (index = 2)] receive_messages_proof { relayer_id_at_bridged_chain : :: sp_core :: crypto :: AccountId32 , proof : :: bridge_runtime_common :: messages :: target :: FromBridgedChainMessagesProof < :: subxt :: utils :: H256 > , messages_count : :: core :: primitive :: u32 , dispatch_weight : :: sp_weights :: Weight , } , # [codec (index = 3)] receive_messages_delivery_proof { proof : :: bridge_runtime_common :: messages :: source :: FromBridgedChainMessagesDeliveryProof < :: subxt :: utils :: H256 > , relayers_state : :: bp_messages :: UnrewardedRelayersState , } , } #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] pub enum Error { - # [codec (index = 0)] NotOperatingNormally , # [codec (index = 1)] InactiveOutboundLane , # [codec (index = 2)] MessageDispatchInactive , # [codec (index = 3)] MessageRejectedByChainVerifier (runtime_types :: bp_messages :: VerificationError ,) , # [codec (index = 4)] MessageRejectedByLaneVerifier (runtime_types :: bp_messages :: VerificationError ,) , # [codec (index = 5)] MessageRejectedByPallet (runtime_types :: bp_messages :: VerificationError ,) , # [codec (index = 6)] FailedToWithdrawMessageFee , # [codec (index = 7)] TooManyMessagesInTheProof , # [codec (index = 8)] InvalidMessagesProof , # [codec (index = 9)] InvalidMessagesDeliveryProof , # [codec (index = 10)] InvalidUnrewardedRelayersState , # [codec (index = 11)] InsufficientDispatchWeight , # [codec (index = 12)] MessageIsNotYetSent , # [codec (index = 13)] ReceivalConfirmation (runtime_types :: pallet_bridge_messages :: outbound_lane :: ReceivalConfirmationError ,) , # [codec (index = 14)] BridgeModule (runtime_types :: bp_runtime :: OwnedBridgeModuleError ,) , } + # [codec (index = 0)] NotOperatingNormally , # [codec (index = 1)] InactiveOutboundLane , # [codec (index = 2)] MessageDispatchInactive , # [codec (index = 3)] MessageRejectedByChainVerifier (runtime_types :: bp_messages :: VerificationError ,) , # [codec (index = 4)] MessageRejectedByPallet (runtime_types :: bp_messages :: VerificationError ,) , # [codec (index = 5)] FailedToWithdrawMessageFee , # [codec (index = 6)] TooManyMessagesInTheProof , # [codec (index = 7)] InvalidMessagesProof , # [codec (index = 8)] InvalidMessagesDeliveryProof , # [codec (index = 9)] InvalidUnrewardedRelayersState , # [codec (index = 10)] InsufficientDispatchWeight , # [codec (index = 11)] MessageIsNotYetSent , # [codec (index = 12)] ReceivalConfirmation (runtime_types :: pallet_bridge_messages :: outbound_lane :: ReceivalConfirmationError ,) , # [codec (index = 13)] BridgeModule (runtime_types :: bp_runtime :: OwnedBridgeModuleError ,) , } #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] pub enum Event { # [codec (index = 0)] MessageAccepted { lane_id : runtime_types :: bp_messages :: LaneId , nonce : :: core :: primitive :: u64 , } , # [codec (index = 1)] MessagesReceived (:: std :: vec :: Vec < runtime_types :: bp_messages :: ReceivedMessages < runtime_types :: bridge_runtime_common :: messages_xcm_extension :: XcmBlobMessageDispatchResult > > ,) , # [codec (index = 2)] MessagesDelivered { lane_id : runtime_types :: bp_messages :: LaneId , messages : runtime_types :: bp_messages :: DeliveredMessages , } , } @@ -1378,12 +1424,18 @@ pub mod api { #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] pub enum Event { #[codec(index = 0)] - RewardPaid { + RewardRegistered { relayer: ::sp_core::crypto::AccountId32, rewards_account_params: runtime_types::bp_relayers::RewardsAccountParams, reward: ::core::primitive::u128, }, #[codec(index = 1)] + RewardPaid { + relayer: ::sp_core::crypto::AccountId32, + rewards_account_params: runtime_types::bp_relayers::RewardsAccountParams, + reward: ::core::primitive::u128, + }, + #[codec(index = 2)] RegistrationUpdated { relayer: ::sp_core::crypto::AccountId32, registration: runtime_types::bp_relayers::registration::Registration< @@ -1391,9 +1443,9 @@ pub mod api { ::core::primitive::u128, >, }, - #[codec(index = 2)] - Deregistered { relayer: ::sp_core::crypto::AccountId32 }, #[codec(index = 3)] + Deregistered { relayer: ::sp_core::crypto::AccountId32 }, + #[codec(index = 4)] SlashedAndDeregistered { relayer: ::sp_core::crypto::AccountId32, registration: runtime_types::bp_relayers::registration::Registration< @@ -1424,6 +1476,13 @@ pub mod api { add_invulnerable { who: ::sp_core::crypto::AccountId32 }, #[codec(index = 6)] remove_invulnerable { who: ::sp_core::crypto::AccountId32 }, + #[codec(index = 7)] + update_bond { new_deposit: ::core::primitive::u128 }, + #[codec(index = 8)] + take_candidate_slot { + deposit: ::core::primitive::u128, + target: ::sp_core::crypto::AccountId32, + }, } #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] pub struct CandidateInfo<_0, _1> { @@ -1450,6 +1509,22 @@ pub mod api { NoAssociatedValidatorId, #[codec(index = 8)] ValidatorNotRegistered, + #[codec(index = 9)] + InsertToCandidateListFailed, + #[codec(index = 10)] + RemoveFromCandidateListFailed, + #[codec(index = 11)] + DepositTooLow, + #[codec(index = 12)] + UpdateCandidateListFailed, + #[codec(index = 13)] + InsufficientBond, + #[codec(index = 14)] + TargetIsNotCandidate, + #[codec(index = 15)] + IdenticalDeposit, + #[codec(index = 16)] + InvalidUnreserve, } #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] pub enum Event { @@ -1471,12 +1546,125 @@ pub mod api { deposit: ::core::primitive::u128, }, #[codec(index = 6)] - CandidateRemoved { account_id: ::sp_core::crypto::AccountId32 }, + CandidateBondUpdated { + account_id: ::sp_core::crypto::AccountId32, + deposit: ::core::primitive::u128, + }, #[codec(index = 7)] + CandidateRemoved { account_id: ::sp_core::crypto::AccountId32 }, + #[codec(index = 8)] + CandidateReplaced { + old: ::sp_core::crypto::AccountId32, + new: ::sp_core::crypto::AccountId32, + deposit: ::core::primitive::u128, + }, + #[codec(index = 9)] InvalidInvulnerableSkipped { account_id: ::sp_core::crypto::AccountId32 }, } } } + pub mod pallet_message_queue { + use super::runtime_types; + pub mod pallet { + use super::runtime_types; + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum Call { + #[codec(index = 0)] + reap_page { + message_origin: + runtime_types::bridge_hub_common::message_queue::AggregateMessageOrigin, + page_index: ::core::primitive::u32, + }, + #[codec(index = 1)] + execute_overweight { + message_origin: + runtime_types::bridge_hub_common::message_queue::AggregateMessageOrigin, + page: ::core::primitive::u32, + index: ::core::primitive::u32, + weight_limit: ::sp_weights::Weight, + }, + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum Error { + #[codec(index = 0)] + NotReapable, + #[codec(index = 1)] + NoPage, + #[codec(index = 2)] + NoMessage, + #[codec(index = 3)] + AlreadyProcessed, + #[codec(index = 4)] + Queued, + #[codec(index = 5)] + InsufficientWeight, + #[codec(index = 6)] + TemporarilyUnprocessable, + #[codec(index = 7)] + QueuePaused, + #[codec(index = 8)] + RecursiveDisallowed, + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum Event { + #[codec(index = 0)] + ProcessingFailed { + id: ::subxt::utils::H256, + origin: + runtime_types::bridge_hub_common::message_queue::AggregateMessageOrigin, + error: runtime_types::frame_support::traits::messages::ProcessMessageError, + }, + #[codec(index = 1)] + Processed { + id: ::subxt::utils::H256, + origin: + runtime_types::bridge_hub_common::message_queue::AggregateMessageOrigin, + weight_used: ::sp_weights::Weight, + success: ::core::primitive::bool, + }, + #[codec(index = 2)] + OverweightEnqueued { + id: [::core::primitive::u8; 32usize], + origin: + runtime_types::bridge_hub_common::message_queue::AggregateMessageOrigin, + page_index: ::core::primitive::u32, + message_index: ::core::primitive::u32, + }, + #[codec(index = 3)] + PageReaped { + origin: + runtime_types::bridge_hub_common::message_queue::AggregateMessageOrigin, + index: ::core::primitive::u32, + }, + } + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct BookState<_0> { + pub begin: ::core::primitive::u32, + pub end: ::core::primitive::u32, + pub count: ::core::primitive::u32, + pub ready_neighbours: + ::core::option::Option>, + pub message_count: ::core::primitive::u64, + pub size: ::core::primitive::u64, + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct Neighbours<_0> { + pub prev: _0, + pub next: _0, + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct Page<_0> { + pub remaining: _0, + pub remaining_size: _0, + pub first_index: _0, + pub first: _0, + pub last: _0, + pub heap: runtime_types::bounded_collections::bounded_vec::BoundedVec< + ::core::primitive::u8, + >, + } + } pub mod pallet_multisig { use super::runtime_types; pub mod pallet { @@ -1776,21 +1964,21 @@ pub mod api { pub enum Call { #[codec(index = 0)] send { - dest: ::std::boxed::Box, + dest: ::std::boxed::Box, message: ::std::boxed::Box, }, #[codec(index = 1)] teleport_assets { - dest: ::std::boxed::Box, - beneficiary: ::std::boxed::Box, - assets: ::std::boxed::Box, + dest: ::std::boxed::Box, + beneficiary: ::std::boxed::Box, + assets: ::std::boxed::Box, fee_asset_item: ::core::primitive::u32, }, #[codec(index = 2)] reserve_transfer_assets { - dest: ::std::boxed::Box, - beneficiary: ::std::boxed::Box, - assets: ::std::boxed::Box, + dest: ::std::boxed::Box, + beneficiary: ::std::boxed::Box, + assets: ::std::boxed::Box, fee_asset_item: ::core::primitive::u32, }, #[codec(index = 3)] @@ -1800,9 +1988,8 @@ pub mod api { }, #[codec(index = 4)] force_xcm_version { - location: ::std::boxed::Box< - runtime_types::staging_xcm::v3::multilocation::MultiLocation, - >, + location: + ::std::boxed::Box, version: ::core::primitive::u32, }, #[codec(index = 5)] @@ -1811,30 +1998,43 @@ pub mod api { }, #[codec(index = 6)] force_subscribe_version_notify { - location: ::std::boxed::Box, + location: ::std::boxed::Box, }, #[codec(index = 7)] force_unsubscribe_version_notify { - location: ::std::boxed::Box, + location: ::std::boxed::Box, }, #[codec(index = 8)] limited_reserve_transfer_assets { - dest: ::std::boxed::Box, - beneficiary: ::std::boxed::Box, - assets: ::std::boxed::Box, + dest: ::std::boxed::Box, + beneficiary: ::std::boxed::Box, + assets: ::std::boxed::Box, fee_asset_item: ::core::primitive::u32, weight_limit: runtime_types::xcm::v3::WeightLimit, }, #[codec(index = 9)] limited_teleport_assets { - dest: ::std::boxed::Box, - beneficiary: ::std::boxed::Box, - assets: ::std::boxed::Box, + dest: ::std::boxed::Box, + beneficiary: ::std::boxed::Box, + assets: ::std::boxed::Box, fee_asset_item: ::core::primitive::u32, weight_limit: runtime_types::xcm::v3::WeightLimit, }, #[codec(index = 10)] force_suspension { suspended: ::core::primitive::bool }, + #[codec(index = 11)] + transfer_assets { + dest: ::std::boxed::Box, + beneficiary: ::std::boxed::Box, + assets: ::std::boxed::Box, + fee_asset_item: ::core::primitive::u32, + weight_limit: runtime_types::xcm::v3::WeightLimit, + }, + #[codec(index = 12)] + claim_assets { + assets: ::std::boxed::Box, + beneficiary: ::std::boxed::Box, + }, } #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] pub enum Error { @@ -1865,7 +2065,7 @@ pub mod api { #[codec(index = 12)] AlreadySubscribed, #[codec(index = 13)] - InvalidAsset, + CannotCheckOutTeleport, #[codec(index = 14)] LowBalance, #[codec(index = 15)] @@ -1878,27 +2078,37 @@ pub mod api { LockNotFound, #[codec(index = 19)] InUse, + #[codec(index = 20)] + InvalidAssetNotConcrete, + #[codec(index = 21)] + InvalidAssetUnknownReserve, + #[codec(index = 22)] + InvalidAssetUnsupportedReserve, + #[codec(index = 23)] + TooManyReserves, + #[codec(index = 24)] + LocalExecutionIncomplete, } #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] pub enum Event { #[codec(index = 0)] - Attempted { outcome: runtime_types::xcm::v3::traits::Outcome }, + Attempted { outcome: runtime_types::staging_xcm::v4::traits::Outcome }, #[codec(index = 1)] Sent { - origin: runtime_types::staging_xcm::v3::multilocation::MultiLocation, - destination: runtime_types::staging_xcm::v3::multilocation::MultiLocation, - message: runtime_types::xcm::v3::Xcm, + origin: runtime_types::staging_xcm::v4::location::Location, + destination: runtime_types::staging_xcm::v4::location::Location, + message: runtime_types::staging_xcm::v4::Xcm, message_id: [::core::primitive::u8; 32usize], }, #[codec(index = 2)] UnexpectedResponse { - origin: runtime_types::staging_xcm::v3::multilocation::MultiLocation, + origin: runtime_types::staging_xcm::v4::location::Location, query_id: ::core::primitive::u64, }, #[codec(index = 3)] ResponseReady { query_id: ::core::primitive::u64, - response: runtime_types::xcm::v3::Response, + response: runtime_types::staging_xcm::v4::Response, }, #[codec(index = 4)] Notified { @@ -1928,15 +2138,15 @@ pub mod api { }, #[codec(index = 8)] InvalidResponder { - origin: runtime_types::staging_xcm::v3::multilocation::MultiLocation, + origin: runtime_types::staging_xcm::v4::location::Location, query_id: ::core::primitive::u64, expected_location: ::core::option::Option< - runtime_types::staging_xcm::v3::multilocation::MultiLocation, + runtime_types::staging_xcm::v4::location::Location, >, }, #[codec(index = 9)] InvalidResponderVersion { - origin: runtime_types::staging_xcm::v3::multilocation::MultiLocation, + origin: runtime_types::staging_xcm::v4::location::Location, query_id: ::core::primitive::u64, }, #[codec(index = 10)] @@ -1944,98 +2154,99 @@ pub mod api { #[codec(index = 11)] AssetsTrapped { hash: ::subxt::utils::H256, - origin: runtime_types::staging_xcm::v3::multilocation::MultiLocation, - assets: runtime_types::xcm::VersionedMultiAssets, + origin: runtime_types::staging_xcm::v4::location::Location, + assets: runtime_types::xcm::VersionedAssets, }, #[codec(index = 12)] VersionChangeNotified { - destination: runtime_types::staging_xcm::v3::multilocation::MultiLocation, + destination: runtime_types::staging_xcm::v4::location::Location, result: ::core::primitive::u32, - cost: runtime_types::xcm::v3::multiasset::MultiAssets, + cost: runtime_types::staging_xcm::v4::asset::Assets, message_id: [::core::primitive::u8; 32usize], }, #[codec(index = 13)] SupportedVersionChanged { - location: runtime_types::staging_xcm::v3::multilocation::MultiLocation, + location: runtime_types::staging_xcm::v4::location::Location, version: ::core::primitive::u32, }, #[codec(index = 14)] NotifyTargetSendFail { - location: runtime_types::staging_xcm::v3::multilocation::MultiLocation, + location: runtime_types::staging_xcm::v4::location::Location, query_id: ::core::primitive::u64, error: runtime_types::xcm::v3::traits::Error, }, #[codec(index = 15)] NotifyTargetMigrationFail { - location: runtime_types::xcm::VersionedMultiLocation, + location: runtime_types::xcm::VersionedLocation, query_id: ::core::primitive::u64, }, #[codec(index = 16)] InvalidQuerierVersion { - origin: runtime_types::staging_xcm::v3::multilocation::MultiLocation, + origin: runtime_types::staging_xcm::v4::location::Location, query_id: ::core::primitive::u64, }, #[codec(index = 17)] InvalidQuerier { - origin: runtime_types::staging_xcm::v3::multilocation::MultiLocation, + origin: runtime_types::staging_xcm::v4::location::Location, query_id: ::core::primitive::u64, - expected_querier: - runtime_types::staging_xcm::v3::multilocation::MultiLocation, + expected_querier: runtime_types::staging_xcm::v4::location::Location, maybe_actual_querier: ::core::option::Option< - runtime_types::staging_xcm::v3::multilocation::MultiLocation, + runtime_types::staging_xcm::v4::location::Location, >, }, #[codec(index = 18)] VersionNotifyStarted { - destination: runtime_types::staging_xcm::v3::multilocation::MultiLocation, - cost: runtime_types::xcm::v3::multiasset::MultiAssets, + destination: runtime_types::staging_xcm::v4::location::Location, + cost: runtime_types::staging_xcm::v4::asset::Assets, message_id: [::core::primitive::u8; 32usize], }, #[codec(index = 19)] VersionNotifyRequested { - destination: runtime_types::staging_xcm::v3::multilocation::MultiLocation, - cost: runtime_types::xcm::v3::multiasset::MultiAssets, + destination: runtime_types::staging_xcm::v4::location::Location, + cost: runtime_types::staging_xcm::v4::asset::Assets, message_id: [::core::primitive::u8; 32usize], }, #[codec(index = 20)] VersionNotifyUnrequested { - destination: runtime_types::staging_xcm::v3::multilocation::MultiLocation, - cost: runtime_types::xcm::v3::multiasset::MultiAssets, + destination: runtime_types::staging_xcm::v4::location::Location, + cost: runtime_types::staging_xcm::v4::asset::Assets, message_id: [::core::primitive::u8; 32usize], }, #[codec(index = 21)] FeesPaid { - paying: runtime_types::staging_xcm::v3::multilocation::MultiLocation, - fees: runtime_types::xcm::v3::multiasset::MultiAssets, + paying: runtime_types::staging_xcm::v4::location::Location, + fees: runtime_types::staging_xcm::v4::asset::Assets, }, #[codec(index = 22)] AssetsClaimed { hash: ::subxt::utils::H256, - origin: runtime_types::staging_xcm::v3::multilocation::MultiLocation, - assets: runtime_types::xcm::VersionedMultiAssets, + origin: runtime_types::staging_xcm::v4::location::Location, + assets: runtime_types::xcm::VersionedAssets, }, + #[codec(index = 23)] + VersionMigrationFinished { version: ::core::primitive::u32 }, } #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] pub enum Origin { #[codec(index = 0)] - Xcm(runtime_types::staging_xcm::v3::multilocation::MultiLocation), + Xcm(runtime_types::staging_xcm::v4::location::Location), #[codec(index = 1)] - Response(runtime_types::staging_xcm::v3::multilocation::MultiLocation), + Response(runtime_types::staging_xcm::v4::location::Location), } #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] pub enum QueryStatus<_0> { #[codec(index = 0)] Pending { - responder: runtime_types::xcm::VersionedMultiLocation, + responder: runtime_types::xcm::VersionedLocation, maybe_match_querier: - ::core::option::Option, + ::core::option::Option, maybe_notify: ::core::option::Option<(::core::primitive::u8, ::core::primitive::u8)>, timeout: _0, }, #[codec(index = 1)] VersionNotifier { - origin: runtime_types::xcm::VersionedMultiLocation, + origin: runtime_types::xcm::VersionedLocation, is_active: ::core::primitive::bool, }, #[codec(index = 2)] @@ -2044,8 +2255,8 @@ pub mod api { #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] pub struct RemoteLockedFungibleRecord<_0> { pub amount: ::core::primitive::u128, - pub owner: runtime_types::xcm::VersionedMultiLocation, - pub locker: runtime_types::xcm::VersionedMultiLocation, + pub owner: runtime_types::xcm::VersionedLocation, + pub locker: runtime_types::xcm::VersionedLocation, pub consumers: runtime_types::bounded_collections::bounded_vec::BoundedVec<( _0, ::core::primitive::u128, @@ -2101,15 +2312,6 @@ pub mod api { pub struct Id(pub ::core::primitive::u32); #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] pub struct ValidationCode(pub ::std::vec::Vec<::core::primitive::u8>); - #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub enum XcmpMessageFormat { - #[codec(index = 0)] - ConcatenatedVersionedXcm, - #[codec(index = 1)] - ConcatenatedEncodedBlob, - #[codec(index = 2)] - Signals, - } } } pub mod polkadot_primitives { @@ -2169,339 +2371,1739 @@ pub mod api { } } } - pub mod sp_arithmetic { + pub mod primitive_types { use super::runtime_types; - pub mod fixed_point { - use super::runtime_types; - #[derive( - :: codec :: Decode, - :: codec :: Encode, - :: subxt :: ext :: codec :: CompactAs, - Clone, - Debug, - PartialEq, - )] - pub struct FixedU128(pub ::core::primitive::u128); - } #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub enum ArithmeticError { - #[codec(index = 0)] - Underflow, - #[codec(index = 1)] - Overflow, - #[codec(index = 2)] - DivisionByZero, - } + pub struct U256(pub [::core::primitive::u64; 4usize]); } - pub mod sp_consensus_aura { + pub mod snowbridge_amcl { use super::runtime_types; - pub mod sr25519 { + pub mod bls381 { use super::runtime_types; - pub mod app_sr25519 { + pub mod big { use super::runtime_types; #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub struct Public(pub runtime_types::sp_core::sr25519::Public); + pub struct Big { + pub w: [::core::primitive::i32; 14usize], + } + } + pub mod ecp { + use super::runtime_types; + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct ECP { + pub x: runtime_types::snowbridge_amcl::bls381::fp::FP, + pub y: runtime_types::snowbridge_amcl::bls381::fp::FP, + pub z: runtime_types::snowbridge_amcl::bls381::fp::FP, + } + } + pub mod fp { + use super::runtime_types; + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct FP { + pub x: runtime_types::snowbridge_amcl::bls381::big::Big, + pub xes: ::core::primitive::i32, + } } } } - pub mod sp_consensus_grandpa { + pub mod snowbridge_beacon_primitives { use super::runtime_types; - pub mod app { + pub mod bls { use super::runtime_types; #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub struct Public(pub runtime_types::sp_core::ed25519::Public); + pub enum BlsError { + #[codec(index = 0)] + InvalidSignature, + #[codec(index = 1)] + InvalidPublicKey, + #[codec(index = 2)] + InvalidAggregatePublicKeys, + #[codec(index = 3)] + SignatureVerificationFailed, + } + } + pub mod types { + use super::runtime_types; + pub mod deneb { + use super::runtime_types; + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct ExecutionPayloadHeader { + pub parent_hash: ::subxt::utils::H256, + pub fee_recipient: ::subxt::utils::H160, + pub state_root: ::subxt::utils::H256, + pub receipts_root: ::subxt::utils::H256, + pub logs_bloom: ::std::vec::Vec<::core::primitive::u8>, + pub prev_randao: ::subxt::utils::H256, + pub block_number: ::core::primitive::u64, + pub gas_limit: ::core::primitive::u64, + pub gas_used: ::core::primitive::u64, + pub timestamp: ::core::primitive::u64, + pub extra_data: ::std::vec::Vec<::core::primitive::u8>, + pub base_fee_per_gas: runtime_types::primitive_types::U256, + pub block_hash: ::subxt::utils::H256, + pub transactions_root: ::subxt::utils::H256, + pub withdrawals_root: ::subxt::utils::H256, + pub blob_gas_used: ::core::primitive::u64, + pub excess_blob_gas: ::core::primitive::u64, + } + } #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub struct Signature(pub runtime_types::sp_core::ed25519::Signature); + pub struct BeaconHeader { + pub slot: ::core::primitive::u64, + pub proposer_index: ::core::primitive::u64, + pub parent_root: ::subxt::utils::H256, + pub state_root: ::subxt::utils::H256, + pub body_root: ::subxt::utils::H256, + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct CompactBeaconState { + #[codec(compact)] + pub slot: ::core::primitive::u64, + pub block_roots_root: ::subxt::utils::H256, + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct CompactExecutionHeader { + pub parent_hash: ::subxt::utils::H256, + #[codec(compact)] + pub block_number: ::core::primitive::u64, + pub state_root: ::subxt::utils::H256, + pub receipts_root: ::subxt::utils::H256, + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct ExecutionHeaderState { + pub beacon_block_root: ::subxt::utils::H256, + pub beacon_slot: ::core::primitive::u64, + pub block_hash: ::subxt::utils::H256, + pub block_number: ::core::primitive::u64, + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct ExecutionPayloadHeader { + pub parent_hash: ::subxt::utils::H256, + pub fee_recipient: ::subxt::utils::H160, + pub state_root: ::subxt::utils::H256, + pub receipts_root: ::subxt::utils::H256, + pub logs_bloom: ::std::vec::Vec<::core::primitive::u8>, + pub prev_randao: ::subxt::utils::H256, + pub block_number: ::core::primitive::u64, + pub gas_limit: ::core::primitive::u64, + pub gas_used: ::core::primitive::u64, + pub timestamp: ::core::primitive::u64, + pub extra_data: ::std::vec::Vec<::core::primitive::u8>, + pub base_fee_per_gas: runtime_types::primitive_types::U256, + pub block_hash: ::subxt::utils::H256, + pub transactions_root: ::subxt::utils::H256, + pub withdrawals_root: ::subxt::utils::H256, + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct Fork { + pub version: [::core::primitive::u8; 4usize], + pub epoch: ::core::primitive::u64, + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct ForkVersions { + pub genesis: runtime_types::snowbridge_beacon_primitives::types::Fork, + pub altair: runtime_types::snowbridge_beacon_primitives::types::Fork, + pub bellatrix: runtime_types::snowbridge_beacon_primitives::types::Fork, + pub capella: runtime_types::snowbridge_beacon_primitives::types::Fork, + pub deneb: runtime_types::snowbridge_beacon_primitives::types::Fork, + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct PublicKey(pub [::core::primitive::u8; 48usize]); + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct Signature(pub [::core::primitive::u8; 96usize]); + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct SyncAggregate { + pub sync_committee_bits: [::core::primitive::u8; 64usize], + pub sync_committee_signature: + runtime_types::snowbridge_beacon_primitives::types::Signature, + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct SyncCommittee { + pub pubkeys: + [runtime_types::snowbridge_beacon_primitives::types::PublicKey; 512usize], + pub aggregate_pubkey: + runtime_types::snowbridge_beacon_primitives::types::PublicKey, + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct SyncCommitteePrepared { + pub root: ::subxt::utils::H256, + pub pubkeys: ::std::boxed::Box< + [runtime_types::snowbridge_milagro_bls::keys::PublicKey; 512usize], + >, + pub aggregate_pubkey: runtime_types::snowbridge_milagro_bls::keys::PublicKey, + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum VersionedExecutionPayloadHeader { + # [codec (index = 0)] Capella (runtime_types :: snowbridge_beacon_primitives :: types :: ExecutionPayloadHeader ,) , # [codec (index = 1)] Deneb (runtime_types :: snowbridge_beacon_primitives :: types :: deneb :: ExecutionPayloadHeader ,) , } + } + pub mod updates { + use super::runtime_types; + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct AncestryProof { + pub header_branch: ::std::vec::Vec<::subxt::utils::H256>, + pub finalized_block_root: ::subxt::utils::H256, + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct CheckpointUpdate { + pub header: runtime_types::snowbridge_beacon_primitives::types::BeaconHeader, + pub current_sync_committee: + runtime_types::snowbridge_beacon_primitives::types::SyncCommittee, + pub current_sync_committee_branch: ::std::vec::Vec<::subxt::utils::H256>, + pub validators_root: ::subxt::utils::H256, + pub block_roots_root: ::subxt::utils::H256, + pub block_roots_branch: ::std::vec::Vec<::subxt::utils::H256>, + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct ExecutionHeaderUpdate { pub header : runtime_types :: snowbridge_beacon_primitives :: types :: BeaconHeader , pub ancestry_proof : :: core :: option :: Option < runtime_types :: snowbridge_beacon_primitives :: updates :: AncestryProof > , pub execution_header : runtime_types :: snowbridge_beacon_primitives :: types :: VersionedExecutionPayloadHeader , pub execution_branch : :: std :: vec :: Vec < :: subxt :: utils :: H256 > , } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct NextSyncCommitteeUpdate { + pub next_sync_committee: + runtime_types::snowbridge_beacon_primitives::types::SyncCommittee, + pub next_sync_committee_branch: ::std::vec::Vec<::subxt::utils::H256>, + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct Update { pub attested_header : runtime_types :: snowbridge_beacon_primitives :: types :: BeaconHeader , pub sync_aggregate : runtime_types :: snowbridge_beacon_primitives :: types :: SyncAggregate , pub signature_slot : :: core :: primitive :: u64 , pub next_sync_committee_update : :: core :: option :: Option < runtime_types :: snowbridge_beacon_primitives :: updates :: NextSyncCommitteeUpdate > , pub finalized_header : runtime_types :: snowbridge_beacon_primitives :: types :: BeaconHeader , pub finality_branch : :: std :: vec :: Vec < :: subxt :: utils :: H256 > , pub block_roots_root : :: subxt :: utils :: H256 , pub block_roots_branch : :: std :: vec :: Vec < :: subxt :: utils :: H256 > , } } } - pub mod sp_consensus_slots { - use super::runtime_types; - #[derive( - :: codec :: Decode, - :: codec :: Encode, - :: subxt :: ext :: codec :: CompactAs, - Clone, - Debug, - PartialEq, - )] - pub struct Slot(pub ::core::primitive::u64); - #[derive( - :: codec :: Decode, - :: codec :: Encode, - :: subxt :: ext :: codec :: CompactAs, - Clone, - Debug, - PartialEq, - )] - pub struct SlotDuration(pub ::core::primitive::u64); - } - pub mod sp_core { + pub mod snowbridge_core { use super::runtime_types; - pub mod crypto { + pub mod inbound { use super::runtime_types; #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub struct KeyTypeId(pub [::core::primitive::u8; 4usize]); + pub struct Log { + pub address: ::subxt::utils::H160, + pub topics: ::std::vec::Vec<::subxt::utils::H256>, + pub data: ::std::vec::Vec<::core::primitive::u8>, + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct Message { + pub event_log: runtime_types::snowbridge_core::inbound::Log, + pub proof: runtime_types::snowbridge_core::inbound::Proof, + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct Proof { + pub block_hash: ::subxt::utils::H256, + pub tx_index: ::core::primitive::u32, + pub data: ( + ::std::vec::Vec<::std::vec::Vec<::core::primitive::u8>>, + ::std::vec::Vec<::std::vec::Vec<::core::primitive::u8>>, + ), + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum VerificationError { + #[codec(index = 0)] + HeaderNotFound, + #[codec(index = 1)] + LogNotFound, + #[codec(index = 2)] + InvalidLog, + #[codec(index = 3)] + InvalidProof, + } } - pub mod ecdsa { + pub mod operating_mode { use super::runtime_types; #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub struct Signature(pub [::core::primitive::u8; 65usize]); + pub enum BasicOperatingMode { + #[codec(index = 0)] + Normal, + #[codec(index = 1)] + Halted, + } } - pub mod ed25519 { + pub mod outbound { use super::runtime_types; + pub mod v1 { + use super::runtime_types; + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum AgentExecuteCommand { + #[codec(index = 0)] + TransferToken { + token: ::subxt::utils::H160, + recipient: ::subxt::utils::H160, + amount: ::core::primitive::u128, + }, + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum Command { + #[codec(index = 0)] + AgentExecute { + agent_id: ::subxt::utils::H256, + command: + runtime_types::snowbridge_core::outbound::v1::AgentExecuteCommand, + }, + #[codec(index = 1)] + Upgrade { + impl_address: ::subxt::utils::H160, + impl_code_hash: ::subxt::utils::H256, + initializer: ::core::option::Option< + runtime_types::snowbridge_core::outbound::v1::Initializer, + >, + }, + #[codec(index = 2)] + CreateAgent { agent_id: ::subxt::utils::H256 }, + #[codec(index = 3)] + CreateChannel { + channel_id: runtime_types::snowbridge_core::ChannelId, + agent_id: ::subxt::utils::H256, + mode: runtime_types::snowbridge_core::outbound::v1::OperatingMode, + }, + #[codec(index = 4)] + UpdateChannel { + channel_id: runtime_types::snowbridge_core::ChannelId, + mode: runtime_types::snowbridge_core::outbound::v1::OperatingMode, + }, + #[codec(index = 5)] + SetOperatingMode { + mode: runtime_types::snowbridge_core::outbound::v1::OperatingMode, + }, + #[codec(index = 6)] + TransferNativeFromAgent { + agent_id: ::subxt::utils::H256, + recipient: ::subxt::utils::H160, + amount: ::core::primitive::u128, + }, + #[codec(index = 7)] + SetTokenTransferFees { + create_asset_xcm: ::core::primitive::u128, + transfer_asset_xcm: ::core::primitive::u128, + register_token: runtime_types::primitive_types::U256, + }, + #[codec(index = 8)] + SetPricingParameters { + exchange_rate: runtime_types::snowbridge_core::pricing::UD60x18, + delivery_cost: ::core::primitive::u128, + multiplier: runtime_types::snowbridge_core::pricing::UD60x18, + }, + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct Initializer { + pub params: ::std::vec::Vec<::core::primitive::u8>, + pub maximum_required_gas: ::core::primitive::u64, + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum OperatingMode { + #[codec(index = 0)] + Normal, + #[codec(index = 1)] + RejectingOutboundMessages, + } + } #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub struct Public(pub [::core::primitive::u8; 32usize]); + pub struct Fee<_0> { + pub local: _0, + pub remote: _0, + } #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub struct Signature(pub [::core::primitive::u8; 64usize]); + pub enum SendError { + #[codec(index = 0)] + MessageTooLarge, + #[codec(index = 1)] + Halted, + #[codec(index = 2)] + InvalidChannel, + } } - pub mod sr25519 { + pub mod pricing { use super::runtime_types; #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub struct Public(pub [::core::primitive::u8; 32usize]); + pub struct PricingParameters<_0> { + pub exchange_rate: runtime_types::sp_arithmetic::fixed_point::FixedU128, + pub rewards: runtime_types::snowbridge_core::pricing::Rewards<_0>, + pub fee_per_gas: runtime_types::primitive_types::U256, + pub multiplier: runtime_types::sp_arithmetic::fixed_point::FixedU128, + } #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub struct Signature(pub [::core::primitive::u8; 64usize]); + pub struct Rewards<_0> { + pub local: _0, + pub remote: runtime_types::primitive_types::U256, + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct UD60x18(pub runtime_types::primitive_types::U256); } #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub struct OpaqueMetadata(pub ::std::vec::Vec<::core::primitive::u8>); + pub struct Channel { + pub agent_id: ::subxt::utils::H256, + pub para_id: runtime_types::polkadot_parachain_primitives::primitives::Id, + } #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub enum Void {} + pub struct ChannelId(pub [::core::primitive::u8; 32usize]); } - pub mod sp_inherents { + pub mod snowbridge_milagro_bls { use super::runtime_types; - #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub struct CheckInherentsResult { - pub okay: ::core::primitive::bool, - pub fatal_error: ::core::primitive::bool, - pub errors: runtime_types::sp_inherents::InherentData, + pub mod keys { + use super::runtime_types; + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct PublicKey { + pub point: runtime_types::snowbridge_amcl::bls381::ecp::ECP, + } } + } + pub mod snowbridge_outbound_queue_merkle_tree { + use super::runtime_types; #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub struct InherentData { - pub data: ::subxt::utils::KeyedVec< - [::core::primitive::u8; 8usize], - ::std::vec::Vec<::core::primitive::u8>, - >, + pub struct MerkleProof { + pub root: ::subxt::utils::H256, + pub proof: ::std::vec::Vec<::subxt::utils::H256>, + pub number_of_leaves: ::core::primitive::u64, + pub leaf_index: ::core::primitive::u64, + pub leaf: ::subxt::utils::H256, } } - pub mod sp_runtime { + pub mod snowbridge_pallet_ethereum_client { use super::runtime_types; - pub mod generic { - use super::runtime_types; - pub mod block { - use super::runtime_types; - #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub struct Block<_0, _1> { - pub header: _0, - pub extrinsics: ::std::vec::Vec<_1>, - } - } - pub mod digest { - use super::runtime_types; - #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub enum DigestItem { - #[codec(index = 6)] - PreRuntime( - [::core::primitive::u8; 4usize], - ::std::vec::Vec<::core::primitive::u8>, - ), - #[codec(index = 4)] - Consensus( - [::core::primitive::u8; 4usize], - ::std::vec::Vec<::core::primitive::u8>, - ), - #[codec(index = 5)] - Seal( - [::core::primitive::u8; 4usize], - ::std::vec::Vec<::core::primitive::u8>, - ), - #[codec(index = 0)] - Other(::std::vec::Vec<::core::primitive::u8>), - #[codec(index = 8)] - RuntimeEnvironmentUpdated, - } - } - } - pub mod transaction_validity { + pub mod pallet { use super::runtime_types; #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub enum InvalidTransaction { + pub enum Call { + # [codec (index = 0)] force_checkpoint { update : :: std :: boxed :: Box < runtime_types :: snowbridge_beacon_primitives :: updates :: CheckpointUpdate > , } , # [codec (index = 1)] submit { update : :: std :: boxed :: Box < runtime_types :: snowbridge_beacon_primitives :: updates :: Update > , } , # [codec (index = 2)] submit_execution_header { update : :: std :: boxed :: Box < runtime_types :: snowbridge_beacon_primitives :: updates :: ExecutionHeaderUpdate > , } , # [codec (index = 3)] set_operating_mode { mode : runtime_types :: snowbridge_core :: operating_mode :: BasicOperatingMode , } , } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum Error { #[codec(index = 0)] - Call, + SkippedSyncCommitteePeriod, #[codec(index = 1)] - Payment, + IrrelevantUpdate, #[codec(index = 2)] - Future, + NotBootstrapped, #[codec(index = 3)] - Stale, + SyncCommitteeParticipantsNotSupermajority, #[codec(index = 4)] - BadProof, + InvalidHeaderMerkleProof, #[codec(index = 5)] - AncientBirthBlock, + InvalidSyncCommitteeMerkleProof, #[codec(index = 6)] - ExhaustsResources, + InvalidExecutionHeaderProof, #[codec(index = 7)] - Custom(::core::primitive::u8), + InvalidAncestryMerkleProof, #[codec(index = 8)] - BadMandatory, + InvalidBlockRootsRootMerkleProof, #[codec(index = 9)] - MandatoryValidation, + InvalidFinalizedHeaderGap, #[codec(index = 10)] - BadSigner, + HeaderNotFinalized, + #[codec(index = 11)] + BlockBodyHashTreeRootFailed, + #[codec(index = 12)] + HeaderHashTreeRootFailed, + #[codec(index = 13)] + SyncCommitteeHashTreeRootFailed, + #[codec(index = 14)] + SigningRootHashTreeRootFailed, + #[codec(index = 15)] + ForkDataHashTreeRootFailed, + #[codec(index = 16)] + ExpectedFinalizedHeaderNotStored, + #[codec(index = 17)] + BLSPreparePublicKeysFailed, + #[codec(index = 18)] + BLSVerificationFailed( + runtime_types::snowbridge_beacon_primitives::bls::BlsError, + ), + #[codec(index = 19)] + InvalidUpdateSlot, + #[codec(index = 20)] + InvalidSyncCommitteeUpdate, + #[codec(index = 21)] + ExecutionHeaderTooFarBehind, + #[codec(index = 22)] + ExecutionHeaderSkippedBlock, + #[codec(index = 23)] + Halted, + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum Event { + #[codec(index = 0)] + BeaconHeaderImported { + block_hash: ::subxt::utils::H256, + slot: ::core::primitive::u64, + }, + #[codec(index = 1)] + ExecutionHeaderImported { + block_hash: ::subxt::utils::H256, + block_number: ::core::primitive::u64, + }, + #[codec(index = 2)] + SyncCommitteeUpdated { period: ::core::primitive::u64 }, + #[codec(index = 3)] + OperatingModeChanged { + mode: runtime_types::snowbridge_core::operating_mode::BasicOperatingMode, + }, + } + } + } + pub mod snowbridge_pallet_inbound_queue { + use super::runtime_types; + pub mod pallet { + use super::runtime_types; + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum Call { + #[codec(index = 0)] + submit { message: runtime_types::snowbridge_core::inbound::Message }, + #[codec(index = 1)] + set_operating_mode { + mode: runtime_types::snowbridge_core::operating_mode::BasicOperatingMode, + }, + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum Error { + #[codec(index = 0)] + InvalidGateway, + #[codec(index = 1)] + InvalidEnvelope, + #[codec(index = 2)] + InvalidNonce, + #[codec(index = 3)] + InvalidPayload, + #[codec(index = 4)] + InvalidChannel, + #[codec(index = 5)] + MaxNonceReached, + #[codec(index = 6)] + InvalidAccountConversion, + #[codec(index = 7)] + Halted, + #[codec(index = 8)] + Verification(runtime_types::snowbridge_core::inbound::VerificationError), + #[codec(index = 9)] + Send(runtime_types::snowbridge_pallet_inbound_queue::pallet::SendError), + #[codec(index = 10)] + ConvertMessage( + runtime_types::snowbridge_router_primitives::inbound::ConvertMessageError, + ), + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum Event { + #[codec(index = 0)] + MessageReceived { + channel_id: runtime_types::snowbridge_core::ChannelId, + nonce: ::core::primitive::u64, + message_id: [::core::primitive::u8; 32usize], + fee_burned: ::core::primitive::u128, + }, + #[codec(index = 1)] + OperatingModeChanged { + mode: runtime_types::snowbridge_core::operating_mode::BasicOperatingMode, + }, + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum SendError { + #[codec(index = 0)] + NotApplicable, + #[codec(index = 1)] + NotRoutable, + #[codec(index = 2)] + Transport, + #[codec(index = 3)] + DestinationUnsupported, + #[codec(index = 4)] + ExceedsMaxMessageSize, + #[codec(index = 5)] + MissingArgument, + #[codec(index = 6)] + Fees, + } + } + } + pub mod snowbridge_pallet_outbound_queue { + use super::runtime_types; + pub mod pallet { + use super::runtime_types; + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum Call { + #[codec(index = 0)] + set_operating_mode { + mode: runtime_types::snowbridge_core::operating_mode::BasicOperatingMode, + }, + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum Error { + #[codec(index = 0)] + MessageTooLarge, + #[codec(index = 1)] + Halted, + #[codec(index = 2)] + InvalidChannel, + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum Event { + #[codec(index = 0)] + MessageQueued { id: ::subxt::utils::H256 }, + #[codec(index = 1)] + MessageAccepted { id: ::subxt::utils::H256, nonce: ::core::primitive::u64 }, + #[codec(index = 2)] + MessagesCommitted { root: ::subxt::utils::H256, count: ::core::primitive::u64 }, + #[codec(index = 3)] + OperatingModeChanged { + mode: runtime_types::snowbridge_core::operating_mode::BasicOperatingMode, + }, + } + } + pub mod types { + use super::runtime_types; + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct CommittedMessage { + pub channel_id: runtime_types::snowbridge_core::ChannelId, + #[codec(compact)] + pub nonce: ::core::primitive::u64, + pub command: ::core::primitive::u8, + pub params: ::std::vec::Vec<::core::primitive::u8>, + #[codec(compact)] + pub max_dispatch_gas: ::core::primitive::u64, + #[codec(compact)] + pub max_fee_per_gas: ::core::primitive::u128, + #[codec(compact)] + pub reward: ::core::primitive::u128, + pub id: ::subxt::utils::H256, + } + } + } + pub mod snowbridge_pallet_system { + use super::runtime_types; + pub mod pallet { + use super::runtime_types; + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum Call { + #[codec(index = 0)] + upgrade { + impl_address: ::subxt::utils::H160, + impl_code_hash: ::subxt::utils::H256, + initializer: ::core::option::Option< + runtime_types::snowbridge_core::outbound::v1::Initializer, + >, + }, + #[codec(index = 1)] + set_operating_mode { + mode: runtime_types::snowbridge_core::outbound::v1::OperatingMode, + }, + #[codec(index = 2)] + set_pricing_parameters { + params: runtime_types::snowbridge_core::pricing::PricingParameters< + ::core::primitive::u128, + >, + }, + #[codec(index = 3)] + create_agent, + #[codec(index = 4)] + create_channel { + mode: runtime_types::snowbridge_core::outbound::v1::OperatingMode, + }, + #[codec(index = 5)] + update_channel { + mode: runtime_types::snowbridge_core::outbound::v1::OperatingMode, + }, + #[codec(index = 6)] + force_update_channel { + channel_id: runtime_types::snowbridge_core::ChannelId, + mode: runtime_types::snowbridge_core::outbound::v1::OperatingMode, + }, + #[codec(index = 7)] + transfer_native_from_agent { + recipient: ::subxt::utils::H160, + amount: ::core::primitive::u128, + }, + #[codec(index = 8)] + force_transfer_native_from_agent { + location: ::std::boxed::Box, + recipient: ::subxt::utils::H160, + amount: ::core::primitive::u128, + }, + #[codec(index = 9)] + set_token_transfer_fees { + create_asset_xcm: ::core::primitive::u128, + transfer_asset_xcm: ::core::primitive::u128, + register_token: runtime_types::primitive_types::U256, + }, + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum Error { + #[codec(index = 0)] + LocationConversionFailed, + #[codec(index = 1)] + AgentAlreadyCreated, + #[codec(index = 2)] + NoAgent, + #[codec(index = 3)] + ChannelAlreadyCreated, + #[codec(index = 4)] + NoChannel, + #[codec(index = 5)] + UnsupportedLocationVersion, + #[codec(index = 6)] + InvalidLocation, + #[codec(index = 7)] + Send(runtime_types::snowbridge_core::outbound::SendError), + #[codec(index = 8)] + InvalidTokenTransferFees, + #[codec(index = 9)] + InvalidPricingParameters, + #[codec(index = 10)] + InvalidUpgradeParameters, + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum Event { + #[codec(index = 0)] + Upgrade { + impl_address: ::subxt::utils::H160, + impl_code_hash: ::subxt::utils::H256, + initializer_params_hash: ::core::option::Option<::subxt::utils::H256>, + }, + #[codec(index = 1)] + CreateAgent { + location: + ::std::boxed::Box, + agent_id: ::subxt::utils::H256, + }, + #[codec(index = 2)] + CreateChannel { + channel_id: runtime_types::snowbridge_core::ChannelId, + agent_id: ::subxt::utils::H256, + }, + #[codec(index = 3)] + UpdateChannel { + channel_id: runtime_types::snowbridge_core::ChannelId, + mode: runtime_types::snowbridge_core::outbound::v1::OperatingMode, + }, + #[codec(index = 4)] + SetOperatingMode { + mode: runtime_types::snowbridge_core::outbound::v1::OperatingMode, + }, + #[codec(index = 5)] + TransferNativeFromAgent { + agent_id: ::subxt::utils::H256, + recipient: ::subxt::utils::H160, + amount: ::core::primitive::u128, + }, + #[codec(index = 6)] + SetTokenTransferFees { + create_asset_xcm: ::core::primitive::u128, + transfer_asset_xcm: ::core::primitive::u128, + register_token: runtime_types::primitive_types::U256, + }, + #[codec(index = 7)] + PricingParametersChanged { + params: runtime_types::snowbridge_core::pricing::PricingParameters< + ::core::primitive::u128, + >, + }, + } + } + } + pub mod snowbridge_router_primitives { + use super::runtime_types; + pub mod inbound { + use super::runtime_types; + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum ConvertMessageError { + #[codec(index = 0)] + UnsupportedVersion, + } + } + } + pub mod sp_arithmetic { + use super::runtime_types; + pub mod fixed_point { + use super::runtime_types; + #[derive( + :: codec :: Decode, + :: codec :: Encode, + :: subxt :: ext :: codec :: CompactAs, + Clone, + Debug, + PartialEq, + )] + pub struct FixedU128(pub ::core::primitive::u128); + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum ArithmeticError { + #[codec(index = 0)] + Underflow, + #[codec(index = 1)] + Overflow, + #[codec(index = 2)] + DivisionByZero, + } + } + pub mod sp_consensus_aura { + use super::runtime_types; + pub mod sr25519 { + use super::runtime_types; + pub mod app_sr25519 { + use super::runtime_types; + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct Public(pub runtime_types::sp_core::sr25519::Public); + } + } + } + pub mod sp_consensus_grandpa { + use super::runtime_types; + pub mod app { + use super::runtime_types; + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct Public(pub runtime_types::sp_core::ed25519::Public); + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct Signature(pub runtime_types::sp_core::ed25519::Signature); + } + } + pub mod sp_consensus_slots { + use super::runtime_types; + #[derive( + :: codec :: Decode, + :: codec :: Encode, + :: subxt :: ext :: codec :: CompactAs, + Clone, + Debug, + PartialEq, + )] + pub struct Slot(pub ::core::primitive::u64); + #[derive( + :: codec :: Decode, + :: codec :: Encode, + :: subxt :: ext :: codec :: CompactAs, + Clone, + Debug, + PartialEq, + )] + pub struct SlotDuration(pub ::core::primitive::u64); + } + pub mod sp_core { + use super::runtime_types; + pub mod crypto { + use super::runtime_types; + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct KeyTypeId(pub [::core::primitive::u8; 4usize]); + } + pub mod ecdsa { + use super::runtime_types; + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct Signature(pub [::core::primitive::u8; 65usize]); + } + pub mod ed25519 { + use super::runtime_types; + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct Public(pub [::core::primitive::u8; 32usize]); + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct Signature(pub [::core::primitive::u8; 64usize]); + } + pub mod sr25519 { + use super::runtime_types; + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct Public(pub [::core::primitive::u8; 32usize]); + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct Signature(pub [::core::primitive::u8; 64usize]); + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct OpaqueMetadata(pub ::std::vec::Vec<::core::primitive::u8>); + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum Void {} + } + pub mod sp_inherents { + use super::runtime_types; + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct CheckInherentsResult { + pub okay: ::core::primitive::bool, + pub fatal_error: ::core::primitive::bool, + pub errors: runtime_types::sp_inherents::InherentData, + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct InherentData { + pub data: ::subxt::utils::KeyedVec< + [::core::primitive::u8; 8usize], + ::std::vec::Vec<::core::primitive::u8>, + >, + } + } + pub mod sp_runtime { + use super::runtime_types; + pub mod generic { + use super::runtime_types; + pub mod block { + use super::runtime_types; + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct Block<_0, _1> { + pub header: _0, + pub extrinsics: ::std::vec::Vec<_1>, + } + } + pub mod digest { + use super::runtime_types; + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum DigestItem { + #[codec(index = 6)] + PreRuntime( + [::core::primitive::u8; 4usize], + ::std::vec::Vec<::core::primitive::u8>, + ), + #[codec(index = 4)] + Consensus( + [::core::primitive::u8; 4usize], + ::std::vec::Vec<::core::primitive::u8>, + ), + #[codec(index = 5)] + Seal( + [::core::primitive::u8; 4usize], + ::std::vec::Vec<::core::primitive::u8>, + ), + #[codec(index = 0)] + Other(::std::vec::Vec<::core::primitive::u8>), + #[codec(index = 8)] + RuntimeEnvironmentUpdated, + } + } + } + pub mod transaction_validity { + use super::runtime_types; + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum InvalidTransaction { + #[codec(index = 0)] + Call, + #[codec(index = 1)] + Payment, + #[codec(index = 2)] + Future, + #[codec(index = 3)] + Stale, + #[codec(index = 4)] + BadProof, + #[codec(index = 5)] + AncientBirthBlock, + #[codec(index = 6)] + ExhaustsResources, + #[codec(index = 7)] + Custom(::core::primitive::u8), + #[codec(index = 8)] + BadMandatory, + #[codec(index = 9)] + MandatoryValidation, + #[codec(index = 10)] + BadSigner, + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum TransactionSource { + #[codec(index = 0)] + InBlock, + #[codec(index = 1)] + Local, + #[codec(index = 2)] + External, + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum TransactionValidityError { + #[codec(index = 0)] + Invalid(runtime_types::sp_runtime::transaction_validity::InvalidTransaction), + #[codec(index = 1)] + Unknown(runtime_types::sp_runtime::transaction_validity::UnknownTransaction), + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum UnknownTransaction { + #[codec(index = 0)] + CannotLookup, + #[codec(index = 1)] + NoUnsignedValidator, + #[codec(index = 2)] + Custom(::core::primitive::u8), + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct ValidTransaction { + pub priority: ::core::primitive::u64, + pub requires: ::std::vec::Vec<::std::vec::Vec<::core::primitive::u8>>, + pub provides: ::std::vec::Vec<::std::vec::Vec<::core::primitive::u8>>, + pub longevity: ::core::primitive::u64, + pub propagate: ::core::primitive::bool, + } + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum DispatchError { + #[codec(index = 0)] + Other, + #[codec(index = 1)] + CannotLookup, + #[codec(index = 2)] + BadOrigin, + #[codec(index = 3)] + Module(runtime_types::sp_runtime::ModuleError), + #[codec(index = 4)] + ConsumerRemaining, + #[codec(index = 5)] + NoProviders, + #[codec(index = 6)] + TooManyConsumers, + #[codec(index = 7)] + Token(runtime_types::sp_runtime::TokenError), + #[codec(index = 8)] + Arithmetic(runtime_types::sp_arithmetic::ArithmeticError), + #[codec(index = 9)] + Transactional(runtime_types::sp_runtime::TransactionalError), + #[codec(index = 10)] + Exhausted, + #[codec(index = 11)] + Corruption, + #[codec(index = 12)] + Unavailable, + #[codec(index = 13)] + RootNotAllowed, + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct ModuleError { + pub index: ::core::primitive::u8, + pub error: [::core::primitive::u8; 4usize], + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum MultiSignature { + #[codec(index = 0)] + Ed25519(runtime_types::sp_core::ed25519::Signature), + #[codec(index = 1)] + Sr25519(runtime_types::sp_core::sr25519::Signature), + #[codec(index = 2)] + Ecdsa(runtime_types::sp_core::ecdsa::Signature), + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum TokenError { + #[codec(index = 0)] + FundsUnavailable, + #[codec(index = 1)] + OnlyProvider, + #[codec(index = 2)] + BelowMinimum, + #[codec(index = 3)] + CannotCreate, + #[codec(index = 4)] + UnknownAsset, + #[codec(index = 5)] + Frozen, + #[codec(index = 6)] + Unsupported, + #[codec(index = 7)] + CannotCreateHold, + #[codec(index = 8)] + NotExpendable, + #[codec(index = 9)] + Blocked, + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum TransactionalError { + #[codec(index = 0)] + LimitReached, + #[codec(index = 1)] + NoLayer, + } + } + pub mod sp_trie { + use super::runtime_types; + pub mod storage_proof { + use super::runtime_types; + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct StorageProof { + pub trie_nodes: ::std::vec::Vec<::std::vec::Vec<::core::primitive::u8>>, + } + } + } + pub mod sp_version { + use super::runtime_types; + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct RuntimeVersion { + pub spec_name: ::std::string::String, + pub impl_name: ::std::string::String, + pub authoring_version: ::core::primitive::u32, + pub spec_version: ::core::primitive::u32, + pub impl_version: ::core::primitive::u32, + pub apis: + ::std::vec::Vec<([::core::primitive::u8; 8usize], ::core::primitive::u32)>, + pub transaction_version: ::core::primitive::u32, + pub state_version: ::core::primitive::u8, + } + } + pub mod sp_weights { + use super::runtime_types; + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct RuntimeDbWeight { + pub read: ::core::primitive::u64, + pub write: ::core::primitive::u64, + } + } + pub mod staging_parachain_info { + use super::runtime_types; + pub mod pallet { + use super::runtime_types; + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum Call {} + } + } + pub mod staging_xcm { + use super::runtime_types; + pub mod v3 { + use super::runtime_types; + pub mod multilocation { + use super::runtime_types; + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct MultiLocation { + pub parents: ::core::primitive::u8, + pub interior: runtime_types::xcm::v3::junctions::Junctions, + } + } + } + pub mod v4 { + use super::runtime_types; + pub mod asset { + use super::runtime_types; + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct Asset { + pub id: runtime_types::staging_xcm::v4::asset::AssetId, + pub fun: runtime_types::staging_xcm::v4::asset::Fungibility, + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum AssetFilter { + #[codec(index = 0)] + Definite(runtime_types::staging_xcm::v4::asset::Assets), + #[codec(index = 1)] + Wild(runtime_types::staging_xcm::v4::asset::WildAsset), + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct AssetId(pub runtime_types::staging_xcm::v4::location::Location); + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum AssetInstance { + #[codec(index = 0)] + Undefined, + #[codec(index = 1)] + Index(#[codec(compact)] ::core::primitive::u128), + #[codec(index = 2)] + Array4([::core::primitive::u8; 4usize]), + #[codec(index = 3)] + Array8([::core::primitive::u8; 8usize]), + #[codec(index = 4)] + Array16([::core::primitive::u8; 16usize]), + #[codec(index = 5)] + Array32([::core::primitive::u8; 32usize]), + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct Assets( + pub ::std::vec::Vec, + ); + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum Fungibility { + #[codec(index = 0)] + Fungible(#[codec(compact)] ::core::primitive::u128), + #[codec(index = 1)] + NonFungible(runtime_types::staging_xcm::v4::asset::AssetInstance), + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum WildAsset { + #[codec(index = 0)] + All, + #[codec(index = 1)] + AllOf { + id: runtime_types::staging_xcm::v4::asset::AssetId, + fun: runtime_types::staging_xcm::v4::asset::WildFungibility, + }, + #[codec(index = 2)] + AllCounted(#[codec(compact)] ::core::primitive::u32), + #[codec(index = 3)] + AllOfCounted { + id: runtime_types::staging_xcm::v4::asset::AssetId, + fun: runtime_types::staging_xcm::v4::asset::WildFungibility, + #[codec(compact)] + count: ::core::primitive::u32, + }, + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum WildFungibility { + #[codec(index = 0)] + Fungible, + #[codec(index = 1)] + NonFungible, + } + } + pub mod junction { + use super::runtime_types; + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum Junction { + #[codec(index = 0)] + Parachain(#[codec(compact)] ::core::primitive::u32), + #[codec(index = 1)] + AccountId32 { + network: ::core::option::Option< + runtime_types::staging_xcm::v4::junction::NetworkId, + >, + id: [::core::primitive::u8; 32usize], + }, + #[codec(index = 2)] + AccountIndex64 { + network: ::core::option::Option< + runtime_types::staging_xcm::v4::junction::NetworkId, + >, + #[codec(compact)] + index: ::core::primitive::u64, + }, + #[codec(index = 3)] + AccountKey20 { + network: ::core::option::Option< + runtime_types::staging_xcm::v4::junction::NetworkId, + >, + key: [::core::primitive::u8; 20usize], + }, + #[codec(index = 4)] + PalletInstance(::core::primitive::u8), + #[codec(index = 5)] + GeneralIndex(#[codec(compact)] ::core::primitive::u128), + #[codec(index = 6)] + GeneralKey { + length: ::core::primitive::u8, + data: [::core::primitive::u8; 32usize], + }, + #[codec(index = 7)] + OnlyChild, + #[codec(index = 8)] + Plurality { + id: runtime_types::xcm::v3::junction::BodyId, + part: runtime_types::xcm::v3::junction::BodyPart, + }, + #[codec(index = 9)] + GlobalConsensus(runtime_types::staging_xcm::v4::junction::NetworkId), + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum NetworkId { + #[codec(index = 0)] + ByGenesis([::core::primitive::u8; 32usize]), + #[codec(index = 1)] + ByFork { + block_number: ::core::primitive::u64, + block_hash: [::core::primitive::u8; 32usize], + }, + #[codec(index = 2)] + Polkadot, + #[codec(index = 3)] + Kusama, + #[codec(index = 4)] + Westend, + #[codec(index = 5)] + Rococo, + #[codec(index = 6)] + Wococo, + #[codec(index = 7)] + Ethereum { + #[codec(compact)] + chain_id: ::core::primitive::u64, + }, + #[codec(index = 8)] + BitcoinCore, + #[codec(index = 9)] + BitcoinCash, + #[codec(index = 10)] + PolkadotBulletin, + } + } + pub mod junctions { + use super::runtime_types; + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum Junctions { + #[codec(index = 0)] + Here, + #[codec(index = 1)] + X1([runtime_types::staging_xcm::v4::junction::Junction; 1usize]), + #[codec(index = 2)] + X2([runtime_types::staging_xcm::v4::junction::Junction; 2usize]), + #[codec(index = 3)] + X3([runtime_types::staging_xcm::v4::junction::Junction; 3usize]), + #[codec(index = 4)] + X4([runtime_types::staging_xcm::v4::junction::Junction; 4usize]), + #[codec(index = 5)] + X5([runtime_types::staging_xcm::v4::junction::Junction; 5usize]), + #[codec(index = 6)] + X6([runtime_types::staging_xcm::v4::junction::Junction; 6usize]), + #[codec(index = 7)] + X7([runtime_types::staging_xcm::v4::junction::Junction; 7usize]), + #[codec(index = 8)] + X8([runtime_types::staging_xcm::v4::junction::Junction; 8usize]), + } + } + pub mod location { + use super::runtime_types; + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct Location { + pub parents: ::core::primitive::u8, + pub interior: runtime_types::staging_xcm::v4::junctions::Junctions, + } + } + pub mod traits { + use super::runtime_types; + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum Outcome { + #[codec(index = 0)] + Complete { used: ::sp_weights::Weight }, + #[codec(index = 1)] + Incomplete { + used: ::sp_weights::Weight, + error: runtime_types::xcm::v3::traits::Error, + }, + #[codec(index = 2)] + Error { error: runtime_types::xcm::v3::traits::Error }, + } + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum Instruction { + #[codec(index = 0)] + WithdrawAsset(runtime_types::staging_xcm::v4::asset::Assets), + #[codec(index = 1)] + ReserveAssetDeposited(runtime_types::staging_xcm::v4::asset::Assets), + #[codec(index = 2)] + ReceiveTeleportedAsset(runtime_types::staging_xcm::v4::asset::Assets), + #[codec(index = 3)] + QueryResponse { + #[codec(compact)] + query_id: ::core::primitive::u64, + response: runtime_types::staging_xcm::v4::Response, + max_weight: ::sp_weights::Weight, + querier: ::core::option::Option< + runtime_types::staging_xcm::v4::location::Location, + >, + }, + #[codec(index = 4)] + TransferAsset { + assets: runtime_types::staging_xcm::v4::asset::Assets, + beneficiary: runtime_types::staging_xcm::v4::location::Location, + }, + #[codec(index = 5)] + TransferReserveAsset { + assets: runtime_types::staging_xcm::v4::asset::Assets, + dest: runtime_types::staging_xcm::v4::location::Location, + xcm: runtime_types::staging_xcm::v4::Xcm, + }, + #[codec(index = 6)] + Transact { + origin_kind: runtime_types::xcm::v2::OriginKind, + require_weight_at_most: ::sp_weights::Weight, + call: runtime_types::xcm::double_encoded::DoubleEncoded, + }, + #[codec(index = 7)] + HrmpNewChannelOpenRequest { + #[codec(compact)] + sender: ::core::primitive::u32, + #[codec(compact)] + max_message_size: ::core::primitive::u32, + #[codec(compact)] + max_capacity: ::core::primitive::u32, + }, + #[codec(index = 8)] + HrmpChannelAccepted { + #[codec(compact)] + recipient: ::core::primitive::u32, + }, + #[codec(index = 9)] + HrmpChannelClosing { + #[codec(compact)] + initiator: ::core::primitive::u32, + #[codec(compact)] + sender: ::core::primitive::u32, + #[codec(compact)] + recipient: ::core::primitive::u32, + }, + #[codec(index = 10)] + ClearOrigin, + #[codec(index = 11)] + DescendOrigin(runtime_types::staging_xcm::v4::junctions::Junctions), + #[codec(index = 12)] + ReportError(runtime_types::staging_xcm::v4::QueryResponseInfo), + #[codec(index = 13)] + DepositAsset { + assets: runtime_types::staging_xcm::v4::asset::AssetFilter, + beneficiary: runtime_types::staging_xcm::v4::location::Location, + }, + #[codec(index = 14)] + DepositReserveAsset { + assets: runtime_types::staging_xcm::v4::asset::AssetFilter, + dest: runtime_types::staging_xcm::v4::location::Location, + xcm: runtime_types::staging_xcm::v4::Xcm, + }, + #[codec(index = 15)] + ExchangeAsset { + give: runtime_types::staging_xcm::v4::asset::AssetFilter, + want: runtime_types::staging_xcm::v4::asset::Assets, + maximal: ::core::primitive::bool, + }, + #[codec(index = 16)] + InitiateReserveWithdraw { + assets: runtime_types::staging_xcm::v4::asset::AssetFilter, + reserve: runtime_types::staging_xcm::v4::location::Location, + xcm: runtime_types::staging_xcm::v4::Xcm, + }, + #[codec(index = 17)] + InitiateTeleport { + assets: runtime_types::staging_xcm::v4::asset::AssetFilter, + dest: runtime_types::staging_xcm::v4::location::Location, + xcm: runtime_types::staging_xcm::v4::Xcm, + }, + #[codec(index = 18)] + ReportHolding { + response_info: runtime_types::staging_xcm::v4::QueryResponseInfo, + assets: runtime_types::staging_xcm::v4::asset::AssetFilter, + }, + #[codec(index = 19)] + BuyExecution { + fees: runtime_types::staging_xcm::v4::asset::Asset, + weight_limit: runtime_types::xcm::v3::WeightLimit, + }, + #[codec(index = 20)] + RefundSurplus, + #[codec(index = 21)] + SetErrorHandler(runtime_types::staging_xcm::v4::Xcm), + #[codec(index = 22)] + SetAppendix(runtime_types::staging_xcm::v4::Xcm), + #[codec(index = 23)] + ClearError, + #[codec(index = 24)] + ClaimAsset { + assets: runtime_types::staging_xcm::v4::asset::Assets, + ticket: runtime_types::staging_xcm::v4::location::Location, + }, + #[codec(index = 25)] + Trap(#[codec(compact)] ::core::primitive::u64), + #[codec(index = 26)] + SubscribeVersion { + #[codec(compact)] + query_id: ::core::primitive::u64, + max_response_weight: ::sp_weights::Weight, + }, + #[codec(index = 27)] + UnsubscribeVersion, + #[codec(index = 28)] + BurnAsset(runtime_types::staging_xcm::v4::asset::Assets), + #[codec(index = 29)] + ExpectAsset(runtime_types::staging_xcm::v4::asset::Assets), + #[codec(index = 30)] + ExpectOrigin( + ::core::option::Option, + ), + #[codec(index = 31)] + ExpectError( + ::core::option::Option<( + ::core::primitive::u32, + runtime_types::xcm::v3::traits::Error, + )>, + ), + #[codec(index = 32)] + ExpectTransactStatus(runtime_types::xcm::v3::MaybeErrorCode), + #[codec(index = 33)] + QueryPallet { + module_name: ::std::vec::Vec<::core::primitive::u8>, + response_info: runtime_types::staging_xcm::v4::QueryResponseInfo, + }, + #[codec(index = 34)] + ExpectPallet { + #[codec(compact)] + index: ::core::primitive::u32, + name: ::std::vec::Vec<::core::primitive::u8>, + module_name: ::std::vec::Vec<::core::primitive::u8>, + #[codec(compact)] + crate_major: ::core::primitive::u32, + #[codec(compact)] + min_crate_minor: ::core::primitive::u32, + }, + #[codec(index = 35)] + ReportTransactStatus(runtime_types::staging_xcm::v4::QueryResponseInfo), + #[codec(index = 36)] + ClearTransactStatus, + #[codec(index = 37)] + UniversalOrigin(runtime_types::staging_xcm::v4::junction::Junction), + #[codec(index = 38)] + ExportMessage { + network: runtime_types::staging_xcm::v4::junction::NetworkId, + destination: runtime_types::staging_xcm::v4::junctions::Junctions, + xcm: runtime_types::staging_xcm::v4::Xcm, + }, + #[codec(index = 39)] + LockAsset { + asset: runtime_types::staging_xcm::v4::asset::Asset, + unlocker: runtime_types::staging_xcm::v4::location::Location, + }, + #[codec(index = 40)] + UnlockAsset { + asset: runtime_types::staging_xcm::v4::asset::Asset, + target: runtime_types::staging_xcm::v4::location::Location, + }, + #[codec(index = 41)] + NoteUnlockable { + asset: runtime_types::staging_xcm::v4::asset::Asset, + owner: runtime_types::staging_xcm::v4::location::Location, + }, + #[codec(index = 42)] + RequestUnlock { + asset: runtime_types::staging_xcm::v4::asset::Asset, + locker: runtime_types::staging_xcm::v4::location::Location, + }, + #[codec(index = 43)] + SetFeesMode { jit_withdraw: ::core::primitive::bool }, + #[codec(index = 44)] + SetTopic([::core::primitive::u8; 32usize]), + #[codec(index = 45)] + ClearTopic, + #[codec(index = 46)] + AliasOrigin(runtime_types::staging_xcm::v4::location::Location), + #[codec(index = 47)] + UnpaidExecution { + weight_limit: runtime_types::xcm::v3::WeightLimit, + check_origin: ::core::option::Option< + runtime_types::staging_xcm::v4::location::Location, + >, + }, + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum Instruction2 { + #[codec(index = 0)] + WithdrawAsset(runtime_types::staging_xcm::v4::asset::Assets), + #[codec(index = 1)] + ReserveAssetDeposited(runtime_types::staging_xcm::v4::asset::Assets), + #[codec(index = 2)] + ReceiveTeleportedAsset(runtime_types::staging_xcm::v4::asset::Assets), + #[codec(index = 3)] + QueryResponse { + #[codec(compact)] + query_id: ::core::primitive::u64, + response: runtime_types::staging_xcm::v4::Response, + max_weight: ::sp_weights::Weight, + querier: ::core::option::Option< + runtime_types::staging_xcm::v4::location::Location, + >, + }, + #[codec(index = 4)] + TransferAsset { + assets: runtime_types::staging_xcm::v4::asset::Assets, + beneficiary: runtime_types::staging_xcm::v4::location::Location, + }, + #[codec(index = 5)] + TransferReserveAsset { + assets: runtime_types::staging_xcm::v4::asset::Assets, + dest: runtime_types::staging_xcm::v4::location::Location, + xcm: runtime_types::staging_xcm::v4::Xcm, + }, + #[codec(index = 6)] + Transact { + origin_kind: runtime_types::xcm::v2::OriginKind, + require_weight_at_most: ::sp_weights::Weight, + call: runtime_types::xcm::double_encoded::DoubleEncoded2, + }, + #[codec(index = 7)] + HrmpNewChannelOpenRequest { + #[codec(compact)] + sender: ::core::primitive::u32, + #[codec(compact)] + max_message_size: ::core::primitive::u32, + #[codec(compact)] + max_capacity: ::core::primitive::u32, + }, + #[codec(index = 8)] + HrmpChannelAccepted { + #[codec(compact)] + recipient: ::core::primitive::u32, + }, + #[codec(index = 9)] + HrmpChannelClosing { + #[codec(compact)] + initiator: ::core::primitive::u32, + #[codec(compact)] + sender: ::core::primitive::u32, + #[codec(compact)] + recipient: ::core::primitive::u32, + }, + #[codec(index = 10)] + ClearOrigin, + #[codec(index = 11)] + DescendOrigin(runtime_types::staging_xcm::v4::junctions::Junctions), + #[codec(index = 12)] + ReportError(runtime_types::staging_xcm::v4::QueryResponseInfo), + #[codec(index = 13)] + DepositAsset { + assets: runtime_types::staging_xcm::v4::asset::AssetFilter, + beneficiary: runtime_types::staging_xcm::v4::location::Location, + }, + #[codec(index = 14)] + DepositReserveAsset { + assets: runtime_types::staging_xcm::v4::asset::AssetFilter, + dest: runtime_types::staging_xcm::v4::location::Location, + xcm: runtime_types::staging_xcm::v4::Xcm, + }, + #[codec(index = 15)] + ExchangeAsset { + give: runtime_types::staging_xcm::v4::asset::AssetFilter, + want: runtime_types::staging_xcm::v4::asset::Assets, + maximal: ::core::primitive::bool, + }, + #[codec(index = 16)] + InitiateReserveWithdraw { + assets: runtime_types::staging_xcm::v4::asset::AssetFilter, + reserve: runtime_types::staging_xcm::v4::location::Location, + xcm: runtime_types::staging_xcm::v4::Xcm, + }, + #[codec(index = 17)] + InitiateTeleport { + assets: runtime_types::staging_xcm::v4::asset::AssetFilter, + dest: runtime_types::staging_xcm::v4::location::Location, + xcm: runtime_types::staging_xcm::v4::Xcm, + }, + #[codec(index = 18)] + ReportHolding { + response_info: runtime_types::staging_xcm::v4::QueryResponseInfo, + assets: runtime_types::staging_xcm::v4::asset::AssetFilter, + }, + #[codec(index = 19)] + BuyExecution { + fees: runtime_types::staging_xcm::v4::asset::Asset, + weight_limit: runtime_types::xcm::v3::WeightLimit, + }, + #[codec(index = 20)] + RefundSurplus, + #[codec(index = 21)] + SetErrorHandler(runtime_types::staging_xcm::v4::Xcm2), + #[codec(index = 22)] + SetAppendix(runtime_types::staging_xcm::v4::Xcm2), + #[codec(index = 23)] + ClearError, + #[codec(index = 24)] + ClaimAsset { + assets: runtime_types::staging_xcm::v4::asset::Assets, + ticket: runtime_types::staging_xcm::v4::location::Location, + }, + #[codec(index = 25)] + Trap(#[codec(compact)] ::core::primitive::u64), + #[codec(index = 26)] + SubscribeVersion { + #[codec(compact)] + query_id: ::core::primitive::u64, + max_response_weight: ::sp_weights::Weight, + }, + #[codec(index = 27)] + UnsubscribeVersion, + #[codec(index = 28)] + BurnAsset(runtime_types::staging_xcm::v4::asset::Assets), + #[codec(index = 29)] + ExpectAsset(runtime_types::staging_xcm::v4::asset::Assets), + #[codec(index = 30)] + ExpectOrigin( + ::core::option::Option, + ), + #[codec(index = 31)] + ExpectError( + ::core::option::Option<( + ::core::primitive::u32, + runtime_types::xcm::v3::traits::Error, + )>, + ), + #[codec(index = 32)] + ExpectTransactStatus(runtime_types::xcm::v3::MaybeErrorCode), + #[codec(index = 33)] + QueryPallet { + module_name: ::std::vec::Vec<::core::primitive::u8>, + response_info: runtime_types::staging_xcm::v4::QueryResponseInfo, + }, + #[codec(index = 34)] + ExpectPallet { + #[codec(compact)] + index: ::core::primitive::u32, + name: ::std::vec::Vec<::core::primitive::u8>, + module_name: ::std::vec::Vec<::core::primitive::u8>, + #[codec(compact)] + crate_major: ::core::primitive::u32, + #[codec(compact)] + min_crate_minor: ::core::primitive::u32, + }, + #[codec(index = 35)] + ReportTransactStatus(runtime_types::staging_xcm::v4::QueryResponseInfo), + #[codec(index = 36)] + ClearTransactStatus, + #[codec(index = 37)] + UniversalOrigin(runtime_types::staging_xcm::v4::junction::Junction), + #[codec(index = 38)] + ExportMessage { + network: runtime_types::staging_xcm::v4::junction::NetworkId, + destination: runtime_types::staging_xcm::v4::junctions::Junctions, + xcm: runtime_types::staging_xcm::v4::Xcm, + }, + #[codec(index = 39)] + LockAsset { + asset: runtime_types::staging_xcm::v4::asset::Asset, + unlocker: runtime_types::staging_xcm::v4::location::Location, + }, + #[codec(index = 40)] + UnlockAsset { + asset: runtime_types::staging_xcm::v4::asset::Asset, + target: runtime_types::staging_xcm::v4::location::Location, + }, + #[codec(index = 41)] + NoteUnlockable { + asset: runtime_types::staging_xcm::v4::asset::Asset, + owner: runtime_types::staging_xcm::v4::location::Location, + }, + #[codec(index = 42)] + RequestUnlock { + asset: runtime_types::staging_xcm::v4::asset::Asset, + locker: runtime_types::staging_xcm::v4::location::Location, + }, + #[codec(index = 43)] + SetFeesMode { jit_withdraw: ::core::primitive::bool }, + #[codec(index = 44)] + SetTopic([::core::primitive::u8; 32usize]), + #[codec(index = 45)] + ClearTopic, + #[codec(index = 46)] + AliasOrigin(runtime_types::staging_xcm::v4::location::Location), + #[codec(index = 47)] + UnpaidExecution { + weight_limit: runtime_types::xcm::v3::WeightLimit, + check_origin: ::core::option::Option< + runtime_types::staging_xcm::v4::location::Location, + >, + }, } #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub enum TransactionSource { - #[codec(index = 0)] - InBlock, - #[codec(index = 1)] - Local, - #[codec(index = 2)] - External, + pub struct PalletInfo { + #[codec(compact)] + pub index: ::core::primitive::u32, + pub name: runtime_types::bounded_collections::bounded_vec::BoundedVec< + ::core::primitive::u8, + >, + pub module_name: runtime_types::bounded_collections::bounded_vec::BoundedVec< + ::core::primitive::u8, + >, + #[codec(compact)] + pub major: ::core::primitive::u32, + #[codec(compact)] + pub minor: ::core::primitive::u32, + #[codec(compact)] + pub patch: ::core::primitive::u32, } #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub enum TransactionValidityError { - #[codec(index = 0)] - Invalid(runtime_types::sp_runtime::transaction_validity::InvalidTransaction), - #[codec(index = 1)] - Unknown(runtime_types::sp_runtime::transaction_validity::UnknownTransaction), + pub struct QueryResponseInfo { + pub destination: runtime_types::staging_xcm::v4::location::Location, + #[codec(compact)] + pub query_id: ::core::primitive::u64, + pub max_weight: ::sp_weights::Weight, } #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub enum UnknownTransaction { + pub enum Response { #[codec(index = 0)] - CannotLookup, + Null, #[codec(index = 1)] - NoUnsignedValidator, + Assets(runtime_types::staging_xcm::v4::asset::Assets), #[codec(index = 2)] - Custom(::core::primitive::u8), + ExecutionResult( + ::core::option::Option<( + ::core::primitive::u32, + runtime_types::xcm::v3::traits::Error, + )>, + ), + #[codec(index = 3)] + Version(::core::primitive::u32), + #[codec(index = 4)] + PalletsInfo( + runtime_types::bounded_collections::bounded_vec::BoundedVec< + runtime_types::staging_xcm::v4::PalletInfo, + >, + ), + #[codec(index = 5)] + DispatchResult(runtime_types::xcm::v3::MaybeErrorCode), } #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub struct ValidTransaction { - pub priority: ::core::primitive::u64, - pub requires: ::std::vec::Vec<::std::vec::Vec<::core::primitive::u8>>, - pub provides: ::std::vec::Vec<::std::vec::Vec<::core::primitive::u8>>, - pub longevity: ::core::primitive::u64, - pub propagate: ::core::primitive::bool, - } - } - #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub enum DispatchError { - #[codec(index = 0)] - Other, - #[codec(index = 1)] - CannotLookup, - #[codec(index = 2)] - BadOrigin, - #[codec(index = 3)] - Module(runtime_types::sp_runtime::ModuleError), - #[codec(index = 4)] - ConsumerRemaining, - #[codec(index = 5)] - NoProviders, - #[codec(index = 6)] - TooManyConsumers, - #[codec(index = 7)] - Token(runtime_types::sp_runtime::TokenError), - #[codec(index = 8)] - Arithmetic(runtime_types::sp_arithmetic::ArithmeticError), - #[codec(index = 9)] - Transactional(runtime_types::sp_runtime::TransactionalError), - #[codec(index = 10)] - Exhausted, - #[codec(index = 11)] - Corruption, - #[codec(index = 12)] - Unavailable, - #[codec(index = 13)] - RootNotAllowed, - } - #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub struct ModuleError { - pub index: ::core::primitive::u8, - pub error: [::core::primitive::u8; 4usize], - } - #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub enum MultiSignature { - #[codec(index = 0)] - Ed25519(runtime_types::sp_core::ed25519::Signature), - #[codec(index = 1)] - Sr25519(runtime_types::sp_core::sr25519::Signature), - #[codec(index = 2)] - Ecdsa(runtime_types::sp_core::ecdsa::Signature), - } - #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub enum TokenError { - #[codec(index = 0)] - FundsUnavailable, - #[codec(index = 1)] - OnlyProvider, - #[codec(index = 2)] - BelowMinimum, - #[codec(index = 3)] - CannotCreate, - #[codec(index = 4)] - UnknownAsset, - #[codec(index = 5)] - Frozen, - #[codec(index = 6)] - Unsupported, - #[codec(index = 7)] - CannotCreateHold, - #[codec(index = 8)] - NotExpendable, - #[codec(index = 9)] - Blocked, - } - #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub enum TransactionalError { - #[codec(index = 0)] - LimitReached, - #[codec(index = 1)] - NoLayer, - } - } - pub mod sp_trie { - use super::runtime_types; - pub mod storage_proof { - use super::runtime_types; + pub struct Xcm(pub ::std::vec::Vec); #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub struct StorageProof { - pub trie_nodes: ::std::vec::Vec<::std::vec::Vec<::core::primitive::u8>>, - } - } - } - pub mod sp_version { - use super::runtime_types; - #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub struct RuntimeVersion { - pub spec_name: ::std::string::String, - pub impl_name: ::std::string::String, - pub authoring_version: ::core::primitive::u32, - pub spec_version: ::core::primitive::u32, - pub impl_version: ::core::primitive::u32, - pub apis: - ::std::vec::Vec<([::core::primitive::u8; 8usize], ::core::primitive::u32)>, - pub transaction_version: ::core::primitive::u32, - pub state_version: ::core::primitive::u8, - } - } - pub mod sp_weights { - use super::runtime_types; - #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub struct RuntimeDbWeight { - pub read: ::core::primitive::u64, - pub write: ::core::primitive::u64, - } - } - pub mod staging_xcm { - use super::runtime_types; - pub mod v3 { - use super::runtime_types; - pub mod multilocation { - use super::runtime_types; - #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub struct MultiLocation { - pub parents: ::core::primitive::u8, - pub interior: runtime_types::xcm::v3::junctions::Junctions, - } - } + pub struct Xcm2(pub ::std::vec::Vec); } } pub mod xcm { @@ -3276,6 +4878,8 @@ pub mod api { BitcoinCore, #[codec(index = 9)] BitcoinCash, + #[codec(index = 10)] + PolkadotBulletin, } } pub mod junctions { @@ -3503,15 +5107,6 @@ pub mod api { #[codec(index = 39)] ExceedsStackLimit, } - #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub enum Outcome { - #[codec(index = 0)] - Complete(::sp_weights::Weight), - #[codec(index = 1)] - Incomplete(::sp_weights::Weight, runtime_types::xcm::v3::traits::Error), - #[codec(index = 2)] - Error(runtime_types::xcm::v3::traits::Error), - } } #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] pub enum Instruction { @@ -4012,20 +5607,26 @@ pub mod api { pub enum VersionedAssetId { #[codec(index = 3)] V3(runtime_types::xcm::v3::multiasset::AssetId), + #[codec(index = 4)] + V4(runtime_types::staging_xcm::v4::asset::AssetId), } #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub enum VersionedMultiAssets { + pub enum VersionedAssets { #[codec(index = 1)] V2(runtime_types::xcm::v2::multiasset::MultiAssets), #[codec(index = 3)] V3(runtime_types::xcm::v3::multiasset::MultiAssets), + #[codec(index = 4)] + V4(runtime_types::staging_xcm::v4::asset::Assets), } #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub enum VersionedMultiLocation { + pub enum VersionedLocation { #[codec(index = 1)] V2(runtime_types::xcm::v2::multilocation::MultiLocation), #[codec(index = 3)] V3(runtime_types::staging_xcm::v3::multilocation::MultiLocation), + #[codec(index = 4)] + V4(runtime_types::staging_xcm::v4::location::Location), } #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] pub enum VersionedResponse { @@ -4033,6 +5634,8 @@ pub mod api { V2(runtime_types::xcm::v2::Response), #[codec(index = 3)] V3(runtime_types::xcm::v3::Response), + #[codec(index = 4)] + V4(runtime_types::staging_xcm::v4::Response), } #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] pub enum VersionedXcm { @@ -4040,6 +5643,8 @@ pub mod api { V2(runtime_types::xcm::v2::Xcm), #[codec(index = 3)] V3(runtime_types::xcm::v3::Xcm), + #[codec(index = 4)] + V4(runtime_types::staging_xcm::v4::Xcm), } #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] pub enum VersionedXcm2 { @@ -4047,6 +5652,8 @@ pub mod api { V2(runtime_types::xcm::v2::Xcm2), #[codec(index = 3)] V3(runtime_types::xcm::v3::Xcm2), + #[codec(index = 4)] + V4(runtime_types::staging_xcm::v4::Xcm2), } } } diff --git a/relay-clients/client-bridge-hub-kusama/src/lib.rs b/relay-clients/client-bridge-hub-kusama/src/lib.rs index 43dd53d2d..25675d26d 100644 --- a/relay-clients/client-bridge-hub-kusama/src/lib.rs +++ b/relay-clients/client-bridge-hub-kusama/src/lib.rs @@ -123,5 +123,5 @@ impl ChainWithMessages for BridgeHubKusama { impl ChainWithRuntimeVersion for BridgeHubKusama { const RUNTIME_VERSION: Option = - Some(SimpleRuntimeVersion { spec_version: 1_001_000, transaction_version: 4 }); + Some(SimpleRuntimeVersion { spec_version: 1_002_000, transaction_version: 4 }); } diff --git a/scripts/regenerate_runtimes.sh b/scripts/regenerate_runtimes.sh index 700f4dc1c..6100c79d7 100755 --- a/scripts/regenerate_runtimes.sh +++ b/scripts/regenerate_runtimes.sh @@ -13,6 +13,8 @@ cargo run --bin runtime-codegen -- --from-node-url "wss://rpc.polkadot.io:443" > # TODO: there is a bug, probably needs to update subxt, generates: `::sp_runtime::generic::Header<::core::primitive::u32>` withtout second `Hash` parameter. # cargo run --bin runtime-codegen -- --from-wasm-file ../../../polkadot-sdk/target/release/wbuild/bridge-hub-rococo-runtime/bridge_hub_rococo_runtime.compact.compressed.wasm > ../../relays/client-bridge-hub-rococo/src/codegen_runtime.rs # cargo run --bin runtime-codegen -- --from-wasm-file ../../../polkadot-sdk/target/release/wbuild/bridge-hub-westend-runtime/bridge_hub_westend_runtime.compact.compressed.wasm > ../../relays/client-bridge-hub-westend/src/codegen_runtime.rs +# OR +# cargo run --bin runtime-codegen -- --from-node-url wss://kusama-bridge-hub-rpc.polkadot.io/ > ../../relay-clients/client-bridge-hub-kusama/src/codegen_runtime.rs cd - cargo fmt --all diff --git a/substrate-relay/Cargo.toml b/substrate-relay/Cargo.toml index 620ef7dbe..3be1033aa 100644 --- a/substrate-relay/Cargo.toml +++ b/substrate-relay/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "substrate-relay" -version = "1.2.1" +version = "1.3.0" authors.workspace = true edition.workspace = true license = "GPL-3.0-or-later WITH Classpath-exception-2.0" -- GitLab From bacc6bfd905ddcd7c3e0f755b459d733f27e8feb Mon Sep 17 00:00:00 2001 From: Svyatoslav Nikolsky Date: Thu, 18 Apr 2024 15:20:50 +0300 Subject: [PATCH 35/39] exported P<>K dashboards (#2960) --- deployments/bridges/kusama-polkadot/README.md | 23 + .../bridge-kusama-polkadot-alerts.json | 1656 +++++++++++++++++ ...kusama-polkadot-maintenance-dashboard.json | 1026 ++++++++++ ...kusama-to-polkadot-messages-dashboard.json | 982 ++++++++++ ...polkadot-to-kusama-messages-dashboard.json | 970 ++++++++++ 5 files changed, 4657 insertions(+) create mode 100644 deployments/bridges/kusama-polkadot/README.md create mode 100644 deployments/bridges/kusama-polkadot/dashboard/grafana/bridge-kusama-polkadot-alerts.json create mode 100644 deployments/bridges/kusama-polkadot/dashboard/grafana/kusama-polkadot-maintenance-dashboard.json create mode 100644 deployments/bridges/kusama-polkadot/dashboard/grafana/relay-kusama-to-polkadot-messages-dashboard.json create mode 100644 deployments/bridges/kusama-polkadot/dashboard/grafana/relay-polkadot-to-kusama-messages-dashboard.json diff --git a/deployments/bridges/kusama-polkadot/README.md b/deployments/bridges/kusama-polkadot/README.md new file mode 100644 index 000000000..27a55a023 --- /dev/null +++ b/deployments/bridges/kusama-polkadot/README.md @@ -0,0 +1,23 @@ +# Kusama Bridge Hub <> Polkadot Bridge Hub deployments + +This folder contains some information and useful stuff from our other test deployment - between Kusama and Polkadot +bridge hubs. The code and other helpful information can be found in +[this document](https://github.com/paritytech/polkadot-sdk/blob/master/bridges/docs/polkadot-kusama-bridge-overview.md) +and in the [code](https://github.com/polkadot-fellows/runtimes/tree/main/system-parachains/bridge-hubs). + +## Grafana Alerts and Dashboards + +JSON model for Grafana alerts and dashobards that we use, may be found in the [dasboard/grafana](./dashboard/grafana/) +folder. + +**Dashboards:** +- kusama-polkadot-maintenance-dashboard.json +- relay-kusama-to-polkadot-messages-dashboard.json +- relay-polkadot-to-kusama-messages-dashboard.json + +(exported JSON directly from https://grafana.teleport.parity.io/dashboards/f/eblDiw17z/Bridges) + +**Alerts:** +- bridge-kusama-polkadot-alerts.json https://grafana.teleport.parity.io/alerting/list + +_Note: All json files are formatted with `jq . file.json`._ diff --git a/deployments/bridges/kusama-polkadot/dashboard/grafana/bridge-kusama-polkadot-alerts.json b/deployments/bridges/kusama-polkadot/dashboard/grafana/bridge-kusama-polkadot-alerts.json new file mode 100644 index 000000000..eb3b7d339 --- /dev/null +++ b/deployments/bridges/kusama-polkadot/dashboard/grafana/bridge-kusama-polkadot-alerts.json @@ -0,0 +1,1656 @@ +{ + "apiVersion": 1, + "groups": [ + { + "orgId": 1, + "name": "Bridge Kusama <> Polkadot", + "folder": "bridges", + "interval": "1m", + "rules": [ + { + "uid": "adizmaavld2psc", + "title": "Polkadot -> KusamaBridgeHub finality sync lags (00000001)", + "condition": "D", + "data": [ + { + "refId": "A", + "relativeTimeRange": { + "from": 21600, + "to": 0 + }, + "datasourceUid": "PC96415006F908B67", + "model": { + "datasource": { + "type": "prometheus", + "uid": "PC96415006F908B67" + }, + "editorMode": "code", + "expr": "max(increase(Polkadot_to_BridgeHubKusama_Sync_best_source_at_target_block_number{domain=\"parity-chains\"}[24h]))", + "instant": false, + "interval": "", + "intervalMs": 30000, + "legendFormat": "At Polkadot", + "maxDataPoints": 43200, + "range": true, + "refId": "A" + } + }, + { + "refId": "C", + "relativeTimeRange": { + "from": 21600, + "to": 0 + }, + "datasourceUid": "__expr__", + "model": { + "conditions": [ + { + "evaluator": { + "params": [], + "type": "gt" + }, + "operator": { + "type": "and" + }, + "query": { + "params": [ + "C" + ] + }, + "reducer": { + "params": [], + "type": "last" + }, + "type": "query" + } + ], + "datasource": { + "type": "__expr__", + "uid": "__expr__" + }, + "expression": "A", + "intervalMs": 1000, + "maxDataPoints": 43200, + "reducer": "max", + "refId": "C", + "type": "reduce" + } + }, + { + "refId": "D", + "relativeTimeRange": { + "from": 21600, + "to": 0 + }, + "datasourceUid": "__expr__", + "model": { + "conditions": [ + { + "evaluator": { + "params": [ + 5000 + ], + "type": "lt" + }, + "operator": { + "type": "and" + }, + "query": { + "params": [ + "D" + ] + }, + "reducer": { + "params": [], + "type": "last" + }, + "type": "query" + } + ], + "datasource": { + "type": "__expr__", + "uid": "__expr__" + }, + "expression": "C", + "intervalMs": 1000, + "maxDataPoints": 43200, + "refId": "D", + "type": "threshold" + } + } + ], + "dasboardUid": "zqjpkXxnk", + "panelId": 2, + "noDataState": "OK", + "execErrState": "OK", + "for": "5m", + "annotations": { + "__dashboardUid__": "zqjpkXxnk", + "__panelId__": "2", + "summary": "Less than 5000 Polkadot headers (~1/2 era) have been synced to KusamaBridgeHub in last 25 hours. Relay is not running?" + }, + "labels": { + "matrix_room": "FqmgUhjOliBGoncGwm" + }, + "isPaused": false + }, + { + "uid": "cdiznm0i2tslca", + "title": "PolkadotBridgeHub -> KusamaBridgeHub delivery lags (00000001)", + "condition": "B", + "data": [ + { + "refId": "A", + "relativeTimeRange": { + "from": 21600, + "to": 0 + }, + "datasourceUid": "PC96415006F908B67", + "model": { + "datasource": { + "type": "prometheus", + "uid": "PC96415006F908B67" + }, + "editorMode": "code", + "expr": "((vector(0) and ((BridgeHubPolkadot_to_BridgeHubKusama_MessageLane_00000001_lane_state_nonces{domain=\"parity-chains\",type=\"source_latest_generated\"} > on () BridgeHubPolkadot_to_BridgeHubKusama_MessageLane_00000001_lane_state_nonces{domain=\"parity-chains\",type=\"target_latest_received\"}))) or vector(1)) + on () increase(BridgeHubPolkadot_to_BridgeHubKusama_MessageLane_00000001_lane_state_nonces{domain=\"parity-chains\",type=\"target_latest_received\"}[10m]) * on () ((vector(1) and ((BridgeHubPolkadot_to_BridgeHubKusama_MessageLane_00000001_lane_state_nonces{domain=\"parity-chains\",type=\"source_latest_generated\"} > on () BridgeHubPolkadot_to_BridgeHubKusama_MessageLane_00000001_lane_state_nonces{domain=\"parity-chains\",type=\"target_latest_received\"}))) or vector(0))", + "instant": false, + "interval": "", + "intervalMs": 30000, + "legendFormat": "Undelivered messages", + "maxDataPoints": 43200, + "range": true, + "refId": "A" + } + }, + { + "refId": "B", + "relativeTimeRange": { + "from": 600, + "to": 0 + }, + "datasourceUid": "__expr__", + "model": { + "conditions": [ + { + "evaluator": { + "params": [ + 1, + 0 + ], + "type": "lt" + }, + "operator": { + "type": "and" + }, + "query": { + "params": [ + "A" + ] + }, + "reducer": { + "params": [], + "type": "max" + }, + "type": "query" + } + ], + "datasource": { + "name": "Expression", + "type": "__expr__", + "uid": "__expr__" + }, + "expression": "", + "hide": false, + "intervalMs": 1000, + "maxDataPoints": 43200, + "refId": "B", + "type": "classic_conditions" + } + } + ], + "dasboardUid": "zqjpkXxnk", + "panelId": 14, + "noDataState": "OK", + "execErrState": "OK", + "for": "10m", + "annotations": { + "__dashboardUid__": "zqjpkXxnk", + "__panelId__": "14", + "summary": "Messages from PolkadotBridgeHub to KusamaBridgeHub (00000001) are either not delivered, or are delivered with lags" + }, + "labels": { + "matrix_room": "FqmgUhjOliBGoncGwm" + }, + "isPaused": false + }, + { + "uid": "adizouqsgd62od", + "title": "PolkadotBridgeHub -> KusamaBridgeHub confirmation lags (00000001)", + "condition": "B", + "data": [ + { + "refId": "A", + "relativeTimeRange": { + "from": 21600, + "to": 0 + }, + "datasourceUid": "PC96415006F908B67", + "model": { + "datasource": { + "type": "prometheus", + "uid": "PC96415006F908B67" + }, + "editorMode": "code", + "expr": "scalar(max_over_time(BridgeHubPolkadot_to_BridgeHubKusama_MessageLane_00000001_lane_state_nonces{domain=\"parity-chains\",type=\"target_latest_received\"}[2m]) OR on() vector(0)) - scalar(max_over_time(BridgeHubPolkadot_to_BridgeHubKusama_MessageLane_00000001_lane_state_nonces{domain=\"parity-chains\",type=\"source_latest_confirmed\"}[2m]) OR on() vector(0))", + "instant": false, + "interval": "", + "intervalMs": 30000, + "legendFormat": "Unconfirmed messages", + "maxDataPoints": 43200, + "range": true, + "refId": "A" + } + }, + { + "refId": "B", + "relativeTimeRange": { + "from": 0, + "to": 0 + }, + "datasourceUid": "__expr__", + "model": { + "conditions": [ + { + "evaluator": { + "params": [ + 50, + 0 + ], + "type": "gt" + }, + "operator": { + "type": "and" + }, + "query": { + "params": [ + "A" + ] + }, + "reducer": { + "params": [], + "type": "max" + }, + "type": "query" + } + ], + "datasource": { + "name": "Expression", + "type": "__expr__", + "uid": "__expr__" + }, + "expression": "", + "intervalMs": 1000, + "maxDataPoints": 43200, + "refId": "B", + "type": "classic_conditions" + } + } + ], + "dasboardUid": "zqjpkXxnk", + "panelId": 16, + "noDataState": "OK", + "execErrState": "OK", + "for": "10m", + "annotations": { + "__dashboardUid__": "zqjpkXxnk", + "__panelId__": "16", + "summary": "Messages from PolkadotBridgeHub to KusamaBridgeHub (00000001) are either not confirmed, or are confirmed with lags" + }, + "labels": { + "matrix_room": "FqmgUhjOliBGoncGwm" + }, + "isPaused": false + }, + { + "uid": "fdizp9l7o5rswf", + "title": "PolkadotBridgeHub -> KusamaBridgeHub reward lags (00000002)", + "condition": "B", + "data": [ + { + "refId": "A", + "relativeTimeRange": { + "from": 21600, + "to": 0 + }, + "datasourceUid": "PC96415006F908B67", + "model": { + "datasource": { + "type": "prometheus", + "uid": "PC96415006F908B67" + }, + "editorMode": "code", + "expr": "scalar(max_over_time(BridgeHubPolkadot_to_BridgeHubKusama_MessageLane_00000001_lane_state_nonces{domain=\"parity-chains\",type=\"source_latest_confirmed\"}[2m]) OR on() vector(0)) - scalar(max_over_time(BridgeHubPolkadot_to_BridgeHubKusama_MessageLane_00000001_lane_state_nonces{domain=\"parity-chains\",type=\"target_latest_confirmed\"}[2m]) OR on() vector(0))", + "instant": false, + "interval": "", + "intervalMs": 30000, + "legendFormat": "Unconfirmed rewards", + "maxDataPoints": 43200, + "range": true, + "refId": "A" + } + }, + { + "refId": "B", + "relativeTimeRange": { + "from": 0, + "to": 0 + }, + "datasourceUid": "__expr__", + "model": { + "conditions": [ + { + "evaluator": { + "params": [ + 10, + 0 + ], + "type": "gt" + }, + "operator": { + "type": "and" + }, + "query": { + "params": [ + "A" + ] + }, + "reducer": { + "params": [], + "type": "min" + }, + "type": "query" + } + ], + "datasource": { + "name": "Expression", + "type": "__expr__", + "uid": "__expr__" + }, + "expression": "", + "hide": false, + "intervalMs": 1000, + "maxDataPoints": 43200, + "refId": "B", + "type": "classic_conditions" + } + } + ], + "dasboardUid": "zqjpkXxnk", + "panelId": 18, + "noDataState": "OK", + "execErrState": "OK", + "for": "10m", + "annotations": { + "__dashboardUid__": "zqjpkXxnk", + "__panelId__": "18", + "summary": "Rewards for messages from PolkadotBridgeHub to KusamaBridgeHub (00000001) are either not confirmed, or are confirmed with lags" + }, + "labels": { + "matrix_room": "FqmgUhjOliBGoncGwm" + }, + "isPaused": false + }, + { + "uid": "bdizqaq47emf4f", + "title": "Kusama -> PolkadotBridgeHub finality sync lags (00000001)", + "condition": "D", + "data": [ + { + "refId": "A", + "relativeTimeRange": { + "from": 21600, + "to": 0 + }, + "datasourceUid": "PC96415006F908B67", + "model": { + "datasource": { + "type": "prometheus", + "uid": "PC96415006F908B67" + }, + "editorMode": "code", + "expr": "max(increase(Kusama_to_BridgeHubPolkadot_Sync_best_source_at_target_block_number{domain=\"parity-chains\"}[24h]))", + "instant": false, + "interval": "", + "intervalMs": 30000, + "legendFormat": "At Kusama", + "maxDataPoints": 43200, + "range": true, + "refId": "A" + } + }, + { + "refId": "C", + "relativeTimeRange": { + "from": 21600, + "to": 0 + }, + "datasourceUid": "__expr__", + "model": { + "conditions": [ + { + "evaluator": { + "params": [], + "type": "gt" + }, + "operator": { + "type": "and" + }, + "query": { + "params": [ + "C" + ] + }, + "reducer": { + "params": [], + "type": "last" + }, + "type": "query" + } + ], + "datasource": { + "type": "__expr__", + "uid": "__expr__" + }, + "expression": "A", + "intervalMs": 1000, + "maxDataPoints": 43200, + "reducer": "max", + "refId": "C", + "type": "reduce" + } + }, + { + "refId": "D", + "relativeTimeRange": { + "from": 21600, + "to": 0 + }, + "datasourceUid": "__expr__", + "model": { + "conditions": [ + { + "evaluator": { + "params": [ + 5000 + ], + "type": "lt" + }, + "operator": { + "type": "and" + }, + "query": { + "params": [ + "D" + ] + }, + "reducer": { + "params": [], + "type": "last" + }, + "type": "query" + } + ], + "datasource": { + "type": "__expr__", + "uid": "__expr__" + }, + "expression": "C", + "intervalMs": 1000, + "maxDataPoints": 43200, + "refId": "D", + "type": "threshold" + } + } + ], + "dasboardUid": "tkpc6_bnk", + "panelId": 6, + "noDataState": "OK", + "execErrState": "OK", + "for": "5m", + "annotations": { + "__dashboardUid__": "tkpc6_bnk", + "__panelId__": "6", + "summary": "Less than 5000 Kusama headers (~1/2 era) have been synced to PolkadotBridgeHub in last 25 hours. Relay is not running?" + }, + "labels": { + "matrix_room": "FqmgUhjOliBGoncGwm" + }, + "isPaused": false + }, + { + "uid": "adizvdppi4cu8b", + "title": "KusamaBridgeHub -> PolkadotBridgeHub delivery lags (00000001)", + "condition": "A", + "data": [ + { + "refId": "B", + "relativeTimeRange": { + "from": 21600, + "to": 0 + }, + "datasourceUid": "PC96415006F908B67", + "model": { + "datasource": { + "type": "prometheus", + "uid": "PC96415006F908B67" + }, + "editorMode": "code", + "expr": "((vector(0) and ((BridgeHubKusama_to_BridgeHubPolkadot_MessageLane_00000001_lane_state_nonces{domain=\"parity-chains\",type=\"source_latest_generated\"} > on () BridgeHubKusama_to_BridgeHubPolkadot_MessageLane_00000001_lane_state_nonces{domain=\"parity-chains\",type=\"target_latest_received\"}))) or vector(1)) + on () increase(BridgeHubKusama_to_BridgeHubPolkadot_MessageLane_00000001_lane_state_nonces{domain=\"parity-chains\",type=\"target_latest_received\"}[10m]) * on () ((vector(1) and ((BridgeHubKusama_to_BridgeHubPolkadot_MessageLane_00000001_lane_state_nonces{domain=\"parity-chains\",type=\"source_latest_generated\"} > on () BridgeHubKusama_to_BridgeHubPolkadot_MessageLane_00000001_lane_state_nonces{domain=\"parity-chains\",type=\"target_latest_received\"}))) or vector(0))", + "instant": false, + "interval": "", + "intervalMs": 30000, + "legendFormat": "1 if all messages are delivered. Otherwise - number of delivered messages in last 10m", + "maxDataPoints": 43200, + "range": true, + "refId": "B" + } + }, + { + "refId": "A", + "relativeTimeRange": { + "from": 0, + "to": 0 + }, + "datasourceUid": "__expr__", + "model": { + "conditions": [ + { + "evaluator": { + "params": [ + 1, + 0 + ], + "type": "lt" + }, + "operator": { + "type": "and" + }, + "query": { + "params": [ + "B" + ] + }, + "reducer": { + "params": [], + "type": "max" + }, + "type": "query" + } + ], + "datasource": { + "name": "Expression", + "type": "__expr__", + "uid": "__expr__" + }, + "expression": "", + "intervalMs": 1000, + "maxDataPoints": 43200, + "refId": "A", + "type": "classic_conditions" + } + } + ], + "dasboardUid": "tkpc6_bnk", + "panelId": 12, + "noDataState": "OK", + "execErrState": "OK", + "for": "10m", + "annotations": { + "__dashboardUid__": "tkpc6_bnk", + "__panelId__": "12", + "summary": "Messages from KusamaBridgeHub to PolkadotBridgeHub (00000001) are either not delivered, or are delivered with lags" + }, + "labels": { + "matrix_room": "FqmgUhjOliBGoncGwm" + }, + "isPaused": false + }, + { + "uid": "ddizvjxnpwa2ob", + "title": "KusamaBridgeHub -> PolkadotBridgeHub confirmation lags (00000001)", + "condition": "B", + "data": [ + { + "refId": "A", + "relativeTimeRange": { + "from": 21600, + "to": 0 + }, + "datasourceUid": "PC96415006F908B67", + "model": { + "datasource": { + "type": "prometheus", + "uid": "PC96415006F908B67" + }, + "editorMode": "code", + "expr": "scalar(max_over_time(KusamaBridgeHub_to_PolkadotBridgeHub_MessageLane_00000001_lane_state_nonces{domain=\"parity-chains\",type=\"target_latest_received\"}[2m]) OR on() vector(0)) - scalar(max_over_time(KusamaBridgeHub_to_PolkadotBridgeHub_MessageLane_00000001_lane_state_nonces{domain=\"parity-chains\",type=\"source_latest_confirmed\"}[2m]) OR on() vector(0))", + "instant": false, + "interval": "", + "intervalMs": 30000, + "legendFormat": "Unconfirmed messages", + "maxDataPoints": 43200, + "range": true, + "refId": "A" + } + }, + { + "refId": "B", + "relativeTimeRange": { + "from": 0, + "to": 0 + }, + "datasourceUid": "__expr__", + "model": { + "conditions": [ + { + "evaluator": { + "params": [ + 50, + 0 + ], + "type": "gt" + }, + "operator": { + "type": "and" + }, + "query": { + "params": [ + "A" + ] + }, + "reducer": { + "params": [], + "type": "min" + }, + "type": "query" + } + ], + "datasource": { + "name": "Expression", + "type": "__expr__", + "uid": "__expr__" + }, + "expression": "", + "hide": false, + "intervalMs": 1000, + "maxDataPoints": 43200, + "refId": "B", + "type": "classic_conditions" + } + } + ], + "dasboardUid": "tkpc6_bnk", + "panelId": 14, + "noDataState": "OK", + "execErrState": "OK", + "for": "10m", + "annotations": { + "__dashboardUid__": "tkpc6_bnk", + "__panelId__": "14" + }, + "labels": { + "matrix_room": "FqmgUhjOliBGoncGwm" + }, + "isPaused": false + }, + { + "uid": "fdizvp3bz6oe8c", + "title": "KusamaBridgeHub -> PolkadotBridgeHub reward lags (00000002)", + "condition": "B", + "data": [ + { + "refId": "A", + "relativeTimeRange": { + "from": 21600, + "to": 0 + }, + "datasourceUid": "PC96415006F908B67", + "model": { + "datasource": { + "type": "prometheus", + "uid": "PC96415006F908B67" + }, + "editorMode": "code", + "expr": "scalar(max_over_time(BridgeHubKusama_to_BridgeHubPolkadot_MessageLane_00000001_lane_state_nonces{domain=\"parity-chains\",type=\"source_latest_confirmed\"}[2m]) OR on() vector(0)) - scalar(max_over_time(BridgeHubKusama_to_BridgeHubPolkadot_MessageLane_00000001_lane_state_nonces{domain=\"parity-chains\",type=\"target_latest_confirmed\"}[2m]) OR on() vector(0))", + "interval": "", + "intervalMs": 30000, + "legendFormat": "Unconfirmed rewards", + "maxDataPoints": 43200, + "range": true, + "refId": "A" + } + }, + { + "refId": "B", + "relativeTimeRange": { + "from": 0, + "to": 0 + }, + "datasourceUid": "__expr__", + "model": { + "conditions": [ + { + "evaluator": { + "params": [ + 10, + 0 + ], + "type": "gt" + }, + "operator": { + "type": "and" + }, + "query": { + "params": [ + "A" + ] + }, + "reducer": { + "params": [], + "type": "min" + }, + "type": "query" + } + ], + "datasource": { + "name": "Expression", + "type": "__expr__", + "uid": "__expr__" + }, + "expression": "", + "hide": false, + "intervalMs": 1000, + "maxDataPoints": 43200, + "refId": "B", + "type": "classic_conditions" + } + } + ], + "dasboardUid": "tkpc6_bnk", + "panelId": 15, + "noDataState": "OK", + "execErrState": "OK", + "for": "10m", + "annotations": { + "__dashboardUid__": "tkpc6_bnk", + "__panelId__": "15", + "summary": "Rewards for messages from KusamaBridgeHub to PolkadotBridgeHub (00000001) are either not confirmed, or are confirmed with lags" + }, + "labels": { + "matrix_room": "FqmgUhjOliBGoncGwm" + }, + "isPaused": false + }, + { + "uid": "edizwf9kbhhxcc", + "title": "KusamaBridgeHub <> PolkadotBridgeHub relay (00000001) node is down", + "condition": "C", + "data": [ + { + "refId": "A", + "relativeTimeRange": { + "from": 21600, + "to": 0 + }, + "datasourceUid": "PC96415006F908B67", + "model": { + "datasource": { + "type": "prometheus", + "uid": "PC96415006F908B67" + }, + "editorMode": "code", + "exemplar": false, + "expr": "up{domain=\"parity-chains\",container=\"bridges-common-relay\"}", + "instant": false, + "interval": "", + "intervalMs": 30000, + "legendFormat": "Is relay running", + "maxDataPoints": 43200, + "range": true, + "refId": "A" + } + }, + { + "refId": "B", + "relativeTimeRange": { + "from": 0, + "to": 0 + }, + "datasourceUid": "__expr__", + "model": { + "conditions": [ + { + "evaluator": { + "params": [], + "type": "gt" + }, + "operator": { + "type": "and" + }, + "query": { + "params": [ + "B" + ] + }, + "reducer": { + "params": [], + "type": "last" + }, + "type": "query" + } + ], + "datasource": { + "type": "__expr__", + "uid": "__expr__" + }, + "expression": "A", + "intervalMs": 1000, + "maxDataPoints": 43200, + "reducer": "min", + "refId": "B", + "type": "reduce" + } + }, + { + "refId": "C", + "relativeTimeRange": { + "from": 0, + "to": 0 + }, + "datasourceUid": "__expr__", + "model": { + "conditions": [ + { + "evaluator": { + "params": [ + 1 + ], + "type": "lt" + }, + "operator": { + "type": "and" + }, + "query": { + "params": [ + "C" + ] + }, + "reducer": { + "params": [], + "type": "last" + }, + "type": "query" + } + ], + "datasource": { + "type": "__expr__", + "uid": "__expr__" + }, + "expression": "B", + "intervalMs": 1000, + "maxDataPoints": 43200, + "refId": "C", + "type": "threshold" + } + } + ], + "dasboardUid": "UFsgpJtVz", + "panelId": 16, + "noDataState": "OK", + "execErrState": "OK", + "for": "5m", + "annotations": { + "__dashboardUid__": "UFsgpJtVz", + "__panelId__": "16", + "summary": "KusamaBridgeHub <> PolkadotBridgeHub relay (00000001) node is down" + }, + "labels": { + "matrix_room": "FqmgUhjOliBGoncGwm" + }, + "isPaused": false + }, + { + "uid": "adizwlq6yk83kc", + "title": "Version guard has aborted KusamaBridgeHub <> PolkadotBridgeHub relay (00000001)", + "condition": "C", + "data": [ + { + "refId": "A", + "queryType": "range", + "relativeTimeRange": { + "from": 21600, + "to": 0 + }, + "datasourceUid": "P7028671862427D8D", + "model": { + "datasource": { + "type": "loki", + "uid": "P7028671862427D8D" + }, + "editorMode": "code", + "expr": "count_over_time({container=\"bridges-common-relay\"} |= `Aborting relay` [1m])", + "intervalMs": 1000, + "legendFormat": "Errors per minute", + "maxDataPoints": 43200, + "queryType": "range", + "refId": "A" + } + }, + { + "refId": "B", + "relativeTimeRange": { + "from": 21600, + "to": 0 + }, + "datasourceUid": "__expr__", + "model": { + "conditions": [ + { + "evaluator": { + "params": [], + "type": "gt" + }, + "operator": { + "type": "and" + }, + "query": { + "params": [ + "B" + ] + }, + "reducer": { + "params": [], + "type": "last" + }, + "type": "query" + } + ], + "datasource": { + "type": "__expr__", + "uid": "__expr__" + }, + "expression": "A", + "intervalMs": 1000, + "maxDataPoints": 43200, + "reducer": "max", + "refId": "B", + "type": "reduce" + } + }, + { + "refId": "C", + "relativeTimeRange": { + "from": 21600, + "to": 0 + }, + "datasourceUid": "__expr__", + "model": { + "conditions": [ + { + "evaluator": { + "params": [ + 0 + ], + "type": "gt" + }, + "operator": { + "type": "and" + }, + "query": { + "params": [ + "C" + ] + }, + "reducer": { + "params": [], + "type": "last" + }, + "type": "query" + } + ], + "datasource": { + "type": "__expr__", + "uid": "__expr__" + }, + "expression": "B", + "intervalMs": 1000, + "maxDataPoints": 43200, + "refId": "C", + "type": "threshold" + } + } + ], + "dasboardUid": "UFsgpJtVz", + "panelId": 11, + "noDataState": "OK", + "execErrState": "OK", + "for": "0s", + "annotations": { + "__dashboardUid__": "UFsgpJtVz", + "__panelId__": "11", + "summary": "The KusamaBridgeHub <> PolkadotBridgeHub relay (00000001) has been aborted by version guard - i.e. one of chains has been upgraded and relay wasn't redeployed" + }, + "labels": { + "matrix_room": "FqmgUhjOliBGoncGwm" + }, + "isPaused": false + }, + { + "uid": "fdizwsne5dz40b", + "title": "Kusama headers mismatch", + "condition": "C", + "data": [ + { + "refId": "A", + "relativeTimeRange": { + "from": 21600, + "to": 0 + }, + "datasourceUid": "PC96415006F908B67", + "model": { + "datasource": { + "type": "prometheus", + "uid": "PC96415006F908B67" + }, + "editorMode": "code", + "expr": "Kusama_to_BridgeHubPolkadot_Sync_is_source_and_source_at_target_using_different_forks{domain=\"parity-chains\"}", + "interval": "", + "intervalMs": 30000, + "legendFormat": "Best BridgeHubKusama header at BridgeHubPolkadot doesn't match the same header of BridgeHubKusama", + "maxDataPoints": 43200, + "range": true, + "refId": "A" + } + }, + { + "refId": "B", + "relativeTimeRange": { + "from": 0, + "to": 0 + }, + "datasourceUid": "__expr__", + "model": { + "conditions": [ + { + "evaluator": { + "params": [], + "type": "gt" + }, + "operator": { + "type": "and" + }, + "query": { + "params": [ + "B" + ] + }, + "reducer": { + "params": [], + "type": "last" + }, + "type": "query" + } + ], + "datasource": { + "type": "__expr__", + "uid": "__expr__" + }, + "expression": "A", + "intervalMs": 1000, + "maxDataPoints": 43200, + "reducer": "last", + "refId": "B", + "type": "reduce" + } + }, + { + "refId": "C", + "relativeTimeRange": { + "from": 0, + "to": 0 + }, + "datasourceUid": "__expr__", + "model": { + "conditions": [ + { + "evaluator": { + "params": [ + 0 + ], + "type": "gt" + }, + "operator": { + "type": "and" + }, + "query": { + "params": [ + "C" + ] + }, + "reducer": { + "params": [], + "type": "last" + }, + "type": "query" + } + ], + "datasource": { + "type": "__expr__", + "uid": "__expr__" + }, + "expression": "B", + "intervalMs": 1000, + "maxDataPoints": 43200, + "refId": "C", + "type": "threshold" + } + } + ], + "dasboardUid": "UFsgpJtVz", + "panelId": 12, + "noDataState": "OK", + "execErrState": "OK", + "for": "10m", + "annotations": { + "__dashboardUid__": "UFsgpJtVz", + "__panelId__": "12", + "summary": "Best Kusama header at BridgeHubPolkadot (00000001) doesn't match the same header at Kusama" + }, + "labels": { + "matrix_room": "FqmgUhjOliBGoncGwm" + }, + "isPaused": false + }, + { + "uid": "ddizwvw3dlzi8e", + "title": "Polkadot headers mismatch", + "condition": "C", + "data": [ + { + "refId": "A", + "relativeTimeRange": { + "from": 21600, + "to": 0 + }, + "datasourceUid": "PC96415006F908B67", + "model": { + "datasource": { + "type": "prometheus", + "uid": "PC96415006F908B67" + }, + "editorMode": "code", + "expr": "Polkadot_to_BridgeHubKusama_Sync_is_source_and_source_at_target_using_different_forks{domain=\"parity-chains\"}", + "interval": "", + "intervalMs": 30000, + "legendFormat": "Best BridgeHubKusama header at BridgeHubPolkadot doesn't match the same header of BridgeHubKusama", + "maxDataPoints": 43200, + "range": true, + "refId": "A" + } + }, + { + "refId": "B", + "relativeTimeRange": { + "from": 0, + "to": 0 + }, + "datasourceUid": "__expr__", + "model": { + "conditions": [ + { + "evaluator": { + "params": [], + "type": "gt" + }, + "operator": { + "type": "and" + }, + "query": { + "params": [ + "B" + ] + }, + "reducer": { + "params": [], + "type": "last" + }, + "type": "query" + } + ], + "datasource": { + "type": "__expr__", + "uid": "__expr__" + }, + "expression": "A", + "intervalMs": 1000, + "maxDataPoints": 43200, + "reducer": "last", + "refId": "B", + "type": "reduce" + } + }, + { + "refId": "C", + "relativeTimeRange": { + "from": 0, + "to": 0 + }, + "datasourceUid": "__expr__", + "model": { + "conditions": [ + { + "evaluator": { + "params": [ + 0 + ], + "type": "gt" + }, + "operator": { + "type": "and" + }, + "query": { + "params": [ + "C" + ] + }, + "reducer": { + "params": [], + "type": "last" + }, + "type": "query" + } + ], + "datasource": { + "type": "__expr__", + "uid": "__expr__" + }, + "expression": "B", + "intervalMs": 1000, + "maxDataPoints": 43200, + "refId": "C", + "type": "threshold" + } + } + ], + "dasboardUid": "UFsgpJtVz", + "panelId": 13, + "noDataState": "NoData", + "execErrState": "Error", + "for": "10m", + "annotations": { + "__dashboardUid__": "UFsgpJtVz", + "__panelId__": "13", + "summary": "Best Polkadot header at BridgeHubKusama (00000001) doesn't match the same header at Polkadot" + }, + "labels": { + "matrix_room": "FqmgUhjOliBGoncGwm" + }, + "isPaused": false + }, + { + "uid": "bdizx0xdiomwwc", + "title": "BridgeHubKusama headers mismatch", + "condition": "B", + "data": [ + { + "refId": "A", + "relativeTimeRange": { + "from": 21600, + "to": 0 + }, + "datasourceUid": "PC96415006F908B67", + "model": { + "datasource": { + "type": "prometheus", + "uid": "PC96415006F908B67" + }, + "editorMode": "code", + "expr": "BridgeHubKusama_to_BridgeHubPolkadot_MessageLane_00000001_is_source_and_source_at_target_using_different_forks{domain=\"parity-chains\"}", + "interval": "", + "intervalMs": 30000, + "legendFormat": "Best BridgeHubKusama header at BridgeHubPolkadot doesn't match the same header of BridgeHubKusama", + "maxDataPoints": 43200, + "range": true, + "refId": "A" + } + }, + { + "refId": "B", + "relativeTimeRange": { + "from": 0, + "to": 0 + }, + "datasourceUid": "__expr__", + "model": { + "conditions": [ + { + "evaluator": { + "params": [ + 0, + 0 + ], + "type": "gt" + }, + "operator": { + "type": "and" + }, + "query": { + "params": [ + "A" + ] + }, + "reducer": { + "params": [], + "type": "max" + }, + "type": "query" + } + ], + "datasource": { + "name": "Expression", + "type": "__expr__", + "uid": "__expr__" + }, + "expression": "", + "hide": false, + "intervalMs": 1000, + "maxDataPoints": 43200, + "refId": "B", + "type": "classic_conditions" + } + } + ], + "dasboardUid": "UFsgpJtVz", + "panelId": 2, + "noDataState": "OK", + "execErrState": "OK", + "for": "10m", + "annotations": { + "__dashboardUid__": "UFsgpJtVz", + "__panelId__": "2", + "summary": "Best BridgeHubKusama header at BridgeHubPolkadot (00000001) doesn't match the same header at BridgeHubKusama" + }, + "labels": { + "matrix_room": "FqmgUhjOliBGoncGwm" + }, + "isPaused": false + }, + { + "uid": "fdizx4hrhg2yod", + "title": "BridgeHubPolkadot headers mismatch", + "condition": "B", + "data": [ + { + "refId": "A", + "relativeTimeRange": { + "from": 21600, + "to": 0 + }, + "datasourceUid": "PC96415006F908B67", + "model": { + "datasource": { + "type": "prometheus", + "uid": "PC96415006F908B67" + }, + "editorMode": "code", + "expr": "BridgeHubPolkadot_to_BridgeHubKusama_MessageLane_00000001_is_source_and_source_at_target_using_different_forks{domain=\"parity-chains\"}", + "interval": "", + "intervalMs": 30000, + "legendFormat": "Best BridgeHubKusama header at BridgeHubPolkadot doesn't match the same header of BridgeHubKusama", + "maxDataPoints": 43200, + "range": true, + "refId": "A" + } + }, + { + "refId": "B", + "relativeTimeRange": { + "from": 0, + "to": 0 + }, + "datasourceUid": "__expr__", + "model": { + "conditions": [ + { + "evaluator": { + "params": [ + 0, + 0 + ], + "type": "gt" + }, + "operator": { + "type": "and" + }, + "query": { + "params": [ + "A" + ] + }, + "reducer": { + "params": [], + "type": "max" + }, + "type": "query" + } + ], + "datasource": { + "name": "Expression", + "type": "__expr__", + "uid": "__expr__" + }, + "expression": "", + "hide": false, + "intervalMs": 1000, + "maxDataPoints": 43200, + "refId": "B", + "type": "classic_conditions" + } + } + ], + "dasboardUid": "UFsgpJtVz", + "panelId": 3, + "noDataState": "OK", + "execErrState": "OK", + "for": "10m", + "annotations": { + "__dashboardUid__": "UFsgpJtVz", + "__panelId__": "3", + "summary": "Best BridgeHubPolkadot header at BridgeHubKusama (00000001) doesn't match the same header at BridgeHubPolkadot" + }, + "labels": { + "matrix_room": "FqmgUhjOliBGoncGwm" + }, + "isPaused": false + }, + { + "uid": "cdizxaawyvldsb", + "title": "Relay balances at KusamaBridgeHub", + "condition": "B", + "data": [ + { + "refId": "A", + "relativeTimeRange": { + "from": 21600, + "to": 0 + }, + "datasourceUid": "PC96415006F908B67", + "model": { + "datasource": { + "type": "prometheus", + "uid": "PC96415006F908B67" + }, + "editorMode": "code", + "expr": "last_over_time(at_BridgeHubKusama_relay_BridgeHubPolkadotMessages_balance{domain=\"parity-chains\"}[1h])", + "instant": false, + "interval": "", + "intervalMs": 30000, + "legendFormat": "Messages Relay Balance", + "maxDataPoints": 43200, + "range": true, + "refId": "A" + } + }, + { + "refId": "B", + "relativeTimeRange": { + "from": 0, + "to": 0 + }, + "datasourceUid": "__expr__", + "model": { + "conditions": [ + { + "evaluator": { + "params": [ + 2, + 0 + ], + "type": "lt" + }, + "operator": { + "type": "and" + }, + "query": { + "params": [ + "A" + ] + }, + "reducer": { + "params": [], + "type": "last" + }, + "type": "query" + } + ], + "datasource": { + "name": "Expression", + "type": "__expr__", + "uid": "__expr__" + }, + "expression": "", + "intervalMs": 1000, + "maxDataPoints": 43200, + "refId": "B", + "type": "classic_conditions" + } + } + ], + "dasboardUid": "UFsgpJtVz", + "panelId": 5, + "noDataState": "NoData", + "execErrState": "Error", + "for": "10m", + "annotations": { + "__dashboardUid__": "UFsgpJtVz", + "__panelId__": "5", + "summary": "With-PolkadotBridgeHub messages relay balance at KusamaBridgeHub (00000001) is too low" + }, + "labels": { + "matrix_room": "FqmgUhjOliBGoncGwm" + }, + "isPaused": false + }, + { + "uid": "fdizxtuxuza4gd", + "title": "Relay balances at PolkadotBridgeHub", + "condition": "B", + "data": [ + { + "refId": "A", + "relativeTimeRange": { + "from": 21600, + "to": 0 + }, + "datasourceUid": "PC96415006F908B67", + "model": { + "datasource": { + "type": "prometheus", + "uid": "PC96415006F908B67" + }, + "editorMode": "code", + "expr": "last_over_time(at_BridgeHubPolkadot_relay_BridgeHubKusamaMessages_balance{domain=\"parity-chains\"}[1h])", + "instant": false, + "interval": "", + "intervalMs": 30000, + "legendFormat": "Messages Relay Balance", + "maxDataPoints": 43200, + "range": true, + "refId": "A" + } + }, + { + "refId": "B", + "relativeTimeRange": { + "from": 0, + "to": 0 + }, + "datasourceUid": "__expr__", + "model": { + "conditions": [ + { + "evaluator": { + "params": [ + 10, + 0 + ], + "type": "lt" + }, + "operator": { + "type": "and" + }, + "query": { + "params": [ + "A" + ] + }, + "reducer": { + "params": [], + "type": "last" + }, + "type": "query" + } + ], + "datasource": { + "name": "Expression", + "type": "__expr__", + "uid": "__expr__" + }, + "expression": "", + "hide": false, + "intervalMs": 1000, + "maxDataPoints": 43200, + "refId": "B", + "type": "classic_conditions" + } + } + ], + "dasboardUid": "UFsgpJtVz", + "panelId": 6, + "noDataState": "OK", + "execErrState": "OK", + "for": "10m", + "annotations": { + "__dashboardUid__": "UFsgpJtVz", + "__panelId__": "6", + "summary": "With-KusamaBridgeHub messages relay balance at PolkadotBridgeHub (00000001) is too low" + }, + "labels": { + "matrix_room": "FqmgUhjOliBGoncGwm" + }, + "isPaused": false + } + ] + } + ] +} diff --git a/deployments/bridges/kusama-polkadot/dashboard/grafana/kusama-polkadot-maintenance-dashboard.json b/deployments/bridges/kusama-polkadot/dashboard/grafana/kusama-polkadot-maintenance-dashboard.json new file mode 100644 index 000000000..2be77fa36 --- /dev/null +++ b/deployments/bridges/kusama-polkadot/dashboard/grafana/kusama-polkadot-maintenance-dashboard.json @@ -0,0 +1,1026 @@ +{ + "annotations": { + "list": [ + { + "builtIn": 1, + "datasource": { + "type": "grafana", + "uid": "-- Grafana --" + }, + "enable": true, + "hide": true, + "iconColor": "rgba(0, 211, 255, 1)", + "name": "Annotations & Alerts", + "target": { + "limit": 100, + "matchAny": false, + "tags": [], + "type": "dashboard" + }, + "type": "dashboard" + } + ] + }, + "editable": true, + "fiscalYearStartMonth": 0, + "graphTooltip": 0, + "id": 4107, + "links": [], + "liveNow": false, + "panels": [ + { + "datasource": { + "type": "prometheus", + "uid": "PC96415006F908B67" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 5, + "w": 5, + "x": 0, + "y": 0 + }, + "id": 8, + "options": { + "colorMode": "value", + "graphMode": "area", + "justifyMode": "auto", + "orientation": "auto", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showPercentChange": false, + "text": {}, + "textMode": "name", + "wideLayout": true + }, + "pluginVersion": "10.4.0", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "PC96415006F908B67" + }, + "editorMode": "code", + "exemplar": false, + "expr": "substrate_relay_build_info{domain=\"parity-chains\"}", + "instant": true, + "legendFormat": "{{commit}}", + "range": false, + "refId": "A" + } + ], + "title": "Relay build commit", + "type": "stat" + }, + { + "datasource": { + "type": "prometheus", + "uid": "PC96415006F908B67" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 5, + "w": 4, + "x": 5, + "y": 0 + }, + "id": 9, + "options": { + "colorMode": "value", + "graphMode": "area", + "justifyMode": "auto", + "orientation": "auto", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showPercentChange": false, + "text": {}, + "textMode": "name", + "wideLayout": true + }, + "pluginVersion": "10.4.0", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "PC96415006F908B67" + }, + "editorMode": "code", + "exemplar": false, + "expr": "substrate_relay_build_info{domain=\"parity-chains\"}", + "instant": true, + "legendFormat": "{{version}}", + "range": false, + "refId": "A" + } + ], + "title": "Relay build version", + "type": "stat" + }, + { + "datasource": { + "type": "prometheus", + "uid": "PC96415006F908B67" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "mappings": [ + { + "options": { + "0": { + "color": "red", + "index": 1, + "text": "No" + }, + "1": { + "color": "green", + "index": 0, + "text": "Yes" + } + }, + "type": "value" + } + ], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 5, + "w": 4, + "x": 9, + "y": 0 + }, + "id": 15, + "options": { + "colorMode": "value", + "graphMode": "area", + "justifyMode": "auto", + "orientation": "auto", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showPercentChange": false, + "textMode": "auto", + "wideLayout": true + }, + "pluginVersion": "10.4.0", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "PC96415006F908B67" + }, + "editorMode": "code", + "exemplar": false, + "expr": "up{domain=\"parity-chains\",container=\"bridges-common-relay\"}", + "instant": false, + "legendFormat": "Is relay running", + "range": true, + "refId": "A" + } + ], + "title": "Is relay running?", + "type": "stat" + }, + { + "datasource": { + "type": "prometheus", + "uid": "PC96415006F908B67" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 5, + "w": 5, + "x": 13, + "y": 0 + }, + "id": 16, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "pluginVersion": "9.3.1", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "PC96415006F908B67" + }, + "editorMode": "code", + "exemplar": false, + "expr": "up{domain=\"parity-chains\",container=\"bridges-common-relay\"}", + "instant": false, + "legendFormat": "Is relay running", + "range": true, + "refId": "A" + } + ], + "title": "Is relay running? (for alert)", + "type": "timeseries" + }, + { + "datasource": { + "type": "loki", + "uid": "P7028671862427D8D" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 9, + "w": 18, + "x": 0, + "y": 5 + }, + "id": 11, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "loki", + "uid": "P7028671862427D8D" + }, + "editorMode": "code", + "expr": "count_over_time({container=\"bridges-common-relay\"} |~ `(?i)(warn|error|fail)` [1m])", + "legendFormat": "Errors per minute", + "queryType": "range", + "refId": "A" + } + ], + "title": "Relay errors/warnings per minute", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "PC96415006F908B67" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 7, + "w": 9, + "x": 0, + "y": 14 + }, + "id": 12, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "PC96415006F908B67" + }, + "editorMode": "code", + "expr": "Kusama_to_BridgeHubPolkadot_Sync_is_source_and_source_at_target_using_different_forks{domain=\"parity-chains\"}", + "legendFormat": "Best BridgeHubKusama header at BridgeHubPolkadot doesn't match the same header of BridgeHubKusama", + "range": true, + "refId": "A" + } + ], + "title": "Kusama headers mismatch", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "PC96415006F908B67" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 7, + "w": 9, + "x": 9, + "y": 14 + }, + "id": 13, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "PC96415006F908B67" + }, + "editorMode": "code", + "expr": "Polkadot_to_BridgeHubKusama_Sync_is_source_and_source_at_target_using_different_forks{domain=\"parity-chains\"}", + "legendFormat": "Best BridgeHubKusama header at BridgeHubPolkadot doesn't match the same header of BridgeHubKusama", + "range": true, + "refId": "A" + } + ], + "title": "Polkadot headers mismatch", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "PC96415006F908B67" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 7, + "w": 9, + "x": 0, + "y": 21 + }, + "id": 2, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "PC96415006F908B67" + }, + "editorMode": "code", + "expr": "BridgeHubKusama_to_BridgeHubPolkadot_MessageLane_00000001_is_source_and_source_at_target_using_different_forks{domain=\"parity-chains\"}", + "legendFormat": "Best BridgeHubKusama header at BridgeHubPolkadot doesn't match the same header of BridgeHubKusama", + "range": true, + "refId": "A" + } + ], + "title": "BridgeHubKusama headers mismatch", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "PC96415006F908B67" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 7, + "w": 9, + "x": 9, + "y": 21 + }, + "id": 3, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "PC96415006F908B67" + }, + "editorMode": "code", + "expr": "BridgeHubPolkadot_to_BridgeHubKusama_MessageLane_00000001_is_source_and_source_at_target_using_different_forks{domain=\"parity-chains\"}", + "legendFormat": "Best BridgeHubKusama header at BridgeHubPolkadot doesn't match the same header of BridgeHubKusama", + "range": true, + "refId": "A" + } + ], + "title": "BridgeHubPolkadot headers mismatch", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "PC96415006F908B67" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 9, + "x": 0, + "y": 28 + }, + "id": 5, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "PC96415006F908B67" + }, + "editorMode": "code", + "expr": "at_BridgeHubKusama_relay_BridgeHubPolkadotMessages_balance{domain=\"parity-chains\"}", + "legendFormat": "Messages Relay Balance", + "range": true, + "refId": "A" + } + ], + "title": "Relay balances at KusamaBridgeHub", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "PC96415006F908B67" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 9, + "x": 9, + "y": 28 + }, + "id": 6, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "PC96415006F908B67" + }, + "editorMode": "code", + "expr": "at_BridgeHubPolkadot_relay_BridgeHubKusamaMessages_balance{domain=\"parity-chains\"}", + "legendFormat": "Messages Relay Balance", + "range": true, + "refId": "A" + } + ], + "title": "Relay balances at PolkadotBridgeHub", + "type": "timeseries" + } + ], + "refresh": "5s", + "schemaVersion": 39, + "tags": [], + "templating": { + "list": [] + }, + "time": { + "from": "now-6h", + "to": "now" + }, + "timepicker": {}, + "timezone": "", + "title": "BridgeHubKusama <> BridgeHubPolkadot maintenance (00000001)", + "uid": "UFsgpJtVz", + "version": 6, + "weekStart": "" +} diff --git a/deployments/bridges/kusama-polkadot/dashboard/grafana/relay-kusama-to-polkadot-messages-dashboard.json b/deployments/bridges/kusama-polkadot/dashboard/grafana/relay-kusama-to-polkadot-messages-dashboard.json new file mode 100644 index 000000000..d9660a2a1 --- /dev/null +++ b/deployments/bridges/kusama-polkadot/dashboard/grafana/relay-kusama-to-polkadot-messages-dashboard.json @@ -0,0 +1,982 @@ +{ + "annotations": { + "list": [ + { + "builtIn": 1, + "datasource": { + "type": "datasource", + "uid": "grafana" + }, + "enable": true, + "hide": true, + "iconColor": "rgba(0, 211, 255, 1)", + "name": "Annotations & Alerts", + "target": { + "limit": 100, + "matchAny": false, + "tags": [], + "type": "dashboard" + }, + "type": "dashboard" + } + ] + }, + "editable": true, + "fiscalYearStartMonth": 0, + "graphTooltip": 0, + "id": 4105, + "links": [], + "liveNow": false, + "panels": [ + { + "datasource": { + "type": "prometheus", + "uid": "PC96415006F908B67" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 6, + "x": 0, + "y": 0 + }, + "id": 6, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "PC96415006F908B67" + }, + "editorMode": "code", + "expr": "Kusama_to_BridgeHubPolkadot_Sync_best_source_block_number{domain=\"parity-chains\"}", + "legendFormat": "At Kusama", + "range": true, + "refId": "A" + }, + { + "datasource": { + "type": "prometheus", + "uid": "PC96415006F908B67" + }, + "editorMode": "code", + "expr": "Kusama_to_BridgeHubPolkadot_Sync_best_source_at_target_block_number{domain=\"parity-chains\"}", + "hide": false, + "legendFormat": "At BridgeHubPolkadot", + "range": true, + "refId": "B" + } + ], + "title": "Best finalized Kusama headers", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "PC96415006F908B67" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 6, + "x": 6, + "y": 0 + }, + "id": 7, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "PC96415006F908B67" + }, + "editorMode": "code", + "expr": "Polkadot_to_BridgeHubKusama_Sync_best_source_block_number{domain=\"parity-chains\"}", + "legendFormat": "At Polkadot", + "range": true, + "refId": "A" + }, + { + "datasource": { + "type": "prometheus", + "uid": "PC96415006F908B67" + }, + "editorMode": "code", + "expr": "Polkadot_to_BridgeHubKusama_Sync_best_source_at_target_block_number{domain=\"parity-chains\"}", + "hide": false, + "legendFormat": "At BridgeHubKusama", + "range": true, + "refId": "B" + } + ], + "title": "Best finalized Polkadot headers", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "PC96415006F908B67" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 6, + "x": 12, + "y": 0 + }, + "id": 2, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "PC96415006F908B67" + }, + "editorMode": "code", + "exemplar": true, + "expr": "BridgeHubKusama_to_BridgeHubPolkadot_MessageLane_00000001_best_source_block_number{domain=\"parity-chains\"}", + "interval": "", + "legendFormat": "At KusamaBridgeHub", + "range": true, + "refId": "A" + }, + { + "datasource": { + "type": "prometheus", + "uid": "PC96415006F908B67" + }, + "editorMode": "code", + "expr": "BridgeHubKusama_to_BridgeHubPolkadot_MessageLane_00000001_best_source_at_target_block_number{domain=\"parity-chains\"}", + "hide": false, + "legendFormat": "At PolkadotBridgeHub", + "range": true, + "refId": "B" + } + ], + "title": "Best finalized KusamaBridgeHub headers", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "PC96415006F908B67" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 6, + "x": 18, + "y": 0 + }, + "id": 4, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "PC96415006F908B67" + }, + "editorMode": "code", + "exemplar": true, + "expr": "BridgeHubKusama_to_BridgeHubPolkadot_MessageLane_00000001_best_target_block_number{domain=\"parity-chains\"}", + "interval": "", + "legendFormat": "At PolkadotBridgeHub", + "range": true, + "refId": "A" + }, + { + "datasource": { + "type": "prometheus", + "uid": "PC96415006F908B67" + }, + "editorMode": "code", + "expr": "BridgeHubKusama_to_BridgeHubPolkadot_MessageLane_00000001_best_target_at_source_block_number{domain=\"parity-chains\"}", + "hide": false, + "legendFormat": "At KusamaBridgeHub", + "range": true, + "refId": "B" + } + ], + "title": "Best finalized PolkadotBridgeHub headers", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "PC96415006F908B67" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 8 + }, + "id": 9, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "PC96415006F908B67" + }, + "editorMode": "code", + "expr": "label_replace(label_replace(BridgeHubKusama_to_BridgeHubPolkadot_MessageLane_00000001_lane_state_nonces{domain=\"parity-chains\", type=~\"source_latest_generated|target_latest_received\"}, \"type\", \"Latest message sent from BridgeHubKusama\", \"type\", \"source_latest_generated\"), \"type\", \"Latest BridgeHubKusama message received by BridgeHubPolkadot\", \"type\", \"target_latest_received\")", + "legendFormat": "{{type}}", + "range": true, + "refId": "A" + }, + { + "datasource": { + "type": "prometheus", + "uid": "PC96415006F908B67" + }, + "editorMode": "code", + "expr": "increase(BridgeHubKusama_to_BridgeHubPolkadot_MessageLane_00000001_lane_state_nonces{domain=\"parity-chains\", type=~\"source_latest_generated\"}[24h])", + "hide": true, + "legendFormat": "Messages generated in last 24h", + "range": true, + "refId": "B" + } + ], + "title": "Delivery race (00000001)", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "PC96415006F908B67" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 12, + "y": 8 + }, + "id": 10, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "PC96415006F908B67" + }, + "editorMode": "code", + "expr": "label_replace(label_replace(BridgeHubKusama_to_BridgeHubPolkadot_MessageLane_00000001_lane_state_nonces{domain=\"parity-chains\",type=~\"source_latest_confirmed|target_latest_received\"}, \"type\", \"Latest delivery confirmation from BridgeHubPolkadot to BridgeHubKusama\", \"type\", \"source_latest_confirmed\"), \"type\", \"Latest BridgeHubKusama message received by BridgeHubPolkadot\", \"type\", \"target_latest_received\")", + "legendFormat": "{{type}}", + "range": true, + "refId": "A" + } + ], + "title": "Confirmations race (00000001)", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "PC96415006F908B67" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 8, + "x": 0, + "y": 16 + }, + "id": 12, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "PC96415006F908B67" + }, + "editorMode": "code", + "expr": "scalar(max_over_time(BridgeHubKusama_to_BridgeHubPolkadot_MessageLane_00000001_lane_state_nonces{domain=\"parity-chains\",type=\"source_latest_generated\"}[2m]) OR on() vector(0)) - scalar(max_over_time(BridgeHubKusama_to_BridgeHubPolkadot_MessageLane_00000001_lane_state_nonces{domain=\"parity-chains\",type=\"target_latest_received\"}[2m]) OR on() vector(0))", + "legendFormat": "Undelivered messages", + "range": true, + "refId": "A" + }, + { + "datasource": { + "type": "prometheus", + "uid": "PC96415006F908B67" + }, + "editorMode": "code", + "expr": "((vector(0) and ((BridgeHubKusama_to_BridgeHubPolkadot_MessageLane_00000001_lane_state_nonces{domain=\"parity-chains\",type=\"source_latest_generated\"} > on () BridgeHubKusama_to_BridgeHubPolkadot_MessageLane_00000001_lane_state_nonces{domain=\"parity-chains\",type=\"target_latest_received\"}))) or vector(1)) + on () increase(BridgeHubKusama_to_BridgeHubPolkadot_MessageLane_00000001_lane_state_nonces{domain=\"parity-chains\",type=\"target_latest_received\"}[10m]) * on () ((vector(1) and ((BridgeHubKusama_to_BridgeHubPolkadot_MessageLane_00000001_lane_state_nonces{domain=\"parity-chains\",type=\"source_latest_generated\"} > on () BridgeHubKusama_to_BridgeHubPolkadot_MessageLane_00000001_lane_state_nonces{domain=\"parity-chains\",type=\"target_latest_received\"}))) or vector(0))", + "hide": true, + "legendFormat": "1 if all messages are delivered. Otherwise - number of delivered messages in last 10m", + "range": true, + "refId": "B" + } + ], + "title": "Delivery race lags (00000001)", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "PC96415006F908B67" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 8, + "x": 8, + "y": 16 + }, + "id": 14, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "PC96415006F908B67" + }, + "editorMode": "code", + "expr": "scalar(max_over_time(KusamaBridgeHub_to_PolkadotBridgeHub_MessageLane_00000001_lane_state_nonces{domain=\"parity-chains\",type=\"target_latest_received\"}[2m]) OR on() vector(0)) - scalar(max_over_time(KusamaBridgeHub_to_PolkadotBridgeHub_MessageLane_00000001_lane_state_nonces{domain=\"parity-chains\",type=\"source_latest_confirmed\"}[2m]) OR on() vector(0))", + "legendFormat": "Unconfirmed messages", + "range": true, + "refId": "A" + } + ], + "title": "Confirmations race lags (00000001)", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "PC96415006F908B67" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 8, + "x": 16, + "y": 16 + }, + "id": 15, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "PC96415006F908B67" + }, + "editorMode": "code", + "expr": "scalar(max_over_time(BridgeHubKusama_to_BridgeHubPolkadot_MessageLane_00000001_lane_state_nonces{domain=\"parity-chains\",type=\"source_latest_confirmed\"}[2m]) OR on() vector(0)) - scalar(max_over_time(BridgeHubKusama_to_BridgeHubPolkadot_MessageLane_00000001_lane_state_nonces{domain=\"parity-chains\",type=\"target_latest_confirmed\"}[2m]) OR on() vector(0))", + "legendFormat": "Unconfirmed rewards", + "range": true, + "refId": "A" + }, + { + "datasource": { + "type": "prometheus", + "uid": "PC96415006F908B67" + }, + "editorMode": "code", + "expr": "(scalar(max_over_time(BridgeHubKusama_to_BridgeHubPolkadot_MessageLane_00000001_lane_state_nonces{domain=\"parity-chains\",type=\"source_latest_confirmed\"}[2m]) OR on() vector(0)) - scalar(max_over_time(BridgeHubKusama_to_BridgeHubPolkadot_MessageLane_00000001_lane_state_nonces{domain=\"parity-chains\",type=\"target_latest_confirmed\"}[2m]) OR on() vector(0))) * (max_over_time(BridgeHubKusama_to_BridgeHubPolkadot_MessageLane_00000001_lane_state_nonces{domain=\"parity-chains\",type=\"target_latest_received\"}[2m]) OR on() vector(0) > bool min_over_time(BridgeHubKusama_to_BridgeHubPolkadot_MessageLane_00000001_lane_state_nonces{domain=\"parity-chains\",type=\"target_latest_received\"}[2m]) OR on() vector(0))", + "hide": true, + "legendFormat": "__auto", + "range": true, + "refId": "B" + } + ], + "title": "Reward lags (00000001)", + "type": "timeseries" + } + ], + "refresh": "5s", + "schemaVersion": 39, + "tags": [], + "templating": { + "list": [] + }, + "time": { + "from": "now-6h", + "to": "now" + }, + "timepicker": {}, + "timezone": "", + "title": "BridgeHubKusama to BridgeHubPolkadot (00000001)", + "uid": "tkpc6_bnk", + "version": 2, + "weekStart": "" +} diff --git a/deployments/bridges/kusama-polkadot/dashboard/grafana/relay-polkadot-to-kusama-messages-dashboard.json b/deployments/bridges/kusama-polkadot/dashboard/grafana/relay-polkadot-to-kusama-messages-dashboard.json new file mode 100644 index 000000000..4fbe9cc09 --- /dev/null +++ b/deployments/bridges/kusama-polkadot/dashboard/grafana/relay-polkadot-to-kusama-messages-dashboard.json @@ -0,0 +1,970 @@ +{ + "annotations": { + "list": [ + { + "builtIn": 1, + "datasource": { + "type": "datasource", + "uid": "grafana" + }, + "enable": true, + "hide": true, + "iconColor": "rgba(0, 211, 255, 1)", + "name": "Annotations & Alerts", + "target": { + "limit": 100, + "matchAny": false, + "tags": [], + "type": "dashboard" + }, + "type": "dashboard" + } + ] + }, + "editable": true, + "fiscalYearStartMonth": 0, + "graphTooltip": 0, + "id": 4106, + "links": [], + "liveNow": false, + "panels": [ + { + "datasource": { + "type": "prometheus", + "uid": "PC96415006F908B67" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 6, + "x": 0, + "y": 0 + }, + "id": 2, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "PC96415006F908B67" + }, + "editorMode": "code", + "expr": "Polkadot_to_BridgeHubKusama_Sync_best_source_block_number{domain=\"parity-chains\"}", + "legendFormat": "At Polkadot", + "range": true, + "refId": "A" + }, + { + "datasource": { + "type": "prometheus", + "uid": "PC96415006F908B67" + }, + "editorMode": "code", + "expr": "Polkadot_to_BridgeHubKusama_Sync_best_source_at_target_block_number{domain=\"parity-chains\"}", + "hide": false, + "legendFormat": "At BridgeHubKusama", + "range": true, + "refId": "B" + } + ], + "title": "Best finalized Polkadot headers", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "PC96415006F908B67" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 6, + "x": 6, + "y": 0 + }, + "id": 4, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "PC96415006F908B67" + }, + "editorMode": "code", + "expr": "Kusama_to_BridgeHubPolkadot_Sync_best_source_block_number{domain=\"parity-chains\"}", + "legendFormat": "At Kusama", + "range": true, + "refId": "A" + }, + { + "datasource": { + "type": "prometheus", + "uid": "PC96415006F908B67" + }, + "editorMode": "code", + "expr": "Kusama_to_BridgeHubPolkadot_Sync_best_source_at_target_block_number{domain=\"parity-chains\"}", + "hide": false, + "legendFormat": "At PolkadotBridgeHub", + "range": true, + "refId": "B" + } + ], + "title": "Best finalized Kusama headers", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "PC96415006F908B67" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 6, + "x": 12, + "y": 0 + }, + "id": 6, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "PC96415006F908B67" + }, + "editorMode": "code", + "exemplar": true, + "expr": "BridgeHubPolkadot_to_BridgeHubKusama_MessageLane_00000001_best_source_block_number{domain=\"parity-chains\"}", + "interval": "", + "legendFormat": "At PolkadotBridgeHub", + "range": true, + "refId": "A" + }, + { + "datasource": { + "type": "prometheus", + "uid": "PC96415006F908B67" + }, + "editorMode": "code", + "expr": "BridgeHubPolkadot_to_BridgeHubKusama_MessageLane_00000001_best_source_at_target_block_number{domain=\"parity-chains\"}", + "hide": false, + "legendFormat": "At KusamaBridgeHub", + "range": true, + "refId": "B" + } + ], + "title": "Best finalized PolkadotBridgeHub headers", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "PC96415006F908B67" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 6, + "x": 18, + "y": 0 + }, + "id": 8, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "PC96415006F908B67" + }, + "editorMode": "code", + "exemplar": true, + "expr": "BridgeHubPolkadot_to_BridgeHubKusama_MessageLane_00000001_best_target_block_number{domain=\"parity-chains\"}", + "interval": "", + "legendFormat": "At KusamaBridgeHub", + "range": true, + "refId": "A" + }, + { + "datasource": { + "type": "prometheus", + "uid": "PC96415006F908B67" + }, + "editorMode": "code", + "expr": "BridgeHubPolkadot_to_BridgeHubKusama_MessageLane_00000001_best_target_at_source_block_number{domain=\"parity-chains\"}", + "hide": false, + "legendFormat": "At PolkadotBridgeHub", + "range": true, + "refId": "B" + } + ], + "title": "Best finalized KusamaBridgeHub headers", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "PC96415006F908B67" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 8 + }, + "id": 10, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "PC96415006F908B67" + }, + "editorMode": "code", + "expr": "label_replace(label_replace(BridgeHubPolkadot_to_BridgeHubKusama_MessageLane_00000001_lane_state_nonces{domain=\"parity-chains\",type=~\"source_latest_generated|target_latest_received\"}, \"type\", \"Latest message sent from BridgeHubPolkadot\", \"type\", \"source_latest_generated\"), \"type\", \"Latest BridgeHubPolkadot message received by BridgeHubKusama\", \"type\", \"target_latest_received\")", + "legendFormat": "{{type}}", + "range": true, + "refId": "A" + } + ], + "title": "Delivery race (00000001)", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "PC96415006F908B67" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 12, + "y": 8 + }, + "id": 12, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "PC96415006F908B67" + }, + "editorMode": "code", + "expr": "label_replace(label_replace(BridgeHubPolkadot_to_BridgeHubKusama_MessageLane_00000001_lane_state_nonces{domain=\"parity-chains\",type=~\"source_latest_confirmed|target_latest_received\"}, \"type\", \"Latest delivery confirmation from BridgeHubKusama to BridgeHubPolkadot\", \"type\", \"source_latest_confirmed\"), \"type\", \"Latest BridgeHubPolkadot message received by BridgeHubKusama\", \"type\", \"target_latest_received\")", + "legendFormat": "{{type}}", + "range": true, + "refId": "A" + } + ], + "title": "Confirmations race (00000001)", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "PC96415006F908B67" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 8, + "x": 0, + "y": 16 + }, + "id": 14, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "PC96415006F908B67" + }, + "editorMode": "code", + "expr": "scalar(max_over_time(BridgeHubPolkadot_to_BridgeHubKusama_MessageLane_00000001_lane_state_nonces{domain=\"parity-chains\",type=\"source_latest_generated\"}[2m]) OR on() vector(0)) - scalar(max_over_time(BridgeHubPolkadot_to_BridgeHubKusama_MessageLane_00000001_lane_state_nonces{domain=\"parity-chains\",type=\"target_latest_received\"}[2m]) OR on() vector(0))", + "legendFormat": "Undelivered messages", + "range": true, + "refId": "A" + }, + { + "datasource": { + "type": "prometheus", + "uid": "PC96415006F908B67" + }, + "editorMode": "code", + "expr": "((vector(0) and ((BridgeHubPolkadot_to_BridgeHubKusama_MessageLane_00000001_lane_state_nonces{domain=\"parity-chains\",type=\"source_latest_generated\"} > on () BridgeHubPolkadot_to_BridgeHubKusama_MessageLane_00000001_lane_state_nonces{domain=\"parity-chains\",type=\"target_latest_received\"}))) or vector(1)) + on () increase(BridgeHubPolkadot_to_BridgeHubKusama_MessageLane_00000001_lane_state_nonces{domain=\"parity-chains\",type=\"target_latest_received\"}[10m]) * on () ((vector(1) and ((BridgeHubPolkadot_to_BridgeHubKusama_MessageLane_00000001_lane_state_nonces{domain=\"parity-chains\",type=\"source_latest_generated\"} > on () BridgeHubPolkadot_to_BridgeHubKusama_MessageLane_00000001_lane_state_nonces{domain=\"parity-chains\",type=\"target_latest_received\"}))) or vector(0))", + "hide": true, + "legendFormat": "1 if all messages are delivered. Otherwise - number of delivered messages in last 10m", + "range": true, + "refId": "B" + } + ], + "title": "Delivery race lags (00000001)", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "PC96415006F908B67" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 8, + "x": 8, + "y": 16 + }, + "id": 16, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "PC96415006F908B67" + }, + "editorMode": "code", + "expr": "scalar(max_over_time(BridgeHubPolkadot_to_BridgeHubKusama_MessageLane_00000001_lane_state_nonces{domain=\"parity-chains\",type=\"target_latest_received\"}[2m]) OR on() vector(0)) - scalar(max_over_time(BridgeHubPolkadot_to_BridgeHubKusama_MessageLane_00000001_lane_state_nonces{domain=\"parity-chains\",type=\"source_latest_confirmed\"}[2m]) OR on() vector(0))", + "legendFormat": "Unconfirmed messages", + "range": true, + "refId": "A" + } + ], + "title": "Confirmations race lags (00000001)", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "PC96415006F908B67" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 8, + "x": 16, + "y": 16 + }, + "id": 18, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "PC96415006F908B67" + }, + "editorMode": "code", + "expr": "scalar(max_over_time(BridgeHubPolkadot_to_BridgeHubKusama_MessageLane_00000001_lane_state_nonces{domain=\"parity-chains\",type=\"source_latest_confirmed\"}[2m]) OR on() vector(0)) - scalar(max_over_time(BridgeHubPolkadot_to_BridgeHubKusama_MessageLane_00000001_lane_state_nonces{domain=\"parity-chains\",type=\"target_latest_confirmed\"}[2m]) OR on() vector(0))", + "legendFormat": "Unconfirmed rewards", + "range": true, + "refId": "A" + }, + { + "datasource": { + "type": "prometheus", + "uid": "PC96415006F908B67" + }, + "editorMode": "code", + "expr": "(scalar(max_over_time(BridgeHubPolkadot_to_BridgeHubKusama_MessageLane_00000001_lane_state_nonces{domain=\"parity-chains\",type=\"source_latest_confirmed\"}[2m]) OR on() vector(0)) - scalar(max_over_time(BridgeHubPolkadot_to_BridgeHubKusama_MessageLane_00000001_lane_state_nonces{domain=\"parity-chains\",type=\"target_latest_confirmed\"}[2m]) OR on() vector(0))) * (max_over_time(BridgeHubPolkadot_to_BridgeHubKusama_MessageLane_00000001_lane_state_nonces{domain=\"parity-chains\",type=\"target_latest_received\"}[2m]) OR on() vector(0) > bool min_over_time(BridgeHubPolkadot_to_BridgeHubKusama_MessageLane_00000001_lane_state_nonces{domain=\"parity-chains\",type=\"target_latest_received\"}[2m]) OR on() vector(0))", + "hide": true, + "legendFormat": "__auto", + "range": true, + "refId": "B" + } + ], + "title": "Reward lags (00000001)", + "type": "timeseries" + } + ], + "refresh": "5s", + "schemaVersion": 39, + "tags": [], + "templating": { + "list": [] + }, + "time": { + "from": "now-6h", + "to": "now" + }, + "timepicker": {}, + "timezone": "", + "title": "BridgeHubPolkadot to BridgeHubKusama (00000001)", + "uid": "zqjpkXxnk", + "version": 2, + "weekStart": "" +} -- GitLab From 7f41e098c64291dcaf1aa2ff397b2330c648bc5d Mon Sep 17 00:00:00 2001 From: Svyatoslav Nikolsky Date: Fri, 19 Apr 2024 17:40:52 +0300 Subject: [PATCH 36/39] cargo update + fix litep2p manually (#2964) --- Cargo.lock | 650 +++++++++++++++++++++++++++-------------------------- 1 file changed, 328 insertions(+), 322 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 8c7d1a76e..e748ab3b7 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -106,9 +106,9 @@ dependencies = [ [[package]] name = "allocator-api2" -version = "0.2.16" +version = "0.2.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0942ffc6dcaadf03badf6e6a2d0228460359d5e34b57ccdc720b7382dfbd5ec5" +checksum = "5c6cb57a04249c6480766f7f7cef5467412af1490f8d1e243141daddada3264f" [[package]] name = "android-tzdata" @@ -206,9 +206,9 @@ dependencies = [ "include_dir", "itertools 0.10.5", "proc-macro-error", - "proc-macro2 1.0.79", + "proc-macro2 1.0.81", "quote 1.0.36", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] @@ -391,7 +391,7 @@ checksum = "7abe79b0e4288889c4574159ab790824d0033b9fdcb2a112a3182fac2e514565" dependencies = [ "num-bigint", "num-traits", - "proc-macro2 1.0.79", + "proc-macro2 1.0.81", "quote 1.0.36", "syn 1.0.109", ] @@ -469,7 +469,7 @@ version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ae3281bc6d0fd7e549af32b52511e1302185bd688fd3359fa36423346ff682ea" dependencies = [ - "proc-macro2 1.0.79", + "proc-macro2 1.0.81", "quote 1.0.36", "syn 1.0.109", ] @@ -553,7 +553,7 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "726535892e8eae7e70657b4c8ea93d26b8553afb1ce617caee529ef96d7dee6c" dependencies = [ - "proc-macro2 1.0.79", + "proc-macro2 1.0.81", "quote 1.0.36", "syn 1.0.109", "synstructure", @@ -565,7 +565,7 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2777730b2039ac0f95f093556e61b6d26cebed5393ca6f152717777cec3a42ed" dependencies = [ - "proc-macro2 1.0.79", + "proc-macro2 1.0.81", "quote 1.0.36", "syn 1.0.109", ] @@ -593,9 +593,9 @@ dependencies = [ [[package]] name = "async-channel" -version = "2.2.0" +version = "2.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f28243a43d821d11341ab73c80bed182dc015c514b951616cf79bd4af39af0c3" +checksum = "136d4d23bcc79e27423727b36823d86233aad06dfea531837b038394d11e9928" dependencies = [ "concurrent-queue", "event-listener 5.3.0", @@ -606,11 +606,10 @@ dependencies = [ [[package]] name = "async-executor" -version = "1.10.0" +version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f98c37cf288e302c16ef6c8472aad1e034c6c84ce5ea7b8101c98eb4a802fee" +checksum = "b10202063978b3351199d68f8b22c4e47e4b1b822f8d43fd862d5ea8c006b29a" dependencies = [ - "async-lock 3.3.0", "async-task", "concurrent-queue", "fastrand 2.0.2", @@ -635,7 +634,7 @@ version = "2.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "05b1b633a2115cd122d73b955eadd9916c18c8f510ec9cd1686404c60ad1c29c" dependencies = [ - "async-channel 2.2.0", + "async-channel 2.2.1", "async-executor", "async-io 2.3.2", "async-lock 3.3.0", @@ -716,11 +715,11 @@ dependencies = [ [[package]] name = "async-process" -version = "2.2.0" +version = "2.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d999d925640d51b662b7b4e404224dd81de70f4aa4a199383c2c5e5b86885fa3" +checksum = "cad07b3443bfa10dcddf86a452ec48949e8e7fedf7392d82de3969fda99e90ed" dependencies = [ - "async-channel 2.2.0", + "async-channel 2.2.1", "async-io 2.3.2", "async-lock 3.3.0", "async-signal", @@ -791,9 +790,9 @@ version = "0.1.80" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c6fa2087f2753a7da8cc1c0dbfcf89579dd57458e36769de5ac750b4671737ca" dependencies = [ - "proc-macro2 1.0.79", + "proc-macro2 1.0.81", "quote 1.0.36", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] @@ -1093,7 +1092,7 @@ version = "1.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6a37913e8dc4ddcc604f0c6d3bf2887c995153af3611de9e23c352b44c1b9118" dependencies = [ - "async-channel 2.2.0", + "async-channel 2.2.1", "async-lock 3.3.0", "async-task", "fastrand 2.0.2", @@ -1118,7 +1117,7 @@ dependencies = [ [[package]] name = "bp-bridge-hub-cumulus" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "bp-messages", "bp-polkadot-core", @@ -1133,7 +1132,7 @@ dependencies = [ [[package]] name = "bp-bridge-hub-kusama" version = "0.6.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "bp-bridge-hub-cumulus", "bp-messages", @@ -1147,7 +1146,7 @@ dependencies = [ [[package]] name = "bp-bridge-hub-polkadot" version = "0.6.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "bp-bridge-hub-cumulus", "bp-messages", @@ -1161,7 +1160,7 @@ dependencies = [ [[package]] name = "bp-bridge-hub-rococo" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "bp-bridge-hub-cumulus", "bp-messages", @@ -1175,7 +1174,7 @@ dependencies = [ [[package]] name = "bp-bridge-hub-westend" version = "0.3.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "bp-bridge-hub-cumulus", "bp-messages", @@ -1189,7 +1188,7 @@ dependencies = [ [[package]] name = "bp-header-chain" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "bp-runtime", "finality-grandpa", @@ -1206,7 +1205,7 @@ dependencies = [ [[package]] name = "bp-kusama" version = "0.5.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "bp-header-chain", "bp-polkadot-core", @@ -1219,7 +1218,7 @@ dependencies = [ [[package]] name = "bp-messages" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "bp-header-chain", "bp-runtime", @@ -1234,7 +1233,7 @@ dependencies = [ [[package]] name = "bp-parachains" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "bp-header-chain", "bp-polkadot-core", @@ -1251,7 +1250,7 @@ dependencies = [ [[package]] name = "bp-polkadot" version = "0.5.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "bp-header-chain", "bp-polkadot-core", @@ -1264,7 +1263,7 @@ dependencies = [ [[package]] name = "bp-polkadot-bulletin" version = "0.4.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "bp-header-chain", "bp-messages", @@ -1282,7 +1281,7 @@ dependencies = [ [[package]] name = "bp-polkadot-core" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "bp-messages", "bp-runtime", @@ -1300,7 +1299,7 @@ dependencies = [ [[package]] name = "bp-relayers" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "bp-messages", "bp-runtime", @@ -1314,7 +1313,7 @@ dependencies = [ [[package]] name = "bp-rococo" version = "0.6.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "bp-header-chain", "bp-polkadot-core", @@ -1327,7 +1326,7 @@ dependencies = [ [[package]] name = "bp-runtime" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "frame-support", "frame-system", @@ -1350,7 +1349,7 @@ dependencies = [ [[package]] name = "bp-test-utils" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "bp-header-chain", "bp-parachains", @@ -1370,7 +1369,7 @@ dependencies = [ [[package]] name = "bp-westend" version = "0.3.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "bp-header-chain", "bp-polkadot-core", @@ -1383,7 +1382,7 @@ dependencies = [ [[package]] name = "bp-xcm-bridge-hub" version = "0.2.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk?branch=master)", ] @@ -1391,7 +1390,7 @@ dependencies = [ [[package]] name = "bp-xcm-bridge-hub-router" version = "0.6.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "parity-scale-codec", "scale-info", @@ -1402,7 +1401,7 @@ dependencies = [ [[package]] name = "bridge-runtime-common" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "bp-header-chain", "bp-messages", @@ -1503,9 +1502,9 @@ checksum = "a2698f953def977c68f935bb0dfa959375ad4638570e969e2f1e9f433cbf1af6" [[package]] name = "cc" -version = "1.0.92" +version = "1.0.94" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2678b2e3449475e95b0aa6f9b506a28e61b3dc8996592b983695e8ebb58a8b41" +checksum = "17f6e324229dc011159fcc089755d1e2e216a90d43a7dea6853ca740b84f35e7" dependencies = [ "jobserver", "libc", @@ -1513,9 +1512,9 @@ dependencies = [ [[package]] name = "cfg-expr" -version = "0.15.7" +version = "0.15.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa50868b64a9a6fda9d593ce778849ea8715cd2a3d2cc17ffdb4a2f2f2f1961d" +checksum = "d067ad48b8650848b989a59a86c6c36a995d02d2bf778d45c3c5d57bc2718f02" dependencies = [ "smallvec", ] @@ -1562,16 +1561,16 @@ dependencies = [ [[package]] name = "chrono" -version = "0.4.37" +version = "0.4.38" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a0d04d43504c61aa6c7531f1871dd0d418d91130162063b789da00fd7057a5e" +checksum = "a21f936df1771bf62b77f047b726c4625ff2e8aa607c01ec06e5a05bd8463401" dependencies = [ "android-tzdata", "iana-time-zone", "js-sys", "num-traits", "wasm-bindgen", - "windows-targets 0.52.4", + "windows-targets 0.52.5", ] [[package]] @@ -1643,9 +1642,9 @@ checksum = "acbf1af155f9b9ef647e42cdc158db4b64a1b61f743629225fde6f3e0be2a7c7" [[package]] name = "combine" -version = "4.6.6" +version = "4.6.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "35ed6e9d84f0b51a7f52daf1c7d71dd136fd7a3f41a8462b8cdb8c78d920fad4" +checksum = "ba5a308b75df32fe02788e748662718f03fde005016435c444eea572398219fd" dependencies = [ "bytes", "memchr", @@ -2056,9 +2055,9 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3" dependencies = [ - "proc-macro2 1.0.79", + "proc-macro2 1.0.81", "quote 1.0.36", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] @@ -2089,7 +2088,7 @@ checksum = "109c1ca6e6b7f82cc233a97004ea8ed7ca123a9af07a8230878fcfda9b158bf0" dependencies = [ "fnv", "ident_case", - "proc-macro2 1.0.79", + "proc-macro2 1.0.81", "quote 1.0.36", "strsim 0.10.0", "syn 1.0.109", @@ -2103,10 +2102,10 @@ checksum = "9c2cf1c23a687a1feeb728783b993c4e1ad83d99f351801977dd809b48d0a70f" dependencies = [ "fnv", "ident_case", - "proc-macro2 1.0.79", + "proc-macro2 1.0.81", "quote 1.0.36", "strsim 0.10.0", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] @@ -2128,7 +2127,7 @@ checksum = "a668eda54683121533a393014d8692171709ff57a7d61f187b6e782719f8933f" dependencies = [ "darling_core 0.20.8", "quote 1.0.36", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] @@ -2196,7 +2195,7 @@ version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b" dependencies = [ - "proc-macro2 1.0.79", + "proc-macro2 1.0.81", "quote 1.0.36", "syn 1.0.109", ] @@ -2207,7 +2206,7 @@ version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e79116f119dd1dba1abf1f3405f03b9b0e79a27a3883864bfebded8a3dc768cd" dependencies = [ - "proc-macro2 1.0.79", + "proc-macro2 1.0.81", "quote 1.0.36", "syn 1.0.109", ] @@ -2218,9 +2217,9 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d65d7ce8132b7c0e54497a4d9a55a1c2a0912a0d786cf894472ba818fba45762" dependencies = [ - "proc-macro2 1.0.79", + "proc-macro2 1.0.81", "quote 1.0.36", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] @@ -2230,7 +2229,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321" dependencies = [ "convert_case", - "proc-macro2 1.0.79", + "proc-macro2 1.0.81", "quote 1.0.36", "rustc_version", "syn 1.0.109", @@ -2299,9 +2298,9 @@ version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "487585f4d0c6655fe74905e2504d8ad6908e4db67f744eb140876906c2f3175d" dependencies = [ - "proc-macro2 1.0.79", + "proc-macro2 1.0.81", "quote 1.0.36", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] @@ -2338,10 +2337,10 @@ dependencies = [ "common-path", "derive-syn-parse 0.2.0", "once_cell", - "proc-macro2 1.0.79", + "proc-macro2 1.0.81", "quote 1.0.36", "regex", - "syn 2.0.58", + "syn 2.0.60", "termcolor", "toml 0.8.12", "walkdir", @@ -2381,7 +2380,7 @@ version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "558e40ea573c374cf53507fd240b7ee2f5477df7cfebdb97323ec61c719399c5" dependencies = [ - "proc-macro2 1.0.79", + "proc-macro2 1.0.81", "quote 1.0.36", "syn 1.0.109", ] @@ -2486,9 +2485,9 @@ dependencies = [ [[package]] name = "either" -version = "1.10.0" +version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "11157ac094ffbdde99aa67b23417ebdd801842852b500e395a45a9c0aac03e4a" +checksum = "a47c1c47d2f5964e29c61246e81db715514cd532db6b5116a25ea3c03d6780a2" [[package]] name = "elliptic-curve" @@ -2512,9 +2511,9 @@ dependencies = [ [[package]] name = "encoding_rs" -version = "0.8.33" +version = "0.8.34" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7268b386296a025e474d5140678f75d6de9493ae55a5d709eeb9dd08149945e1" +checksum = "b45de904aa0b010bce2ab45264d0631681847fa7b6f2eaa7dab7619943bc4f59" dependencies = [ "cfg-if", ] @@ -2526,7 +2525,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c9720bba047d567ffc8a3cba48bf19126600e249ab7f128e9233e6376976a116" dependencies = [ "heck 0.4.1", - "proc-macro2 1.0.79", + "proc-macro2 1.0.81", "quote 1.0.36", "syn 1.0.109", ] @@ -2538,9 +2537,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5ffccbb6966c05b32ef8fbac435df276c4ae4d3dc55a8cd0eb9745e6c12f546a" dependencies = [ "heck 0.4.1", - "proc-macro2 1.0.79", + "proc-macro2 1.0.81", "quote 1.0.36", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] @@ -2594,7 +2593,7 @@ checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" [[package]] name = "equivocation-detector" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "async-std", "async-trait", @@ -2701,9 +2700,9 @@ dependencies = [ "blake2 0.10.6", "fs-err", "prettier-please", - "proc-macro2 1.0.79", + "proc-macro2 1.0.81", "quote 1.0.36", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] @@ -2785,7 +2784,7 @@ dependencies = [ [[package]] name = "finality-relay" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "async-std", "async-trait", @@ -2874,7 +2873,7 @@ checksum = "6c2141d6d6c8512188a7891b4b01590a45f6dac67afb4f255c4124dbb86d4eaa" [[package]] name = "frame-benchmarking" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "frame-support", "frame-support-procedural", @@ -2922,7 +2921,7 @@ dependencies = [ [[package]] name = "frame-support" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "aquamarine", "array-bytes 6.2.2", @@ -2963,7 +2962,7 @@ dependencies = [ [[package]] name = "frame-support-procedural" version = "23.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "Inflector", "cfg-expr", @@ -2973,38 +2972,38 @@ dependencies = [ "itertools 0.10.5", "macro_magic", "proc-macro-warning", - "proc-macro2 1.0.79", + "proc-macro2 1.0.81", "quote 1.0.36", "sp-crypto-hashing 0.1.0 (git+https://github.com/paritytech/polkadot-sdk?branch=master)", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] name = "frame-support-procedural-tools" version = "10.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "frame-support-procedural-tools-derive", "proc-macro-crate 3.1.0", - "proc-macro2 1.0.79", + "proc-macro2 1.0.81", "quote 1.0.36", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] name = "frame-support-procedural-tools-derive" version = "11.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ - "proc-macro2 1.0.79", + "proc-macro2 1.0.81", "quote 1.0.36", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] name = "frame-system" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "cfg-if", "docify", @@ -3119,9 +3118,9 @@ version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac" dependencies = [ - "proc-macro2 1.0.79", + "proc-macro2 1.0.81", "quote 1.0.36", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] @@ -3663,7 +3662,7 @@ version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "11d7a9f6330b71fea57921c9b61c47ee6e84f72d394754eff6163ae67e7395eb" dependencies = [ - "proc-macro2 1.0.79", + "proc-macro2 1.0.81", "quote 1.0.36", "syn 1.0.109", ] @@ -3683,7 +3682,7 @@ version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b139284b5cf57ecfa712bcc66950bb635b31aff41c188e8a4cfc758eca374a3f" dependencies = [ - "proc-macro2 1.0.79", + "proc-macro2 1.0.81", "quote 1.0.36", ] @@ -3840,9 +3839,9 @@ checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b" [[package]] name = "jobserver" -version = "0.1.28" +version = "0.1.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ab46a6e9526ddef3ae7f787c06f0f2600639ba80ea3eade3d8e670a2230f51d6" +checksum = "685a7d121ee3f65ae4fddd72b25a04bb36b6af81bc0828f7d5434c0fe60fa3a2" dependencies = [ "libc", ] @@ -3958,9 +3957,9 @@ checksum = "2c326f9e95aeff7d707b2ffde72c22a52acc975ba1c48587776c02b90c4747a6" dependencies = [ "heck 0.4.1", "proc-macro-crate 3.1.0", - "proc-macro2 1.0.79", + "proc-macro2 1.0.81", "quote 1.0.36", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] @@ -4080,9 +4079,9 @@ checksum = "4ec2a862134d2a7d32d7983ddcdd1c4923530833c9f2ea1a44fc5fa473989058" [[package]] name = "libnghttp2-sys" -version = "0.1.9+1.58.0" +version = "0.1.10+1.61.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b57e858af2798e167e709b9d969325b6d8e9d50232fcbc494d7d54f976854a64" +checksum = "959c25552127d2e1fa72f0e52548ec04fc386e827ba71a7bd01db46a447dc135" dependencies = [ "cc", "libc", @@ -4732,7 +4731,7 @@ dependencies = [ "macro_magic_core", "macro_magic_macros", "quote 1.0.36", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] @@ -4744,9 +4743,9 @@ dependencies = [ "const-random", "derive-syn-parse 0.1.5", "macro_magic_core_macros", - "proc-macro2 1.0.79", + "proc-macro2 1.0.81", "quote 1.0.36", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] @@ -4755,9 +4754,9 @@ version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9ea73aa640dc01d62a590d48c0c3521ed739d53b27f919b25c3551e233481654" dependencies = [ - "proc-macro2 1.0.79", + "proc-macro2 1.0.81", "quote 1.0.36", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] @@ -4768,7 +4767,7 @@ checksum = "ef9d79ae96aaba821963320eb2b6e34d17df1e5a83d8a1985c29cc5be59577b3" dependencies = [ "macro_magic_core", "quote 1.0.36", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] @@ -4859,7 +4858,7 @@ dependencies = [ [[package]] name = "messages-relay" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "async-std", "async-trait", @@ -4954,7 +4953,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "22ce75669015c4f47b289fd4d4f56e894e4c96003ffdf3ac51313126f94c6cbb" dependencies = [ "cfg-if", - "proc-macro2 1.0.79", + "proc-macro2 1.0.81", "quote 1.0.36", "syn 1.0.109", ] @@ -5031,7 +5030,7 @@ checksum = "1d6d4752e6230d8ef7adf7bd5d8c4b1f6561c1014c5ba9a37445ccefe18aa1db" dependencies = [ "proc-macro-crate 1.1.3", "proc-macro-error", - "proc-macro2 1.0.79", + "proc-macro2 1.0.81", "quote 1.0.36", "syn 1.0.109", "synstructure", @@ -5079,7 +5078,7 @@ version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "91761aed67d03ad966ef783ae962ef9bbaca728d2dd7ceb7939ec110fffad998" dependencies = [ - "proc-macro2 1.0.79", + "proc-macro2 1.0.81", "quote 1.0.36", "syn 1.0.109", ] @@ -5381,9 +5380,9 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" dependencies = [ - "proc-macro2 1.0.79", + "proc-macro2 1.0.81", "quote 1.0.36", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] @@ -5423,7 +5422,7 @@ checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" [[package]] name = "pallet-authorship" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "frame-support", "frame-system", @@ -5437,7 +5436,7 @@ dependencies = [ [[package]] name = "pallet-balances" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "docify", "frame-benchmarking", @@ -5453,7 +5452,7 @@ dependencies = [ [[package]] name = "pallet-bridge-grandpa" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "bp-header-chain", "bp-runtime", @@ -5474,7 +5473,7 @@ dependencies = [ [[package]] name = "pallet-bridge-messages" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "bp-messages", "bp-runtime", @@ -5492,7 +5491,7 @@ dependencies = [ [[package]] name = "pallet-bridge-parachains" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "bp-header-chain", "bp-parachains", @@ -5513,7 +5512,7 @@ dependencies = [ [[package]] name = "pallet-bridge-relayers" version = "0.7.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "bp-messages", "bp-relayers", @@ -5533,7 +5532,7 @@ dependencies = [ [[package]] name = "pallet-grandpa" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "frame-benchmarking", "frame-support", @@ -5556,7 +5555,7 @@ dependencies = [ [[package]] name = "pallet-session" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "frame-support", "frame-system", @@ -5578,7 +5577,7 @@ dependencies = [ [[package]] name = "pallet-timestamp" version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "docify", "frame-benchmarking", @@ -5598,7 +5597,7 @@ dependencies = [ [[package]] name = "pallet-transaction-payment" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "frame-support", "frame-system", @@ -5614,7 +5613,7 @@ dependencies = [ [[package]] name = "pallet-transaction-payment-rpc-runtime-api" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "pallet-transaction-payment", "parity-scale-codec", @@ -5626,7 +5625,7 @@ dependencies = [ [[package]] name = "pallet-utility" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "frame-benchmarking", "frame-support", @@ -5642,7 +5641,7 @@ dependencies = [ [[package]] name = "parachains-relay" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "async-std", "async-trait", @@ -5688,7 +5687,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "be30eaf4b0a9fba5336683b38de57bb86d179a35862ba6bfcf57625d006bde5b" dependencies = [ "proc-macro-crate 2.0.0", - "proc-macro2 1.0.79", + "proc-macro2 1.0.81", "quote 1.0.36", "syn 1.0.109", ] @@ -5723,7 +5722,7 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f557c32c6d268a07c921471619c0295f5efad3a0e76d4f97a05c091a51d110b2" dependencies = [ - "proc-macro2 1.0.79", + "proc-macro2 1.0.81", "syn 1.0.109", "synstructure", ] @@ -5861,9 +5860,9 @@ version = "1.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2f38a4412a78282e09a2cf38d195ea5420d15ba0602cb375210efbc877243965" dependencies = [ - "proc-macro2 1.0.79", + "proc-macro2 1.0.81", "quote 1.0.36", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] @@ -5920,7 +5919,7 @@ checksum = "db23d408679286588f4d4644f965003d056e3dd5abcaaa938116871d7ce2fee7" [[package]] name = "polkadot-core-primitives" version = "7.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "parity-scale-codec", "scale-info", @@ -5932,7 +5931,7 @@ dependencies = [ [[package]] name = "polkadot-parachain-primitives" version = "6.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "bounded-collections", "derive_more", @@ -5949,7 +5948,7 @@ dependencies = [ [[package]] name = "polkadot-primitives" version = "7.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "bitvec", "hex-literal", @@ -6020,9 +6019,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5c4fdfc49717fb9a196e74a5d28e0bc764eb394a2c803eb11133a31ac996c60c" dependencies = [ "polkavm-common", - "proc-macro2 1.0.79", + "proc-macro2 1.0.81", "quote 1.0.36", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] @@ -6032,7 +6031,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8ba81f7b5faac81e528eb6158a6f3c9e0bb1008e0ffa19653bc8dea925ecb429" dependencies = [ "polkavm-derive-impl", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] @@ -6143,8 +6142,8 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "22020dfcf177fcc7bf5deaf7440af371400c67c0de14c399938d8ed4fb4645d3" dependencies = [ - "proc-macro2 1.0.79", - "syn 2.0.58", + "proc-macro2 1.0.81", + "syn 2.0.60", ] [[package]] @@ -6153,7 +6152,7 @@ version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f28f53e8b192565862cf99343194579a022eb9c7dd3a8d03134734803c7b3125" dependencies = [ - "proc-macro2 1.0.79", + "proc-macro2 1.0.81", "syn 1.0.109", ] @@ -6206,7 +6205,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c" dependencies = [ "proc-macro-error-attr", - "proc-macro2 1.0.79", + "proc-macro2 1.0.81", "quote 1.0.36", "syn 1.0.109", "version_check", @@ -6218,7 +6217,7 @@ version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869" dependencies = [ - "proc-macro2 1.0.79", + "proc-macro2 1.0.81", "quote 1.0.36", "version_check", ] @@ -6229,9 +6228,9 @@ version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "834da187cfe638ae8abb0203f0b33e5ccdb02a28e7199f2f47b3e2754f50edca" dependencies = [ - "proc-macro2 1.0.79", + "proc-macro2 1.0.81", "quote 1.0.36", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] @@ -6245,9 +6244,9 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.79" +version = "1.0.81" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e835ff2298f5721608eb1a980ecaee1aef2c132bf95ecc026a11b7bf3c01c02e" +checksum = "3d1597b0c024618f09a9c3b8655b7e430397a36d23fdafec26d6965e9eec3eba" dependencies = [ "unicode-ident", ] @@ -6284,9 +6283,9 @@ version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "440f724eba9f6996b75d63681b0a92b06947f1457076d503a4d2e2c8f56442b8" dependencies = [ - "proc-macro2 1.0.79", + "proc-macro2 1.0.81", "quote 1.0.36", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] @@ -6329,7 +6328,7 @@ checksum = "e5d2d8d10f3c6ded6da8b05b5fb3b8a5082514344d56c9f871412d29b4e075b4" dependencies = [ "anyhow", "itertools 0.10.5", - "proc-macro2 1.0.79", + "proc-macro2 1.0.81", "quote 1.0.36", "syn 1.0.109", ] @@ -6455,7 +6454,7 @@ version = "1.0.36" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0fa76aaf39101c457836aec0ce2316dbdc3ab723cdda1c6bd4e6ad4208acaca7" dependencies = [ - "proc-macro2 1.0.79", + "proc-macro2 1.0.81", ] [[package]] @@ -6647,9 +6646,9 @@ version = "1.0.22" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5fddb4f8d99b0a2ebafc65a87a69a7b9875e4b1ae1f00db265d300ef7f28bccc" dependencies = [ - "proc-macro2 1.0.79", + "proc-macro2 1.0.81", "quote 1.0.36", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] @@ -6873,7 +6872,7 @@ dependencies = [ [[package]] name = "relay-substrate-client" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "async-std", "async-trait", @@ -6915,7 +6914,7 @@ dependencies = [ [[package]] name = "relay-utils" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "ansi_term", "anyhow", @@ -7295,7 +7294,7 @@ dependencies = [ [[package]] name = "sc-allocator" version = "23.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "log", "sp-core", @@ -7306,7 +7305,7 @@ dependencies = [ [[package]] name = "sc-chain-spec" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "array-bytes 6.2.2", "docify", @@ -7333,18 +7332,18 @@ dependencies = [ [[package]] name = "sc-chain-spec-derive" version = "11.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "proc-macro-crate 3.1.0", - "proc-macro2 1.0.79", + "proc-macro2 1.0.81", "quote 1.0.36", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] name = "sc-client-api" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "fnv", "futures", @@ -7371,7 +7370,7 @@ dependencies = [ [[package]] name = "sc-consensus" version = "0.33.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "async-trait", "futures", @@ -7396,7 +7395,7 @@ dependencies = [ [[package]] name = "sc-executor" version = "0.32.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "parity-scale-codec", "parking_lot 0.12.1", @@ -7419,7 +7418,7 @@ dependencies = [ [[package]] name = "sc-executor-common" version = "0.29.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "polkavm", "sc-allocator", @@ -7432,7 +7431,7 @@ dependencies = [ [[package]] name = "sc-executor-polkavm" version = "0.29.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "log", "polkavm", @@ -7443,7 +7442,7 @@ dependencies = [ [[package]] name = "sc-executor-wasmtime" version = "0.29.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "anyhow", "cfg-if", @@ -7461,7 +7460,7 @@ dependencies = [ [[package]] name = "sc-mixnet" version = "0.4.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "array-bytes 4.2.0", "arrayvec 0.7.4", @@ -7490,7 +7489,7 @@ dependencies = [ [[package]] name = "sc-network" version = "0.34.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "array-bytes 6.2.2", "async-channel 1.9.0", @@ -7541,7 +7540,7 @@ dependencies = [ [[package]] name = "sc-network-common" version = "0.33.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "async-trait", "bitflags 1.3.2", @@ -7559,7 +7558,7 @@ dependencies = [ [[package]] name = "sc-network-types" version = "0.10.0-dev" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "bs58 0.4.0", "libp2p-identity", @@ -7573,7 +7572,7 @@ dependencies = [ [[package]] name = "sc-rpc-api" version = "0.33.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "jsonrpsee", "parity-scale-codec", @@ -7593,7 +7592,7 @@ dependencies = [ [[package]] name = "sc-telemetry" version = "15.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "chrono", "futures", @@ -7613,7 +7612,7 @@ dependencies = [ [[package]] name = "sc-transaction-pool-api" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "async-trait", "futures", @@ -7629,7 +7628,7 @@ dependencies = [ [[package]] name = "sc-utils" version = "14.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "async-channel 1.9.0", "futures", @@ -7675,7 +7674,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5398fdb3c7bea3cb419bac4983aadacae93fe1a7b5f693f4ebd98c3821aad7a5" dependencies = [ "darling 0.14.4", - "proc-macro2 1.0.79", + "proc-macro2 1.0.81", "quote 1.0.36", "syn 1.0.109", ] @@ -7703,7 +7702,7 @@ checksum = "7a304e1af7cdfbe7a24e08b012721456cc8cecdedadc14b3d10513eada63233c" dependencies = [ "darling 0.14.4", "proc-macro-crate 1.1.3", - "proc-macro2 1.0.79", + "proc-macro2 1.0.81", "quote 1.0.36", "syn 1.0.109", ] @@ -7729,7 +7728,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "18cf6c6447f813ef19eb450e985bcce6705f9ce7660db221b59093d15c79c4b7" dependencies = [ "proc-macro-crate 1.1.3", - "proc-macro2 1.0.79", + "proc-macro2 1.0.81", "quote 1.0.36", "syn 1.0.109", ] @@ -7750,10 +7749,10 @@ version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d470fa75e71b12b3244a4113adc4bc49891f3daba2054703cacd06256066397e" dependencies = [ - "proc-macro2 1.0.79", + "proc-macro2 1.0.81", "quote 1.0.36", "scale-info", - "syn 2.0.58", + "syn 2.0.60", "thiserror", ] @@ -7921,9 +7920,9 @@ checksum = "92d43fe69e652f3df9bdc2b85b2854a0825b86e4fb76bc44d945137d053639ca" [[package]] name = "serde" -version = "1.0.197" +version = "1.0.198" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3fb1c873e1b9b056a4dc4c0c198b24c3ffa059243875552b2bd0933b1aee4ce2" +checksum = "9846a40c979031340571da2545a4e5b7c4163bdae79b301d5f86d03979451fcc" dependencies = [ "serde_derive", ] @@ -7939,20 +7938,20 @@ dependencies = [ [[package]] name = "serde_derive" -version = "1.0.197" +version = "1.0.198" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7eb0b34b42edc17f6b7cac84a52a1c5f0e1bb2227e997ca9011ea3dd34e8610b" +checksum = "e88edab869b01783ba905e7d0153f9fc1a6505a96e4ad3018011eedb838566d9" dependencies = [ - "proc-macro2 1.0.79", + "proc-macro2 1.0.81", "quote 1.0.36", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] name = "serde_json" -version = "1.0.115" +version = "1.0.116" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "12dc5c46daa8e9fdf4f5e71b6cf9a53f2487da0e86e55808e2d35539666497dd" +checksum = "3e17db7126d17feb94eb3fad46bf1a96b034e8aacbc2e775fe81505f8b0b2813" dependencies = [ "indexmap 2.2.6", "itoa", @@ -8186,7 +8185,7 @@ version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e635339259e51ef85ac7aa29a1cd991b957047507288697a690e80ab97d07cad" dependencies = [ - "async-channel 2.2.0", + "async-channel 2.2.1", "async-executor", "async-fs", "async-io 2.3.2", @@ -8258,7 +8257,7 @@ version = "0.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5496f2d116b7019a526b1039ec2247dd172b8670633b1a64a614c9ea12c9d8c7" dependencies = [ - "async-channel 2.2.0", + "async-channel 2.2.1", "async-lock 3.3.0", "base64 0.21.7", "blake2-rfc", @@ -8345,7 +8344,7 @@ dependencies = [ [[package]] name = "sp-api" version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "hash-db", "log", @@ -8367,21 +8366,21 @@ dependencies = [ [[package]] name = "sp-api-proc-macro" version = "15.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "Inflector", "blake2 0.10.6", "expander", "proc-macro-crate 3.1.0", - "proc-macro2 1.0.79", + "proc-macro2 1.0.81", "quote 1.0.36", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] name = "sp-application-crypto" version = "30.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "parity-scale-codec", "scale-info", @@ -8394,7 +8393,7 @@ dependencies = [ [[package]] name = "sp-arithmetic" version = "23.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "docify", "integer-sqrt", @@ -8427,7 +8426,7 @@ dependencies = [ [[package]] name = "sp-authority-discovery" version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "parity-scale-codec", "scale-info", @@ -8439,7 +8438,7 @@ dependencies = [ [[package]] name = "sp-blockchain" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "futures", "log", @@ -8457,7 +8456,7 @@ dependencies = [ [[package]] name = "sp-consensus" version = "0.32.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "async-trait", "futures", @@ -8472,7 +8471,7 @@ dependencies = [ [[package]] name = "sp-consensus-grandpa" version = "13.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "finality-grandpa", "log", @@ -8489,7 +8488,7 @@ dependencies = [ [[package]] name = "sp-consensus-slots" version = "0.32.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "parity-scale-codec", "scale-info", @@ -8500,7 +8499,7 @@ dependencies = [ [[package]] name = "sp-core" version = "28.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "array-bytes 6.2.2", "bandersnatch_vrfs", @@ -8547,7 +8546,7 @@ dependencies = [ [[package]] name = "sp-crypto-ec-utils" version = "0.10.0" -source = "git+https://github.com/paritytech/polkadot-sdk#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "ark-bls12-377", "ark-bls12-377-ext", @@ -8581,7 +8580,7 @@ dependencies = [ [[package]] name = "sp-crypto-hashing" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "blake2b_simd", "byteorder", @@ -8594,17 +8593,17 @@ dependencies = [ [[package]] name = "sp-crypto-hashing-proc-macro" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "quote 1.0.36", "sp-crypto-hashing 0.1.0 (git+https://github.com/paritytech/polkadot-sdk?branch=master)", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] name = "sp-database" version = "10.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "kvdb", "parking_lot 0.12.1", @@ -8613,27 +8612,27 @@ dependencies = [ [[package]] name = "sp-debug-derive" version = "14.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ - "proc-macro2 1.0.79", + "proc-macro2 1.0.81", "quote 1.0.36", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] name = "sp-debug-derive" version = "14.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ - "proc-macro2 1.0.79", + "proc-macro2 1.0.81", "quote 1.0.36", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] name = "sp-externalities" version = "0.25.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "environmental", "parity-scale-codec", @@ -8643,7 +8642,7 @@ dependencies = [ [[package]] name = "sp-externalities" version = "0.25.0" -source = "git+https://github.com/paritytech/polkadot-sdk#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "environmental", "parity-scale-codec", @@ -8653,7 +8652,7 @@ dependencies = [ [[package]] name = "sp-genesis-builder" version = "0.8.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "parity-scale-codec", "scale-info", @@ -8665,7 +8664,7 @@ dependencies = [ [[package]] name = "sp-inherents" version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "async-trait", "impl-trait-for-tuples", @@ -8678,7 +8677,7 @@ dependencies = [ [[package]] name = "sp-io" version = "30.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "bytes", "ed25519-dalek 2.1.1", @@ -8704,7 +8703,7 @@ dependencies = [ [[package]] name = "sp-keyring" version = "31.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "sp-core", "sp-runtime", @@ -8714,7 +8713,7 @@ dependencies = [ [[package]] name = "sp-keystore" version = "0.34.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "parity-scale-codec", "parking_lot 0.12.1", @@ -8725,7 +8724,7 @@ dependencies = [ [[package]] name = "sp-maybe-compressed-blob" version = "11.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "thiserror", "zstd 0.12.4", @@ -8734,7 +8733,7 @@ dependencies = [ [[package]] name = "sp-metadata-ir" version = "0.6.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "frame-metadata 16.0.0", "parity-scale-codec", @@ -8744,7 +8743,7 @@ dependencies = [ [[package]] name = "sp-mixnet" version = "0.4.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "parity-scale-codec", "scale-info", @@ -8755,7 +8754,7 @@ dependencies = [ [[package]] name = "sp-panic-handler" version = "13.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "backtrace", "lazy_static", @@ -8765,7 +8764,7 @@ dependencies = [ [[package]] name = "sp-rpc" version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "rustc-hash", "serde", @@ -8775,7 +8774,7 @@ dependencies = [ [[package]] name = "sp-runtime" version = "31.0.1" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "docify", "either", @@ -8799,7 +8798,7 @@ dependencies = [ [[package]] name = "sp-runtime-interface" version = "24.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "bytes", "impl-trait-for-tuples", @@ -8818,7 +8817,7 @@ dependencies = [ [[package]] name = "sp-runtime-interface" version = "24.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "bytes", "impl-trait-for-tuples", @@ -8837,33 +8836,33 @@ dependencies = [ [[package]] name = "sp-runtime-interface-proc-macro" version = "17.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "Inflector", "expander", "proc-macro-crate 3.1.0", - "proc-macro2 1.0.79", + "proc-macro2 1.0.81", "quote 1.0.36", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] name = "sp-runtime-interface-proc-macro" version = "17.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "Inflector", "expander", "proc-macro-crate 3.1.0", - "proc-macro2 1.0.79", + "proc-macro2 1.0.81", "quote 1.0.36", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] name = "sp-session" version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "parity-scale-codec", "scale-info", @@ -8877,7 +8876,7 @@ dependencies = [ [[package]] name = "sp-staking" version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "impl-trait-for-tuples", "parity-scale-codec", @@ -8890,7 +8889,7 @@ dependencies = [ [[package]] name = "sp-state-machine" version = "0.35.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "hash-db", "log", @@ -8910,7 +8909,7 @@ dependencies = [ [[package]] name = "sp-statement-store" version = "10.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "aes-gcm", "curve25519-dalek 4.1.2", @@ -8934,17 +8933,17 @@ dependencies = [ [[package]] name = "sp-std" version = "14.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" [[package]] name = "sp-std" version = "14.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" [[package]] name = "sp-storage" version = "19.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "impl-serde", "parity-scale-codec", @@ -8956,7 +8955,7 @@ dependencies = [ [[package]] name = "sp-storage" version = "19.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "impl-serde", "parity-scale-codec", @@ -8968,7 +8967,7 @@ dependencies = [ [[package]] name = "sp-timestamp" version = "26.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "async-trait", "parity-scale-codec", @@ -8980,7 +8979,7 @@ dependencies = [ [[package]] name = "sp-tracing" version = "16.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "parity-scale-codec", "tracing", @@ -8991,7 +8990,7 @@ dependencies = [ [[package]] name = "sp-tracing" version = "16.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "parity-scale-codec", "tracing", @@ -9002,7 +9001,7 @@ dependencies = [ [[package]] name = "sp-trie" version = "29.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "ahash 0.8.11", "hash-db", @@ -9025,7 +9024,7 @@ dependencies = [ [[package]] name = "sp-version" version = "29.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "impl-serde", "parity-scale-codec", @@ -9042,18 +9041,18 @@ dependencies = [ [[package]] name = "sp-version-proc-macro" version = "13.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "parity-scale-codec", - "proc-macro2 1.0.79", + "proc-macro2 1.0.81", "quote 1.0.36", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] name = "sp-wasm-interface" version = "20.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "anyhow", "impl-trait-for-tuples", @@ -9065,7 +9064,7 @@ dependencies = [ [[package]] name = "sp-wasm-interface" version = "20.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "impl-trait-for-tuples", "log", @@ -9075,7 +9074,7 @@ dependencies = [ [[package]] name = "sp-weights" version = "27.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "bounded-collections", "parity-scale-codec", @@ -9116,7 +9115,7 @@ checksum = "4743ce898933fbff7bbf414f497c459a782d496269644b3d650a398ae6a487ba" dependencies = [ "Inflector", "num-format", - "proc-macro2 1.0.79", + "proc-macro2 1.0.81", "quote 1.0.36", "serde", "serde_json", @@ -9132,7 +9131,7 @@ checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" [[package]] name = "staging-xcm" version = "7.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "array-bytes 6.2.2", "bounded-collections", @@ -9150,7 +9149,7 @@ dependencies = [ [[package]] name = "staging-xcm-builder" version = "7.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "frame-support", "frame-system", @@ -9172,7 +9171,7 @@ dependencies = [ [[package]] name = "staging-xcm-executor" version = "7.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "environmental", "frame-benchmarking", @@ -9247,7 +9246,7 @@ checksum = "dcb5ae327f9cc13b68763b5749770cb9e048a99bd9dfdfa58d0cf05d5f64afe0" dependencies = [ "heck 0.3.3", "proc-macro-error", - "proc-macro2 1.0.79", + "proc-macro2 1.0.81", "quote 1.0.36", "syn 1.0.109", ] @@ -9268,16 +9267,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c6cf59daf282c0a494ba14fd21610a0325f9f90ec9d1231dea26bcb1d696c946" dependencies = [ "heck 0.4.1", - "proc-macro2 1.0.79", + "proc-macro2 1.0.81", "quote 1.0.36", "rustversion", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] name = "substrate-bip39" version = "0.4.7" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "hmac 0.12.1", "pbkdf2", @@ -9289,7 +9288,7 @@ dependencies = [ [[package]] name = "substrate-prometheus-endpoint" version = "0.17.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "hyper", "log", @@ -9355,7 +9354,7 @@ dependencies = [ [[package]] name = "substrate-relay-helper" version = "0.1.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "anyhow", "async-std", @@ -9454,12 +9453,12 @@ dependencies = [ "hex", "jsonrpsee", "parity-scale-codec", - "proc-macro2 1.0.79", + "proc-macro2 1.0.81", "quote 1.0.36", "scale-info", "scale-typegen", "subxt-metadata", - "syn 2.0.58", + "syn 2.0.60", "thiserror", "tokio", ] @@ -9493,7 +9492,7 @@ dependencies = [ "quote 1.0.36", "scale-typegen", "subxt-codegen", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] @@ -9527,18 +9526,18 @@ version = "1.0.109" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" dependencies = [ - "proc-macro2 1.0.79", + "proc-macro2 1.0.81", "quote 1.0.36", "unicode-ident", ] [[package]] name = "syn" -version = "2.0.58" +version = "2.0.60" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44cfb93f38070beee36b3fef7d4f5a16f27751d94b187b666a5cc5e9b0d30687" +checksum = "909518bc7b1c9b779f1bbf07f2929d35af9f0f37e47c6e9ef7f9dddc1e1821f3" dependencies = [ - "proc-macro2 1.0.79", + "proc-macro2 1.0.81", "quote 1.0.36", "unicode-ident", ] @@ -9549,7 +9548,7 @@ version = "0.12.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f36bdaa60a83aca3921b5259d5400cbf5e90fc51931376a9bd4a0eb79aa7210f" dependencies = [ - "proc-macro2 1.0.79", + "proc-macro2 1.0.81", "quote 1.0.36", "syn 1.0.109", "unicode-xid 0.2.4", @@ -9557,9 +9556,9 @@ dependencies = [ [[package]] name = "sysinfo" -version = "0.30.9" +version = "0.30.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e9a84fe4cfc513b41cb2596b624e561ec9e7e1c4b46328e496ed56a53514ef2a" +checksum = "87341a165d73787554941cd5ef55ad728011566fe714e987d1b976c15dbc3a83" dependencies = [ "cfg-if", "core-foundation-sys", @@ -9654,9 +9653,9 @@ version = "1.0.58" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c61f3ba182994efc43764a46c018c347bc492c79f024e705f46567b418f6d4f7" dependencies = [ - "proc-macro2 1.0.79", + "proc-macro2 1.0.81", "quote 1.0.36", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] @@ -9671,9 +9670,9 @@ dependencies = [ [[package]] name = "time" -version = "0.3.34" +version = "0.3.36" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c8248b6521bb14bc45b4067159b9b6ad792e2d6d754d6c41fb50e29fefe38749" +checksum = "5dfd88e563464686c916c7e46e623e520ddc6d79fa6641390f2e3fa86e83e885" dependencies = [ "deranged", "itoa", @@ -9694,9 +9693,9 @@ checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3" [[package]] name = "time-macros" -version = "0.2.17" +version = "0.2.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ba3a3ef41e6672a2f0f001392bb5dcd3ff0a9992d618ca761a11c3121547774" +checksum = "3f252a68540fde3a3877aeea552b832b40ab9a69e318efd078774a01ddee1ccf" dependencies = [ "num-conv", "time-core", @@ -9750,9 +9749,9 @@ version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5b8a1e28f2deaa14e508979454cb3a223b10b938b45af148bc0986de36f1923b" dependencies = [ - "proc-macro2 1.0.79", + "proc-macro2 1.0.81", "quote 1.0.36", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] @@ -9836,7 +9835,7 @@ dependencies = [ "serde", "serde_spanned", "toml_datetime", - "toml_edit 0.22.9", + "toml_edit 0.22.11", ] [[package]] @@ -9872,15 +9871,15 @@ dependencies = [ [[package]] name = "toml_edit" -version = "0.22.9" +version = "0.22.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e40bb779c5187258fd7aad0eb68cb8706a0a81fa712fbea808ab43c4b8374c4" +checksum = "fb686a972ccef8537b39eead3968b0e8616cb5040dbb9bba93007c8e07c9215f" dependencies = [ "indexmap 2.2.6", "serde", "serde_spanned", "toml_datetime", - "winnow 0.6.5", + "winnow 0.6.6", ] [[package]] @@ -9928,9 +9927,9 @@ version = "0.1.27" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" dependencies = [ - "proc-macro2 1.0.79", + "proc-macro2 1.0.81", "quote 1.0.36", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] @@ -10376,9 +10375,9 @@ dependencies = [ "bumpalo", "log", "once_cell", - "proc-macro2 1.0.79", + "proc-macro2 1.0.81", "quote 1.0.36", - "syn 2.0.58", + "syn 2.0.60", "wasm-bindgen-shared", ] @@ -10410,9 +10409,9 @@ version = "0.2.92" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7" dependencies = [ - "proc-macro2 1.0.79", + "proc-macro2 1.0.81", "quote 1.0.36", - "syn 2.0.58", + "syn 2.0.60", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -10735,9 +10734,9 @@ dependencies = [ [[package]] name = "wide" -version = "0.7.15" +version = "0.7.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89beec544f246e679fc25490e3f8e08003bc4bf612068f325120dad4cea02c1c" +checksum = "81a1851a719f11d1d2fea40e15c72f6c00de8c142d7ac47c1441cc7e4d0d5bc6" dependencies = [ "bytemuck", "safe_arch", @@ -10797,7 +10796,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e48a53791691ab099e5e2ad123536d0fff50652600abaf43bbf952894110d0be" dependencies = [ "windows-core 0.52.0", - "windows-targets 0.52.4", + "windows-targets 0.52.5", ] [[package]] @@ -10815,7 +10814,7 @@ version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "33ab640c8d7e35bf8ba19b884ba838ceb4fba93a4e8c65a9059d08afcfc683d9" dependencies = [ - "windows-targets 0.52.4", + "windows-targets 0.52.5", ] [[package]] @@ -10857,7 +10856,7 @@ version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" dependencies = [ - "windows-targets 0.52.4", + "windows-targets 0.52.5", ] [[package]] @@ -10892,17 +10891,18 @@ dependencies = [ [[package]] name = "windows-targets" -version = "0.52.4" +version = "0.52.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7dd37b7e5ab9018759f893a1952c9420d060016fc19a472b4bb20d1bdd694d1b" +checksum = "6f0713a46559409d202e70e28227288446bf7841d3211583a4b53e3f6d96e7eb" dependencies = [ - "windows_aarch64_gnullvm 0.52.4", - "windows_aarch64_msvc 0.52.4", - "windows_i686_gnu 0.52.4", - "windows_i686_msvc 0.52.4", - "windows_x86_64_gnu 0.52.4", - "windows_x86_64_gnullvm 0.52.4", - "windows_x86_64_msvc 0.52.4", + "windows_aarch64_gnullvm 0.52.5", + "windows_aarch64_msvc 0.52.5", + "windows_i686_gnu 0.52.5", + "windows_i686_gnullvm", + "windows_i686_msvc 0.52.5", + "windows_x86_64_gnu 0.52.5", + "windows_x86_64_gnullvm 0.52.5", + "windows_x86_64_msvc 0.52.5", ] [[package]] @@ -10919,9 +10919,9 @@ checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" [[package]] name = "windows_aarch64_gnullvm" -version = "0.52.4" +version = "0.52.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bcf46cf4c365c6f2d1cc93ce535f2c8b244591df96ceee75d8e83deb70a9cac9" +checksum = "7088eed71e8b8dda258ecc8bac5fb1153c5cffaf2578fc8ff5d61e23578d3263" [[package]] name = "windows_aarch64_msvc" @@ -10937,9 +10937,9 @@ checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" [[package]] name = "windows_aarch64_msvc" -version = "0.52.4" +version = "0.52.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da9f259dd3bcf6990b55bffd094c4f7235817ba4ceebde8e6d11cd0c5633b675" +checksum = "9985fd1504e250c615ca5f281c3f7a6da76213ebd5ccc9561496568a2752afb6" [[package]] name = "windows_i686_gnu" @@ -10955,9 +10955,15 @@ checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" [[package]] name = "windows_i686_gnu" -version = "0.52.4" +version = "0.52.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b474d8268f99e0995f25b9f095bc7434632601028cf86590aea5c8a5cb7801d3" +checksum = "88ba073cf16d5372720ec942a8ccbf61626074c6d4dd2e745299726ce8b89670" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87f4261229030a858f36b459e748ae97545d6f1ec60e5e0d6a3d32e0dc232ee9" [[package]] name = "windows_i686_msvc" @@ -10973,9 +10979,9 @@ checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" [[package]] name = "windows_i686_msvc" -version = "0.52.4" +version = "0.52.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1515e9a29e5bed743cb4415a9ecf5dfca648ce85ee42e15873c3cd8610ff8e02" +checksum = "db3c2bf3d13d5b658be73463284eaf12830ac9a26a90c717b7f771dfe97487bf" [[package]] name = "windows_x86_64_gnu" @@ -10991,9 +10997,9 @@ checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" [[package]] name = "windows_x86_64_gnu" -version = "0.52.4" +version = "0.52.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5eee091590e89cc02ad514ffe3ead9eb6b660aedca2183455434b93546371a03" +checksum = "4e4246f76bdeff09eb48875a0fd3e2af6aada79d409d33011886d3e1581517d9" [[package]] name = "windows_x86_64_gnullvm" @@ -11009,9 +11015,9 @@ checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" [[package]] name = "windows_x86_64_gnullvm" -version = "0.52.4" +version = "0.52.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77ca79f2451b49fa9e2af39f0747fe999fcda4f5e241b2898624dca97a1f2177" +checksum = "852298e482cd67c356ddd9570386e2862b5673c85bd5f88df9ab6802b334c596" [[package]] name = "windows_x86_64_msvc" @@ -11027,9 +11033,9 @@ checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" [[package]] name = "windows_x86_64_msvc" -version = "0.52.4" +version = "0.52.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32b752e52a2da0ddfbdbcc6fceadfeede4c939ed16d13e648833a61dfb611ed8" +checksum = "bec47e5bfd1bff0eeaf6d8b485cc1074891a197ab4225d504cb7a1ab88b02bf0" [[package]] name = "winnow" @@ -11042,9 +11048,9 @@ dependencies = [ [[package]] name = "winnow" -version = "0.6.5" +version = "0.6.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dffa400e67ed5a4dd237983829e66475f0a4a26938c4b04c21baede6262215b8" +checksum = "f0c976aaaa0e1f90dbb21e9587cdaf1d9679a1cde8875c0d6bd83ab96a208352" dependencies = [ "memchr", ] @@ -11129,12 +11135,12 @@ dependencies = [ [[package]] name = "xcm-procedural" version = "7.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#92e142555d45f97aa88d241665d9952d12f4ae40" +source = "git+https://github.com/paritytech/polkadot-sdk?branch=master#4eabe5e0dddc4cd31ad9dab5645350360d4d36a5" dependencies = [ "Inflector", - "proc-macro2 1.0.79", + "proc-macro2 1.0.81", "quote 1.0.36", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] @@ -11181,9 +11187,9 @@ version = "0.7.32" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9ce1b18ccd8e73a9321186f97e46f9f04b778851177567b1975109d26a08d2a6" dependencies = [ - "proc-macro2 1.0.79", + "proc-macro2 1.0.81", "quote 1.0.36", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] @@ -11201,9 +11207,9 @@ version = "1.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ - "proc-macro2 1.0.79", + "proc-macro2 1.0.81", "quote 1.0.36", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] -- GitLab From 5c8d4df563df57a60b1ecaa5c34f3b1ffb849283 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 20 Apr 2024 10:05:29 +0300 Subject: [PATCH 37/39] Bump rustls from 0.21.8 to 0.21.11 in /tools/runtime-codegen (#2965) Bumps [rustls](https://github.com/rustls/rustls) from 0.21.8 to 0.21.11. - [Release notes](https://github.com/rustls/rustls/releases) - [Changelog](https://github.com/rustls/rustls/blob/main/CHANGELOG.md) - [Commits](https://github.com/rustls/rustls/compare/v/0.21.8...v/0.21.11) --- updated-dependencies: - dependency-name: rustls dependency-type: indirect ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- tools/runtime-codegen/Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tools/runtime-codegen/Cargo.lock b/tools/runtime-codegen/Cargo.lock index 0a92d9c9b..ded1a4cb4 100644 --- a/tools/runtime-codegen/Cargo.lock +++ b/tools/runtime-codegen/Cargo.lock @@ -2926,9 +2926,9 @@ dependencies = [ [[package]] name = "rustls" -version = "0.21.8" +version = "0.21.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "446e14c5cda4f3f30fe71863c34ec70f5ac79d6087097ad0bb433e1be5edf04c" +checksum = "7fecbfb7b1444f477b345853b1fce097a2c6fb637b2bfb87e6bc5db0f043fae4" dependencies = [ "log", "ring 0.17.5", -- GitLab From ed7ae4df49424dda52601b1b61169f6bd85a7e49 Mon Sep 17 00:00:00 2001 From: Svyatoslav Nikolsky Date: Mon, 22 Apr 2024 11:16:01 +0300 Subject: [PATCH 38/39] relayer 1.4.0: support PBH version 1_002_000 (#2967) --- Cargo.lock | 2 +- .../src/codegen_runtime.rs | 2555 ++++++++++++++--- .../client-bridge-hub-polkadot/src/lib.rs | 2 +- scripts/regenerate_runtimes.sh | 3 +- substrate-relay/Cargo.toml | 2 +- 5 files changed, 2086 insertions(+), 478 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index e748ab3b7..5ddf1c956 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -9299,7 +9299,7 @@ dependencies = [ [[package]] name = "substrate-relay" -version = "1.3.0" +version = "1.4.0" dependencies = [ "anyhow", "async-std", diff --git a/relay-clients/client-bridge-hub-polkadot/src/codegen_runtime.rs b/relay-clients/client-bridge-hub-polkadot/src/codegen_runtime.rs index 1ce9d0588..26dd02291 100644 --- a/relay-clients/client-bridge-hub-polkadot/src/codegen_runtime.rs +++ b/relay-clients/client-bridge-hub-polkadot/src/codegen_runtime.rs @@ -17,7 +17,7 @@ //! Autogenerated runtime API //! THIS FILE WAS AUTOGENERATED USING parity-bridges-common::runtime-codegen //! EXECUTED COMMAND: target/debug/runtime-codegen --from-node-url -//! wss://polkadot-bridge-hub-rpc.polkadot.io +//! wss://polkadot-bridge-hub-rpc.polkadot.io/ #[allow(dead_code, unused_imports, non_camel_case_types)] #[allow(clippy::all)] @@ -31,6 +31,11 @@ pub mod api { use super::runtime_types; pub mod bounded_collections { use super::runtime_types; + pub mod bounded_btree_set { + use super::runtime_types; + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct BoundedBTreeSet<_0>(pub ::std::vec::Vec<_0>); + } pub mod bounded_vec { use super::runtime_types; #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] @@ -244,6 +249,23 @@ pub mod api { #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] pub struct StrippableError; } + pub mod bridge_hub_common { + use super::runtime_types; + pub mod message_queue { + use super::runtime_types; + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum AggregateMessageOrigin { + #[codec(index = 0)] + Here, + #[codec(index = 1)] + Parent, + #[codec(index = 2)] + Sibling(runtime_types::polkadot_parachain_primitives::primitives::Id), + #[codec(index = 3)] + Snowbridge(runtime_types::snowbridge_core::ChannelId), + } + } + } pub mod bridge_hub_polkadot_runtime { use super::runtime_types; #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] @@ -273,6 +295,8 @@ pub mod api { ParachainSystem(runtime_types::cumulus_pallet_parachain_system::pallet::Call), #[codec(index = 2)] Timestamp(runtime_types::pallet_timestamp::pallet::Call), + #[codec(index = 3)] + ParachainInfo(runtime_types::staging_parachain_info::pallet::Call), #[codec(index = 10)] Balances(runtime_types::pallet_balances::pallet::Call), #[codec(index = 21)] @@ -283,6 +307,8 @@ pub mod api { XcmpQueue(runtime_types::cumulus_pallet_xcmp_queue::pallet::Call), #[codec(index = 31)] PolkadotXcm(runtime_types::pallet_xcm::pallet::Call), + #[codec(index = 32)] + CumulusXcm(runtime_types::cumulus_pallet_xcm::pallet::Call), #[codec(index = 33)] DmpQueue(runtime_types::cumulus_pallet_dmp_queue::pallet::Call), #[codec(index = 40)] @@ -297,6 +323,20 @@ pub mod api { BridgeKusamaParachains(runtime_types::pallet_bridge_parachains::pallet::Call), #[codec(index = 53)] BridgeKusamaMessages(runtime_types::pallet_bridge_messages::pallet::Call), + #[codec(index = 80)] + EthereumInboundQueue(runtime_types::snowbridge_pallet_inbound_queue::pallet::Call), + #[codec(index = 81)] + EthereumOutboundQueue( + runtime_types::snowbridge_pallet_outbound_queue::pallet::Call, + ), + #[codec(index = 82)] + EthereumBeaconClient( + runtime_types::snowbridge_pallet_ethereum_client::pallet::Call, + ), + #[codec(index = 83)] + EthereumSystem(runtime_types::snowbridge_pallet_system::pallet::Call), + #[codec(index = 175)] + MessageQueue(runtime_types::pallet_message_queue::pallet::Call), } #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] pub enum RuntimeError { @@ -314,10 +354,6 @@ pub mod api { XcmpQueue(runtime_types::cumulus_pallet_xcmp_queue::pallet::Error), #[codec(index = 31)] PolkadotXcm(runtime_types::pallet_xcm::pallet::Error), - #[codec(index = 32)] - CumulusXcm(runtime_types::cumulus_pallet_xcm::pallet::Error), - #[codec(index = 33)] - DmpQueue(runtime_types::cumulus_pallet_dmp_queue::pallet::Error), #[codec(index = 40)] Utility(runtime_types::pallet_utility::pallet::Error), #[codec(index = 41)] @@ -330,6 +366,20 @@ pub mod api { BridgeKusamaParachains(runtime_types::pallet_bridge_parachains::pallet::Error), #[codec(index = 53)] BridgeKusamaMessages(runtime_types::pallet_bridge_messages::pallet::Error), + #[codec(index = 80)] + EthereumInboundQueue(runtime_types::snowbridge_pallet_inbound_queue::pallet::Error), + #[codec(index = 81)] + EthereumOutboundQueue( + runtime_types::snowbridge_pallet_outbound_queue::pallet::Error, + ), + #[codec(index = 82)] + EthereumBeaconClient( + runtime_types::snowbridge_pallet_ethereum_client::pallet::Error, + ), + #[codec(index = 83)] + EthereumSystem(runtime_types::snowbridge_pallet_system::pallet::Error), + #[codec(index = 175)] + MessageQueue(runtime_types::pallet_message_queue::pallet::Error), } #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] pub enum RuntimeEvent { @@ -365,6 +415,20 @@ pub mod api { BridgeKusamaParachains(runtime_types::pallet_bridge_parachains::pallet::Event), #[codec(index = 53)] BridgeKusamaMessages(runtime_types::pallet_bridge_messages::pallet::Event), + #[codec(index = 80)] + EthereumInboundQueue(runtime_types::snowbridge_pallet_inbound_queue::pallet::Event), + #[codec(index = 81)] + EthereumOutboundQueue( + runtime_types::snowbridge_pallet_outbound_queue::pallet::Event, + ), + #[codec(index = 82)] + EthereumBeaconClient( + runtime_types::snowbridge_pallet_ethereum_client::pallet::Event, + ), + #[codec(index = 83)] + EthereumSystem(runtime_types::snowbridge_pallet_system::pallet::Event), + #[codec(index = 175)] + MessageQueue(runtime_types::pallet_message_queue::pallet::Event), } #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] pub enum RuntimeHoldReason {} @@ -392,7 +456,7 @@ pub mod api { #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] pub struct RefundBridgedParachainMessages; #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub struct RefundTransactionExtensionAdapter<_0>(pub _0); + pub struct RefundSignedExtensionAdapter<_0>(pub _0); } } pub mod cumulus_pallet_dmp_queue { @@ -400,65 +464,56 @@ pub mod api { pub mod pallet { use super::runtime_types; #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub enum Call { - #[codec(index = 0)] - service_overweight { - index: ::core::primitive::u64, - weight_limit: ::sp_weights::Weight, - }, - } + pub enum Call {} #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub enum Error { + pub enum Event { #[codec(index = 0)] - Unknown, + StartedExport, #[codec(index = 1)] - OverLimit, + Exported { page: ::core::primitive::u32 }, + #[codec(index = 2)] + ExportFailed { page: ::core::primitive::u32 }, + #[codec(index = 3)] + CompletedExport, + #[codec(index = 4)] + StartedOverweightExport, + #[codec(index = 5)] + ExportedOverweight { index: ::core::primitive::u64 }, + #[codec(index = 6)] + ExportOverweightFailed { index: ::core::primitive::u64 }, + #[codec(index = 7)] + CompletedOverweightExport, + #[codec(index = 8)] + StartedCleanup, + #[codec(index = 9)] + CleanedSome { keys_removed: ::core::primitive::u32 }, + #[codec(index = 10)] + Completed { error: ::core::primitive::bool }, } #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub enum Event { + pub enum MigrationState { #[codec(index = 0)] - InvalidFormat { message_hash: [::core::primitive::u8; 32usize] }, + NotStarted, #[codec(index = 1)] - UnsupportedVersion { message_hash: [::core::primitive::u8; 32usize] }, + StartedExport { next_begin_used: ::core::primitive::u32 }, #[codec(index = 2)] - ExecutedDownward { - message_hash: [::core::primitive::u8; 32usize], - message_id: [::core::primitive::u8; 32usize], - outcome: runtime_types::xcm::v3::traits::Outcome, - }, + CompletedExport, #[codec(index = 3)] - WeightExhausted { - message_hash: [::core::primitive::u8; 32usize], - message_id: [::core::primitive::u8; 32usize], - remaining_weight: ::sp_weights::Weight, - required_weight: ::sp_weights::Weight, - }, + StartedOverweightExport { next_overweight_index: ::core::primitive::u64 }, #[codec(index = 4)] - OverweightEnqueued { - message_hash: [::core::primitive::u8; 32usize], - message_id: [::core::primitive::u8; 32usize], - overweight_index: ::core::primitive::u64, - required_weight: ::sp_weights::Weight, - }, + CompletedOverweightExport, #[codec(index = 5)] - OverweightServiced { - overweight_index: ::core::primitive::u64, - weight_used: ::sp_weights::Weight, + StartedCleanup { + cursor: ::core::option::Option< + runtime_types::bounded_collections::bounded_vec::BoundedVec< + ::core::primitive::u8, + >, + >, }, #[codec(index = 6)] - MaxMessagesExhausted { message_hash: [::core::primitive::u8; 32usize] }, + Completed, } } - #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub struct ConfigData { - pub max_individual: ::sp_weights::Weight, - } - #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub struct PageIndexData { - pub begin_used: ::core::primitive::u32, - pub end_used: ::core::primitive::u32, - pub overweight_count: ::core::primitive::u64, - } } pub mod cumulus_pallet_parachain_system { use super::runtime_types; @@ -495,15 +550,13 @@ pub mod api { #[codec(index = 2)] ValidationFunctionDiscarded, #[codec(index = 3)] - UpgradeAuthorized { code_hash: ::subxt::utils::H256 }, - #[codec(index = 4)] DownwardMessagesReceived { count: ::core::primitive::u32 }, - #[codec(index = 5)] + #[codec(index = 4)] DownwardMessagesProcessed { weight_used: ::sp_weights::Weight, dmq_head: ::subxt::utils::H256, }, - #[codec(index = 6)] + #[codec(index = 5)] UpwardMessageSent { message_hash: ::core::option::Option<[::core::primitive::u8; 32usize]>, }, @@ -533,18 +586,13 @@ pub mod api { #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] pub struct UsedBandwidth { pub ump_msg_count : :: core :: primitive :: u32 , pub ump_total_bytes : :: core :: primitive :: u32 , pub hrmp_outgoing : :: subxt :: utils :: KeyedVec < runtime_types :: polkadot_parachain_primitives :: primitives :: Id , runtime_types :: cumulus_pallet_parachain_system :: unincluded_segment :: HrmpChannelUpdate > , } } - #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub struct CodeUpgradeAuthorization { - pub code_hash: ::subxt::utils::H256, - pub check_version: ::core::primitive::bool, - } } pub mod cumulus_pallet_xcm { use super::runtime_types; pub mod pallet { use super::runtime_types; #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub enum Error {} + pub enum Call {} #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] pub enum Event { #[codec(index = 0)] @@ -554,7 +602,7 @@ pub mod api { #[codec(index = 2)] ExecutedDownward( [::core::primitive::u8; 32usize], - runtime_types::xcm::v3::traits::Outcome, + runtime_types::staging_xcm::v4::traits::Outcome, ), } #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] @@ -572,11 +620,6 @@ pub mod api { use super::runtime_types; #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] pub enum Call { - #[codec(index = 0)] - service_overweight { - index: ::core::primitive::u64, - weight_limit: ::sp_weights::Weight, - }, #[codec(index = 1)] suspend_xcm_execution, #[codec(index = 2)] @@ -587,75 +630,23 @@ pub mod api { update_drop_threshold { new: ::core::primitive::u32 }, #[codec(index = 5)] update_resume_threshold { new: ::core::primitive::u32 }, - #[codec(index = 6)] - update_threshold_weight { new: ::sp_weights::Weight }, - #[codec(index = 7)] - update_weight_restrict_decay { new: ::sp_weights::Weight }, - #[codec(index = 8)] - update_xcmp_max_individual_weight { new: ::sp_weights::Weight }, } #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] pub enum Error { #[codec(index = 0)] - FailedToSend, + BadQueueConfig, #[codec(index = 1)] - BadXcmOrigin, + AlreadySuspended, #[codec(index = 2)] - BadXcm, - #[codec(index = 3)] - BadOverweightIndex, - #[codec(index = 4)] - WeightOverLimit, + AlreadyResumed, } #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] pub enum Event { #[codec(index = 0)] - Success { - message_hash: [::core::primitive::u8; 32usize], - message_id: [::core::primitive::u8; 32usize], - weight: ::sp_weights::Weight, - }, - #[codec(index = 1)] - Fail { - message_hash: [::core::primitive::u8; 32usize], - message_id: [::core::primitive::u8; 32usize], - error: runtime_types::xcm::v3::traits::Error, - weight: ::sp_weights::Weight, - }, - #[codec(index = 2)] - BadVersion { message_hash: [::core::primitive::u8; 32usize] }, - #[codec(index = 3)] - BadFormat { message_hash: [::core::primitive::u8; 32usize] }, - #[codec(index = 4)] XcmpMessageSent { message_hash: [::core::primitive::u8; 32usize] }, - #[codec(index = 5)] - OverweightEnqueued { - sender: runtime_types::polkadot_parachain_primitives::primitives::Id, - sent_at: ::core::primitive::u32, - index: ::core::primitive::u64, - required: ::sp_weights::Weight, - }, - #[codec(index = 6)] - OverweightServiced { index: ::core::primitive::u64, used: ::sp_weights::Weight }, } } #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub struct InboundChannelDetails { - pub sender: runtime_types::polkadot_parachain_primitives::primitives::Id, - pub state: runtime_types::cumulus_pallet_xcmp_queue::InboundState, - pub message_metadata: ::std::vec::Vec<( - ::core::primitive::u32, - runtime_types::polkadot_parachain_primitives::primitives::XcmpMessageFormat, - )>, - } - #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub enum InboundState { - #[codec(index = 0)] - Ok, - #[codec(index = 1)] - Suspended, - } - #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] pub struct OutboundChannelDetails { pub recipient: runtime_types::polkadot_parachain_primitives::primitives::Id, pub state: runtime_types::cumulus_pallet_xcmp_queue::OutboundState, @@ -675,9 +666,6 @@ pub mod api { pub suspend_threshold: ::core::primitive::u32, pub drop_threshold: ::core::primitive::u32, pub resume_threshold: ::core::primitive::u32, - pub threshold_weight: ::sp_weights::Weight, - pub weight_restrict_decay: ::sp_weights::Weight, - pub xcmp_max_individual_weight: ::sp_weights::Weight, } } pub mod cumulus_primitives_core { @@ -791,6 +779,22 @@ pub mod api { } pub mod traits { use super::runtime_types; + pub mod messages { + use super::runtime_types; + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum ProcessMessageError { + #[codec(index = 0)] + BadFormat, + #[codec(index = 1)] + Corrupt, + #[codec(index = 2)] + Unsupported, + #[codec(index = 3)] + Overweight(::sp_weights::Weight), + #[codec(index = 4)] + Yield, + } + } pub mod tokens { use super::runtime_types; pub mod misc { @@ -900,6 +904,12 @@ pub mod api { }, #[codec(index = 7)] remark_with_event { remark: ::std::vec::Vec<::core::primitive::u8> }, + #[codec(index = 9)] + authorize_upgrade { code_hash: ::subxt::utils::H256 }, + #[codec(index = 10)] + authorize_upgrade_without_checks { code_hash: ::subxt::utils::H256 }, + #[codec(index = 11)] + apply_authorized_upgrade { code: ::std::vec::Vec<::core::primitive::u8> }, } #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] pub enum Error { @@ -915,6 +925,10 @@ pub mod api { NonZeroRefCount, #[codec(index = 5)] CallFiltered, + #[codec(index = 6)] + NothingAuthorized, + #[codec(index = 7)] + Unauthorized, } #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] pub enum Event { @@ -935,6 +949,11 @@ pub mod api { KilledAccount { account: ::sp_core::crypto::AccountId32 }, #[codec(index = 5)] Remarked { sender: ::sp_core::crypto::AccountId32, hash: ::subxt::utils::H256 }, + #[codec(index = 6)] + UpgradeAuthorized { + code_hash: ::subxt::utils::H256, + check_version: ::core::primitive::bool, + }, } } #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] @@ -946,6 +965,11 @@ pub mod api { pub data: _1, } #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct CodeUpgradeAuthorization { + pub code_hash: ::subxt::utils::H256, + pub check_version: ::core::primitive::bool, + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] pub struct EventRecord<_0, _1> { pub phase: runtime_types::frame_system::Phase, pub event: _0, @@ -1010,6 +1034,12 @@ pub mod api { #[codec(compact)] new_free: ::core::primitive::u128, }, + #[codec(index = 9)] + force_adjust_total_issuance { + direction: runtime_types::pallet_balances::types::AdjustmentDirection, + #[codec(compact)] + delta: ::core::primitive::u128, + }, } #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] pub enum Error { @@ -1033,6 +1063,10 @@ pub mod api { TooManyHolds, #[codec(index = 9)] TooManyFreezes, + #[codec(index = 10)] + IssuanceDeactivated, + #[codec(index = 11)] + DeltaZero, } #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] pub enum Event { @@ -1115,6 +1149,11 @@ pub mod api { Frozen { who: ::sp_core::crypto::AccountId32, amount: ::core::primitive::u128 }, #[codec(index = 20)] Thawed { who: ::sp_core::crypto::AccountId32, amount: ::core::primitive::u128 }, + #[codec(index = 21)] + TotalIssuanceForced { + old: ::core::primitive::u128, + new: ::core::primitive::u128, + }, } } pub mod types { @@ -1127,6 +1166,13 @@ pub mod api { pub flags: runtime_types::pallet_balances::types::ExtraFlags, } #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum AdjustmentDirection { + #[codec(index = 0)] + Increase, + #[codec(index = 1)] + Decrease, + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] pub struct BalanceLock<_0> { pub id: [::core::primitive::u8; 8usize], pub amount: _0, @@ -1271,7 +1317,7 @@ pub mod api { # [codec (index = 0)] set_owner { new_owner : :: core :: option :: Option < :: sp_core :: crypto :: AccountId32 > , } , # [codec (index = 1)] set_operating_mode { operating_mode : runtime_types :: bp_messages :: MessagesOperatingMode , } , # [codec (index = 2)] receive_messages_proof { relayer_id_at_bridged_chain : :: sp_core :: crypto :: AccountId32 , proof : :: bridge_runtime_common :: messages :: target :: FromBridgedChainMessagesProof < :: subxt :: utils :: H256 > , messages_count : :: core :: primitive :: u32 , dispatch_weight : :: sp_weights :: Weight , } , # [codec (index = 3)] receive_messages_delivery_proof { proof : :: bridge_runtime_common :: messages :: source :: FromBridgedChainMessagesDeliveryProof < :: subxt :: utils :: H256 > , relayers_state : :: bp_messages :: UnrewardedRelayersState , } , } #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] pub enum Error { - # [codec (index = 0)] NotOperatingNormally , # [codec (index = 1)] InactiveOutboundLane , # [codec (index = 2)] MessageDispatchInactive , # [codec (index = 3)] MessageRejectedByChainVerifier (runtime_types :: bp_messages :: VerificationError ,) , # [codec (index = 4)] MessageRejectedByLaneVerifier (runtime_types :: bp_messages :: VerificationError ,) , # [codec (index = 5)] MessageRejectedByPallet (runtime_types :: bp_messages :: VerificationError ,) , # [codec (index = 6)] FailedToWithdrawMessageFee , # [codec (index = 7)] TooManyMessagesInTheProof , # [codec (index = 8)] InvalidMessagesProof , # [codec (index = 9)] InvalidMessagesDeliveryProof , # [codec (index = 10)] InvalidUnrewardedRelayersState , # [codec (index = 11)] InsufficientDispatchWeight , # [codec (index = 12)] MessageIsNotYetSent , # [codec (index = 13)] ReceivalConfirmation (runtime_types :: pallet_bridge_messages :: outbound_lane :: ReceivalConfirmationError ,) , # [codec (index = 14)] BridgeModule (runtime_types :: bp_runtime :: OwnedBridgeModuleError ,) , } + # [codec (index = 0)] NotOperatingNormally , # [codec (index = 1)] InactiveOutboundLane , # [codec (index = 2)] MessageDispatchInactive , # [codec (index = 3)] MessageRejectedByChainVerifier (runtime_types :: bp_messages :: VerificationError ,) , # [codec (index = 4)] MessageRejectedByPallet (runtime_types :: bp_messages :: VerificationError ,) , # [codec (index = 5)] FailedToWithdrawMessageFee , # [codec (index = 6)] TooManyMessagesInTheProof , # [codec (index = 7)] InvalidMessagesProof , # [codec (index = 8)] InvalidMessagesDeliveryProof , # [codec (index = 9)] InvalidUnrewardedRelayersState , # [codec (index = 10)] InsufficientDispatchWeight , # [codec (index = 11)] MessageIsNotYetSent , # [codec (index = 12)] ReceivalConfirmation (runtime_types :: pallet_bridge_messages :: outbound_lane :: ReceivalConfirmationError ,) , # [codec (index = 13)] BridgeModule (runtime_types :: bp_runtime :: OwnedBridgeModuleError ,) , } #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] pub enum Event { # [codec (index = 0)] MessageAccepted { lane_id : runtime_types :: bp_messages :: LaneId , nonce : :: core :: primitive :: u64 , } , # [codec (index = 1)] MessagesReceived (:: std :: vec :: Vec < runtime_types :: bp_messages :: ReceivedMessages < runtime_types :: bridge_runtime_common :: messages_xcm_extension :: XcmBlobMessageDispatchResult > > ,) , # [codec (index = 2)] MessagesDelivered { lane_id : runtime_types :: bp_messages :: LaneId , messages : runtime_types :: bp_messages :: DeliveredMessages , } , } @@ -1378,12 +1424,18 @@ pub mod api { #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] pub enum Event { #[codec(index = 0)] - RewardPaid { + RewardRegistered { relayer: ::sp_core::crypto::AccountId32, rewards_account_params: runtime_types::bp_relayers::RewardsAccountParams, reward: ::core::primitive::u128, }, #[codec(index = 1)] + RewardPaid { + relayer: ::sp_core::crypto::AccountId32, + rewards_account_params: runtime_types::bp_relayers::RewardsAccountParams, + reward: ::core::primitive::u128, + }, + #[codec(index = 2)] RegistrationUpdated { relayer: ::sp_core::crypto::AccountId32, registration: runtime_types::bp_relayers::registration::Registration< @@ -1391,9 +1443,9 @@ pub mod api { ::core::primitive::u128, >, }, - #[codec(index = 2)] - Deregistered { relayer: ::sp_core::crypto::AccountId32 }, #[codec(index = 3)] + Deregistered { relayer: ::sp_core::crypto::AccountId32 }, + #[codec(index = 4)] SlashedAndDeregistered { relayer: ::sp_core::crypto::AccountId32, registration: runtime_types::bp_relayers::registration::Registration< @@ -1424,6 +1476,13 @@ pub mod api { add_invulnerable { who: ::sp_core::crypto::AccountId32 }, #[codec(index = 6)] remove_invulnerable { who: ::sp_core::crypto::AccountId32 }, + #[codec(index = 7)] + update_bond { new_deposit: ::core::primitive::u128 }, + #[codec(index = 8)] + take_candidate_slot { + deposit: ::core::primitive::u128, + target: ::sp_core::crypto::AccountId32, + }, } #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] pub struct CandidateInfo<_0, _1> { @@ -1450,6 +1509,22 @@ pub mod api { NoAssociatedValidatorId, #[codec(index = 8)] ValidatorNotRegistered, + #[codec(index = 9)] + InsertToCandidateListFailed, + #[codec(index = 10)] + RemoveFromCandidateListFailed, + #[codec(index = 11)] + DepositTooLow, + #[codec(index = 12)] + UpdateCandidateListFailed, + #[codec(index = 13)] + InsufficientBond, + #[codec(index = 14)] + TargetIsNotCandidate, + #[codec(index = 15)] + IdenticalDeposit, + #[codec(index = 16)] + InvalidUnreserve, } #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] pub enum Event { @@ -1471,12 +1546,125 @@ pub mod api { deposit: ::core::primitive::u128, }, #[codec(index = 6)] - CandidateRemoved { account_id: ::sp_core::crypto::AccountId32 }, + CandidateBondUpdated { + account_id: ::sp_core::crypto::AccountId32, + deposit: ::core::primitive::u128, + }, #[codec(index = 7)] + CandidateRemoved { account_id: ::sp_core::crypto::AccountId32 }, + #[codec(index = 8)] + CandidateReplaced { + old: ::sp_core::crypto::AccountId32, + new: ::sp_core::crypto::AccountId32, + deposit: ::core::primitive::u128, + }, + #[codec(index = 9)] InvalidInvulnerableSkipped { account_id: ::sp_core::crypto::AccountId32 }, } } } + pub mod pallet_message_queue { + use super::runtime_types; + pub mod pallet { + use super::runtime_types; + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum Call { + #[codec(index = 0)] + reap_page { + message_origin: + runtime_types::bridge_hub_common::message_queue::AggregateMessageOrigin, + page_index: ::core::primitive::u32, + }, + #[codec(index = 1)] + execute_overweight { + message_origin: + runtime_types::bridge_hub_common::message_queue::AggregateMessageOrigin, + page: ::core::primitive::u32, + index: ::core::primitive::u32, + weight_limit: ::sp_weights::Weight, + }, + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum Error { + #[codec(index = 0)] + NotReapable, + #[codec(index = 1)] + NoPage, + #[codec(index = 2)] + NoMessage, + #[codec(index = 3)] + AlreadyProcessed, + #[codec(index = 4)] + Queued, + #[codec(index = 5)] + InsufficientWeight, + #[codec(index = 6)] + TemporarilyUnprocessable, + #[codec(index = 7)] + QueuePaused, + #[codec(index = 8)] + RecursiveDisallowed, + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum Event { + #[codec(index = 0)] + ProcessingFailed { + id: ::subxt::utils::H256, + origin: + runtime_types::bridge_hub_common::message_queue::AggregateMessageOrigin, + error: runtime_types::frame_support::traits::messages::ProcessMessageError, + }, + #[codec(index = 1)] + Processed { + id: ::subxt::utils::H256, + origin: + runtime_types::bridge_hub_common::message_queue::AggregateMessageOrigin, + weight_used: ::sp_weights::Weight, + success: ::core::primitive::bool, + }, + #[codec(index = 2)] + OverweightEnqueued { + id: [::core::primitive::u8; 32usize], + origin: + runtime_types::bridge_hub_common::message_queue::AggregateMessageOrigin, + page_index: ::core::primitive::u32, + message_index: ::core::primitive::u32, + }, + #[codec(index = 3)] + PageReaped { + origin: + runtime_types::bridge_hub_common::message_queue::AggregateMessageOrigin, + index: ::core::primitive::u32, + }, + } + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct BookState<_0> { + pub begin: ::core::primitive::u32, + pub end: ::core::primitive::u32, + pub count: ::core::primitive::u32, + pub ready_neighbours: + ::core::option::Option>, + pub message_count: ::core::primitive::u64, + pub size: ::core::primitive::u64, + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct Neighbours<_0> { + pub prev: _0, + pub next: _0, + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct Page<_0> { + pub remaining: _0, + pub remaining_size: _0, + pub first_index: _0, + pub first: _0, + pub last: _0, + pub heap: runtime_types::bounded_collections::bounded_vec::BoundedVec< + ::core::primitive::u8, + >, + } + } pub mod pallet_multisig { use super::runtime_types; pub mod pallet { @@ -1779,21 +1967,21 @@ pub mod api { pub enum Call { #[codec(index = 0)] send { - dest: ::std::boxed::Box, + dest: ::std::boxed::Box, message: ::std::boxed::Box, }, #[codec(index = 1)] teleport_assets { - dest: ::std::boxed::Box, - beneficiary: ::std::boxed::Box, - assets: ::std::boxed::Box, + dest: ::std::boxed::Box, + beneficiary: ::std::boxed::Box, + assets: ::std::boxed::Box, fee_asset_item: ::core::primitive::u32, }, #[codec(index = 2)] reserve_transfer_assets { - dest: ::std::boxed::Box, - beneficiary: ::std::boxed::Box, - assets: ::std::boxed::Box, + dest: ::std::boxed::Box, + beneficiary: ::std::boxed::Box, + assets: ::std::boxed::Box, fee_asset_item: ::core::primitive::u32, }, #[codec(index = 3)] @@ -1803,9 +1991,8 @@ pub mod api { }, #[codec(index = 4)] force_xcm_version { - location: ::std::boxed::Box< - runtime_types::staging_xcm::v3::multilocation::MultiLocation, - >, + location: + ::std::boxed::Box, version: ::core::primitive::u32, }, #[codec(index = 5)] @@ -1814,30 +2001,43 @@ pub mod api { }, #[codec(index = 6)] force_subscribe_version_notify { - location: ::std::boxed::Box, + location: ::std::boxed::Box, }, #[codec(index = 7)] force_unsubscribe_version_notify { - location: ::std::boxed::Box, + location: ::std::boxed::Box, }, #[codec(index = 8)] limited_reserve_transfer_assets { - dest: ::std::boxed::Box, - beneficiary: ::std::boxed::Box, - assets: ::std::boxed::Box, + dest: ::std::boxed::Box, + beneficiary: ::std::boxed::Box, + assets: ::std::boxed::Box, fee_asset_item: ::core::primitive::u32, weight_limit: runtime_types::xcm::v3::WeightLimit, }, #[codec(index = 9)] limited_teleport_assets { - dest: ::std::boxed::Box, - beneficiary: ::std::boxed::Box, - assets: ::std::boxed::Box, + dest: ::std::boxed::Box, + beneficiary: ::std::boxed::Box, + assets: ::std::boxed::Box, fee_asset_item: ::core::primitive::u32, weight_limit: runtime_types::xcm::v3::WeightLimit, }, #[codec(index = 10)] force_suspension { suspended: ::core::primitive::bool }, + #[codec(index = 11)] + transfer_assets { + dest: ::std::boxed::Box, + beneficiary: ::std::boxed::Box, + assets: ::std::boxed::Box, + fee_asset_item: ::core::primitive::u32, + weight_limit: runtime_types::xcm::v3::WeightLimit, + }, + #[codec(index = 12)] + claim_assets { + assets: ::std::boxed::Box, + beneficiary: ::std::boxed::Box, + }, } #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] pub enum Error { @@ -1868,7 +2068,7 @@ pub mod api { #[codec(index = 12)] AlreadySubscribed, #[codec(index = 13)] - InvalidAsset, + CannotCheckOutTeleport, #[codec(index = 14)] LowBalance, #[codec(index = 15)] @@ -1881,27 +2081,37 @@ pub mod api { LockNotFound, #[codec(index = 19)] InUse, + #[codec(index = 20)] + InvalidAssetNotConcrete, + #[codec(index = 21)] + InvalidAssetUnknownReserve, + #[codec(index = 22)] + InvalidAssetUnsupportedReserve, + #[codec(index = 23)] + TooManyReserves, + #[codec(index = 24)] + LocalExecutionIncomplete, } #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] pub enum Event { #[codec(index = 0)] - Attempted { outcome: runtime_types::xcm::v3::traits::Outcome }, + Attempted { outcome: runtime_types::staging_xcm::v4::traits::Outcome }, #[codec(index = 1)] Sent { - origin: runtime_types::staging_xcm::v3::multilocation::MultiLocation, - destination: runtime_types::staging_xcm::v3::multilocation::MultiLocation, - message: runtime_types::xcm::v3::Xcm, + origin: runtime_types::staging_xcm::v4::location::Location, + destination: runtime_types::staging_xcm::v4::location::Location, + message: runtime_types::staging_xcm::v4::Xcm, message_id: [::core::primitive::u8; 32usize], }, #[codec(index = 2)] UnexpectedResponse { - origin: runtime_types::staging_xcm::v3::multilocation::MultiLocation, + origin: runtime_types::staging_xcm::v4::location::Location, query_id: ::core::primitive::u64, }, #[codec(index = 3)] ResponseReady { query_id: ::core::primitive::u64, - response: runtime_types::xcm::v3::Response, + response: runtime_types::staging_xcm::v4::Response, }, #[codec(index = 4)] Notified { @@ -1931,15 +2141,15 @@ pub mod api { }, #[codec(index = 8)] InvalidResponder { - origin: runtime_types::staging_xcm::v3::multilocation::MultiLocation, + origin: runtime_types::staging_xcm::v4::location::Location, query_id: ::core::primitive::u64, expected_location: ::core::option::Option< - runtime_types::staging_xcm::v3::multilocation::MultiLocation, + runtime_types::staging_xcm::v4::location::Location, >, }, #[codec(index = 9)] InvalidResponderVersion { - origin: runtime_types::staging_xcm::v3::multilocation::MultiLocation, + origin: runtime_types::staging_xcm::v4::location::Location, query_id: ::core::primitive::u64, }, #[codec(index = 10)] @@ -1947,98 +2157,99 @@ pub mod api { #[codec(index = 11)] AssetsTrapped { hash: ::subxt::utils::H256, - origin: runtime_types::staging_xcm::v3::multilocation::MultiLocation, - assets: runtime_types::xcm::VersionedMultiAssets, + origin: runtime_types::staging_xcm::v4::location::Location, + assets: runtime_types::xcm::VersionedAssets, }, #[codec(index = 12)] VersionChangeNotified { - destination: runtime_types::staging_xcm::v3::multilocation::MultiLocation, + destination: runtime_types::staging_xcm::v4::location::Location, result: ::core::primitive::u32, - cost: runtime_types::xcm::v3::multiasset::MultiAssets, + cost: runtime_types::staging_xcm::v4::asset::Assets, message_id: [::core::primitive::u8; 32usize], }, #[codec(index = 13)] SupportedVersionChanged { - location: runtime_types::staging_xcm::v3::multilocation::MultiLocation, + location: runtime_types::staging_xcm::v4::location::Location, version: ::core::primitive::u32, }, #[codec(index = 14)] NotifyTargetSendFail { - location: runtime_types::staging_xcm::v3::multilocation::MultiLocation, + location: runtime_types::staging_xcm::v4::location::Location, query_id: ::core::primitive::u64, error: runtime_types::xcm::v3::traits::Error, }, #[codec(index = 15)] NotifyTargetMigrationFail { - location: runtime_types::xcm::VersionedMultiLocation, + location: runtime_types::xcm::VersionedLocation, query_id: ::core::primitive::u64, }, #[codec(index = 16)] InvalidQuerierVersion { - origin: runtime_types::staging_xcm::v3::multilocation::MultiLocation, + origin: runtime_types::staging_xcm::v4::location::Location, query_id: ::core::primitive::u64, }, #[codec(index = 17)] InvalidQuerier { - origin: runtime_types::staging_xcm::v3::multilocation::MultiLocation, + origin: runtime_types::staging_xcm::v4::location::Location, query_id: ::core::primitive::u64, - expected_querier: - runtime_types::staging_xcm::v3::multilocation::MultiLocation, + expected_querier: runtime_types::staging_xcm::v4::location::Location, maybe_actual_querier: ::core::option::Option< - runtime_types::staging_xcm::v3::multilocation::MultiLocation, + runtime_types::staging_xcm::v4::location::Location, >, }, #[codec(index = 18)] VersionNotifyStarted { - destination: runtime_types::staging_xcm::v3::multilocation::MultiLocation, - cost: runtime_types::xcm::v3::multiasset::MultiAssets, + destination: runtime_types::staging_xcm::v4::location::Location, + cost: runtime_types::staging_xcm::v4::asset::Assets, message_id: [::core::primitive::u8; 32usize], }, #[codec(index = 19)] VersionNotifyRequested { - destination: runtime_types::staging_xcm::v3::multilocation::MultiLocation, - cost: runtime_types::xcm::v3::multiasset::MultiAssets, + destination: runtime_types::staging_xcm::v4::location::Location, + cost: runtime_types::staging_xcm::v4::asset::Assets, message_id: [::core::primitive::u8; 32usize], }, #[codec(index = 20)] VersionNotifyUnrequested { - destination: runtime_types::staging_xcm::v3::multilocation::MultiLocation, - cost: runtime_types::xcm::v3::multiasset::MultiAssets, + destination: runtime_types::staging_xcm::v4::location::Location, + cost: runtime_types::staging_xcm::v4::asset::Assets, message_id: [::core::primitive::u8; 32usize], }, #[codec(index = 21)] FeesPaid { - paying: runtime_types::staging_xcm::v3::multilocation::MultiLocation, - fees: runtime_types::xcm::v3::multiasset::MultiAssets, + paying: runtime_types::staging_xcm::v4::location::Location, + fees: runtime_types::staging_xcm::v4::asset::Assets, }, #[codec(index = 22)] AssetsClaimed { hash: ::subxt::utils::H256, - origin: runtime_types::staging_xcm::v3::multilocation::MultiLocation, - assets: runtime_types::xcm::VersionedMultiAssets, + origin: runtime_types::staging_xcm::v4::location::Location, + assets: runtime_types::xcm::VersionedAssets, }, + #[codec(index = 23)] + VersionMigrationFinished { version: ::core::primitive::u32 }, } #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] pub enum Origin { #[codec(index = 0)] - Xcm(runtime_types::staging_xcm::v3::multilocation::MultiLocation), + Xcm(runtime_types::staging_xcm::v4::location::Location), #[codec(index = 1)] - Response(runtime_types::staging_xcm::v3::multilocation::MultiLocation), + Response(runtime_types::staging_xcm::v4::location::Location), } #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] pub enum QueryStatus<_0> { #[codec(index = 0)] Pending { - responder: runtime_types::xcm::VersionedMultiLocation, + responder: runtime_types::xcm::VersionedLocation, maybe_match_querier: - ::core::option::Option, + ::core::option::Option, maybe_notify: ::core::option::Option<(::core::primitive::u8, ::core::primitive::u8)>, timeout: _0, }, #[codec(index = 1)] VersionNotifier { - origin: runtime_types::xcm::VersionedMultiLocation, + origin: runtime_types::xcm::VersionedLocation, is_active: ::core::primitive::bool, }, #[codec(index = 2)] @@ -2047,8 +2258,8 @@ pub mod api { #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] pub struct RemoteLockedFungibleRecord<_0> { pub amount: ::core::primitive::u128, - pub owner: runtime_types::xcm::VersionedMultiLocation, - pub locker: runtime_types::xcm::VersionedMultiLocation, + pub owner: runtime_types::xcm::VersionedLocation, + pub locker: runtime_types::xcm::VersionedLocation, pub consumers: runtime_types::bounded_collections::bounded_vec::BoundedVec<( _0, ::core::primitive::u128, @@ -2104,15 +2315,6 @@ pub mod api { pub struct Id(pub ::core::primitive::u32); #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] pub struct ValidationCode(pub ::std::vec::Vec<::core::primitive::u8>); - #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub enum XcmpMessageFormat { - #[codec(index = 0)] - ConcatenatedVersionedXcm, - #[codec(index = 1)] - ConcatenatedEncodedBlob, - #[codec(index = 2)] - Signals, - } } } pub mod polkadot_primitives { @@ -2172,339 +2374,1739 @@ pub mod api { } } } - pub mod sp_arithmetic { + pub mod primitive_types { use super::runtime_types; - pub mod fixed_point { - use super::runtime_types; - #[derive( - :: codec :: Decode, - :: codec :: Encode, - :: subxt :: ext :: codec :: CompactAs, - Clone, - Debug, - PartialEq, - )] - pub struct FixedU128(pub ::core::primitive::u128); - } #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub enum ArithmeticError { - #[codec(index = 0)] - Underflow, - #[codec(index = 1)] - Overflow, - #[codec(index = 2)] - DivisionByZero, - } + pub struct U256(pub [::core::primitive::u64; 4usize]); } - pub mod sp_consensus_aura { + pub mod snowbridge_amcl { use super::runtime_types; - pub mod sr25519 { + pub mod bls381 { use super::runtime_types; - pub mod app_sr25519 { + pub mod big { use super::runtime_types; #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub struct Public(pub runtime_types::sp_core::sr25519::Public); + pub struct Big { + pub w: [::core::primitive::i32; 14usize], + } + } + pub mod ecp { + use super::runtime_types; + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct ECP { + pub x: runtime_types::snowbridge_amcl::bls381::fp::FP, + pub y: runtime_types::snowbridge_amcl::bls381::fp::FP, + pub z: runtime_types::snowbridge_amcl::bls381::fp::FP, + } + } + pub mod fp { + use super::runtime_types; + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct FP { + pub x: runtime_types::snowbridge_amcl::bls381::big::Big, + pub xes: ::core::primitive::i32, + } } } } - pub mod sp_consensus_grandpa { + pub mod snowbridge_beacon_primitives { use super::runtime_types; - pub mod app { + pub mod bls { use super::runtime_types; #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub struct Public(pub runtime_types::sp_core::ed25519::Public); + pub enum BlsError { + #[codec(index = 0)] + InvalidSignature, + #[codec(index = 1)] + InvalidPublicKey, + #[codec(index = 2)] + InvalidAggregatePublicKeys, + #[codec(index = 3)] + SignatureVerificationFailed, + } + } + pub mod types { + use super::runtime_types; + pub mod deneb { + use super::runtime_types; + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct ExecutionPayloadHeader { + pub parent_hash: ::subxt::utils::H256, + pub fee_recipient: ::subxt::utils::H160, + pub state_root: ::subxt::utils::H256, + pub receipts_root: ::subxt::utils::H256, + pub logs_bloom: ::std::vec::Vec<::core::primitive::u8>, + pub prev_randao: ::subxt::utils::H256, + pub block_number: ::core::primitive::u64, + pub gas_limit: ::core::primitive::u64, + pub gas_used: ::core::primitive::u64, + pub timestamp: ::core::primitive::u64, + pub extra_data: ::std::vec::Vec<::core::primitive::u8>, + pub base_fee_per_gas: runtime_types::primitive_types::U256, + pub block_hash: ::subxt::utils::H256, + pub transactions_root: ::subxt::utils::H256, + pub withdrawals_root: ::subxt::utils::H256, + pub blob_gas_used: ::core::primitive::u64, + pub excess_blob_gas: ::core::primitive::u64, + } + } #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub struct Signature(pub runtime_types::sp_core::ed25519::Signature); + pub struct BeaconHeader { + pub slot: ::core::primitive::u64, + pub proposer_index: ::core::primitive::u64, + pub parent_root: ::subxt::utils::H256, + pub state_root: ::subxt::utils::H256, + pub body_root: ::subxt::utils::H256, + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct CompactBeaconState { + #[codec(compact)] + pub slot: ::core::primitive::u64, + pub block_roots_root: ::subxt::utils::H256, + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct CompactExecutionHeader { + pub parent_hash: ::subxt::utils::H256, + #[codec(compact)] + pub block_number: ::core::primitive::u64, + pub state_root: ::subxt::utils::H256, + pub receipts_root: ::subxt::utils::H256, + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct ExecutionHeaderState { + pub beacon_block_root: ::subxt::utils::H256, + pub beacon_slot: ::core::primitive::u64, + pub block_hash: ::subxt::utils::H256, + pub block_number: ::core::primitive::u64, + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct ExecutionPayloadHeader { + pub parent_hash: ::subxt::utils::H256, + pub fee_recipient: ::subxt::utils::H160, + pub state_root: ::subxt::utils::H256, + pub receipts_root: ::subxt::utils::H256, + pub logs_bloom: ::std::vec::Vec<::core::primitive::u8>, + pub prev_randao: ::subxt::utils::H256, + pub block_number: ::core::primitive::u64, + pub gas_limit: ::core::primitive::u64, + pub gas_used: ::core::primitive::u64, + pub timestamp: ::core::primitive::u64, + pub extra_data: ::std::vec::Vec<::core::primitive::u8>, + pub base_fee_per_gas: runtime_types::primitive_types::U256, + pub block_hash: ::subxt::utils::H256, + pub transactions_root: ::subxt::utils::H256, + pub withdrawals_root: ::subxt::utils::H256, + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct Fork { + pub version: [::core::primitive::u8; 4usize], + pub epoch: ::core::primitive::u64, + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct ForkVersions { + pub genesis: runtime_types::snowbridge_beacon_primitives::types::Fork, + pub altair: runtime_types::snowbridge_beacon_primitives::types::Fork, + pub bellatrix: runtime_types::snowbridge_beacon_primitives::types::Fork, + pub capella: runtime_types::snowbridge_beacon_primitives::types::Fork, + pub deneb: runtime_types::snowbridge_beacon_primitives::types::Fork, + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct PublicKey(pub [::core::primitive::u8; 48usize]); + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct Signature(pub [::core::primitive::u8; 96usize]); + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct SyncAggregate { + pub sync_committee_bits: [::core::primitive::u8; 64usize], + pub sync_committee_signature: + runtime_types::snowbridge_beacon_primitives::types::Signature, + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct SyncCommittee { + pub pubkeys: + [runtime_types::snowbridge_beacon_primitives::types::PublicKey; 512usize], + pub aggregate_pubkey: + runtime_types::snowbridge_beacon_primitives::types::PublicKey, + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct SyncCommitteePrepared { + pub root: ::subxt::utils::H256, + pub pubkeys: ::std::boxed::Box< + [runtime_types::snowbridge_milagro_bls::keys::PublicKey; 512usize], + >, + pub aggregate_pubkey: runtime_types::snowbridge_milagro_bls::keys::PublicKey, + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum VersionedExecutionPayloadHeader { + # [codec (index = 0)] Capella (runtime_types :: snowbridge_beacon_primitives :: types :: ExecutionPayloadHeader ,) , # [codec (index = 1)] Deneb (runtime_types :: snowbridge_beacon_primitives :: types :: deneb :: ExecutionPayloadHeader ,) , } + } + pub mod updates { + use super::runtime_types; + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct AncestryProof { + pub header_branch: ::std::vec::Vec<::subxt::utils::H256>, + pub finalized_block_root: ::subxt::utils::H256, + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct CheckpointUpdate { + pub header: runtime_types::snowbridge_beacon_primitives::types::BeaconHeader, + pub current_sync_committee: + runtime_types::snowbridge_beacon_primitives::types::SyncCommittee, + pub current_sync_committee_branch: ::std::vec::Vec<::subxt::utils::H256>, + pub validators_root: ::subxt::utils::H256, + pub block_roots_root: ::subxt::utils::H256, + pub block_roots_branch: ::std::vec::Vec<::subxt::utils::H256>, + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct ExecutionHeaderUpdate { pub header : runtime_types :: snowbridge_beacon_primitives :: types :: BeaconHeader , pub ancestry_proof : :: core :: option :: Option < runtime_types :: snowbridge_beacon_primitives :: updates :: AncestryProof > , pub execution_header : runtime_types :: snowbridge_beacon_primitives :: types :: VersionedExecutionPayloadHeader , pub execution_branch : :: std :: vec :: Vec < :: subxt :: utils :: H256 > , } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct NextSyncCommitteeUpdate { + pub next_sync_committee: + runtime_types::snowbridge_beacon_primitives::types::SyncCommittee, + pub next_sync_committee_branch: ::std::vec::Vec<::subxt::utils::H256>, + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct Update { pub attested_header : runtime_types :: snowbridge_beacon_primitives :: types :: BeaconHeader , pub sync_aggregate : runtime_types :: snowbridge_beacon_primitives :: types :: SyncAggregate , pub signature_slot : :: core :: primitive :: u64 , pub next_sync_committee_update : :: core :: option :: Option < runtime_types :: snowbridge_beacon_primitives :: updates :: NextSyncCommitteeUpdate > , pub finalized_header : runtime_types :: snowbridge_beacon_primitives :: types :: BeaconHeader , pub finality_branch : :: std :: vec :: Vec < :: subxt :: utils :: H256 > , pub block_roots_root : :: subxt :: utils :: H256 , pub block_roots_branch : :: std :: vec :: Vec < :: subxt :: utils :: H256 > , } } } - pub mod sp_consensus_slots { - use super::runtime_types; - #[derive( - :: codec :: Decode, - :: codec :: Encode, - :: subxt :: ext :: codec :: CompactAs, - Clone, - Debug, - PartialEq, - )] - pub struct Slot(pub ::core::primitive::u64); - #[derive( - :: codec :: Decode, - :: codec :: Encode, - :: subxt :: ext :: codec :: CompactAs, - Clone, - Debug, - PartialEq, - )] - pub struct SlotDuration(pub ::core::primitive::u64); - } - pub mod sp_core { + pub mod snowbridge_core { use super::runtime_types; - pub mod crypto { + pub mod inbound { use super::runtime_types; #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub struct KeyTypeId(pub [::core::primitive::u8; 4usize]); + pub struct Log { + pub address: ::subxt::utils::H160, + pub topics: ::std::vec::Vec<::subxt::utils::H256>, + pub data: ::std::vec::Vec<::core::primitive::u8>, + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct Message { + pub event_log: runtime_types::snowbridge_core::inbound::Log, + pub proof: runtime_types::snowbridge_core::inbound::Proof, + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct Proof { + pub block_hash: ::subxt::utils::H256, + pub tx_index: ::core::primitive::u32, + pub data: ( + ::std::vec::Vec<::std::vec::Vec<::core::primitive::u8>>, + ::std::vec::Vec<::std::vec::Vec<::core::primitive::u8>>, + ), + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum VerificationError { + #[codec(index = 0)] + HeaderNotFound, + #[codec(index = 1)] + LogNotFound, + #[codec(index = 2)] + InvalidLog, + #[codec(index = 3)] + InvalidProof, + } } - pub mod ecdsa { + pub mod operating_mode { use super::runtime_types; #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub struct Signature(pub [::core::primitive::u8; 65usize]); + pub enum BasicOperatingMode { + #[codec(index = 0)] + Normal, + #[codec(index = 1)] + Halted, + } } - pub mod ed25519 { + pub mod outbound { use super::runtime_types; + pub mod v1 { + use super::runtime_types; + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum AgentExecuteCommand { + #[codec(index = 0)] + TransferToken { + token: ::subxt::utils::H160, + recipient: ::subxt::utils::H160, + amount: ::core::primitive::u128, + }, + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum Command { + #[codec(index = 0)] + AgentExecute { + agent_id: ::subxt::utils::H256, + command: + runtime_types::snowbridge_core::outbound::v1::AgentExecuteCommand, + }, + #[codec(index = 1)] + Upgrade { + impl_address: ::subxt::utils::H160, + impl_code_hash: ::subxt::utils::H256, + initializer: ::core::option::Option< + runtime_types::snowbridge_core::outbound::v1::Initializer, + >, + }, + #[codec(index = 2)] + CreateAgent { agent_id: ::subxt::utils::H256 }, + #[codec(index = 3)] + CreateChannel { + channel_id: runtime_types::snowbridge_core::ChannelId, + agent_id: ::subxt::utils::H256, + mode: runtime_types::snowbridge_core::outbound::v1::OperatingMode, + }, + #[codec(index = 4)] + UpdateChannel { + channel_id: runtime_types::snowbridge_core::ChannelId, + mode: runtime_types::snowbridge_core::outbound::v1::OperatingMode, + }, + #[codec(index = 5)] + SetOperatingMode { + mode: runtime_types::snowbridge_core::outbound::v1::OperatingMode, + }, + #[codec(index = 6)] + TransferNativeFromAgent { + agent_id: ::subxt::utils::H256, + recipient: ::subxt::utils::H160, + amount: ::core::primitive::u128, + }, + #[codec(index = 7)] + SetTokenTransferFees { + create_asset_xcm: ::core::primitive::u128, + transfer_asset_xcm: ::core::primitive::u128, + register_token: runtime_types::primitive_types::U256, + }, + #[codec(index = 8)] + SetPricingParameters { + exchange_rate: runtime_types::snowbridge_core::pricing::UD60x18, + delivery_cost: ::core::primitive::u128, + multiplier: runtime_types::snowbridge_core::pricing::UD60x18, + }, + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct Initializer { + pub params: ::std::vec::Vec<::core::primitive::u8>, + pub maximum_required_gas: ::core::primitive::u64, + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum OperatingMode { + #[codec(index = 0)] + Normal, + #[codec(index = 1)] + RejectingOutboundMessages, + } + } #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub struct Public(pub [::core::primitive::u8; 32usize]); + pub struct Fee<_0> { + pub local: _0, + pub remote: _0, + } #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub struct Signature(pub [::core::primitive::u8; 64usize]); + pub enum SendError { + #[codec(index = 0)] + MessageTooLarge, + #[codec(index = 1)] + Halted, + #[codec(index = 2)] + InvalidChannel, + } } - pub mod sr25519 { + pub mod pricing { use super::runtime_types; #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub struct Public(pub [::core::primitive::u8; 32usize]); + pub struct PricingParameters<_0> { + pub exchange_rate: runtime_types::sp_arithmetic::fixed_point::FixedU128, + pub rewards: runtime_types::snowbridge_core::pricing::Rewards<_0>, + pub fee_per_gas: runtime_types::primitive_types::U256, + pub multiplier: runtime_types::sp_arithmetic::fixed_point::FixedU128, + } #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub struct Signature(pub [::core::primitive::u8; 64usize]); + pub struct Rewards<_0> { + pub local: _0, + pub remote: runtime_types::primitive_types::U256, + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct UD60x18(pub runtime_types::primitive_types::U256); } #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub struct OpaqueMetadata(pub ::std::vec::Vec<::core::primitive::u8>); + pub struct Channel { + pub agent_id: ::subxt::utils::H256, + pub para_id: runtime_types::polkadot_parachain_primitives::primitives::Id, + } #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub enum Void {} + pub struct ChannelId(pub [::core::primitive::u8; 32usize]); } - pub mod sp_inherents { + pub mod snowbridge_milagro_bls { use super::runtime_types; - #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub struct CheckInherentsResult { - pub okay: ::core::primitive::bool, - pub fatal_error: ::core::primitive::bool, - pub errors: runtime_types::sp_inherents::InherentData, + pub mod keys { + use super::runtime_types; + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct PublicKey { + pub point: runtime_types::snowbridge_amcl::bls381::ecp::ECP, + } } + } + pub mod snowbridge_outbound_queue_merkle_tree { + use super::runtime_types; #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub struct InherentData { - pub data: ::subxt::utils::KeyedVec< - [::core::primitive::u8; 8usize], - ::std::vec::Vec<::core::primitive::u8>, - >, + pub struct MerkleProof { + pub root: ::subxt::utils::H256, + pub proof: ::std::vec::Vec<::subxt::utils::H256>, + pub number_of_leaves: ::core::primitive::u64, + pub leaf_index: ::core::primitive::u64, + pub leaf: ::subxt::utils::H256, } } - pub mod sp_runtime { + pub mod snowbridge_pallet_ethereum_client { use super::runtime_types; - pub mod generic { - use super::runtime_types; - pub mod block { - use super::runtime_types; - #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub struct Block<_0, _1> { - pub header: _0, - pub extrinsics: ::std::vec::Vec<_1>, - } - } - pub mod digest { - use super::runtime_types; - #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub enum DigestItem { - #[codec(index = 6)] - PreRuntime( - [::core::primitive::u8; 4usize], - ::std::vec::Vec<::core::primitive::u8>, - ), - #[codec(index = 4)] - Consensus( - [::core::primitive::u8; 4usize], - ::std::vec::Vec<::core::primitive::u8>, - ), - #[codec(index = 5)] - Seal( - [::core::primitive::u8; 4usize], - ::std::vec::Vec<::core::primitive::u8>, - ), - #[codec(index = 0)] - Other(::std::vec::Vec<::core::primitive::u8>), - #[codec(index = 8)] - RuntimeEnvironmentUpdated, - } - } - } - pub mod transaction_validity { + pub mod pallet { use super::runtime_types; #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub enum InvalidTransaction { + pub enum Call { + # [codec (index = 0)] force_checkpoint { update : :: std :: boxed :: Box < runtime_types :: snowbridge_beacon_primitives :: updates :: CheckpointUpdate > , } , # [codec (index = 1)] submit { update : :: std :: boxed :: Box < runtime_types :: snowbridge_beacon_primitives :: updates :: Update > , } , # [codec (index = 2)] submit_execution_header { update : :: std :: boxed :: Box < runtime_types :: snowbridge_beacon_primitives :: updates :: ExecutionHeaderUpdate > , } , # [codec (index = 3)] set_operating_mode { mode : runtime_types :: snowbridge_core :: operating_mode :: BasicOperatingMode , } , } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum Error { #[codec(index = 0)] - Call, + SkippedSyncCommitteePeriod, #[codec(index = 1)] - Payment, + IrrelevantUpdate, #[codec(index = 2)] - Future, + NotBootstrapped, #[codec(index = 3)] - Stale, + SyncCommitteeParticipantsNotSupermajority, #[codec(index = 4)] - BadProof, + InvalidHeaderMerkleProof, #[codec(index = 5)] - AncientBirthBlock, + InvalidSyncCommitteeMerkleProof, #[codec(index = 6)] - ExhaustsResources, + InvalidExecutionHeaderProof, #[codec(index = 7)] - Custom(::core::primitive::u8), + InvalidAncestryMerkleProof, #[codec(index = 8)] - BadMandatory, + InvalidBlockRootsRootMerkleProof, #[codec(index = 9)] - MandatoryValidation, + InvalidFinalizedHeaderGap, #[codec(index = 10)] - BadSigner, + HeaderNotFinalized, + #[codec(index = 11)] + BlockBodyHashTreeRootFailed, + #[codec(index = 12)] + HeaderHashTreeRootFailed, + #[codec(index = 13)] + SyncCommitteeHashTreeRootFailed, + #[codec(index = 14)] + SigningRootHashTreeRootFailed, + #[codec(index = 15)] + ForkDataHashTreeRootFailed, + #[codec(index = 16)] + ExpectedFinalizedHeaderNotStored, + #[codec(index = 17)] + BLSPreparePublicKeysFailed, + #[codec(index = 18)] + BLSVerificationFailed( + runtime_types::snowbridge_beacon_primitives::bls::BlsError, + ), + #[codec(index = 19)] + InvalidUpdateSlot, + #[codec(index = 20)] + InvalidSyncCommitteeUpdate, + #[codec(index = 21)] + ExecutionHeaderTooFarBehind, + #[codec(index = 22)] + ExecutionHeaderSkippedBlock, + #[codec(index = 23)] + Halted, + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum Event { + #[codec(index = 0)] + BeaconHeaderImported { + block_hash: ::subxt::utils::H256, + slot: ::core::primitive::u64, + }, + #[codec(index = 1)] + ExecutionHeaderImported { + block_hash: ::subxt::utils::H256, + block_number: ::core::primitive::u64, + }, + #[codec(index = 2)] + SyncCommitteeUpdated { period: ::core::primitive::u64 }, + #[codec(index = 3)] + OperatingModeChanged { + mode: runtime_types::snowbridge_core::operating_mode::BasicOperatingMode, + }, + } + } + } + pub mod snowbridge_pallet_inbound_queue { + use super::runtime_types; + pub mod pallet { + use super::runtime_types; + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum Call { + #[codec(index = 0)] + submit { message: runtime_types::snowbridge_core::inbound::Message }, + #[codec(index = 1)] + set_operating_mode { + mode: runtime_types::snowbridge_core::operating_mode::BasicOperatingMode, + }, + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum Error { + #[codec(index = 0)] + InvalidGateway, + #[codec(index = 1)] + InvalidEnvelope, + #[codec(index = 2)] + InvalidNonce, + #[codec(index = 3)] + InvalidPayload, + #[codec(index = 4)] + InvalidChannel, + #[codec(index = 5)] + MaxNonceReached, + #[codec(index = 6)] + InvalidAccountConversion, + #[codec(index = 7)] + Halted, + #[codec(index = 8)] + Verification(runtime_types::snowbridge_core::inbound::VerificationError), + #[codec(index = 9)] + Send(runtime_types::snowbridge_pallet_inbound_queue::pallet::SendError), + #[codec(index = 10)] + ConvertMessage( + runtime_types::snowbridge_router_primitives::inbound::ConvertMessageError, + ), + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum Event { + #[codec(index = 0)] + MessageReceived { + channel_id: runtime_types::snowbridge_core::ChannelId, + nonce: ::core::primitive::u64, + message_id: [::core::primitive::u8; 32usize], + fee_burned: ::core::primitive::u128, + }, + #[codec(index = 1)] + OperatingModeChanged { + mode: runtime_types::snowbridge_core::operating_mode::BasicOperatingMode, + }, + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum SendError { + #[codec(index = 0)] + NotApplicable, + #[codec(index = 1)] + NotRoutable, + #[codec(index = 2)] + Transport, + #[codec(index = 3)] + DestinationUnsupported, + #[codec(index = 4)] + ExceedsMaxMessageSize, + #[codec(index = 5)] + MissingArgument, + #[codec(index = 6)] + Fees, + } + } + } + pub mod snowbridge_pallet_outbound_queue { + use super::runtime_types; + pub mod pallet { + use super::runtime_types; + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum Call { + #[codec(index = 0)] + set_operating_mode { + mode: runtime_types::snowbridge_core::operating_mode::BasicOperatingMode, + }, + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum Error { + #[codec(index = 0)] + MessageTooLarge, + #[codec(index = 1)] + Halted, + #[codec(index = 2)] + InvalidChannel, + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum Event { + #[codec(index = 0)] + MessageQueued { id: ::subxt::utils::H256 }, + #[codec(index = 1)] + MessageAccepted { id: ::subxt::utils::H256, nonce: ::core::primitive::u64 }, + #[codec(index = 2)] + MessagesCommitted { root: ::subxt::utils::H256, count: ::core::primitive::u64 }, + #[codec(index = 3)] + OperatingModeChanged { + mode: runtime_types::snowbridge_core::operating_mode::BasicOperatingMode, + }, + } + } + pub mod types { + use super::runtime_types; + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct CommittedMessage { + pub channel_id: runtime_types::snowbridge_core::ChannelId, + #[codec(compact)] + pub nonce: ::core::primitive::u64, + pub command: ::core::primitive::u8, + pub params: ::std::vec::Vec<::core::primitive::u8>, + #[codec(compact)] + pub max_dispatch_gas: ::core::primitive::u64, + #[codec(compact)] + pub max_fee_per_gas: ::core::primitive::u128, + #[codec(compact)] + pub reward: ::core::primitive::u128, + pub id: ::subxt::utils::H256, + } + } + } + pub mod snowbridge_pallet_system { + use super::runtime_types; + pub mod pallet { + use super::runtime_types; + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum Call { + #[codec(index = 0)] + upgrade { + impl_address: ::subxt::utils::H160, + impl_code_hash: ::subxt::utils::H256, + initializer: ::core::option::Option< + runtime_types::snowbridge_core::outbound::v1::Initializer, + >, + }, + #[codec(index = 1)] + set_operating_mode { + mode: runtime_types::snowbridge_core::outbound::v1::OperatingMode, + }, + #[codec(index = 2)] + set_pricing_parameters { + params: runtime_types::snowbridge_core::pricing::PricingParameters< + ::core::primitive::u128, + >, + }, + #[codec(index = 3)] + create_agent, + #[codec(index = 4)] + create_channel { + mode: runtime_types::snowbridge_core::outbound::v1::OperatingMode, + }, + #[codec(index = 5)] + update_channel { + mode: runtime_types::snowbridge_core::outbound::v1::OperatingMode, + }, + #[codec(index = 6)] + force_update_channel { + channel_id: runtime_types::snowbridge_core::ChannelId, + mode: runtime_types::snowbridge_core::outbound::v1::OperatingMode, + }, + #[codec(index = 7)] + transfer_native_from_agent { + recipient: ::subxt::utils::H160, + amount: ::core::primitive::u128, + }, + #[codec(index = 8)] + force_transfer_native_from_agent { + location: ::std::boxed::Box, + recipient: ::subxt::utils::H160, + amount: ::core::primitive::u128, + }, + #[codec(index = 9)] + set_token_transfer_fees { + create_asset_xcm: ::core::primitive::u128, + transfer_asset_xcm: ::core::primitive::u128, + register_token: runtime_types::primitive_types::U256, + }, + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum Error { + #[codec(index = 0)] + LocationConversionFailed, + #[codec(index = 1)] + AgentAlreadyCreated, + #[codec(index = 2)] + NoAgent, + #[codec(index = 3)] + ChannelAlreadyCreated, + #[codec(index = 4)] + NoChannel, + #[codec(index = 5)] + UnsupportedLocationVersion, + #[codec(index = 6)] + InvalidLocation, + #[codec(index = 7)] + Send(runtime_types::snowbridge_core::outbound::SendError), + #[codec(index = 8)] + InvalidTokenTransferFees, + #[codec(index = 9)] + InvalidPricingParameters, + #[codec(index = 10)] + InvalidUpgradeParameters, + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum Event { + #[codec(index = 0)] + Upgrade { + impl_address: ::subxt::utils::H160, + impl_code_hash: ::subxt::utils::H256, + initializer_params_hash: ::core::option::Option<::subxt::utils::H256>, + }, + #[codec(index = 1)] + CreateAgent { + location: + ::std::boxed::Box, + agent_id: ::subxt::utils::H256, + }, + #[codec(index = 2)] + CreateChannel { + channel_id: runtime_types::snowbridge_core::ChannelId, + agent_id: ::subxt::utils::H256, + }, + #[codec(index = 3)] + UpdateChannel { + channel_id: runtime_types::snowbridge_core::ChannelId, + mode: runtime_types::snowbridge_core::outbound::v1::OperatingMode, + }, + #[codec(index = 4)] + SetOperatingMode { + mode: runtime_types::snowbridge_core::outbound::v1::OperatingMode, + }, + #[codec(index = 5)] + TransferNativeFromAgent { + agent_id: ::subxt::utils::H256, + recipient: ::subxt::utils::H160, + amount: ::core::primitive::u128, + }, + #[codec(index = 6)] + SetTokenTransferFees { + create_asset_xcm: ::core::primitive::u128, + transfer_asset_xcm: ::core::primitive::u128, + register_token: runtime_types::primitive_types::U256, + }, + #[codec(index = 7)] + PricingParametersChanged { + params: runtime_types::snowbridge_core::pricing::PricingParameters< + ::core::primitive::u128, + >, + }, + } + } + } + pub mod snowbridge_router_primitives { + use super::runtime_types; + pub mod inbound { + use super::runtime_types; + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum ConvertMessageError { + #[codec(index = 0)] + UnsupportedVersion, + } + } + } + pub mod sp_arithmetic { + use super::runtime_types; + pub mod fixed_point { + use super::runtime_types; + #[derive( + :: codec :: Decode, + :: codec :: Encode, + :: subxt :: ext :: codec :: CompactAs, + Clone, + Debug, + PartialEq, + )] + pub struct FixedU128(pub ::core::primitive::u128); + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum ArithmeticError { + #[codec(index = 0)] + Underflow, + #[codec(index = 1)] + Overflow, + #[codec(index = 2)] + DivisionByZero, + } + } + pub mod sp_consensus_aura { + use super::runtime_types; + pub mod sr25519 { + use super::runtime_types; + pub mod app_sr25519 { + use super::runtime_types; + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct Public(pub runtime_types::sp_core::sr25519::Public); + } + } + } + pub mod sp_consensus_grandpa { + use super::runtime_types; + pub mod app { + use super::runtime_types; + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct Public(pub runtime_types::sp_core::ed25519::Public); + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct Signature(pub runtime_types::sp_core::ed25519::Signature); + } + } + pub mod sp_consensus_slots { + use super::runtime_types; + #[derive( + :: codec :: Decode, + :: codec :: Encode, + :: subxt :: ext :: codec :: CompactAs, + Clone, + Debug, + PartialEq, + )] + pub struct Slot(pub ::core::primitive::u64); + #[derive( + :: codec :: Decode, + :: codec :: Encode, + :: subxt :: ext :: codec :: CompactAs, + Clone, + Debug, + PartialEq, + )] + pub struct SlotDuration(pub ::core::primitive::u64); + } + pub mod sp_core { + use super::runtime_types; + pub mod crypto { + use super::runtime_types; + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct KeyTypeId(pub [::core::primitive::u8; 4usize]); + } + pub mod ecdsa { + use super::runtime_types; + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct Signature(pub [::core::primitive::u8; 65usize]); + } + pub mod ed25519 { + use super::runtime_types; + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct Public(pub [::core::primitive::u8; 32usize]); + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct Signature(pub [::core::primitive::u8; 64usize]); + } + pub mod sr25519 { + use super::runtime_types; + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct Public(pub [::core::primitive::u8; 32usize]); + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct Signature(pub [::core::primitive::u8; 64usize]); + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct OpaqueMetadata(pub ::std::vec::Vec<::core::primitive::u8>); + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum Void {} + } + pub mod sp_inherents { + use super::runtime_types; + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct CheckInherentsResult { + pub okay: ::core::primitive::bool, + pub fatal_error: ::core::primitive::bool, + pub errors: runtime_types::sp_inherents::InherentData, + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct InherentData { + pub data: ::subxt::utils::KeyedVec< + [::core::primitive::u8; 8usize], + ::std::vec::Vec<::core::primitive::u8>, + >, + } + } + pub mod sp_runtime { + use super::runtime_types; + pub mod generic { + use super::runtime_types; + pub mod block { + use super::runtime_types; + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct Block<_0, _1> { + pub header: _0, + pub extrinsics: ::std::vec::Vec<_1>, + } + } + pub mod digest { + use super::runtime_types; + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum DigestItem { + #[codec(index = 6)] + PreRuntime( + [::core::primitive::u8; 4usize], + ::std::vec::Vec<::core::primitive::u8>, + ), + #[codec(index = 4)] + Consensus( + [::core::primitive::u8; 4usize], + ::std::vec::Vec<::core::primitive::u8>, + ), + #[codec(index = 5)] + Seal( + [::core::primitive::u8; 4usize], + ::std::vec::Vec<::core::primitive::u8>, + ), + #[codec(index = 0)] + Other(::std::vec::Vec<::core::primitive::u8>), + #[codec(index = 8)] + RuntimeEnvironmentUpdated, + } + } + } + pub mod transaction_validity { + use super::runtime_types; + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum InvalidTransaction { + #[codec(index = 0)] + Call, + #[codec(index = 1)] + Payment, + #[codec(index = 2)] + Future, + #[codec(index = 3)] + Stale, + #[codec(index = 4)] + BadProof, + #[codec(index = 5)] + AncientBirthBlock, + #[codec(index = 6)] + ExhaustsResources, + #[codec(index = 7)] + Custom(::core::primitive::u8), + #[codec(index = 8)] + BadMandatory, + #[codec(index = 9)] + MandatoryValidation, + #[codec(index = 10)] + BadSigner, + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum TransactionSource { + #[codec(index = 0)] + InBlock, + #[codec(index = 1)] + Local, + #[codec(index = 2)] + External, + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum TransactionValidityError { + #[codec(index = 0)] + Invalid(runtime_types::sp_runtime::transaction_validity::InvalidTransaction), + #[codec(index = 1)] + Unknown(runtime_types::sp_runtime::transaction_validity::UnknownTransaction), + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum UnknownTransaction { + #[codec(index = 0)] + CannotLookup, + #[codec(index = 1)] + NoUnsignedValidator, + #[codec(index = 2)] + Custom(::core::primitive::u8), + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct ValidTransaction { + pub priority: ::core::primitive::u64, + pub requires: ::std::vec::Vec<::std::vec::Vec<::core::primitive::u8>>, + pub provides: ::std::vec::Vec<::std::vec::Vec<::core::primitive::u8>>, + pub longevity: ::core::primitive::u64, + pub propagate: ::core::primitive::bool, + } + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum DispatchError { + #[codec(index = 0)] + Other, + #[codec(index = 1)] + CannotLookup, + #[codec(index = 2)] + BadOrigin, + #[codec(index = 3)] + Module(runtime_types::sp_runtime::ModuleError), + #[codec(index = 4)] + ConsumerRemaining, + #[codec(index = 5)] + NoProviders, + #[codec(index = 6)] + TooManyConsumers, + #[codec(index = 7)] + Token(runtime_types::sp_runtime::TokenError), + #[codec(index = 8)] + Arithmetic(runtime_types::sp_arithmetic::ArithmeticError), + #[codec(index = 9)] + Transactional(runtime_types::sp_runtime::TransactionalError), + #[codec(index = 10)] + Exhausted, + #[codec(index = 11)] + Corruption, + #[codec(index = 12)] + Unavailable, + #[codec(index = 13)] + RootNotAllowed, + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct ModuleError { + pub index: ::core::primitive::u8, + pub error: [::core::primitive::u8; 4usize], + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum MultiSignature { + #[codec(index = 0)] + Ed25519(runtime_types::sp_core::ed25519::Signature), + #[codec(index = 1)] + Sr25519(runtime_types::sp_core::sr25519::Signature), + #[codec(index = 2)] + Ecdsa(runtime_types::sp_core::ecdsa::Signature), + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum TokenError { + #[codec(index = 0)] + FundsUnavailable, + #[codec(index = 1)] + OnlyProvider, + #[codec(index = 2)] + BelowMinimum, + #[codec(index = 3)] + CannotCreate, + #[codec(index = 4)] + UnknownAsset, + #[codec(index = 5)] + Frozen, + #[codec(index = 6)] + Unsupported, + #[codec(index = 7)] + CannotCreateHold, + #[codec(index = 8)] + NotExpendable, + #[codec(index = 9)] + Blocked, + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum TransactionalError { + #[codec(index = 0)] + LimitReached, + #[codec(index = 1)] + NoLayer, + } + } + pub mod sp_trie { + use super::runtime_types; + pub mod storage_proof { + use super::runtime_types; + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct StorageProof { + pub trie_nodes: ::std::vec::Vec<::std::vec::Vec<::core::primitive::u8>>, + } + } + } + pub mod sp_version { + use super::runtime_types; + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct RuntimeVersion { + pub spec_name: ::std::string::String, + pub impl_name: ::std::string::String, + pub authoring_version: ::core::primitive::u32, + pub spec_version: ::core::primitive::u32, + pub impl_version: ::core::primitive::u32, + pub apis: + ::std::vec::Vec<([::core::primitive::u8; 8usize], ::core::primitive::u32)>, + pub transaction_version: ::core::primitive::u32, + pub state_version: ::core::primitive::u8, + } + } + pub mod sp_weights { + use super::runtime_types; + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct RuntimeDbWeight { + pub read: ::core::primitive::u64, + pub write: ::core::primitive::u64, + } + } + pub mod staging_parachain_info { + use super::runtime_types; + pub mod pallet { + use super::runtime_types; + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum Call {} + } + } + pub mod staging_xcm { + use super::runtime_types; + pub mod v3 { + use super::runtime_types; + pub mod multilocation { + use super::runtime_types; + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct MultiLocation { + pub parents: ::core::primitive::u8, + pub interior: runtime_types::xcm::v3::junctions::Junctions, + } + } + } + pub mod v4 { + use super::runtime_types; + pub mod asset { + use super::runtime_types; + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct Asset { + pub id: runtime_types::staging_xcm::v4::asset::AssetId, + pub fun: runtime_types::staging_xcm::v4::asset::Fungibility, + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum AssetFilter { + #[codec(index = 0)] + Definite(runtime_types::staging_xcm::v4::asset::Assets), + #[codec(index = 1)] + Wild(runtime_types::staging_xcm::v4::asset::WildAsset), + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct AssetId(pub runtime_types::staging_xcm::v4::location::Location); + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum AssetInstance { + #[codec(index = 0)] + Undefined, + #[codec(index = 1)] + Index(#[codec(compact)] ::core::primitive::u128), + #[codec(index = 2)] + Array4([::core::primitive::u8; 4usize]), + #[codec(index = 3)] + Array8([::core::primitive::u8; 8usize]), + #[codec(index = 4)] + Array16([::core::primitive::u8; 16usize]), + #[codec(index = 5)] + Array32([::core::primitive::u8; 32usize]), + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct Assets( + pub ::std::vec::Vec, + ); + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum Fungibility { + #[codec(index = 0)] + Fungible(#[codec(compact)] ::core::primitive::u128), + #[codec(index = 1)] + NonFungible(runtime_types::staging_xcm::v4::asset::AssetInstance), + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum WildAsset { + #[codec(index = 0)] + All, + #[codec(index = 1)] + AllOf { + id: runtime_types::staging_xcm::v4::asset::AssetId, + fun: runtime_types::staging_xcm::v4::asset::WildFungibility, + }, + #[codec(index = 2)] + AllCounted(#[codec(compact)] ::core::primitive::u32), + #[codec(index = 3)] + AllOfCounted { + id: runtime_types::staging_xcm::v4::asset::AssetId, + fun: runtime_types::staging_xcm::v4::asset::WildFungibility, + #[codec(compact)] + count: ::core::primitive::u32, + }, + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum WildFungibility { + #[codec(index = 0)] + Fungible, + #[codec(index = 1)] + NonFungible, + } + } + pub mod junction { + use super::runtime_types; + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum Junction { + #[codec(index = 0)] + Parachain(#[codec(compact)] ::core::primitive::u32), + #[codec(index = 1)] + AccountId32 { + network: ::core::option::Option< + runtime_types::staging_xcm::v4::junction::NetworkId, + >, + id: [::core::primitive::u8; 32usize], + }, + #[codec(index = 2)] + AccountIndex64 { + network: ::core::option::Option< + runtime_types::staging_xcm::v4::junction::NetworkId, + >, + #[codec(compact)] + index: ::core::primitive::u64, + }, + #[codec(index = 3)] + AccountKey20 { + network: ::core::option::Option< + runtime_types::staging_xcm::v4::junction::NetworkId, + >, + key: [::core::primitive::u8; 20usize], + }, + #[codec(index = 4)] + PalletInstance(::core::primitive::u8), + #[codec(index = 5)] + GeneralIndex(#[codec(compact)] ::core::primitive::u128), + #[codec(index = 6)] + GeneralKey { + length: ::core::primitive::u8, + data: [::core::primitive::u8; 32usize], + }, + #[codec(index = 7)] + OnlyChild, + #[codec(index = 8)] + Plurality { + id: runtime_types::xcm::v3::junction::BodyId, + part: runtime_types::xcm::v3::junction::BodyPart, + }, + #[codec(index = 9)] + GlobalConsensus(runtime_types::staging_xcm::v4::junction::NetworkId), + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum NetworkId { + #[codec(index = 0)] + ByGenesis([::core::primitive::u8; 32usize]), + #[codec(index = 1)] + ByFork { + block_number: ::core::primitive::u64, + block_hash: [::core::primitive::u8; 32usize], + }, + #[codec(index = 2)] + Polkadot, + #[codec(index = 3)] + Kusama, + #[codec(index = 4)] + Westend, + #[codec(index = 5)] + Rococo, + #[codec(index = 6)] + Wococo, + #[codec(index = 7)] + Ethereum { + #[codec(compact)] + chain_id: ::core::primitive::u64, + }, + #[codec(index = 8)] + BitcoinCore, + #[codec(index = 9)] + BitcoinCash, + #[codec(index = 10)] + PolkadotBulletin, + } + } + pub mod junctions { + use super::runtime_types; + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum Junctions { + #[codec(index = 0)] + Here, + #[codec(index = 1)] + X1([runtime_types::staging_xcm::v4::junction::Junction; 1usize]), + #[codec(index = 2)] + X2([runtime_types::staging_xcm::v4::junction::Junction; 2usize]), + #[codec(index = 3)] + X3([runtime_types::staging_xcm::v4::junction::Junction; 3usize]), + #[codec(index = 4)] + X4([runtime_types::staging_xcm::v4::junction::Junction; 4usize]), + #[codec(index = 5)] + X5([runtime_types::staging_xcm::v4::junction::Junction; 5usize]), + #[codec(index = 6)] + X6([runtime_types::staging_xcm::v4::junction::Junction; 6usize]), + #[codec(index = 7)] + X7([runtime_types::staging_xcm::v4::junction::Junction; 7usize]), + #[codec(index = 8)] + X8([runtime_types::staging_xcm::v4::junction::Junction; 8usize]), + } + } + pub mod location { + use super::runtime_types; + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub struct Location { + pub parents: ::core::primitive::u8, + pub interior: runtime_types::staging_xcm::v4::junctions::Junctions, + } + } + pub mod traits { + use super::runtime_types; + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum Outcome { + #[codec(index = 0)] + Complete { used: ::sp_weights::Weight }, + #[codec(index = 1)] + Incomplete { + used: ::sp_weights::Weight, + error: runtime_types::xcm::v3::traits::Error, + }, + #[codec(index = 2)] + Error { error: runtime_types::xcm::v3::traits::Error }, + } + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum Instruction { + #[codec(index = 0)] + WithdrawAsset(runtime_types::staging_xcm::v4::asset::Assets), + #[codec(index = 1)] + ReserveAssetDeposited(runtime_types::staging_xcm::v4::asset::Assets), + #[codec(index = 2)] + ReceiveTeleportedAsset(runtime_types::staging_xcm::v4::asset::Assets), + #[codec(index = 3)] + QueryResponse { + #[codec(compact)] + query_id: ::core::primitive::u64, + response: runtime_types::staging_xcm::v4::Response, + max_weight: ::sp_weights::Weight, + querier: ::core::option::Option< + runtime_types::staging_xcm::v4::location::Location, + >, + }, + #[codec(index = 4)] + TransferAsset { + assets: runtime_types::staging_xcm::v4::asset::Assets, + beneficiary: runtime_types::staging_xcm::v4::location::Location, + }, + #[codec(index = 5)] + TransferReserveAsset { + assets: runtime_types::staging_xcm::v4::asset::Assets, + dest: runtime_types::staging_xcm::v4::location::Location, + xcm: runtime_types::staging_xcm::v4::Xcm, + }, + #[codec(index = 6)] + Transact { + origin_kind: runtime_types::xcm::v2::OriginKind, + require_weight_at_most: ::sp_weights::Weight, + call: runtime_types::xcm::double_encoded::DoubleEncoded, + }, + #[codec(index = 7)] + HrmpNewChannelOpenRequest { + #[codec(compact)] + sender: ::core::primitive::u32, + #[codec(compact)] + max_message_size: ::core::primitive::u32, + #[codec(compact)] + max_capacity: ::core::primitive::u32, + }, + #[codec(index = 8)] + HrmpChannelAccepted { + #[codec(compact)] + recipient: ::core::primitive::u32, + }, + #[codec(index = 9)] + HrmpChannelClosing { + #[codec(compact)] + initiator: ::core::primitive::u32, + #[codec(compact)] + sender: ::core::primitive::u32, + #[codec(compact)] + recipient: ::core::primitive::u32, + }, + #[codec(index = 10)] + ClearOrigin, + #[codec(index = 11)] + DescendOrigin(runtime_types::staging_xcm::v4::junctions::Junctions), + #[codec(index = 12)] + ReportError(runtime_types::staging_xcm::v4::QueryResponseInfo), + #[codec(index = 13)] + DepositAsset { + assets: runtime_types::staging_xcm::v4::asset::AssetFilter, + beneficiary: runtime_types::staging_xcm::v4::location::Location, + }, + #[codec(index = 14)] + DepositReserveAsset { + assets: runtime_types::staging_xcm::v4::asset::AssetFilter, + dest: runtime_types::staging_xcm::v4::location::Location, + xcm: runtime_types::staging_xcm::v4::Xcm, + }, + #[codec(index = 15)] + ExchangeAsset { + give: runtime_types::staging_xcm::v4::asset::AssetFilter, + want: runtime_types::staging_xcm::v4::asset::Assets, + maximal: ::core::primitive::bool, + }, + #[codec(index = 16)] + InitiateReserveWithdraw { + assets: runtime_types::staging_xcm::v4::asset::AssetFilter, + reserve: runtime_types::staging_xcm::v4::location::Location, + xcm: runtime_types::staging_xcm::v4::Xcm, + }, + #[codec(index = 17)] + InitiateTeleport { + assets: runtime_types::staging_xcm::v4::asset::AssetFilter, + dest: runtime_types::staging_xcm::v4::location::Location, + xcm: runtime_types::staging_xcm::v4::Xcm, + }, + #[codec(index = 18)] + ReportHolding { + response_info: runtime_types::staging_xcm::v4::QueryResponseInfo, + assets: runtime_types::staging_xcm::v4::asset::AssetFilter, + }, + #[codec(index = 19)] + BuyExecution { + fees: runtime_types::staging_xcm::v4::asset::Asset, + weight_limit: runtime_types::xcm::v3::WeightLimit, + }, + #[codec(index = 20)] + RefundSurplus, + #[codec(index = 21)] + SetErrorHandler(runtime_types::staging_xcm::v4::Xcm), + #[codec(index = 22)] + SetAppendix(runtime_types::staging_xcm::v4::Xcm), + #[codec(index = 23)] + ClearError, + #[codec(index = 24)] + ClaimAsset { + assets: runtime_types::staging_xcm::v4::asset::Assets, + ticket: runtime_types::staging_xcm::v4::location::Location, + }, + #[codec(index = 25)] + Trap(#[codec(compact)] ::core::primitive::u64), + #[codec(index = 26)] + SubscribeVersion { + #[codec(compact)] + query_id: ::core::primitive::u64, + max_response_weight: ::sp_weights::Weight, + }, + #[codec(index = 27)] + UnsubscribeVersion, + #[codec(index = 28)] + BurnAsset(runtime_types::staging_xcm::v4::asset::Assets), + #[codec(index = 29)] + ExpectAsset(runtime_types::staging_xcm::v4::asset::Assets), + #[codec(index = 30)] + ExpectOrigin( + ::core::option::Option, + ), + #[codec(index = 31)] + ExpectError( + ::core::option::Option<( + ::core::primitive::u32, + runtime_types::xcm::v3::traits::Error, + )>, + ), + #[codec(index = 32)] + ExpectTransactStatus(runtime_types::xcm::v3::MaybeErrorCode), + #[codec(index = 33)] + QueryPallet { + module_name: ::std::vec::Vec<::core::primitive::u8>, + response_info: runtime_types::staging_xcm::v4::QueryResponseInfo, + }, + #[codec(index = 34)] + ExpectPallet { + #[codec(compact)] + index: ::core::primitive::u32, + name: ::std::vec::Vec<::core::primitive::u8>, + module_name: ::std::vec::Vec<::core::primitive::u8>, + #[codec(compact)] + crate_major: ::core::primitive::u32, + #[codec(compact)] + min_crate_minor: ::core::primitive::u32, + }, + #[codec(index = 35)] + ReportTransactStatus(runtime_types::staging_xcm::v4::QueryResponseInfo), + #[codec(index = 36)] + ClearTransactStatus, + #[codec(index = 37)] + UniversalOrigin(runtime_types::staging_xcm::v4::junction::Junction), + #[codec(index = 38)] + ExportMessage { + network: runtime_types::staging_xcm::v4::junction::NetworkId, + destination: runtime_types::staging_xcm::v4::junctions::Junctions, + xcm: runtime_types::staging_xcm::v4::Xcm, + }, + #[codec(index = 39)] + LockAsset { + asset: runtime_types::staging_xcm::v4::asset::Asset, + unlocker: runtime_types::staging_xcm::v4::location::Location, + }, + #[codec(index = 40)] + UnlockAsset { + asset: runtime_types::staging_xcm::v4::asset::Asset, + target: runtime_types::staging_xcm::v4::location::Location, + }, + #[codec(index = 41)] + NoteUnlockable { + asset: runtime_types::staging_xcm::v4::asset::Asset, + owner: runtime_types::staging_xcm::v4::location::Location, + }, + #[codec(index = 42)] + RequestUnlock { + asset: runtime_types::staging_xcm::v4::asset::Asset, + locker: runtime_types::staging_xcm::v4::location::Location, + }, + #[codec(index = 43)] + SetFeesMode { jit_withdraw: ::core::primitive::bool }, + #[codec(index = 44)] + SetTopic([::core::primitive::u8; 32usize]), + #[codec(index = 45)] + ClearTopic, + #[codec(index = 46)] + AliasOrigin(runtime_types::staging_xcm::v4::location::Location), + #[codec(index = 47)] + UnpaidExecution { + weight_limit: runtime_types::xcm::v3::WeightLimit, + check_origin: ::core::option::Option< + runtime_types::staging_xcm::v4::location::Location, + >, + }, + } + #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] + pub enum Instruction2 { + #[codec(index = 0)] + WithdrawAsset(runtime_types::staging_xcm::v4::asset::Assets), + #[codec(index = 1)] + ReserveAssetDeposited(runtime_types::staging_xcm::v4::asset::Assets), + #[codec(index = 2)] + ReceiveTeleportedAsset(runtime_types::staging_xcm::v4::asset::Assets), + #[codec(index = 3)] + QueryResponse { + #[codec(compact)] + query_id: ::core::primitive::u64, + response: runtime_types::staging_xcm::v4::Response, + max_weight: ::sp_weights::Weight, + querier: ::core::option::Option< + runtime_types::staging_xcm::v4::location::Location, + >, + }, + #[codec(index = 4)] + TransferAsset { + assets: runtime_types::staging_xcm::v4::asset::Assets, + beneficiary: runtime_types::staging_xcm::v4::location::Location, + }, + #[codec(index = 5)] + TransferReserveAsset { + assets: runtime_types::staging_xcm::v4::asset::Assets, + dest: runtime_types::staging_xcm::v4::location::Location, + xcm: runtime_types::staging_xcm::v4::Xcm, + }, + #[codec(index = 6)] + Transact { + origin_kind: runtime_types::xcm::v2::OriginKind, + require_weight_at_most: ::sp_weights::Weight, + call: runtime_types::xcm::double_encoded::DoubleEncoded2, + }, + #[codec(index = 7)] + HrmpNewChannelOpenRequest { + #[codec(compact)] + sender: ::core::primitive::u32, + #[codec(compact)] + max_message_size: ::core::primitive::u32, + #[codec(compact)] + max_capacity: ::core::primitive::u32, + }, + #[codec(index = 8)] + HrmpChannelAccepted { + #[codec(compact)] + recipient: ::core::primitive::u32, + }, + #[codec(index = 9)] + HrmpChannelClosing { + #[codec(compact)] + initiator: ::core::primitive::u32, + #[codec(compact)] + sender: ::core::primitive::u32, + #[codec(compact)] + recipient: ::core::primitive::u32, + }, + #[codec(index = 10)] + ClearOrigin, + #[codec(index = 11)] + DescendOrigin(runtime_types::staging_xcm::v4::junctions::Junctions), + #[codec(index = 12)] + ReportError(runtime_types::staging_xcm::v4::QueryResponseInfo), + #[codec(index = 13)] + DepositAsset { + assets: runtime_types::staging_xcm::v4::asset::AssetFilter, + beneficiary: runtime_types::staging_xcm::v4::location::Location, + }, + #[codec(index = 14)] + DepositReserveAsset { + assets: runtime_types::staging_xcm::v4::asset::AssetFilter, + dest: runtime_types::staging_xcm::v4::location::Location, + xcm: runtime_types::staging_xcm::v4::Xcm, + }, + #[codec(index = 15)] + ExchangeAsset { + give: runtime_types::staging_xcm::v4::asset::AssetFilter, + want: runtime_types::staging_xcm::v4::asset::Assets, + maximal: ::core::primitive::bool, + }, + #[codec(index = 16)] + InitiateReserveWithdraw { + assets: runtime_types::staging_xcm::v4::asset::AssetFilter, + reserve: runtime_types::staging_xcm::v4::location::Location, + xcm: runtime_types::staging_xcm::v4::Xcm, + }, + #[codec(index = 17)] + InitiateTeleport { + assets: runtime_types::staging_xcm::v4::asset::AssetFilter, + dest: runtime_types::staging_xcm::v4::location::Location, + xcm: runtime_types::staging_xcm::v4::Xcm, + }, + #[codec(index = 18)] + ReportHolding { + response_info: runtime_types::staging_xcm::v4::QueryResponseInfo, + assets: runtime_types::staging_xcm::v4::asset::AssetFilter, + }, + #[codec(index = 19)] + BuyExecution { + fees: runtime_types::staging_xcm::v4::asset::Asset, + weight_limit: runtime_types::xcm::v3::WeightLimit, + }, + #[codec(index = 20)] + RefundSurplus, + #[codec(index = 21)] + SetErrorHandler(runtime_types::staging_xcm::v4::Xcm2), + #[codec(index = 22)] + SetAppendix(runtime_types::staging_xcm::v4::Xcm2), + #[codec(index = 23)] + ClearError, + #[codec(index = 24)] + ClaimAsset { + assets: runtime_types::staging_xcm::v4::asset::Assets, + ticket: runtime_types::staging_xcm::v4::location::Location, + }, + #[codec(index = 25)] + Trap(#[codec(compact)] ::core::primitive::u64), + #[codec(index = 26)] + SubscribeVersion { + #[codec(compact)] + query_id: ::core::primitive::u64, + max_response_weight: ::sp_weights::Weight, + }, + #[codec(index = 27)] + UnsubscribeVersion, + #[codec(index = 28)] + BurnAsset(runtime_types::staging_xcm::v4::asset::Assets), + #[codec(index = 29)] + ExpectAsset(runtime_types::staging_xcm::v4::asset::Assets), + #[codec(index = 30)] + ExpectOrigin( + ::core::option::Option, + ), + #[codec(index = 31)] + ExpectError( + ::core::option::Option<( + ::core::primitive::u32, + runtime_types::xcm::v3::traits::Error, + )>, + ), + #[codec(index = 32)] + ExpectTransactStatus(runtime_types::xcm::v3::MaybeErrorCode), + #[codec(index = 33)] + QueryPallet { + module_name: ::std::vec::Vec<::core::primitive::u8>, + response_info: runtime_types::staging_xcm::v4::QueryResponseInfo, + }, + #[codec(index = 34)] + ExpectPallet { + #[codec(compact)] + index: ::core::primitive::u32, + name: ::std::vec::Vec<::core::primitive::u8>, + module_name: ::std::vec::Vec<::core::primitive::u8>, + #[codec(compact)] + crate_major: ::core::primitive::u32, + #[codec(compact)] + min_crate_minor: ::core::primitive::u32, + }, + #[codec(index = 35)] + ReportTransactStatus(runtime_types::staging_xcm::v4::QueryResponseInfo), + #[codec(index = 36)] + ClearTransactStatus, + #[codec(index = 37)] + UniversalOrigin(runtime_types::staging_xcm::v4::junction::Junction), + #[codec(index = 38)] + ExportMessage { + network: runtime_types::staging_xcm::v4::junction::NetworkId, + destination: runtime_types::staging_xcm::v4::junctions::Junctions, + xcm: runtime_types::staging_xcm::v4::Xcm, + }, + #[codec(index = 39)] + LockAsset { + asset: runtime_types::staging_xcm::v4::asset::Asset, + unlocker: runtime_types::staging_xcm::v4::location::Location, + }, + #[codec(index = 40)] + UnlockAsset { + asset: runtime_types::staging_xcm::v4::asset::Asset, + target: runtime_types::staging_xcm::v4::location::Location, + }, + #[codec(index = 41)] + NoteUnlockable { + asset: runtime_types::staging_xcm::v4::asset::Asset, + owner: runtime_types::staging_xcm::v4::location::Location, + }, + #[codec(index = 42)] + RequestUnlock { + asset: runtime_types::staging_xcm::v4::asset::Asset, + locker: runtime_types::staging_xcm::v4::location::Location, + }, + #[codec(index = 43)] + SetFeesMode { jit_withdraw: ::core::primitive::bool }, + #[codec(index = 44)] + SetTopic([::core::primitive::u8; 32usize]), + #[codec(index = 45)] + ClearTopic, + #[codec(index = 46)] + AliasOrigin(runtime_types::staging_xcm::v4::location::Location), + #[codec(index = 47)] + UnpaidExecution { + weight_limit: runtime_types::xcm::v3::WeightLimit, + check_origin: ::core::option::Option< + runtime_types::staging_xcm::v4::location::Location, + >, + }, } #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub enum TransactionSource { - #[codec(index = 0)] - InBlock, - #[codec(index = 1)] - Local, - #[codec(index = 2)] - External, + pub struct PalletInfo { + #[codec(compact)] + pub index: ::core::primitive::u32, + pub name: runtime_types::bounded_collections::bounded_vec::BoundedVec< + ::core::primitive::u8, + >, + pub module_name: runtime_types::bounded_collections::bounded_vec::BoundedVec< + ::core::primitive::u8, + >, + #[codec(compact)] + pub major: ::core::primitive::u32, + #[codec(compact)] + pub minor: ::core::primitive::u32, + #[codec(compact)] + pub patch: ::core::primitive::u32, } #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub enum TransactionValidityError { - #[codec(index = 0)] - Invalid(runtime_types::sp_runtime::transaction_validity::InvalidTransaction), - #[codec(index = 1)] - Unknown(runtime_types::sp_runtime::transaction_validity::UnknownTransaction), + pub struct QueryResponseInfo { + pub destination: runtime_types::staging_xcm::v4::location::Location, + #[codec(compact)] + pub query_id: ::core::primitive::u64, + pub max_weight: ::sp_weights::Weight, } #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub enum UnknownTransaction { + pub enum Response { #[codec(index = 0)] - CannotLookup, + Null, #[codec(index = 1)] - NoUnsignedValidator, + Assets(runtime_types::staging_xcm::v4::asset::Assets), #[codec(index = 2)] - Custom(::core::primitive::u8), + ExecutionResult( + ::core::option::Option<( + ::core::primitive::u32, + runtime_types::xcm::v3::traits::Error, + )>, + ), + #[codec(index = 3)] + Version(::core::primitive::u32), + #[codec(index = 4)] + PalletsInfo( + runtime_types::bounded_collections::bounded_vec::BoundedVec< + runtime_types::staging_xcm::v4::PalletInfo, + >, + ), + #[codec(index = 5)] + DispatchResult(runtime_types::xcm::v3::MaybeErrorCode), } #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub struct ValidTransaction { - pub priority: ::core::primitive::u64, - pub requires: ::std::vec::Vec<::std::vec::Vec<::core::primitive::u8>>, - pub provides: ::std::vec::Vec<::std::vec::Vec<::core::primitive::u8>>, - pub longevity: ::core::primitive::u64, - pub propagate: ::core::primitive::bool, - } - } - #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub enum DispatchError { - #[codec(index = 0)] - Other, - #[codec(index = 1)] - CannotLookup, - #[codec(index = 2)] - BadOrigin, - #[codec(index = 3)] - Module(runtime_types::sp_runtime::ModuleError), - #[codec(index = 4)] - ConsumerRemaining, - #[codec(index = 5)] - NoProviders, - #[codec(index = 6)] - TooManyConsumers, - #[codec(index = 7)] - Token(runtime_types::sp_runtime::TokenError), - #[codec(index = 8)] - Arithmetic(runtime_types::sp_arithmetic::ArithmeticError), - #[codec(index = 9)] - Transactional(runtime_types::sp_runtime::TransactionalError), - #[codec(index = 10)] - Exhausted, - #[codec(index = 11)] - Corruption, - #[codec(index = 12)] - Unavailable, - #[codec(index = 13)] - RootNotAllowed, - } - #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub struct ModuleError { - pub index: ::core::primitive::u8, - pub error: [::core::primitive::u8; 4usize], - } - #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub enum MultiSignature { - #[codec(index = 0)] - Ed25519(runtime_types::sp_core::ed25519::Signature), - #[codec(index = 1)] - Sr25519(runtime_types::sp_core::sr25519::Signature), - #[codec(index = 2)] - Ecdsa(runtime_types::sp_core::ecdsa::Signature), - } - #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub enum TokenError { - #[codec(index = 0)] - FundsUnavailable, - #[codec(index = 1)] - OnlyProvider, - #[codec(index = 2)] - BelowMinimum, - #[codec(index = 3)] - CannotCreate, - #[codec(index = 4)] - UnknownAsset, - #[codec(index = 5)] - Frozen, - #[codec(index = 6)] - Unsupported, - #[codec(index = 7)] - CannotCreateHold, - #[codec(index = 8)] - NotExpendable, - #[codec(index = 9)] - Blocked, - } - #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub enum TransactionalError { - #[codec(index = 0)] - LimitReached, - #[codec(index = 1)] - NoLayer, - } - } - pub mod sp_trie { - use super::runtime_types; - pub mod storage_proof { - use super::runtime_types; + pub struct Xcm(pub ::std::vec::Vec); #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub struct StorageProof { - pub trie_nodes: ::std::vec::Vec<::std::vec::Vec<::core::primitive::u8>>, - } - } - } - pub mod sp_version { - use super::runtime_types; - #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub struct RuntimeVersion { - pub spec_name: ::std::string::String, - pub impl_name: ::std::string::String, - pub authoring_version: ::core::primitive::u32, - pub spec_version: ::core::primitive::u32, - pub impl_version: ::core::primitive::u32, - pub apis: - ::std::vec::Vec<([::core::primitive::u8; 8usize], ::core::primitive::u32)>, - pub transaction_version: ::core::primitive::u32, - pub state_version: ::core::primitive::u8, - } - } - pub mod sp_weights { - use super::runtime_types; - #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub struct RuntimeDbWeight { - pub read: ::core::primitive::u64, - pub write: ::core::primitive::u64, - } - } - pub mod staging_xcm { - use super::runtime_types; - pub mod v3 { - use super::runtime_types; - pub mod multilocation { - use super::runtime_types; - #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub struct MultiLocation { - pub parents: ::core::primitive::u8, - pub interior: runtime_types::xcm::v3::junctions::Junctions, - } - } + pub struct Xcm2(pub ::std::vec::Vec); } } pub mod xcm { @@ -3279,6 +4881,8 @@ pub mod api { BitcoinCore, #[codec(index = 9)] BitcoinCash, + #[codec(index = 10)] + PolkadotBulletin, } } pub mod junctions { @@ -3506,15 +5110,6 @@ pub mod api { #[codec(index = 39)] ExceedsStackLimit, } - #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub enum Outcome { - #[codec(index = 0)] - Complete(::sp_weights::Weight), - #[codec(index = 1)] - Incomplete(::sp_weights::Weight, runtime_types::xcm::v3::traits::Error), - #[codec(index = 2)] - Error(runtime_types::xcm::v3::traits::Error), - } } #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] pub enum Instruction { @@ -4015,20 +5610,26 @@ pub mod api { pub enum VersionedAssetId { #[codec(index = 3)] V3(runtime_types::xcm::v3::multiasset::AssetId), + #[codec(index = 4)] + V4(runtime_types::staging_xcm::v4::asset::AssetId), } #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub enum VersionedMultiAssets { + pub enum VersionedAssets { #[codec(index = 1)] V2(runtime_types::xcm::v2::multiasset::MultiAssets), #[codec(index = 3)] V3(runtime_types::xcm::v3::multiasset::MultiAssets), + #[codec(index = 4)] + V4(runtime_types::staging_xcm::v4::asset::Assets), } #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] - pub enum VersionedMultiLocation { + pub enum VersionedLocation { #[codec(index = 1)] V2(runtime_types::xcm::v2::multilocation::MultiLocation), #[codec(index = 3)] V3(runtime_types::staging_xcm::v3::multilocation::MultiLocation), + #[codec(index = 4)] + V4(runtime_types::staging_xcm::v4::location::Location), } #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] pub enum VersionedResponse { @@ -4036,6 +5637,8 @@ pub mod api { V2(runtime_types::xcm::v2::Response), #[codec(index = 3)] V3(runtime_types::xcm::v3::Response), + #[codec(index = 4)] + V4(runtime_types::staging_xcm::v4::Response), } #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] pub enum VersionedXcm { @@ -4043,6 +5646,8 @@ pub mod api { V2(runtime_types::xcm::v2::Xcm), #[codec(index = 3)] V3(runtime_types::xcm::v3::Xcm), + #[codec(index = 4)] + V4(runtime_types::staging_xcm::v4::Xcm), } #[derive(:: codec :: Decode, :: codec :: Encode, Clone, Debug, PartialEq)] pub enum VersionedXcm2 { @@ -4050,6 +5655,8 @@ pub mod api { V2(runtime_types::xcm::v2::Xcm2), #[codec(index = 3)] V3(runtime_types::xcm::v3::Xcm2), + #[codec(index = 4)] + V4(runtime_types::staging_xcm::v4::Xcm2), } } } diff --git a/relay-clients/client-bridge-hub-polkadot/src/lib.rs b/relay-clients/client-bridge-hub-polkadot/src/lib.rs index 88b69065f..ba55573bb 100644 --- a/relay-clients/client-bridge-hub-polkadot/src/lib.rs +++ b/relay-clients/client-bridge-hub-polkadot/src/lib.rs @@ -127,5 +127,5 @@ impl ChainWithMessages for BridgeHubPolkadot { impl ChainWithRuntimeVersion for BridgeHubPolkadot { const RUNTIME_VERSION: Option = - Some(SimpleRuntimeVersion { spec_version: 1_001_000, transaction_version: 3 }); + Some(SimpleRuntimeVersion { spec_version: 1_002_000, transaction_version: 3 }); } diff --git a/scripts/regenerate_runtimes.sh b/scripts/regenerate_runtimes.sh index 6100c79d7..0a97e2b6c 100755 --- a/scripts/regenerate_runtimes.sh +++ b/scripts/regenerate_runtimes.sh @@ -13,8 +13,9 @@ cargo run --bin runtime-codegen -- --from-node-url "wss://rpc.polkadot.io:443" > # TODO: there is a bug, probably needs to update subxt, generates: `::sp_runtime::generic::Header<::core::primitive::u32>` withtout second `Hash` parameter. # cargo run --bin runtime-codegen -- --from-wasm-file ../../../polkadot-sdk/target/release/wbuild/bridge-hub-rococo-runtime/bridge_hub_rococo_runtime.compact.compressed.wasm > ../../relays/client-bridge-hub-rococo/src/codegen_runtime.rs # cargo run --bin runtime-codegen -- --from-wasm-file ../../../polkadot-sdk/target/release/wbuild/bridge-hub-westend-runtime/bridge_hub_westend_runtime.compact.compressed.wasm > ../../relays/client-bridge-hub-westend/src/codegen_runtime.rs -# OR +# OR for production runtimes: # cargo run --bin runtime-codegen -- --from-node-url wss://kusama-bridge-hub-rpc.polkadot.io/ > ../../relay-clients/client-bridge-hub-kusama/src/codegen_runtime.rs +# cargo run --bin runtime-codegen -- --from-node-url wss://polkadot-bridge-hub-rpc.polkadot.io/ > ../../relay-clients/client-bridge-hub-polkadot/src/codegen_runtime.rs cd - cargo fmt --all diff --git a/substrate-relay/Cargo.toml b/substrate-relay/Cargo.toml index 3be1033aa..022296d92 100644 --- a/substrate-relay/Cargo.toml +++ b/substrate-relay/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "substrate-relay" -version = "1.3.0" +version = "1.4.0" authors.workspace = true edition.workspace = true license = "GPL-3.0-or-later WITH Classpath-exception-2.0" -- GitLab From f14e95c403d4772656b7e1604d51272c0bfe0cee Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 22 Apr 2024 01:44:40 +0000 Subject: [PATCH 39/39] Bump thiserror from 1.0.58 to 1.0.59 Bumps [thiserror](https://github.com/dtolnay/thiserror) from 1.0.58 to 1.0.59. - [Release notes](https://github.com/dtolnay/thiserror/releases) - [Commits](https://github.com/dtolnay/thiserror/compare/1.0.58...1.0.59) --- updated-dependencies: - dependency-name: thiserror dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- Cargo.lock | 8 ++++---- Cargo.toml | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 5ddf1c956..f82543d8d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -9640,18 +9640,18 @@ dependencies = [ [[package]] name = "thiserror" -version = "1.0.58" +version = "1.0.59" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "03468839009160513471e86a034bb2c5c0e4baae3b43f79ffc55c4a5427b3297" +checksum = "f0126ad08bff79f29fc3ae6a55cc72352056dfff61e3ff8bb7129476d44b23aa" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.58" +version = "1.0.59" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c61f3ba182994efc43764a46c018c347bc492c79f024e705f46567b418f6d4f7" +checksum = "d1cd413b5d558b4c5bf3680e324a6fa5014e7b7c067a51e69dbdf47eb7148b66" dependencies = [ "proc-macro2 1.0.81", "quote 1.0.36", diff --git a/Cargo.toml b/Cargo.toml index 06758ff73..2fe9952f9 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -55,4 +55,4 @@ log = { version = "0.4.20", default-features = false } quote = { version = "1.0.36" } serde = { version = "1.0.197", default-features = false } serde_json = { version = "1.0.115", default-features = false } -thiserror = { version = "1.0.58" } +thiserror = { version = "1.0.59" } -- GitLab