Unverified Commit 96af6ead authored by asynchronous rob's avatar asynchronous rob Committed by GitHub
Browse files

Refactor primitives (#1383)

* create a v1 primitives module

* Improve guide on availability types

* punctuate

* new parachains runtime uses new primitives

* tests of new runtime now use new primitives

* add ErasureChunk to guide

* export erasure chunk from v1 primitives

* subsystem crate uses v1 primitives

* node-primitives uses new v1 primitives

* port overseer to new primitives

* new-proposer uses v1 primitives (no ParachainHost anymore)

* fix no-std compilation for primitives

* service-new uses v1 primitives

* network-bridge uses new primitives

* statement distribution uses v1 primitives

* PoV distribution uses v1 primitives; add PoV::hash fn

* move parachain to v0

* remove inclusion_inherent module and place into v1

* remove everything from primitives crate root

* remove some unused old types from v0 primitives

* point everything else at primitives::v0

* squanch some warns up

* add RuntimeDebug import to no-std as well

* port over statement-table and validation

* fix final errors in validation and node-primitives

* add dummy Ord impl to committed candidate receipt

* guide: update CandidateValidationMessage

* add primitive for validationoutputs

* expand CandidateValidationMessage further

* bikeshed

* add some impls to omitted-validation-data and available-data

* expand CandidateValidationMessage

* make erasure-coding generic over v1/v0

* update usages of erasure-coding

* implement commitments.hash()

* use Arc<Pov> for CandidateValidation

* improve new erasure-coding method names

* fix up candidate backing

* update docs a bit

* fix most tests and add short-circuiting to make_pov_available

* fix remainder of candidate backing tests

* squanching warns

* squanch it up

* some fallout

* overseer fallout

* free from polkadot-test-service hell
parent 8845df22
Pipeline #100069 passed with stages
in 25 minutes and 41 seconds
...@@ -25,12 +25,10 @@ ...@@ -25,12 +25,10 @@
use futures::prelude::*; use futures::prelude::*;
use futures::channel::{mpsc, oneshot}; use futures::channel::{mpsc, oneshot};
use keystore::KeyStorePtr; use keystore::KeyStorePtr;
use polkadot_primitives::{ use polkadot_primitives::v0::{
Hash, Block, Hash, Block,
parachain::{ PoVBlock, AbridgedCandidateReceipt, ErasureChunk,
PoVBlock, AbridgedCandidateReceipt, ErasureChunk, ParachainHost, AvailableData, OmittedValidationData,
ParachainHost, AvailableData, OmittedValidationData,
},
}; };
use sp_runtime::traits::HashFor; use sp_runtime::traits::HashFor;
use sp_blockchain::Result as ClientResult; use sp_blockchain::Result as ClientResult;
......
...@@ -19,11 +19,8 @@ use kvdb_rocksdb::{Database, DatabaseConfig}; ...@@ -19,11 +19,8 @@ use kvdb_rocksdb::{Database, DatabaseConfig};
use kvdb::{KeyValueDB, DBTransaction}; use kvdb::{KeyValueDB, DBTransaction};
use codec::{Encode, Decode}; use codec::{Encode, Decode};
use polkadot_erasure_coding as erasure; use polkadot_erasure_coding as erasure;
use polkadot_primitives::{ use polkadot_primitives::v0::{
Hash, Hash, ErasureChunk, AvailableData, AbridgedCandidateReceipt,
parachain::{
ErasureChunk, AvailableData, AbridgedCandidateReceipt,
},
}; };
use parking_lot::Mutex; use parking_lot::Mutex;
...@@ -273,7 +270,7 @@ impl Store { ...@@ -273,7 +270,7 @@ impl Store {
// If there are no block data in the store at this point, // If there are no block data in the store at this point,
// check that they can be reconstructed now and add them to store if they can. // check that they can be reconstructed now and add them to store if they can.
if self.execution_data(&candidate_hash).is_none() { if self.execution_data(&candidate_hash).is_none() {
if let Ok(available_data) = erasure::reconstruct( if let Ok(available_data) = erasure::reconstruct_v0(
n_validators as usize, n_validators as usize,
v.iter().map(|chunk| (chunk.chunk.as_ref(), chunk.index as usize)), v.iter().map(|chunk| (chunk.chunk.as_ref(), chunk.index as usize)),
) )
...@@ -390,7 +387,7 @@ impl Store { ...@@ -390,7 +387,7 @@ impl Store {
mod tests { mod tests {
use super::*; use super::*;
use polkadot_erasure_coding::{self as erasure}; use polkadot_erasure_coding::{self as erasure};
use polkadot_primitives::parachain::{ use polkadot_primitives::v0::{
Id as ParaId, BlockData, AvailableData, PoVBlock, OmittedValidationData, Id as ParaId, BlockData, AvailableData, PoVBlock, OmittedValidationData,
}; };
...@@ -489,7 +486,7 @@ mod tests { ...@@ -489,7 +486,7 @@ mod tests {
let available_data = available_data(&[42; 8]); let available_data = available_data(&[42; 8]);
let n_validators = 5; let n_validators = 5;
let erasure_chunks = erasure::obtain_chunks( let erasure_chunks = erasure::obtain_chunks_v0(
n_validators, n_validators,
&available_data, &available_data,
).unwrap(); ).unwrap();
......
...@@ -33,8 +33,8 @@ use consensus_common::{ ...@@ -33,8 +33,8 @@ use consensus_common::{
import_queue::CacheKeyId, import_queue::CacheKeyId,
}; };
use sp_core::traits::SpawnNamed; use sp_core::traits::SpawnNamed;
use polkadot_primitives::{Block, BlockId, Hash}; use polkadot_primitives::v0::{
use polkadot_primitives::parachain::{ Block, BlockId, Hash,
ParachainHost, ValidatorId, AbridgedCandidateReceipt, AvailableData, ParachainHost, ValidatorId, AbridgedCandidateReceipt, AvailableData,
ValidatorPair, ErasureChunk, ValidatorPair, ErasureChunk,
}; };
......
...@@ -55,12 +55,11 @@ use log::warn; ...@@ -55,12 +55,11 @@ use log::warn;
use sc_client_api::{StateBackend, BlockchainEvents}; use sc_client_api::{StateBackend, BlockchainEvents};
use sp_blockchain::HeaderBackend; use sp_blockchain::HeaderBackend;
use sp_core::Pair; use sp_core::Pair;
use polkadot_primitives::{ use polkadot_primitives::v0::{
BlockId, Hash, Block, DownwardMessage, BlockId, Hash, Block, DownwardMessage,
parachain::{ BlockData, DutyRoster, HeadData, Id as ParaId,
self, BlockData, DutyRoster, HeadData, Id as ParaId, PoVBlock, ValidatorId, CollatorPair, LocalValidationData, GlobalValidationSchedule,
PoVBlock, ValidatorId, CollatorPair, LocalValidationData, GlobalValidationSchedule, Collation, CollationInfo, collator_signature_payload,
}
}; };
use polkadot_cli::{ use polkadot_cli::{
ProvideRuntimeApi, ParachainHost, IdentifyVariant, ProvideRuntimeApi, ParachainHost, IdentifyVariant,
...@@ -69,7 +68,7 @@ use polkadot_cli::{ ...@@ -69,7 +68,7 @@ use polkadot_cli::{
pub use polkadot_cli::service::Configuration; pub use polkadot_cli::service::Configuration;
pub use polkadot_cli::Cli; pub use polkadot_cli::Cli;
pub use polkadot_validation::SignedStatement; pub use polkadot_validation::SignedStatement;
pub use polkadot_primitives::parachain::CollatorId; pub use polkadot_primitives::v0::CollatorId;
pub use sc_network::PeerId; pub use sc_network::PeerId;
pub use service::RuntimeApiCollection; pub use service::RuntimeApiCollection;
pub use sc_cli::SubstrateCli; pub use sc_cli::SubstrateCli;
...@@ -164,7 +163,7 @@ pub async fn collate<P>( ...@@ -164,7 +163,7 @@ pub async fn collate<P>(
downward_messages: Vec<DownwardMessage>, downward_messages: Vec<DownwardMessage>,
mut para_context: P, mut para_context: P,
key: Arc<CollatorPair>, key: Arc<CollatorPair>,
) -> Option<parachain::Collation> ) -> Option<Collation>
where where
P: ParachainContext, P: ParachainContext,
P::ProduceCandidate: Send, P::ProduceCandidate: Send,
...@@ -181,13 +180,13 @@ pub async fn collate<P>( ...@@ -181,13 +180,13 @@ pub async fn collate<P>(
}; };
let pov_block_hash = pov_block.hash(); let pov_block_hash = pov_block.hash();
let signature = key.sign(&parachain::collator_signature_payload( let signature = key.sign(&collator_signature_payload(
&relay_parent, &relay_parent,
&local_id, &local_id,
&pov_block_hash, &pov_block_hash,
)); ));
let info = parachain::CollationInfo { let info = CollationInfo {
parachain_index: local_id, parachain_index: local_id,
relay_parent, relay_parent,
collator: key.public(), collator: key.public(),
...@@ -196,7 +195,7 @@ pub async fn collate<P>( ...@@ -196,7 +195,7 @@ pub async fn collate<P>(
pov_block_hash, pov_block_hash,
}; };
let collation = parachain::Collation { let collation = Collation {
info, info,
pov: pov_block, pov: pov_block,
}; };
...@@ -456,7 +455,7 @@ where ...@@ -456,7 +455,7 @@ where
#[cfg(not(feature = "service-rewr"))] #[cfg(not(feature = "service-rewr"))]
fn compute_targets(para_id: ParaId, session_keys: &[ValidatorId], roster: DutyRoster) -> HashSet<ValidatorId> { fn compute_targets(para_id: ParaId, session_keys: &[ValidatorId], roster: DutyRoster) -> HashSet<ValidatorId> {
use polkadot_primitives::parachain::Chain; use polkadot_primitives::v0::Chain;
roster.validator_duty.iter().enumerate() roster.validator_duty.iter().enumerate()
.filter(|&(_, c)| c == &Chain::Parachain(para_id)) .filter(|&(_, c)| c == &Chain::Parachain(para_id))
......
...@@ -94,3 +94,8 @@ pub enum DownwardMessage<AccountId = crate::AccountId> { ...@@ -94,3 +94,8 @@ pub enum DownwardMessage<AccountId = crate::AccountId> {
/// XCMP message for the Parachain. /// XCMP message for the Parachain.
XCMPMessage(sp_std::vec::Vec<u8>), XCMPMessage(sp_std::vec::Vec<u8>),
} }
/// V1 primitives.
pub mod v1 {
pub use super::*;
}
...@@ -26,8 +26,8 @@ ...@@ -26,8 +26,8 @@
use codec::{Encode, Decode}; use codec::{Encode, Decode};
use reed_solomon::galois_16::{self, ReedSolomon}; use reed_solomon::galois_16::{self, ReedSolomon};
use primitives::{Hash as H256, BlakeTwo256, HashT}; use primitives::v0::{self, Hash as H256, BlakeTwo256, HashT};
use primitives::parachain::AvailableData; use primitives::v1;
use sp_core::Blake2Hasher; use sp_core::Blake2Hasher;
use trie::{EMPTY_PREFIX, MemoryDB, Trie, TrieMut, trie_types::{TrieDBMut, TrieDB}}; use trie::{EMPTY_PREFIX, MemoryDB, Trie, TrieMut, trie_types::{TrieDBMut, TrieDB}};
...@@ -124,14 +124,32 @@ fn code_params(n_validators: usize) -> Result<CodeParams, Error> { ...@@ -124,14 +124,32 @@ fn code_params(n_validators: usize) -> Result<CodeParams, Error> {
}) })
} }
/// Obtain erasure-coded chunks for v0 `AvailableData`, one for each validator.
///
/// Works only up to 65536 validators, and `n_validators` must be non-zero.
pub fn obtain_chunks_v0(n_validators: usize, data: &v0::AvailableData)
-> Result<Vec<Vec<u8>>, Error>
{
obtain_chunks(n_validators, data)
}
/// Obtain erasure-coded chunks for v1 `AvailableData`, one for each validator.
///
/// Works only up to 65536 validators, and `n_validators` must be non-zero.
pub fn obtain_chunks_v1(n_validators: usize, data: &v1::AvailableData)
-> Result<Vec<Vec<u8>>, Error>
{
obtain_chunks(n_validators, data)
}
/// Obtain erasure-coded chunks, one for each validator. /// Obtain erasure-coded chunks, one for each validator.
/// ///
/// Works only up to 65536 validators, and `n_validators` must be non-zero. /// Works only up to 65536 validators, and `n_validators` must be non-zero.
pub fn obtain_chunks(n_validators: usize, available_data: &AvailableData) fn obtain_chunks<T: Encode>(n_validators: usize, data: &T)
-> Result<Vec<Vec<u8>>, Error> -> Result<Vec<Vec<u8>>, Error>
{ {
let params = code_params(n_validators)?; let params = code_params(n_validators)?;
let encoded = available_data.encode(); let encoded = data.encode();
if encoded.is_empty() { if encoded.is_empty() {
return Err(Error::BadPayload); return Err(Error::BadPayload);
...@@ -145,15 +163,42 @@ pub fn obtain_chunks(n_validators: usize, available_data: &AvailableData) ...@@ -145,15 +163,42 @@ pub fn obtain_chunks(n_validators: usize, available_data: &AvailableData)
Ok(shards.into_iter().map(|w| w.into_inner()).collect()) Ok(shards.into_iter().map(|w| w.into_inner()).collect())
} }
/// Reconstruct the block data from a set of chunks. /// Reconstruct the v0 available data from a set of chunks.
///
/// Provide an iterator containing chunk data and the corresponding index.
/// The indices of the present chunks must be indicated. If too few chunks
/// are provided, recovery is not possible.
///
/// Works only up to 65536 validators, and `n_validators` must be non-zero.
pub fn reconstruct_v0<'a, I: 'a>(n_validators: usize, chunks: I)
-> Result<v0::AvailableData, Error>
where I: IntoIterator<Item=(&'a [u8], usize)>
{
reconstruct(n_validators, chunks)
}
/// Reconstruct the v1 available data from a set of chunks.
///
/// Provide an iterator containing chunk data and the corresponding index.
/// The indices of the present chunks must be indicated. If too few chunks
/// are provided, recovery is not possible.
///
/// Works only up to 65536 validators, and `n_validators` must be non-zero.
pub fn reconstruct_v1<'a, I: 'a>(n_validators: usize, chunks: I)
-> Result<v1::AvailableData, Error>
where I: IntoIterator<Item=(&'a [u8], usize)>
{
reconstruct(n_validators, chunks)
}
/// Reconstruct decodable data from a set of chunks.
/// ///
/// Provide an iterator containing chunk data and the corresponding index. /// Provide an iterator containing chunk data and the corresponding index.
/// The indices of the present chunks must be indicated. If too few chunks /// The indices of the present chunks must be indicated. If too few chunks
/// are provided, recovery is not possible. /// are provided, recovery is not possible.
/// ///
/// Works only up to 65536 validators, and `n_validators` must be non-zero. /// Works only up to 65536 validators, and `n_validators` must be non-zero.
pub fn reconstruct<'a, I: 'a>(n_validators: usize, chunks: I) fn reconstruct<'a, I: 'a, T: Decode>(n_validators: usize, chunks: I) -> Result<T, Error>
-> Result<AvailableData, Error>
where I: IntoIterator<Item=(&'a [u8], usize)> where I: IntoIterator<Item=(&'a [u8], usize)>
{ {
let params = code_params(n_validators)?; let params = code_params(n_validators)?;
...@@ -343,7 +388,7 @@ impl<'a, I: Iterator<Item=&'a [u8]>> codec::Input for ShardInput<'a, I> { ...@@ -343,7 +388,7 @@ impl<'a, I: Iterator<Item=&'a [u8]>> codec::Input for ShardInput<'a, I> {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
use primitives::parachain::{BlockData, PoVBlock}; use primitives::v0::{AvailableData, BlockData, PoVBlock};
#[test] #[test]
fn field_order_is_right_size() { fn field_order_is_right_size() {
...@@ -420,7 +465,7 @@ mod tests { ...@@ -420,7 +465,7 @@ mod tests {
assert_eq!(chunks.len(), 10); assert_eq!(chunks.len(), 10);
// any 4 chunks should work. // any 4 chunks should work.
let reconstructed = reconstruct( let reconstructed: AvailableData = reconstruct(
10, 10,
[ [
(&*chunks[1], 1), (&*chunks[1], 1),
......
...@@ -17,8 +17,7 @@ ...@@ -17,8 +17,7 @@
//! Bridge between the network and consensus service for getting collations to it. //! Bridge between the network and consensus service for getting collations to it.
use codec::{Encode, Decode}; use codec::{Encode, Decode};
use polkadot_primitives::Hash; use polkadot_primitives::v0::{Hash, CollatorId, Id as ParaId, Collation};
use polkadot_primitives::parachain::{CollatorId, Id as ParaId, Collation};
use sc_network::PeerId; use sc_network::PeerId;
use futures::channel::oneshot; use futures::channel::oneshot;
...@@ -236,7 +235,7 @@ impl CollatorPool { ...@@ -236,7 +235,7 @@ impl CollatorPool {
mod tests { mod tests {
use super::*; use super::*;
use sp_core::crypto::UncheckedInto; use sp_core::crypto::UncheckedInto;
use polkadot_primitives::parachain::{CollationInfo, BlockData, PoVBlock}; use polkadot_primitives::v0::{CollationInfo, BlockData, PoVBlock};
use futures::executor::block_on; use futures::executor::block_on;
fn make_pov(block_data: Vec<u8>) -> PoVBlock { fn make_pov(block_data: Vec<u8>) -> PoVBlock {
......
...@@ -33,7 +33,7 @@ ...@@ -33,7 +33,7 @@
use sc_network_gossip::{ValidationResult as GossipValidationResult}; use sc_network_gossip::{ValidationResult as GossipValidationResult};
use sc_network::ReputationChange; use sc_network::ReputationChange;
use polkadot_validation::GenericStatement; use polkadot_validation::GenericStatement;
use polkadot_primitives::Hash; use polkadot_primitives::v0::Hash;
use std::collections::HashMap; use std::collections::HashMap;
......
...@@ -58,8 +58,8 @@ use sc_network_gossip::{ ...@@ -58,8 +58,8 @@ use sc_network_gossip::{
ValidatorContext, MessageIntent, ValidatorContext, MessageIntent,
}; };
use polkadot_validation::{SignedStatement}; use polkadot_validation::{SignedStatement};
use polkadot_primitives::{Block, Hash}; use polkadot_primitives::v0::{
use polkadot_primitives::parachain::{ Block, Hash,
ParachainHost, ValidatorId, ErasureChunk as PrimitiveChunk, SigningContext, PoVBlock, ParachainHost, ValidatorId, ErasureChunk as PrimitiveChunk, SigningContext, PoVBlock,
}; };
use polkadot_erasure_coding::{self as erasure}; use polkadot_erasure_coding::{self as erasure};
...@@ -755,7 +755,7 @@ mod tests { ...@@ -755,7 +755,7 @@ mod tests {
use sc_network_gossip::Validator as ValidatorT; use sc_network_gossip::Validator as ValidatorT;
use std::sync::mpsc; use std::sync::mpsc;
use parking_lot::Mutex; use parking_lot::Mutex;
use polkadot_primitives::parachain::{AbridgedCandidateReceipt, BlockData}; use polkadot_primitives::v0::{AbridgedCandidateReceipt, BlockData};
use sp_core::sr25519::Signature as Sr25519Signature; use sp_core::sr25519::Signature as Sr25519Signature;
use polkadot_validation::GenericStatement; use polkadot_validation::GenericStatement;
......
...@@ -19,7 +19,7 @@ ...@@ -19,7 +19,7 @@
//! Collations are attempted to be repropagated when a new validator connects, //! Collations are attempted to be repropagated when a new validator connects,
//! a validator changes his session key, or when they are generated. //! a validator changes his session key, or when they are generated.
use polkadot_primitives::{Hash, parachain::{ValidatorId}}; use polkadot_primitives::v0::{Hash, ValidatorId};
use crate::legacy::collator_pool::Role; use crate::legacy::collator_pool::Role;
use std::collections::{HashMap, HashSet}; use std::collections::{HashMap, HashSet};
use std::time::Duration; use std::time::Duration;
...@@ -144,7 +144,7 @@ impl<C: Clone> LocalCollations<C> { ...@@ -144,7 +144,7 @@ impl<C: Clone> LocalCollations<C> {
mod tests { mod tests {
use super::*; use super::*;
use sp_core::crypto::UncheckedInto; use sp_core::crypto::UncheckedInto;
use polkadot_primitives::parachain::ValidatorId; use polkadot_primitives::v0::ValidatorId;
#[test] #[test]
fn add_validator_with_ready_collation() { fn add_validator_with_ready_collation() {
......
...@@ -25,7 +25,7 @@ pub mod gossip; ...@@ -25,7 +25,7 @@ pub mod gossip;
use codec::Decode; use codec::Decode;
use futures::prelude::*; use futures::prelude::*;
use polkadot_primitives::Hash; use polkadot_primitives::v0::Hash;
use sc_network::PeerId; use sc_network::PeerId;
use sc_network_gossip::TopicNotification; use sc_network_gossip::TopicNotification;
use log::debug; use log::debug;
......
...@@ -21,7 +21,7 @@ ...@@ -21,7 +21,7 @@
#![recursion_limit="256"] #![recursion_limit="256"]
use polkadot_primitives::{Block, Hash, BlakeTwo256, HashT}; use polkadot_primitives::v0::{Block, Hash, BlakeTwo256, HashT};
pub mod legacy; pub mod legacy;
pub mod protocol; pub mod protocol;
......
...@@ -30,12 +30,10 @@ use futures::task::{Context, Poll}; ...@@ -30,12 +30,10 @@ use futures::task::{Context, Poll};
use futures::stream::{FuturesUnordered, StreamFuture}; use futures::stream::{FuturesUnordered, StreamFuture};
use log::{debug, trace}; use log::{debug, trace};
use polkadot_primitives::{ use polkadot_primitives::v0::{
Hash, Block, Hash, Block,
parachain::{ PoVBlock, ValidatorId, ValidatorIndex, Collation, AbridgedCandidateReceipt,
PoVBlock, ValidatorId, ValidatorIndex, Collation, AbridgedCandidateReceipt, ErasureChunk, ParachainHost, Id as ParaId, CollatorId,
ErasureChunk, ParachainHost, Id as ParaId, CollatorId,
},
}; };
use polkadot_validation::{ use polkadot_validation::{
SharedTable, TableRouter, Network as ParachainNetwork, Validated, GenericStatement, Collators, SharedTable, TableRouter, Network as ParachainNetwork, Validated, GenericStatement, Collators,
......
...@@ -17,8 +17,8 @@ use super::*; ...@@ -17,8 +17,8 @@ use super::*;
use crate::legacy::gossip::GossipPoVBlock; use crate::legacy::gossip::GossipPoVBlock;
use parking_lot::Mutex; use parking_lot::Mutex;
use polkadot_primitives::Block; use polkadot_primitives::v0::{
use polkadot_primitives::parachain::{ Block,
Id as ParaId, Chain, DutyRoster, ParachainHost, ValidatorId, Id as ParaId, Chain, DutyRoster, ParachainHost, ValidatorId,
Retriable, CollatorId, AbridgedCandidateReceipt, Retriable, CollatorId, AbridgedCandidateReceipt,
GlobalValidationSchedule, LocalValidationData, ErasureChunk, SigningContext, GlobalValidationSchedule, LocalValidationData, ErasureChunk, SigningContext,
...@@ -198,7 +198,7 @@ sp_api::mock_impl_runtime_apis! { ...@@ -198,7 +198,7 @@ sp_api::mock_impl_runtime_apis! {
parent_hash: Default::default(), parent_hash: Default::default(),
} }
} }
fn downward_messages(_: ParaId) -> Vec<polkadot_primitives::DownwardMessage> { fn downward_messages(_: ParaId) -> Vec<polkadot_primitives::v0::DownwardMessage> {
Vec::new() Vec::new()
} }
} }
......
This diff is collapsed.
...@@ -2,9 +2,7 @@ use futures::prelude::*; ...@@ -2,9 +2,7 @@ use futures::prelude::*;
use futures::select; use futures::select;
use polkadot_node_subsystem::{messages::{AllMessages, ProvisionerInherentData, ProvisionerMessage}, SubsystemError}; use polkadot_node_subsystem::{messages::{AllMessages, ProvisionerInherentData, ProvisionerMessage}, SubsystemError};
use polkadot_overseer::OverseerHandler; use polkadot_overseer::OverseerHandler;
use polkadot_primitives::{ use polkadot_primitives::v1::{
inclusion_inherent,
parachain::ParachainHost,
Block, Hash, Header, Block, Hash, Header,
}; };
use sc_block_builder::{BlockBuilderApi, BlockBuilderProvider}; use sc_block_builder::{BlockBuilderApi, BlockBuilderProvider};
...@@ -53,7 +51,7 @@ where ...@@ -53,7 +51,7 @@ where
+ Send + Send
+ Sync, + Sync,
Client::Api: Client::Api:
ParachainHost<Block> + BlockBuilderApi<Block> + ApiExt<Block, Error = sp_blockchain::Error>, BlockBuilderApi<Block> + ApiExt<Block, Error = sp_blockchain::Error>,
Backend: Backend:
'static + sc_client_api::Backend<Block, State = sp_api::StateBackendFor<Client, Block>>, 'static + sc_client_api::Backend<Block, State = sp_api::StateBackendFor<Client, Block>>,
// Rust bug: https://github.com/rust-lang/rust/issues/24159 // Rust bug: https://github.com/rust-lang/rust/issues/24159
...@@ -104,7 +102,7 @@ where ...@@ -104,7 +102,7 @@ where
+ Send + Send
+ Sync, + Sync,
Client::Api: Client::Api:
ParachainHost<Block> + BlockBuilderApi<Block> + ApiExt<Block, Error = sp_blockchain::Error>, BlockBuilderApi<Block> + ApiExt<Block, Error = sp_blockchain::Error>,
Backend: Backend:
'static + sc_client_api::Backend<Block, State = sp_api::StateBackendFor<Client, Block>>, 'static + sc_client_api::Backend<Block, State = sp_api::StateBackendFor<Client, Block>>,
// Rust bug: https://github.com/rust-lang/rust/issues/24159 // Rust bug: https://github.com/rust-lang/rust/issues/24159
...@@ -155,7 +153,7 @@ where ...@@ -155,7 +153,7 @@ where
+ Send + Send
+ Sync, + Sync,
Client::Api: Client::Api:
ParachainHost<Block> + BlockBuilderApi<Block> + ApiExt<Block, Error = sp_blockchain::Error>, BlockBuilderApi<Block> + ApiExt<Block, Error = sp_blockchain::Error>,
Backend: Backend:
'static + sc_client_api::Backend<Block, State = sp_api::StateBackendFor<Client, Block>>, 'static + sc_client_api::Backend<Block, State = sp_api::StateBackendFor<Client, Block>>,
// Rust bug: https://github.com/rust-lang/rust/issues/24159 // Rust bug: https://github.com/rust-lang/rust/issues/24159
...@@ -186,7 +184,7 @@ where ...@@ -186,7 +184,7 @@ where
}; };
inherent_data.put_data( inherent_data.put_data(
inclusion_inherent::INHERENT_IDENTIFIER, polkadot_primitives::v1::INCLUSION_INHERENT_IDENTIFIER,
&provisioner_data,