Commit 741f4834 authored by Bastian Köcher's avatar Bastian Köcher Committed by asynchronous rob
Browse files

Update to lastest Substrate master (#443)

parent 1a2bf251
This diff is collapsed.
......@@ -306,7 +306,7 @@ impl<P, E> Worker for CollationNode<P, E> where
};
let is_known = move |block_hash: &Hash| {
use client::BlockStatus;
use consensus_common::BlockStatus;
use polkadot_network::gossip::Known;
match known_oracle.block_status(&BlockId::hash(*block_hash)) {
......
......@@ -21,14 +21,10 @@
use rstd::prelude::*;
use codec::{Encode, Decode};
use srml_support::{decl_storage, decl_module, ensure};
use srml_support::{decl_storage, decl_module, ensure, dispatch::Result, traits::Get};
use primitives::{Hash, parachain::{AttestedCandidate, CandidateReceipt, Id as ParaId}};
use sr_staking_primitives::SessionIndex;
use {system, session};
use srml_support::{
StorageValue, StorageMap, StorageDoubleMap, dispatch::Result, traits::Get,
};
use inherents::{ProvideInherent, InherentData, RuntimeString, MakeFatalError, InherentIdentifier};
use system::ensure_none;
......
......@@ -18,7 +18,7 @@
use rstd::prelude::*;
use sr_io::{keccak_256, secp256k1_ecdsa_recover};
use srml_support::{StorageValue, StorageMap, decl_event, decl_storage, decl_module};
use srml_support::{decl_event, decl_storage, decl_module};
use srml_support::traits::{Currency, Get};
use system::ensure_none;
use codec::{Encode, Decode};
......
......@@ -67,7 +67,7 @@
//! funds ultimately end up in module's fund sub-account.
use srml_support::{
StorageValue, StorageMap, decl_module, decl_storage, decl_event, storage::child, ensure,
decl_module, decl_storage, decl_event, storage::child, ensure,
traits::{Currency, Get, OnUnbalanced, WithdrawReason, ExistenceRequirement}
};
use system::ensure_signed;
......@@ -187,7 +187,7 @@ decl_event! {
decl_module! {
pub struct Module<T: Trait> for enum Call where origin: T::Origin {
fn deposit_event() = default;
/// Create a new crowdfunding campaign for a parachain slot deposit for the current auction.
#[weight = SimpleDispatchInfo::FixedNormal(100_000)]
fn create(origin,
......@@ -233,7 +233,7 @@ decl_module! {
Self::deposit_event(RawEvent::Created(index));
}
/// Contribute to a crowd sale. This will transfer some balance over to fund a parachain
/// slot. It will be withdrawable in two instances: the parachain becomes retired; or the
/// slot is
......@@ -312,7 +312,7 @@ decl_module! {
Self::deposit_event(RawEvent::DeployDataFixed(index));
}
/// Complete onboarding process for a winning parachain fund. This can be called once by
/// any origin once a fund wins a slot and the fund has set its deploy data (using
/// `fix_deploy_data`).
......@@ -337,7 +337,7 @@ decl_module! {
Self::deposit_event(RawEvent::Onboarded(index, para_id));
}
/// Note that a successful fund has lost its parachain slot, and place it into retirement.
fn begin_retirement(origin, #[compact] index: FundIndex) {
let _ = ensure_signed(origin)?;
......@@ -388,7 +388,7 @@ decl_module! {
Self::deposit_event(RawEvent::Withdrew(who, index, balance));
}
/// Remove a fund after either: it was unsuccessful and it timed out; or it was successful
/// but it has been retired from its parachain slot. This places any deposits that were not
/// withdrawn into the treasury.
......@@ -422,7 +422,7 @@ decl_module! {
Self::deposit_event(RawEvent::Dissolved(index));
}
fn on_finalize(n: T::BlockNumber) {
if let Some(n) = <slots::Module<T>>::is_ending(n) {
let auction_index = <slots::Module<T>>::auction_counter();
......@@ -1041,7 +1041,7 @@ mod tests {
assert_eq!(Balances::free_balance(3), 2990);
});
}
#[test]
fn withdraw_handles_basic_errors() {
with_externalities(&mut new_test_ext(), || {
......@@ -1079,7 +1079,7 @@ mod tests {
// Skip all the way to the end
run_to_block(50);
// Check current funds (contributions + deposit)
assert_eq!(Balances::free_balance(Crowdfund::fund_account_id(0)), 601);
......
......@@ -41,12 +41,11 @@ use client::{
use sr_primitives::{
ApplyResult, generic, Permill, Perbill, impl_opaque_keys, create_runtime_str, key_types,
transaction_validity::{TransactionValidity, InvalidTransaction, TransactionValidityError},
weights::{Weight, DispatchInfo},
traits::{BlakeTwo256, Block as BlockT, DigestFor, StaticLookup, SignedExtension},
curve::PiecewiseLinear,
weights::{Weight, DispatchInfo}, curve::PiecewiseLinear,
traits::{BlakeTwo256, Block as BlockT, StaticLookup, SignedExtension},
};
use version::RuntimeVersion;
use grandpa::{AuthorityId as GrandpaId, fg_primitives::{self, ScheduledChange}};
use grandpa::{AuthorityId as GrandpaId, fg_primitives};
use babe_primitives::AuthorityId as BabeId;
use elections::VoteIndex;
#[cfg(any(feature = "std", test))]
......@@ -662,45 +661,25 @@ impl_runtime_apis! {
}
impl fg_primitives::GrandpaApi<Block> for Runtime {
fn grandpa_pending_change(digest: &DigestFor<Block>)
-> Option<ScheduledChange<BlockNumber>>
{
Grandpa::pending_change(digest)
}
fn grandpa_forced_change(digest: &DigestFor<Block>)
-> Option<(BlockNumber, ScheduledChange<BlockNumber>)>
{
Grandpa::forced_change(digest)
}
fn grandpa_authorities() -> Vec<(GrandpaId, u64)> {
Grandpa::grandpa_authorities()
}
}
impl babe_primitives::BabeApi<Block> for Runtime {
fn startup_data() -> babe_primitives::BabeConfiguration {
fn configuration() -> babe_primitives::BabeConfiguration {
// The choice of `c` parameter (where `1 - c` represents the
// probability of a slot being empty), is done in accordance to the
// slot duration and expected target block time, for safely
// resisting network delays of maximum two seconds.
// <https://research.web3.foundation/en/latest/polkadot/BABE/Babe/#6-practical-results>
babe_primitives::BabeConfiguration {
median_required_blocks: 1000,
slot_duration: Babe::slot_duration(),
epoch_length: EpochDuration::get(),
c: PRIMARY_PROBABILITY,
}
}
fn epoch() -> babe_primitives::Epoch {
babe_primitives::Epoch {
start_slot: Babe::epoch_start_slot(),
authorities: Babe::authorities(),
epoch_index: Babe::epoch_index(),
genesis_authorities: Babe::authorities(),
randomness: Babe::randomness(),
duration: EpochDuration::get(),
secondary_slots: Babe::secondary_slots().0,
secondary_slots: true,
}
}
}
......
......@@ -31,8 +31,7 @@ use primitives::{Hash, Balance, parachain::{
}};
use {system, session};
use srml_support::{
StorageValue, StorageMap, Parameter, dispatch::Result,
traits::{Currency, Get, WithdrawReason, ExistenceRequirement}
Parameter, dispatch::Result, traits::{Currency, Get, WithdrawReason, ExistenceRequirement},
};
use inherents::{ProvideInherent, InherentData, RuntimeString, MakeFatalError, InherentIdentifier};
......
......@@ -23,8 +23,8 @@ use sr_primitives::traits::{CheckedSub, StaticLookup, Zero, One, CheckedConversi
use sr_primitives::weights::SimpleDispatchInfo;
use codec::{Encode, Decode};
use srml_support::{
decl_module, decl_storage, decl_event, StorageValue, StorageMap, ensure,
traits::{Currency, ReservableCurrency, WithdrawReason, ExistenceRequirement, Get}
decl_module, decl_storage, decl_event, StorageMap, ensure,
traits::{Currency, ReservableCurrency, WithdrawReason, ExistenceRequirement, Get},
};
use primitives::parachain::AccountIdConversion;
use crate::parachains::ParachainRegistrar;
......
......@@ -27,7 +27,7 @@ use polkadot_runtime::GenesisConfig;
use polkadot_network::{gossip::{self as network_gossip, Known}, validation::ValidationNetwork};
use service::{error::{Error as ServiceError}, Configuration, ServiceBuilder};
use transaction_pool::txpool::{Pool as TransactionPool};
use babe::{import_queue, start_babe, Config};
use babe::{import_queue, start_babe};
use grandpa::{self, FinalityProofProvider as GrandpaFinalityProofProvider};
use inherents::InherentDataProviders;
use log::info;
......@@ -79,8 +79,6 @@ macro_rules! new_full_start {
($config:expr) => {{
let mut import_setup = None;
let inherent_data_providers = inherents::InherentDataProviders::new();
let mut tasks_to_spawn = None;
let builder = service::ServiceBuilder::new_full::<
Block, RuntimeApi, polkadot_executor::Executor
>($config)?
......@@ -90,33 +88,37 @@ macro_rules! new_full_start {
.with_transaction_pool(|config, client|
Ok(transaction_pool::txpool::Pool::new(config, transaction_pool::ChainApi::new(client)))
)?
.with_import_queue(|_config, client, mut select_chain, transaction_pool| {
.with_import_queue(|_config, client, mut select_chain, _| {
let select_chain = select_chain.take()
.ok_or_else(|| service::Error::SelectChainRequired)?;
let (block_import, link_half) =
let (grandpa_block_import, grandpa_link) =
grandpa::block_import::<_, _, _, RuntimeApi, _, _>(
client.clone(), client.clone(), select_chain
client.clone(), &*client, select_chain
)?;
let justification_import = block_import.clone();
let (import_queue, babe_link, babe_block_import, pruning_task) = babe::import_queue(
babe::Config::get_or_compute(&*client)?,
block_import,
Some(Box::new(justification_import)),
None,
client.clone(),
client,
inherent_data_providers.clone(),
Some(transaction_pool)
let justification_import = grandpa_block_import.clone();
let (block_import, babe_link) = babe::block_import(
babe::Config::get_or_compute(&*client)?,
grandpa_block_import,
client.clone(),
client.clone(),
)?;
import_setup = Some((babe_block_import.clone(), link_half, babe_link));
tasks_to_spawn = Some(vec![Box::new(pruning_task)]);
let import_queue = babe::import_queue(
babe_link.clone(),
block_import.clone(),
Some(Box::new(justification_import)),
None,
client.clone(),
client,
inherent_data_providers.clone(),
)?;
import_setup = Some((block_import, grandpa_link, babe_link));
Ok(import_queue)
})?;
(builder, import_setup, inherent_data_providers, tasks_to_spawn)
(builder, import_setup, inherent_data_providers)
}}
}
......@@ -144,7 +146,7 @@ pub fn new_full(config: Configuration<CustomConfiguration, GenesisConfig>)
let disable_grandpa = config.disable_grandpa;
let name = config.name.clone();
let (builder, mut import_setup, inherent_data_providers, mut tasks_to_spawn) = new_full_start!(config);
let (builder, mut import_setup, inherent_data_providers) = new_full_start!(config);
let service = builder
.with_network_protocol(|config| Ok(PolkadotProtocol::new(config.custom.collating_for.clone())))?
......@@ -156,17 +158,6 @@ pub fn new_full(config: Configuration<CustomConfiguration, GenesisConfig>)
let (block_import, link_half, babe_link) = import_setup.take()
.expect("Link Half and Block Import are present for Full Services or setup failed before. qed");
// spawn any futures that were created in the previous setup steps
if let Some(tasks) = tasks_to_spawn.take() {
for task in tasks {
service.spawn_task(
task.select(service.on_exit())
.map(|_| ())
.map_err(|_| ())
);
}
}
if is_collator {
info!(
"The node cannot start as an authority because it is also configured to run as a collator."
......@@ -185,7 +176,7 @@ pub fn new_full(config: Configuration<CustomConfiguration, GenesisConfig>)
let gossip_validator_select_chain = select_chain.clone();
let is_known = move |block_hash: &Hash| {
use client::BlockStatus;
use consensus_common::BlockStatus;
match known_oracle.block_status(&BlockId::hash(*block_hash)) {
Err(_) | Ok(BlockStatus::Unknown) | Ok(BlockStatus::Queued) => None,
......@@ -253,7 +244,6 @@ pub fn new_full(config: Configuration<CustomConfiguration, GenesisConfig>)
let select_chain = service.select_chain().ok_or(ServiceError::SelectChainRequired)?;
let babe_config = babe::BabeParams {
config: Config::get_or_compute(&*client)?,
keystore: service.keystore(),
client,
select_chain,
......@@ -262,7 +252,7 @@ pub fn new_full(config: Configuration<CustomConfiguration, GenesisConfig>)
sync_oracle: service.network(),
inherent_data_providers: inherent_data_providers.clone(),
force_authoring: force_authoring,
time_source: babe_link,
babe_link,
};
let babe = start_babe(babe_config)?;
......@@ -332,28 +322,35 @@ pub fn new_light(config: Configuration<CustomConfiguration, GenesisConfig>)
.with_transaction_pool(|config, client|
Ok(TransactionPool::new(config, transaction_pool::ChainApi::new(client)))
)?
.with_import_queue_and_fprb(|_config, client, backend, fetcher, _select_chain, transaction_pool| {
.with_import_queue_and_fprb(|_config, client, backend, fetcher, _select_chain, _| {
let fetch_checker = fetcher
.map(|fetcher| fetcher.checker().clone())
.ok_or_else(|| "Trying to start light import queue without active fetch checker")?;
let block_import = grandpa::light_block_import::<_, _, _, RuntimeApi, _>(
let grandpa_block_import = grandpa::light_block_import::<_, _, _, RuntimeApi, _>(
client.clone(), backend, Arc::new(fetch_checker), client.clone()
)?;
let finality_proof_import = block_import.clone();
let finality_proof_import = grandpa_block_import.clone();
let finality_proof_request_builder =
finality_proof_import.create_finality_proof_request_builder();
let (babe_block_import, babe_link) = babe::block_import(
babe::Config::get_or_compute(&*client)?,
grandpa_block_import,
client.clone(),
client.clone(),
)?;
// FIXME: pruning task isn't started since light client doesn't do `AuthoritySetup`.
let (import_queue, ..) = import_queue(
Config::get_or_compute(&*client)?,
block_import,
let import_queue = import_queue(
babe_link,
babe_block_import,
None,
Some(Box::new(finality_proof_import)),
client.clone(),
client,
inherent_data_providers.clone(),
Some(transaction_pool)
)?;
Ok((import_queue, finality_proof_request_builder))
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment