Unverified Commit 8c881e45 authored by asynchronous rob's avatar asynchronous rob Committed by GitHub
Browse files

Implement validation data refactor (#1585)



* update primitives

* correct parent_head field

* make hrmp field pub

* refactor validation data: runtime

* refactor validation data: messages

* add arguments to full_validation_data runtime API

* port runtime API

* mostly port over candidate validation

* remove some parameters from ValidationParams

* guide: update candidate validation

* update candidate outputs

* update ValidationOutputs in primitives

* port over candidate validation

* add a new test for no-transient behavior

* update util runtime API wrappers

* candidate backing

* fix missing imports

* change some fields of validation data around

* runtime API impl

* update candidate validation

* fix backing tests

* grumbles from review

* fix av-store tests

* fix some more crates

* fix provisioner tests

* fix availability distribution tests

* port collation-generation to new validation data

* fix overseer tests

* Update roadmap/implementers-guide/src/node/utility/candidate-validation.md
Co-authored-by: default avatarPeter Goodspeed-Niklaus <coriolinus@users.noreply.github.com>
Co-authored-by: default avatarPeter Goodspeed-Niklaus <coriolinus@users.noreply.github.com>
parent 31f7ef9b
Pipeline #104160 passed with stages
in 24 minutes and 30 seconds
......@@ -8694,19 +8694,6 @@ dependencies = [
"test-parachain-adder",
]
[[package]]
name = "test-parachain-code-upgrader"
version = "0.7.22"
dependencies = [
"dlmalloc",
"parity-scale-codec",
"polkadot-parachain",
"sp-io",
"sp-std",
"substrate-wasm-builder-runner 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
"tiny-keccak 1.5.0",
]
[[package]]
name = "test-parachain-halt"
version = "0.8.22"
......@@ -8722,7 +8709,6 @@ dependencies = [
"polkadot-parachain",
"sp-core",
"test-parachain-adder",
"test-parachain-code-upgrader",
"test-parachain-halt",
"tiny-keccak 1.5.0",
]
......
......@@ -70,7 +70,6 @@ members = [
"parachain/test-parachains",
"parachain/test-parachains/adder",
"parachain/test-parachains/adder/collator",
"parachain/test-parachains/code-upgrader",
]
[badges]
......
......@@ -34,13 +34,13 @@ use polkadot_node_subsystem::{
metrics::{self, prometheus},
};
use polkadot_node_subsystem_util::{
self as util, request_availability_cores_ctx, request_global_validation_data_ctx,
request_local_validation_data_ctx, request_validators_ctx,
self as util, request_availability_cores_ctx, request_full_validation_data_ctx,
request_validators_ctx,
};
use polkadot_primitives::v1::{
collator_signature_payload, validation_data_hash, AvailableData, CandidateCommitments,
CandidateDescriptor, CandidateReceipt, CoreState, GlobalValidationData, Hash,
LocalValidationData, OccupiedCoreAssumption, PoV,
collator_signature_payload, AvailableData, CandidateCommitments,
CandidateDescriptor, CandidateReceipt, CoreState, Hash, OccupiedCoreAssumption,
PersistedValidationData, PoV,
};
use sp_core::crypto::Pair;
use std::sync::Arc;
......@@ -198,13 +198,11 @@ async fn handle_new_activations<Context: SubsystemContext>(
for relay_parent in activated.iter().copied() {
// double-future magic happens here: the first layer of requests takes a mutable borrow of the context, and
// returns a receiver. The second layer of requests actually polls those receivers to completion.
let (global_validation_data, availability_cores, validators) = join!(
request_global_validation_data_ctx(relay_parent, ctx).await?,
let (availability_cores, validators) = join!(
request_availability_cores_ctx(relay_parent, ctx).await?,
request_validators_ctx(relay_parent, ctx).await?,
);
let global_validation_data = global_validation_data??;
let availability_cores = availability_cores??;
let n_validators = validators??.len();
......@@ -224,9 +222,10 @@ async fn handle_new_activations<Context: SubsystemContext>(
continue;
}
// we get local validation data synchronously for each core instead of within the subtask loop,
// because we have only a single mutable handle to the context, so the work can't really be distributed
let local_validation_data = match request_local_validation_data_ctx(
// we get validation data synchronously for each core instead of
// within the subtask loop, because we have only a single mutable handle to the
// context, so the work can't really be distributed
let validation_data = match request_full_validation_data_ctx(
relay_parent,
scheduled_core.para_id,
assumption,
......@@ -235,30 +234,32 @@ async fn handle_new_activations<Context: SubsystemContext>(
.await?
.await??
{
Some(local_validation_data) => local_validation_data,
Some(v) => v,
None => continue,
};
let task_global_validation_data = global_validation_data.clone();
let task_config = config.clone();
let mut task_sender = sender.clone();
let metrics = metrics.clone();
ctx.spawn("collation generation collation builder", Box::pin(async move {
let validation_data_hash =
validation_data_hash(&task_global_validation_data, &local_validation_data);
let persisted_validation_data_hash = validation_data.persisted.hash();
let collation = (task_config.collator)(&task_global_validation_data, &local_validation_data).await;
let collation = (task_config.collator)(&validation_data).await;
let pov_hash = collation.proof_of_validity.hash();
let signature_payload = collator_signature_payload(
&relay_parent,
&scheduled_core.para_id,
&validation_data_hash,
&persisted_validation_data_hash,
&pov_hash,
);
let erasure_root = match erasure_root(n_validators, local_validation_data, task_global_validation_data, collation.proof_of_validity.clone()) {
let erasure_root = match erasure_root(
n_validators,
validation_data.persisted,
collation.proof_of_validity.clone(),
) {
Ok(erasure_root) => erasure_root,
Err(err) => {
log::error!(target: "collation_generation", "failed to calculate erasure root for para_id {}: {:?}", scheduled_core.para_id, err);
......@@ -281,7 +282,7 @@ async fn handle_new_activations<Context: SubsystemContext>(
para_id: scheduled_core.para_id,
relay_parent,
collator: task_config.key.public(),
validation_data_hash,
persisted_validation_data_hash,
pov_hash,
},
};
......@@ -302,17 +303,11 @@ async fn handle_new_activations<Context: SubsystemContext>(
fn erasure_root(
n_validators: usize,
local_validation_data: LocalValidationData,
global_validation_data: GlobalValidationData,
persisted_validation: PersistedValidationData,
pov: PoV,
) -> Result<Hash> {
let omitted_validation = polkadot_primitives::v1::OmittedValidationData {
global_validation: global_validation_data,
local_validation: local_validation_data,
};
let available_data = AvailableData {
omitted_validation,
validation_data: persisted_validation,
pov,
};
......@@ -369,8 +364,8 @@ mod tests {
subsystem_test_harness, TestSubsystemContextHandle,
};
use polkadot_primitives::v1::{
BlockData, BlockNumber, CollatorPair, GlobalValidationData, Id as ParaId,
LocalValidationData, PoV, ScheduledCore,
BlockData, BlockNumber, CollatorPair, Id as ParaId,
PersistedValidationData, PoV, ScheduledCore, ValidationData,
};
use std::pin::Pin;
......@@ -402,7 +397,7 @@ mod tests {
fn test_config<Id: Into<ParaId>>(para_id: Id) -> Arc<CollationGenerationConfig> {
Arc::new(CollationGenerationConfig {
key: CollatorPair::generate().0,
collator: Box::new(|_gvd: &GlobalValidationData, _lvd: &LocalValidationData| {
collator: Box::new(|_vd: &ValidationData| {
Box::new(TestCollator)
}),
para_id: para_id.into(),
......@@ -417,7 +412,7 @@ mod tests {
}
#[test]
fn requests_validation_and_availability_per_relay_parent() {
fn requests_availability_per_relay_parent() {
let activated_hashes: Vec<Hash> = vec![
[1; 32].into(),
[4; 32].into(),
......@@ -425,19 +420,13 @@ mod tests {
[16; 32].into(),
];
let requested_validation_data = Arc::new(Mutex::new(Vec::new()));
let requested_availability_cores = Arc::new(Mutex::new(Vec::new()));
let overseer_requested_validation_data = requested_validation_data.clone();
let overseer_requested_availability_cores = requested_availability_cores.clone();
let overseer = |mut handle: TestSubsystemContextHandle<CollationGenerationMessage>| async move {
loop {
match handle.try_recv().await {
None => break,
Some(AllMessages::RuntimeApi(RuntimeApiMessage::Request(hash, RuntimeApiRequest::GlobalValidationData(tx)))) => {
overseer_requested_validation_data.lock().await.push(hash);
tx.send(Ok(Default::default())).unwrap();
}
Some(AllMessages::RuntimeApi(RuntimeApiMessage::Request(hash, RuntimeApiRequest::AvailabilityCores(tx)))) => {
overseer_requested_availability_cores.lock().await.push(hash);
tx.send(Ok(vec![])).unwrap();
......@@ -455,7 +444,7 @@ mod tests {
let subsystem_activated_hashes = activated_hashes.clone();
subsystem_test_harness(overseer, |mut ctx| async move {
handle_new_activations(
test_config(123),
test_config(123u32),
&subsystem_activated_hashes,
&mut ctx,
Metrics(None),
......@@ -465,21 +454,16 @@ mod tests {
.unwrap();
});
let mut requested_validation_data = Arc::try_unwrap(requested_validation_data)
.expect("overseer should have shut down by now")
.into_inner();
requested_validation_data.sort();
let mut requested_availability_cores = Arc::try_unwrap(requested_availability_cores)
.expect("overseer should have shut down by now")
.into_inner();
requested_availability_cores.sort();
assert_eq!(requested_validation_data, activated_hashes);
assert_eq!(requested_availability_cores, activated_hashes);
}
#[test]
fn requests_local_validation_for_scheduled_matches() {
fn requests_validation_data_for_scheduled_matches() {
let activated_hashes: Vec<Hash> = vec![
Hash::repeat_byte(1),
Hash::repeat_byte(4),
......@@ -487,19 +471,13 @@ mod tests {
Hash::repeat_byte(16),
];
let requested_local_validation_data = Arc::new(Mutex::new(Vec::new()));
let requested_full_validation_data = Arc::new(Mutex::new(Vec::new()));
let overseer_requested_local_validation_data = requested_local_validation_data.clone();
let overseer_requested_full_validation_data = requested_full_validation_data.clone();
let overseer = |mut handle: TestSubsystemContextHandle<CollationGenerationMessage>| async move {
loop {
match handle.try_recv().await {
None => break,
Some(AllMessages::RuntimeApi(RuntimeApiMessage::Request(
_hash,
RuntimeApiRequest::GlobalValidationData(tx),
))) => {
tx.send(Ok(Default::default())).unwrap();
}
Some(AllMessages::RuntimeApi(RuntimeApiMessage::Request(
hash,
RuntimeApiRequest::AvailabilityCores(tx),
......@@ -518,13 +496,13 @@ mod tests {
}
Some(AllMessages::RuntimeApi(RuntimeApiMessage::Request(
hash,
RuntimeApiRequest::LocalValidationData(
RuntimeApiRequest::FullValidationData(
_para_id,
_occupied_core_assumption,
tx,
),
))) => {
overseer_requested_local_validation_data
overseer_requested_full_validation_data
.lock()
.await
.push(hash);
......@@ -551,7 +529,7 @@ mod tests {
.unwrap();
});
let requested_local_validation_data = Arc::try_unwrap(requested_local_validation_data)
let requested_full_validation_data = Arc::try_unwrap(requested_full_validation_data)
.expect("overseer should have shut down by now")
.into_inner();
......@@ -559,7 +537,7 @@ mod tests {
// each activated hash generates two scheduled cores: one with its value * 4, one with its value * 5
// given that the test configuration has a para_id of 16, there's only one way to get that value: with the 4
// hash.
assert_eq!(requested_local_validation_data, vec![[4; 32].into()]);
assert_eq!(requested_full_validation_data, vec![[4; 32].into()]);
}
#[test]
......@@ -575,12 +553,6 @@ mod tests {
loop {
match handle.try_recv().await {
None => break,
Some(AllMessages::RuntimeApi(RuntimeApiMessage::Request(
_hash,
RuntimeApiRequest::GlobalValidationData(tx),
))) => {
tx.send(Ok(Default::default())).unwrap();
}
Some(AllMessages::RuntimeApi(RuntimeApiMessage::Request(
hash,
RuntimeApiRequest::AvailabilityCores(tx),
......@@ -599,7 +571,7 @@ mod tests {
}
Some(AllMessages::RuntimeApi(RuntimeApiMessage::Request(
_hash,
RuntimeApiRequest::LocalValidationData(
RuntimeApiRequest::FullValidationData(
_para_id,
_occupied_core_assumption,
tx,
......@@ -647,8 +619,8 @@ mod tests {
// we don't care too much about the commitments_hash right now, but let's ensure that we've calculated the
// correct descriptor
let expect_pov_hash = test_collation().proof_of_validity.hash();
let expect_validation_data_hash =
validation_data_hash::<BlockNumber>(&Default::default(), &Default::default());
let expect_validation_data_hash
= PersistedValidationData::<BlockNumber>::default().hash();
let expect_relay_parent = Hash::repeat_byte(4);
let expect_payload = collator_signature_payload(
&expect_relay_parent,
......@@ -661,7 +633,7 @@ mod tests {
para_id: config.para_id,
relay_parent: expect_relay_parent,
collator: config.key.public(),
validation_data_hash: expect_validation_data_hash,
persisted_validation_data_hash: expect_validation_data_hash,
pov_hash: expect_pov_hash,
};
......@@ -680,7 +652,7 @@ mod tests {
&collator_signature_payload(
&descriptor.relay_parent,
&descriptor.para_id,
&descriptor.validation_data_hash,
&descriptor.persisted_validation_data_hash,
&descriptor.pov_hash,
)
.as_ref(),
......
......@@ -356,8 +356,7 @@ mod tests {
};
use std::cell::RefCell;
use polkadot_primitives::v1::{
AvailableData, BlockData, HeadData, GlobalValidationData, LocalValidationData, PoV,
OmittedValidationData,
AvailableData, BlockData, HeadData, PersistedValidationData, PoV,
};
use polkadot_node_subsystem_test_helpers as test_helpers;
......@@ -370,29 +369,19 @@ mod tests {
}
struct TestState {
global_validation_schedule: GlobalValidationData,
local_validation_data: LocalValidationData,
persisted_validation_data: PersistedValidationData,
}
impl Default for TestState {
fn default() -> Self {
let local_validation_data = LocalValidationData {
let persisted_validation_data = PersistedValidationData {
parent_head: HeadData(vec![7, 8, 9]),
balance: Default::default(),
code_upgrade_allowed: None,
validation_code_hash: Default::default(),
};
let global_validation_schedule = GlobalValidationData {
max_code_size: 1000,
max_head_data_size: 1000,
block_number: Default::default(),
hrmp_mqc_heads: Vec::new(),
};
Self {
local_validation_data,
global_validation_schedule,
persisted_validation_data,
}
}
}
......@@ -470,17 +459,9 @@ mod tests {
block_data: BlockData(vec![4, 5, 6]),
};
let global_validation = test_state.global_validation_schedule;
let local_validation = test_state.local_validation_data;
let omitted_validation = OmittedValidationData {
global_validation,
local_validation,
};
let available_data = AvailableData {
pov,
omitted_validation,
validation_data: test_state.persisted_validation_data,
};
......@@ -531,17 +512,9 @@ mod tests {
block_data: BlockData(vec![4, 5, 6]),
};
let global_validation = test_state.global_validation_schedule;
let local_validation = test_state.local_validation_data;
let omitted_validation = OmittedValidationData {
global_validation,
local_validation,
};
let available_data = AvailableData {
pov,
omitted_validation,
validation_data: test_state.persisted_validation_data,
};
let no_metrics = Metrics(None);
......
......@@ -30,7 +30,7 @@ use futures::{
use keystore::KeyStorePtr;
use polkadot_primitives::v1::{
CommittedCandidateReceipt, BackedCandidate, Id as ParaId, ValidatorId,
ValidatorIndex, SigningContext, PoV, OmittedValidationData,
ValidatorIndex, SigningContext, PoV,
CandidateDescriptor, AvailableData, ValidatorSignature, Hash, CandidateReceipt,
CandidateCommitments, CoreState, CoreIndex,
};
......@@ -623,14 +623,9 @@ impl CandidateBackingJob {
outputs: ValidationOutputs,
with_commitments: impl FnOnce(CandidateCommitments) -> Result<T, E>,
) -> Result<Result<T, E>, Error> {
let omitted_validation = OmittedValidationData {
global_validation: outputs.global_validation_data,
local_validation: outputs.local_validation_data,
};
let available_data = AvailableData {
pov,
omitted_validation,
validation_data: outputs.validation_data,
};
let chunks = erasure_coding::obtain_chunks_v1(
......@@ -835,7 +830,7 @@ mod tests {
use futures::{executor, future, Future};
use polkadot_primitives::v1::{
ScheduledCore, BlockData, CandidateCommitments, CollatorId,
LocalValidationData, GlobalValidationData, HeadData,
PersistedValidationData, ValidationData, TransientValidationData, HeadData,
ValidatorPair, ValidityAttestation, GroupRotationInfo,
};
use polkadot_subsystem::{
......@@ -855,8 +850,7 @@ mod tests {
keystore: KeyStorePtr,
validators: Vec<Sr25519Keyring>,
validator_public: Vec<ValidatorId>,
global_validation_data: GlobalValidationData,
local_validation_data: LocalValidationData,
validation_data: ValidationData,
validator_groups: (Vec<Vec<ValidatorIndex>>, GroupRotationInfo),
availability_cores: Vec<CoreState>,
head_data: HashMap<ParaId, HeadData>,
......@@ -920,17 +914,18 @@ mod tests {
parent_hash: relay_parent,
};
let local_validation_data = LocalValidationData {
parent_head: HeadData(vec![7, 8, 9]),
balance: Default::default(),
code_upgrade_allowed: None,
validation_code_hash: Default::default(),
};
let global_validation_data = GlobalValidationData {
max_code_size: 1000,
max_head_data_size: 1000,
block_number: Default::default(),
let validation_data = ValidationData {
persisted: PersistedValidationData {
parent_head: HeadData(vec![7, 8, 9]),
block_number: Default::default(),
hrmp_mqc_heads: Vec::new(),
},
transient: TransientValidationData {
max_code_size: 1000,
max_head_data_size: 1000,
balance: Default::default(),
code_upgrade_allowed: None,
},
};
Self {
......@@ -941,8 +936,7 @@ mod tests {
validator_groups: (validator_groups, group_rotation_info),
availability_cores,
head_data,
local_validation_data,
global_validation_data,
validation_data,
signing_context,
relay_parent,
}
......@@ -971,13 +965,8 @@ mod tests {
}
fn make_erasure_root(test: &TestState, pov: PoV) -> Hash {
let omitted_validation = OmittedValidationData {
global_validation: test.global_validation_data.clone(),
local_validation: test.local_validation_data.clone(),
};
let available_data = AvailableData {
omitted_validation,
validation_data: test.validation_data.persisted.clone(),
pov,
};
......@@ -1109,8 +1098,7 @@ mod tests {
) if pov == pov && &c == candidate.descriptor() => {
tx.send(Ok(
ValidationResult::Valid(ValidationOutputs {
global_validation_data: test_state.global_validation_data,
local_validation_data: test_state.local_validation_data,
validation_data: test_state.validation_data.persisted,
head_data: expected_head_data.clone(),
upward_messages: Vec::new(),
fees: Default::default(),
......@@ -1221,8 +1209,7 @@ mod tests {
) if pov == pov && &c == candidate_a.descriptor() => {
tx.send(Ok(
ValidationResult::Valid(ValidationOutputs {
global_validation_data: test_state.global_validation_data,
local_validation_data: test_state.local_validation_data,
validation_data: test_state.validation_data.persisted,
head_data: expected_head_data.clone(),
upward_messages: Vec::new(),
fees: Default::default(),
......@@ -1351,8 +1338,7 @@ mod tests {
) if pov == pov && &c == candidate_a.descriptor() => {
tx.send(Ok(
ValidationResult::Valid(ValidationOutputs {
global_validation_data: test_state.global_validation_data,
local_validation_data: test_state.local_validation_data,
validation_data: test_state.validation_data.persisted,
head_data: expected_head_data.clone(),
upward_messages: Vec::new(),
fees: Default::default(),
......@@ -1508,8 +1494,7 @@ mod tests {
) if pov == pov && &c == candidate_b.descriptor() => {
tx.send(Ok(
ValidationResult::Valid(ValidationOutputs {
global_validation_data: test_state.global_validation_data,
local_validation_data: test_state.local_validation_data,
validation_data: test_state.validation_data.persisted,
head_data: expected_head_data.clone(),
upward_messages: Vec::new(),
fees: Default::default(),
......
This diff is collapsed.
......@@ -35,11 +35,10 @@ use polkadot_node_subsystem::{
use polkadot_node_subsystem_util::{
self as util,
delegated_subsystem,
request_availability_cores, request_global_validation_data,
request_local_validation_data, JobTrait, ToJobTrait,
request_availability_cores, request_persisted_validation_data, JobTrait, ToJobTrait,
};
use polkadot_primitives::v1::{
validation_data_hash, BackedCandidate, BlockNumber, CoreState, Hash, OccupiedCoreAssumption,
BackedCandidate, BlockNumber, CoreState, Hash, OccupiedCoreAssumption,
SignedAvailabilityBitfield,
};
use std::{collections::HashMap, convert::TryFrom, pin::Pin};
......@@ -355,10 +354,6 @@ async fn select_candidates(
) -> Result<Vec<BackedCandidate>, Error> {
let block_number = get_block_number_under_construction(relay_parent, sender).await?;
let global_validation_data = request_global_validation_data(relay_parent, sender)
.await?
.await??;
let mut selected_candidates =
Vec::with_capacity(candidates.len().min(availability_cores.len()));
......@@ -387,7 +382,7 @@ async fn select_candidates(
_ => continue,
};
let local_validation_data = match request_local_validation_data(
let validation_data = match request_persisted_validation_data(
relay_parent,
scheduled_core.para_id,
assumption,
......@@ -396,18 +391,17 @@ async fn select_candidates(
.await?
.await??
{
Some(local_validation_data) => local_validation_data,
Some(v) => v,
None => continue,
};
let computed_validation_data_hash =
validation_data_hash(&global_validation_data, &local_validation_data);
let computed_validation_data_hash = validation_data.hash();
// we arbitrarily pick the first of the backed candidates which match the appropriate selection criteria
if let Some(candidate) = candidates.iter().find(|backed_candidate| {
let descriptor = &backed_candidate.candidate.descriptor;
descriptor.para_id == scheduled_core.para_id
&& descriptor.validation_data_hash == computed_validation_data_hash
&& descriptor.persisted_validation_data_hash == computed_validation_data_hash
}) {
selected_candidates.push(candidate.clone());
}
......@@ -657,10 +651,10 @@ mod tests {
use super::super::*;
use super::{build_occupied_core, default_bitvec, occupied_core, scheduled_core};
use polkadot_node_subsystem::messages::RuntimeApiRequest::{
AvailabilityCores, GlobalValidationData, LocalValidationData,
AvailabilityCores, PersistedValidationData as PersistedValidationDataReq,
};