diff --git a/Cargo.lock b/Cargo.lock
index eee6ffdc4d74..855f0565d0ff 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -8031,14 +8031,14 @@ dependencies = [
[[package]]
name = "pretty_assertions"
-version = "1.2.1"
+version = "1.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c89f989ac94207d048d92db058e4f6ec7342b0971fc58d1271ca148b799b3563"
+checksum = "a25e9bcb20aa780fd0bb16b72403a9064d6b3f22f026946029acb941a50af755"
dependencies = [
- "ansi_term",
"ctor",
"diff",
"output_vt100",
+ "yansi",
]
[[package]]
@@ -13822,6 +13822,12 @@ dependencies = [
"static_assertions",
]
+[[package]]
+name = "yansi"
+version = "0.5.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "09041cd90cf85f7f8b2df60c646f853b7f535ce68f85244eb6731cf89fa498ec"
+
[[package]]
name = "yasna"
version = "0.5.1"
diff --git a/node/core/prospective-parachains/src/tests.rs b/node/core/prospective-parachains/src/tests.rs
index ef03789ce101..71e0367e8d62 100644
--- a/node/core/prospective-parachains/src/tests.rs
+++ b/node/core/prospective-parachains/src/tests.rs
@@ -15,7 +15,6 @@
// along with Polkadot. If not, see .
use super::*;
-use ::polkadot_primitives_test_helpers::{dummy_candidate_receipt_bad_sig, dummy_hash};
use assert_matches::assert_matches;
use polkadot_node_subsystem::{
errors::RuntimeApiError,
@@ -27,12 +26,11 @@ use polkadot_node_subsystem::{
use polkadot_node_subsystem_test_helpers as test_helpers;
use polkadot_node_subsystem_types::{jaeger, ActivatedLeaf, LeafStatus};
use polkadot_primitives::{
- v2::{
- CandidateCommitments, HeadData, Header, PersistedValidationData, ScheduledCore,
- ValidationCodeHash,
- },
vstaging::{AsyncBackingParameters, Constraints, InboundHrmpLimitations},
+ CommittedCandidateReceipt, HeadData, Header, PersistedValidationData, ScheduledCore,
+ ValidationCodeHash,
};
+use polkadot_primitives_test_helpers::make_candidate;
use std::sync::Arc;
const ALLOWED_ANCESTRY_LEN: u32 = 3;
@@ -70,42 +68,6 @@ fn dummy_constraints(
}
}
-fn dummy_pvd(parent_head: HeadData, relay_parent_number: u32) -> PersistedValidationData {
- PersistedValidationData {
- parent_head,
- relay_parent_number,
- max_pov_size: MAX_POV_SIZE,
- relay_parent_storage_root: dummy_hash(),
- }
-}
-
-fn make_candidate(
- leaf: &TestLeaf,
- para_id: ParaId,
- parent_head: HeadData,
- head_data: HeadData,
- validation_code_hash: ValidationCodeHash,
-) -> (CommittedCandidateReceipt, PersistedValidationData) {
- let pvd = dummy_pvd(parent_head, leaf.number);
- let commitments = CandidateCommitments {
- head_data,
- horizontal_messages: Vec::new(),
- upward_messages: Vec::new(),
- new_validation_code: None,
- processed_downward_messages: 0,
- hrmp_watermark: leaf.number,
- };
-
- let mut candidate = dummy_candidate_receipt_bad_sig(leaf.hash, Some(Default::default()));
- candidate.commitments_hash = commitments.hash();
- candidate.descriptor.para_id = para_id;
- candidate.descriptor.persisted_validation_data_hash = pvd.hash();
- candidate.descriptor.validation_code_hash = validation_code_hash;
- let candidate = CommittedCandidateReceipt { descriptor: candidate.descriptor, commitments };
-
- (candidate, pvd)
-}
-
struct TestState {
availability_cores: Vec,
validation_code_hash: ValidationCodeHash,
@@ -539,7 +501,8 @@ fn send_candidates_and_check_if_found() {
// Candidate A1
let (candidate_a1, pvd_a1) = make_candidate(
- &leaf_a,
+ leaf_a.hash,
+ leaf_a.number,
1.into(),
HeadData(vec![1, 2, 3]),
HeadData(vec![1]),
@@ -550,7 +513,8 @@ fn send_candidates_and_check_if_found() {
// Candidate A2
let (candidate_a2, pvd_a2) = make_candidate(
- &leaf_a,
+ leaf_a.hash,
+ leaf_a.number,
2.into(),
HeadData(vec![2, 3, 4]),
HeadData(vec![2]),
@@ -561,7 +525,8 @@ fn send_candidates_and_check_if_found() {
// Candidate B
let (candidate_b, pvd_b) = make_candidate(
- &leaf_b,
+ leaf_b.hash,
+ leaf_b.number,
1.into(),
HeadData(vec![3, 4, 5]),
HeadData(vec![3]),
@@ -572,7 +537,8 @@ fn send_candidates_and_check_if_found() {
// Candidate C
let (candidate_c, pvd_c) = make_candidate(
- &leaf_c,
+ leaf_c.hash,
+ leaf_c.number,
2.into(),
HeadData(vec![6, 7, 8]),
HeadData(vec![4]),
@@ -649,7 +615,8 @@ fn check_candidate_parent_leaving_view() {
// Candidate A1
let (candidate_a1, pvd_a1) = make_candidate(
- &leaf_a,
+ leaf_a.hash,
+ leaf_a.number,
1.into(),
HeadData(vec![1, 2, 3]),
HeadData(vec![1]),
@@ -660,7 +627,8 @@ fn check_candidate_parent_leaving_view() {
// Candidate A2
let (candidate_a2, pvd_a2) = make_candidate(
- &leaf_a,
+ leaf_a.hash,
+ leaf_a.number,
2.into(),
HeadData(vec![2, 3, 4]),
HeadData(vec![2]),
@@ -671,7 +639,8 @@ fn check_candidate_parent_leaving_view() {
// Candidate B
let (candidate_b, pvd_b) = make_candidate(
- &leaf_b,
+ leaf_b.hash,
+ leaf_b.number,
1.into(),
HeadData(vec![3, 4, 5]),
HeadData(vec![3]),
@@ -682,7 +651,8 @@ fn check_candidate_parent_leaving_view() {
// Candidate C
let (candidate_c, pvd_c) = make_candidate(
- &leaf_c,
+ leaf_c.hash,
+ leaf_c.number,
2.into(),
HeadData(vec![6, 7, 8]),
HeadData(vec![4]),
@@ -771,7 +741,8 @@ fn check_candidate_on_multiple_forks() {
// Candidate on leaf A.
let (candidate_a, pvd_a) = make_candidate(
- &leaf_a,
+ leaf_a.hash,
+ leaf_a.number,
1.into(),
HeadData(vec![1, 2, 3]),
HeadData(vec![1]),
@@ -782,7 +753,8 @@ fn check_candidate_on_multiple_forks() {
// Candidate on leaf B.
let (candidate_b, pvd_b) = make_candidate(
- &leaf_b,
+ leaf_b.hash,
+ leaf_b.number,
1.into(),
HeadData(vec![3, 4, 5]),
HeadData(vec![1]),
@@ -793,7 +765,8 @@ fn check_candidate_on_multiple_forks() {
// Candidate on leaf C.
let (candidate_c, pvd_c) = make_candidate(
- &leaf_c,
+ leaf_c.hash,
+ leaf_c.number,
1.into(),
HeadData(vec![5, 6, 7]),
HeadData(vec![1]),
@@ -860,7 +833,8 @@ fn check_backable_query() {
// Candidate A
let (candidate_a, pvd_a) = make_candidate(
- &leaf_a,
+ leaf_a.hash,
+ leaf_a.number,
1.into(),
HeadData(vec![1, 2, 3]),
HeadData(vec![1]),
@@ -871,7 +845,8 @@ fn check_backable_query() {
// Candidate B
let (mut candidate_b, pvd_b) = make_candidate(
- &leaf_a,
+ leaf_a.hash,
+ leaf_a.number,
1.into(),
HeadData(vec![1]),
HeadData(vec![2]),
@@ -970,7 +945,8 @@ fn check_depth_query() {
// Candidate A.
let (candidate_a, pvd_a) = make_candidate(
- &leaf_a,
+ leaf_a.hash,
+ leaf_a.number,
1.into(),
HeadData(vec![1, 2, 3]),
HeadData(vec![1]),
@@ -981,7 +957,8 @@ fn check_depth_query() {
// Candidate B.
let (candidate_b, pvd_b) = make_candidate(
- &leaf_a,
+ leaf_a.hash,
+ leaf_a.number,
1.into(),
HeadData(vec![1]),
HeadData(vec![2]),
@@ -992,7 +969,8 @@ fn check_depth_query() {
// Candidate C.
let (candidate_c, pvd_c) = make_candidate(
- &leaf_a,
+ leaf_a.hash,
+ leaf_a.number,
1.into(),
HeadData(vec![2]),
HeadData(vec![3]),
@@ -1120,7 +1098,8 @@ fn check_pvd_query() {
// Candidate A.
let (candidate_a, pvd_a) = make_candidate(
- &leaf_a,
+ leaf_a.hash,
+ leaf_a.number,
1.into(),
HeadData(vec![1, 2, 3]),
HeadData(vec![1]),
@@ -1130,7 +1109,8 @@ fn check_pvd_query() {
// Candidate B.
let (candidate_b, pvd_b) = make_candidate(
- &leaf_a,
+ leaf_a.hash,
+ leaf_a.number,
1.into(),
HeadData(vec![1]),
HeadData(vec![2]),
@@ -1140,7 +1120,8 @@ fn check_pvd_query() {
// Candidate C.
let (candidate_c, pvd_c) = make_candidate(
- &leaf_a,
+ leaf_a.hash,
+ leaf_a.number,
1.into(),
HeadData(vec![2]),
HeadData(vec![3]),
diff --git a/node/network/statement-distribution/Cargo.toml b/node/network/statement-distribution/Cargo.toml
index 4c3aba82e189..b89c6363ec19 100644
--- a/node/network/statement-distribution/Cargo.toml
+++ b/node/network/statement-distribution/Cargo.toml
@@ -23,8 +23,8 @@ fatality = "0.0.6"
bitvec = "1"
[dev-dependencies]
-polkadot-node-subsystem-test-helpers = { path = "../../subsystem-test-helpers" }
assert_matches = "1.4.0"
+polkadot-node-subsystem-test-helpers = { path = "../../subsystem-test-helpers" }
sp-authority-discovery = { git = "https://github.com/paritytech/substrate", branch = "master" }
sp-keyring = { git = "https://github.com/paritytech/substrate", branch = "master" }
sp-core = { git = "https://github.com/paritytech/substrate", branch = "master" }
diff --git a/node/network/statement-distribution/src/vstaging/candidates.rs b/node/network/statement-distribution/src/vstaging/candidates.rs
index b356fd6a8ab0..11deeed42fef 100644
--- a/node/network/statement-distribution/src/vstaging/candidates.rs
+++ b/node/network/statement-distribution/src/vstaging/candidates.rs
@@ -20,7 +20,7 @@
//! Due to the request-oriented nature of this protocol, we often learn
//! about candidates just as a hash, alongside claimed properties that the
//! receipt would commit to. However, it is only later on that we can
-//! confirm those claimed properties. This store lets us keep track of the
+//! confirm those claimed properties. This store lets us keep track of
//! all candidates which are currently 'relevant' after spam-protection, and
//! gives us the ability to detect mis-advertisements after the fact
//! and punish them accordingly.
@@ -42,7 +42,7 @@ use std::{
/// This encapsulates the correct and incorrect advertisers
/// post-confirmation of a candidate.
-#[derive(Default)]
+#[derive(Debug, Default, PartialEq)]
pub struct PostConfirmationReckoning {
/// Peers which advertised correctly.
pub correct: HashSet,
@@ -51,6 +51,7 @@ pub struct PostConfirmationReckoning {
}
/// Outputs generated by initial confirmation of a candidate.
+#[derive(Debug, PartialEq)]
pub struct PostConfirmation {
/// The hypothetical candidate used to determine importability and membership
/// in the hypothetical frontier.
@@ -139,10 +140,12 @@ impl Candidates {
}
/// Note that a candidate has been confirmed. If the candidate has just been
- /// confirmed, then this returns `Some`. Otherwise, `None`.
+ /// confirmed (previous state was `Unconfirmed`), then this returns `Some`. Otherwise, `None`.
+ ///
+ /// If we are confirming for the first time, then remove any outdated claims, and generate a
+ /// reckoning of which peers advertised correctly and incorrectly.
///
- /// This does no sanity-checking of input data, and will overwrite
- /// already-confirmed canidates.
+ /// This does no sanity-checking of input data, and will overwrite already-confirmed candidates.
pub fn confirm_candidate(
&mut self,
candidate_hash: CandidateHash,
@@ -355,9 +358,10 @@ impl Candidates {
}
/// A bad advertisement was recognized.
-#[derive(Debug)]
+#[derive(Debug, PartialEq)]
pub struct BadAdvertisement;
+#[derive(Debug, PartialEq)]
enum CandidateState {
Unconfirmed(UnconfirmedCandidate),
Confirmed(ConfirmedCandidate),
@@ -390,7 +394,7 @@ impl CandidateClaims {
}
// properties of an unconfirmed but hypothetically importable candidate.
-#[derive(Hash, PartialEq, Eq)]
+#[derive(Debug, Hash, PartialEq, Eq)]
struct UnconfirmedImportable {
relay_parent: Hash,
parent_hash: Hash,
@@ -400,6 +404,7 @@ struct UnconfirmedImportable {
// An unconfirmed candidate may have have been advertised under
// multiple identifiers. We track here, on the basis of unique identifier,
// the peers which advertised each candidate in a specific way.
+#[derive(Debug, PartialEq)]
struct UnconfirmedCandidate {
claims: Vec<(PeerId, CandidateClaims)>,
// ref-counted
@@ -506,6 +511,7 @@ impl UnconfirmedCandidate {
}
/// A confirmed candidate.
+#[derive(Debug, PartialEq)]
pub struct ConfirmedCandidate {
receipt: Arc,
persisted_validation_data: PersistedValidationData,
@@ -567,12 +573,681 @@ impl ConfirmedCandidate {
#[cfg(test)]
mod tests {
use super::*;
+ use polkadot_primitives::HeadData;
+ use polkadot_primitives_test_helpers::make_candidate;
+
+ #[test]
+ fn inserting_unconfirmed_rejects_on_incompatible_claims() {
+ let relay_head_data_a = HeadData(vec![1, 2, 3]);
+ let relay_head_data_b = HeadData(vec![4, 5, 6]);
+ let relay_hash_a = relay_head_data_a.hash();
+ let relay_hash_b = relay_head_data_b.hash();
+
+ let para_id_a = 1.into();
+ let para_id_b = 2.into();
+
+ let (candidate_a, pvd_a) = make_candidate(
+ relay_hash_a,
+ 1,
+ para_id_a,
+ relay_head_data_a,
+ HeadData(vec![1]),
+ Hash::from_low_u64_be(1000).into(),
+ );
+
+ let candidate_hash_a = candidate_a.hash();
+
+ let peer = PeerId::random();
+
+ let group_index_a = 100.into();
+ let group_index_b = 200.into();
+
+ let mut candidates = Candidates::default();
+
+ // Confirm a candidate first.
+ candidates.confirm_candidate(candidate_hash_a, candidate_a, pvd_a, group_index_a);
+
+ // Relay parent does not match.
+ assert_eq!(
+ candidates.insert_unconfirmed(
+ peer,
+ candidate_hash_a,
+ relay_hash_b,
+ group_index_a,
+ Some((relay_hash_a, para_id_a)),
+ ),
+ Err(BadAdvertisement)
+ );
+
+ // Group index does not match.
+ assert_eq!(
+ candidates.insert_unconfirmed(
+ peer,
+ candidate_hash_a,
+ relay_hash_a,
+ group_index_b,
+ Some((relay_hash_a, para_id_a)),
+ ),
+ Err(BadAdvertisement)
+ );
+
+ // Parent head data does not match.
+ assert_eq!(
+ candidates.insert_unconfirmed(
+ peer,
+ candidate_hash_a,
+ relay_hash_a,
+ group_index_a,
+ Some((relay_hash_b, para_id_a)),
+ ),
+ Err(BadAdvertisement)
+ );
+
+ // Para ID does not match.
+ assert_eq!(
+ candidates.insert_unconfirmed(
+ peer,
+ candidate_hash_a,
+ relay_hash_a,
+ group_index_a,
+ Some((relay_hash_a, para_id_b)),
+ ),
+ Err(BadAdvertisement)
+ );
+
+ // Everything matches.
+ assert_eq!(
+ candidates.insert_unconfirmed(
+ peer,
+ candidate_hash_a,
+ relay_hash_a,
+ group_index_a,
+ Some((relay_hash_a, para_id_a)),
+ ),
+ Ok(())
+ );
+ }
+
+ // Tests that:
+ //
+ // - When the advertisement matches, confirming does not change the parent hash index.
+ // - When it doesn't match, confirming updates the index. Specifically, confirming should prune
+ // unconfirmed claims.
+ #[test]
+ fn confirming_maintains_parent_hash_index() {
+ let relay_head_data = HeadData(vec![1, 2, 3]);
+ let relay_hash = relay_head_data.hash();
+
+ let candidate_head_data_a = HeadData(vec![1]);
+ let candidate_head_data_b = HeadData(vec![2]);
+ let candidate_head_data_c = HeadData(vec![3]);
+ let candidate_head_data_d = HeadData(vec![4]);
+ let candidate_head_data_hash_a = candidate_head_data_a.hash();
+ let candidate_head_data_hash_b = candidate_head_data_b.hash();
+ let candidate_head_data_hash_c = candidate_head_data_c.hash();
+ let candidate_head_data_hash_d = candidate_head_data_d.hash();
+
+ let (candidate_a, pvd_a) = make_candidate(
+ relay_hash,
+ 1,
+ 1.into(),
+ relay_head_data,
+ candidate_head_data_a.clone(),
+ Hash::from_low_u64_be(1000).into(),
+ );
+ let (candidate_b, pvd_b) = make_candidate(
+ relay_hash,
+ 1,
+ 1.into(),
+ candidate_head_data_a,
+ candidate_head_data_b.clone(),
+ Hash::from_low_u64_be(2000).into(),
+ );
+ let (candidate_c, pvd_c) = make_candidate(
+ relay_hash,
+ 1,
+ 1.into(),
+ candidate_head_data_b.clone(),
+ candidate_head_data_c.clone(),
+ Hash::from_low_u64_be(3000).into(),
+ );
+ let (candidate_d, pvd_d) = make_candidate(
+ relay_hash,
+ 1,
+ 1.into(),
+ candidate_head_data_c.clone(),
+ candidate_head_data_d,
+ Hash::from_low_u64_be(4000).into(),
+ );
+
+ let candidate_hash_a = candidate_a.hash();
+ let candidate_hash_b = candidate_b.hash();
+ let candidate_hash_c = candidate_c.hash();
+ let candidate_hash_d = candidate_d.hash();
+
+ let peer = PeerId::random();
+ let group_index = 100.into();
+
+ let mut candidates = Candidates::default();
+
+ // Insert some unconfirmed candidates.
+
+ // Advertise A without parent hash.
+ candidates.insert_unconfirmed(peer, candidate_hash_a, relay_hash, group_index, None);
+ assert_eq!(candidates.by_parent, HashMap::default());
+
+ // Advertise A with parent hash and ID.
+ candidates.insert_unconfirmed(
+ peer,
+ candidate_hash_a,
+ relay_hash,
+ group_index,
+ Some((relay_hash, 1.into())),
+ );
+ assert_eq!(
+ candidates.by_parent,
+ HashMap::from([((relay_hash, 1.into()), HashSet::from([candidate_hash_a]))])
+ );
+
+ // Advertise B with parent A.
+ candidates.insert_unconfirmed(
+ peer,
+ candidate_hash_b,
+ relay_hash,
+ group_index,
+ Some((candidate_head_data_hash_a, 1.into())),
+ );
+ assert_eq!(
+ candidates.by_parent,
+ HashMap::from([
+ ((relay_hash, 1.into()), HashSet::from([candidate_hash_a])),
+ ((candidate_head_data_hash_a, 1.into()), HashSet::from([candidate_hash_b]))
+ ])
+ );
+
+ // Advertise C with parent A.
+ candidates.insert_unconfirmed(
+ peer,
+ candidate_hash_c,
+ relay_hash,
+ group_index,
+ Some((candidate_head_data_hash_a, 1.into())),
+ );
+ assert_eq!(
+ candidates.by_parent,
+ HashMap::from([
+ ((relay_hash, 1.into()), HashSet::from([candidate_hash_a])),
+ (
+ (candidate_head_data_hash_a, 1.into()),
+ HashSet::from([candidate_hash_b, candidate_hash_c])
+ )
+ ])
+ );
+
+ // Advertise D with parent A.
+ candidates.insert_unconfirmed(
+ peer,
+ candidate_hash_d,
+ relay_hash,
+ group_index,
+ Some((candidate_head_data_hash_a, 1.into())),
+ );
+ assert_eq!(
+ candidates.by_parent,
+ HashMap::from([
+ ((relay_hash, 1.into()), HashSet::from([candidate_hash_a])),
+ (
+ (candidate_head_data_hash_a, 1.into()),
+ HashSet::from([candidate_hash_b, candidate_hash_c, candidate_hash_d])
+ )
+ ])
+ );
+
+ // Insert confirmed candidates and check parent hash index.
+
+ // Confirmation matches advertisement. Index should be unchanged.
+ candidates.confirm_candidate(candidate_hash_a, candidate_a, pvd_a, group_index);
+ assert_eq!(
+ candidates.by_parent,
+ HashMap::from([
+ ((relay_hash, 1.into()), HashSet::from([candidate_hash_a])),
+ (
+ (candidate_head_data_hash_a, 1.into()),
+ HashSet::from([candidate_hash_b, candidate_hash_c, candidate_hash_d])
+ )
+ ])
+ );
+ candidates.confirm_candidate(candidate_hash_b, candidate_b, pvd_b, group_index);
+ assert_eq!(
+ candidates.by_parent,
+ HashMap::from([
+ ((relay_hash, 1.into()), HashSet::from([candidate_hash_a])),
+ (
+ (candidate_head_data_hash_a, 1.into()),
+ HashSet::from([candidate_hash_b, candidate_hash_c, candidate_hash_d])
+ )
+ ])
+ );
+
+ // Confirmation does not match advertisement. Index should be updated.
+ candidates.confirm_candidate(candidate_hash_d, candidate_d, pvd_d, group_index);
+ assert_eq!(
+ candidates.by_parent,
+ HashMap::from([
+ ((relay_hash, 1.into()), HashSet::from([candidate_hash_a])),
+ (
+ (candidate_head_data_hash_a, 1.into()),
+ HashSet::from([candidate_hash_b, candidate_hash_c])
+ ),
+ ((candidate_head_data_hash_c, 1.into()), HashSet::from([candidate_hash_d]))
+ ])
+ );
+
+ // Make a new candidate for C with a different para ID.
+ let (new_candidate_c, new_pvd_c) = make_candidate(
+ relay_hash,
+ 1,
+ 2.into(),
+ candidate_head_data_b,
+ candidate_head_data_c.clone(),
+ Hash::from_low_u64_be(3000).into(),
+ );
+ candidates.confirm_candidate(candidate_hash_c, new_candidate_c, new_pvd_c, group_index);
+ assert_eq!(
+ candidates.by_parent,
+ HashMap::from([
+ ((relay_hash, 1.into()), HashSet::from([candidate_hash_a])),
+ ((candidate_head_data_hash_a, 1.into()), HashSet::from([candidate_hash_b])),
+ ((candidate_head_data_hash_b, 2.into()), HashSet::from([candidate_hash_c])),
+ ((candidate_head_data_hash_c, 1.into()), HashSet::from([candidate_hash_d]))
+ ])
+ );
+ }
+
+ #[test]
+ fn test_returned_post_confirmation() {
+ let relay_head_data = HeadData(vec![1, 2, 3]);
+ let relay_hash = relay_head_data.hash();
+
+ let candidate_head_data_a = HeadData(vec![1]);
+ let candidate_head_data_b = HeadData(vec![2]);
+ let candidate_head_data_c = HeadData(vec![3]);
+ let candidate_head_data_d = HeadData(vec![4]);
+ let candidate_head_data_hash_a = candidate_head_data_a.hash();
+ let candidate_head_data_hash_b = candidate_head_data_b.hash();
+ let candidate_head_data_hash_c = candidate_head_data_c.hash();
+ let candidate_head_data_hash_d = candidate_head_data_d.hash();
+
+ let (candidate_a, pvd_a) = make_candidate(
+ relay_hash,
+ 1,
+ 1.into(),
+ relay_head_data,
+ candidate_head_data_a.clone(),
+ Hash::from_low_u64_be(1000).into(),
+ );
+ let (candidate_b, pvd_b) = make_candidate(
+ relay_hash,
+ 1,
+ 1.into(),
+ candidate_head_data_a.clone(),
+ candidate_head_data_b.clone(),
+ Hash::from_low_u64_be(2000).into(),
+ );
+ let (candidate_c, pvd_c) = make_candidate(
+ relay_hash,
+ 1,
+ 1.into(),
+ candidate_head_data_a.clone(),
+ candidate_head_data_c.clone(),
+ Hash::from_low_u64_be(3000).into(),
+ );
+ let (candidate_d, pvd_d) = make_candidate(
+ relay_hash,
+ 1,
+ 1.into(),
+ candidate_head_data_b.clone(),
+ candidate_head_data_d,
+ Hash::from_low_u64_be(4000).into(),
+ );
+
+ let candidate_hash_a = candidate_a.hash();
+ let candidate_hash_b = candidate_b.hash();
+ let candidate_hash_c = candidate_c.hash();
+ let candidate_hash_d = candidate_d.hash();
+
+ let peer_a = PeerId::random();
+ let peer_b = PeerId::random();
+ let peer_c = PeerId::random();
+ let peer_d = PeerId::random();
+
+ let group_index = 100.into();
- // TODO [now]: test that inserting unconfirmed rejects if claims are
- // incomptable.
+ let mut candidates = Candidates::default();
- // TODO [now]: test that confirming correctly maintains the parent hash index
+ // Insert some unconfirmed candidates.
- // TODO [now]: test that pruning unconfirmed claims correctly maintains the parent hash
- // index
+ // Advertise A without parent hash.
+ candidates.insert_unconfirmed(peer_a, candidate_hash_a, relay_hash, group_index, None);
+
+ // Advertise A with parent hash and ID.
+ candidates.insert_unconfirmed(
+ peer_a,
+ candidate_hash_a,
+ relay_hash,
+ group_index,
+ Some((relay_hash, 1.into())),
+ );
+
+ // (Correctly) advertise B with parent A. Do it from a couple of peers.
+ candidates.insert_unconfirmed(
+ peer_a,
+ candidate_hash_b,
+ relay_hash,
+ group_index,
+ Some((candidate_head_data_hash_a, 1.into())),
+ );
+ candidates.insert_unconfirmed(
+ peer_b,
+ candidate_hash_b,
+ relay_hash,
+ group_index,
+ Some((candidate_head_data_hash_a, 1.into())),
+ );
+
+ // (Wrongly) advertise C with parent A. Do it from a couple peers.
+ candidates.insert_unconfirmed(
+ peer_b,
+ candidate_hash_c,
+ relay_hash,
+ group_index,
+ Some((candidate_head_data_hash_a, 1.into())),
+ );
+ candidates.insert_unconfirmed(
+ peer_c,
+ candidate_hash_c,
+ relay_hash,
+ group_index,
+ Some((candidate_head_data_hash_a, 1.into())),
+ );
+
+ // Advertise D. Do it correctly from one peer (parent B) and wrongly from another (parent A).
+ candidates.insert_unconfirmed(
+ peer_c,
+ candidate_hash_d,
+ relay_hash,
+ group_index,
+ Some((candidate_head_data_hash_b, 1.into())),
+ );
+ candidates.insert_unconfirmed(
+ peer_d,
+ candidate_hash_d,
+ relay_hash,
+ group_index,
+ Some((candidate_head_data_hash_a, 1.into())),
+ );
+
+ assert_eq!(
+ candidates.by_parent,
+ HashMap::from([
+ ((relay_hash, 1.into()), HashSet::from([candidate_hash_a])),
+ (
+ (candidate_head_data_hash_a, 1.into()),
+ HashSet::from([candidate_hash_b, candidate_hash_c, candidate_hash_d])
+ ),
+ ((candidate_head_data_hash_b, 1.into()), HashSet::from([candidate_hash_d]))
+ ])
+ );
+
+ // Insert confirmed candidates and check parent hash index.
+
+ // Confirmation matches advertisement.
+ let post_confirmation = candidates.confirm_candidate(
+ candidate_hash_a,
+ candidate_a.clone(),
+ pvd_a.clone(),
+ group_index,
+ );
+ assert_eq!(
+ post_confirmation,
+ Some(PostConfirmation {
+ hypothetical: HypotheticalCandidate::Complete {
+ candidate_hash: candidate_hash_a,
+ receipt: Arc::new(candidate_a),
+ persisted_validation_data: pvd_a,
+ },
+ reckoning: PostConfirmationReckoning {
+ correct: HashSet::from([peer_a]),
+ incorrect: HashSet::from([]),
+ },
+ })
+ );
+
+ let post_confirmation = candidates.confirm_candidate(
+ candidate_hash_b,
+ candidate_b.clone(),
+ pvd_b.clone(),
+ group_index,
+ );
+ assert_eq!(
+ post_confirmation,
+ Some(PostConfirmation {
+ hypothetical: HypotheticalCandidate::Complete {
+ candidate_hash: candidate_hash_b,
+ receipt: Arc::new(candidate_b),
+ persisted_validation_data: pvd_b,
+ },
+ reckoning: PostConfirmationReckoning {
+ correct: HashSet::from([peer_a, peer_b]),
+ incorrect: HashSet::from([]),
+ },
+ })
+ );
+
+ // Confirm candidate with two wrong peers (different group index).
+ let (new_candidate_c, new_pvd_c) = make_candidate(
+ relay_hash,
+ 1,
+ 2.into(),
+ candidate_head_data_b,
+ candidate_head_data_c.clone(),
+ Hash::from_low_u64_be(3000).into(),
+ );
+ let post_confirmation = candidates.confirm_candidate(
+ candidate_hash_c,
+ new_candidate_c.clone(),
+ new_pvd_c.clone(),
+ group_index,
+ );
+ assert_eq!(
+ post_confirmation,
+ Some(PostConfirmation {
+ hypothetical: HypotheticalCandidate::Complete {
+ candidate_hash: candidate_hash_c,
+ receipt: Arc::new(new_candidate_c),
+ persisted_validation_data: new_pvd_c,
+ },
+ reckoning: PostConfirmationReckoning {
+ correct: HashSet::from([]),
+ incorrect: HashSet::from([peer_b, peer_c]),
+ },
+ })
+ );
+
+ // Confirm candidate with one wrong peer (different parent head data).
+ let post_confirmation = candidates.confirm_candidate(
+ candidate_hash_d,
+ candidate_d.clone(),
+ pvd_d.clone(),
+ group_index,
+ );
+ assert_eq!(
+ post_confirmation,
+ Some(PostConfirmation {
+ hypothetical: HypotheticalCandidate::Complete {
+ candidate_hash: candidate_hash_d,
+ receipt: Arc::new(candidate_d),
+ persisted_validation_data: pvd_d,
+ },
+ reckoning: PostConfirmationReckoning {
+ correct: HashSet::from([peer_c]),
+ incorrect: HashSet::from([peer_d]),
+ },
+ })
+ );
+ }
+
+ #[test]
+ fn test_hypothetical_frontiers() {
+ let relay_head_data = HeadData(vec![1, 2, 3]);
+ let relay_hash = relay_head_data.hash();
+
+ let candidate_head_data_a = HeadData(vec![1]);
+ let candidate_head_data_b = HeadData(vec![2]);
+ let candidate_head_data_c = HeadData(vec![3]);
+ let candidate_head_data_d = HeadData(vec![4]);
+ let candidate_head_data_hash_a = candidate_head_data_a.hash();
+ let candidate_head_data_hash_b = candidate_head_data_b.hash();
+ let candidate_head_data_hash_c = candidate_head_data_c.hash();
+ let candidate_head_data_hash_d = candidate_head_data_d.hash();
+
+ let (candidate_a, pvd_a) = make_candidate(
+ relay_hash,
+ 1,
+ 1.into(),
+ relay_head_data,
+ candidate_head_data_a.clone(),
+ Hash::from_low_u64_be(1000).into(),
+ );
+ let (candidate_b, pvd_b) = make_candidate(
+ relay_hash,
+ 1,
+ 1.into(),
+ candidate_head_data_a.clone(),
+ candidate_head_data_b.clone(),
+ Hash::from_low_u64_be(2000).into(),
+ );
+ let (candidate_c, pvd_c) = make_candidate(
+ relay_hash,
+ 1,
+ 1.into(),
+ candidate_head_data_a.clone(),
+ candidate_head_data_c.clone(),
+ Hash::from_low_u64_be(3000).into(),
+ );
+ let (candidate_d, pvd_d) = make_candidate(
+ relay_hash,
+ 1,
+ 1.into(),
+ candidate_head_data_b.clone(),
+ candidate_head_data_d,
+ Hash::from_low_u64_be(4000).into(),
+ );
+
+ let candidate_hash_a = candidate_a.hash();
+ let candidate_hash_b = candidate_b.hash();
+ let candidate_hash_c = candidate_c.hash();
+ let candidate_hash_d = candidate_d.hash();
+
+ let peer = PeerId::random();
+ let group_index = 100.into();
+
+ let mut candidates = Candidates::default();
+
+ // Confirm A.
+ candidates.confirm_candidate(
+ candidate_hash_a,
+ candidate_a.clone(),
+ pvd_a.clone(),
+ group_index,
+ );
+
+ // Advertise B with parent A.
+ candidates.insert_unconfirmed(
+ peer,
+ candidate_hash_b,
+ relay_hash,
+ group_index,
+ Some((candidate_head_data_hash_a, 1.into())),
+ );
+
+ // Advertise C with parent A.
+ candidates.insert_unconfirmed(
+ peer,
+ candidate_hash_c,
+ relay_hash,
+ group_index,
+ Some((candidate_head_data_hash_a, 1.into())),
+ );
+
+ // Advertise D with parent B.
+ candidates.insert_unconfirmed(
+ peer,
+ candidate_hash_d,
+ relay_hash,
+ group_index,
+ Some((candidate_head_data_hash_b, 1.into())),
+ );
+
+ assert_eq!(
+ candidates.by_parent,
+ HashMap::from([
+ ((relay_hash, 1.into()), HashSet::from([candidate_hash_a])),
+ (
+ (candidate_head_data_hash_a, 1.into()),
+ HashSet::from([candidate_hash_b, candidate_hash_c])
+ ),
+ ((candidate_head_data_hash_b, 1.into()), HashSet::from([candidate_hash_d]))
+ ])
+ );
+
+ let hypothetical_a = HypotheticalCandidate::Complete {
+ candidate_hash: candidate_hash_a,
+ receipt: Arc::new(candidate_a),
+ persisted_validation_data: pvd_a,
+ };
+ let hypothetical_b = HypotheticalCandidate::Incomplete {
+ candidate_hash: candidate_hash_b,
+ candidate_para: 1.into(),
+ parent_head_data_hash: candidate_head_data_hash_a,
+ candidate_relay_parent: relay_hash,
+ };
+ let hypothetical_c = HypotheticalCandidate::Incomplete {
+ candidate_hash: candidate_hash_c,
+ candidate_para: 1.into(),
+ parent_head_data_hash: candidate_head_data_hash_a,
+ candidate_relay_parent: relay_hash,
+ };
+ let hypothetical_d = HypotheticalCandidate::Incomplete {
+ candidate_hash: candidate_hash_d,
+ candidate_para: 1.into(),
+ parent_head_data_hash: candidate_head_data_hash_b,
+ candidate_relay_parent: relay_hash,
+ };
+
+ let hypotheticals = candidates.frontier_hypotheticals(Some((relay_hash, 1.into())));
+ assert_eq!(hypotheticals.len(), 1);
+ assert!(hypotheticals.contains(&hypothetical_a));
+
+ let hypotheticals =
+ candidates.frontier_hypotheticals(Some((candidate_head_data_hash_a, 2.into())));
+ assert_eq!(hypotheticals.len(), 0);
+
+ let hypotheticals =
+ candidates.frontier_hypotheticals(Some((candidate_head_data_hash_a, 1.into())));
+ assert_eq!(hypotheticals.len(), 2);
+ assert!(hypotheticals.contains(&hypothetical_b));
+ assert!(hypotheticals.contains(&hypothetical_c));
+
+ let hypotheticals =
+ candidates.frontier_hypotheticals(Some((candidate_head_data_hash_d, 1.into())));
+ assert_eq!(hypotheticals.len(), 0);
+
+ let hypotheticals = candidates.frontier_hypotheticals(None);
+ assert_eq!(hypotheticals.len(), 4);
+ assert!(hypotheticals.contains(&hypothetical_a));
+ assert!(hypotheticals.contains(&hypothetical_b));
+ assert!(hypotheticals.contains(&hypothetical_c));
+ assert!(hypotheticals.contains(&hypothetical_d));
+ }
}
diff --git a/primitives/test-helpers/src/lib.rs b/primitives/test-helpers/src/lib.rs
index e734caeb35ba..c15f7c826122 100644
--- a/primitives/test-helpers/src/lib.rs
+++ b/primitives/test-helpers/src/lib.rs
@@ -23,14 +23,16 @@
//! contain randomness based data.
use polkadot_primitives::{
CandidateCommitments, CandidateDescriptor, CandidateReceipt, CollatorId, CollatorSignature,
- CommittedCandidateReceipt, Hash, HeadData, Id as ParaId, ValidationCode, ValidationCodeHash,
- ValidatorId,
+ CommittedCandidateReceipt, Hash, HeadData, Id as ParaId, PersistedValidationData,
+ ValidationCode, ValidationCodeHash, ValidatorId,
};
pub use rand;
use sp_application_crypto::sr25519;
use sp_keyring::Sr25519Keyring;
use sp_runtime::generic::Digest;
+const MAX_POV_SIZE: u32 = 1_000_000;
+
/// Creates a candidate receipt with filler data.
pub fn dummy_candidate_receipt>(relay_parent: H) -> CandidateReceipt {
CandidateReceipt:: {
@@ -146,6 +148,46 @@ pub fn dummy_collator_signature() -> CollatorSignature {
CollatorSignature::from(sr25519::Signature([0u8; 64]))
}
+/// Create a meaningless persisted validation data.
+pub fn dummy_pvd(parent_head: HeadData, relay_parent_number: u32) -> PersistedValidationData {
+ PersistedValidationData {
+ parent_head,
+ relay_parent_number,
+ max_pov_size: MAX_POV_SIZE,
+ relay_parent_storage_root: dummy_hash(),
+ }
+}
+
+/// Create a meaningless candidate, returning its receipt and PVD.
+pub fn make_candidate(
+ relay_parent_hash: Hash,
+ relay_parent_number: u32,
+ para_id: ParaId,
+ parent_head: HeadData,
+ head_data: HeadData,
+ validation_code_hash: ValidationCodeHash,
+) -> (CommittedCandidateReceipt, PersistedValidationData) {
+ let pvd = dummy_pvd(parent_head, relay_parent_number);
+ let commitments = CandidateCommitments {
+ head_data,
+ horizontal_messages: Vec::new(),
+ upward_messages: Vec::new(),
+ new_validation_code: None,
+ processed_downward_messages: 0,
+ hrmp_watermark: relay_parent_number,
+ };
+
+ let mut candidate =
+ dummy_candidate_receipt_bad_sig(relay_parent_hash, Some(Default::default()));
+ candidate.commitments_hash = commitments.hash();
+ candidate.descriptor.para_id = para_id;
+ candidate.descriptor.persisted_validation_data_hash = pvd.hash();
+ candidate.descriptor.validation_code_hash = validation_code_hash;
+ let candidate = CommittedCandidateReceipt { descriptor: candidate.descriptor, commitments };
+
+ (candidate, pvd)
+}
+
/// Create a new candidate descriptor, and apply a valid signature
/// using the provided `collator` key.
pub fn make_valid_candidate_descriptor>(