diff --git a/Cargo.lock b/Cargo.lock index 03882df..fe8bf2f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -28,7 +28,7 @@ dependencies = [ [[package]] name = "aleph-bft" -version = "0.39.0" +version = "0.39.1" dependencies = [ "aleph-bft-mock", "aleph-bft-rmc", diff --git a/consensus/Cargo.toml b/consensus/Cargo.toml index 86b8f5f..300a89a 100644 --- a/consensus/Cargo.toml +++ b/consensus/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "aleph-bft" -version = "0.39.0" +version = "0.39.1" edition = "2021" authors = ["Cardinal Cryptography"] categories = ["algorithms", "data-structures", "cryptography", "database"] diff --git a/consensus/src/dag/mod.rs b/consensus/src/dag/mod.rs index a38f306..b57135d 100644 --- a/consensus/src/dag/mod.rs +++ b/consensus/src/dag/mod.rs @@ -468,7 +468,7 @@ mod test { .take(5) .cloned() .collect(); - let fork = random_unit_with_parents(forker_id, &fork_parents); + let fork = random_unit_with_parents(forker_id, &fork_parents, 3); let fork = Signed::sign(fork, &keychains[forker_id.0]); let unit = units .get(3) @@ -552,7 +552,7 @@ mod test { .take(5) .cloned() .collect(); - let fork = random_unit_with_parents(forker_id, &fork_parents); + let fork = random_unit_with_parents(forker_id, &fork_parents, 3); let fork = Signed::sign(fork, &keychains[forker_id.0]); let unit = units .get(3) diff --git a/consensus/src/extension/election.rs b/consensus/src/extension/election.rs index 348fd31..c2afc17 100644 --- a/consensus/src/extension/election.rs +++ b/consensus/src/extension/election.rs @@ -217,8 +217,8 @@ mod test { units::Units, }, units::{ - random_full_parent_reconstrusted_units_up_to, random_reconstructed_unit_with_parents, - TestingDagUnit, Unit, + minimal_reconstructed_dag_units_up_to, random_full_parent_reconstrusted_units_up_to, + random_reconstructed_unit_with_parents, TestingDagUnit, Unit, }, NodeCount, }; @@ -344,6 +344,7 @@ mod test { creator, &parents, &keychains[creator.0], + round, )); } } @@ -356,4 +357,32 @@ mod test { } } } + + #[test] + #[ignore] + // TODO(A0-4563) Uncomment after changes to parent voting code + fn given_minimal_dag_with_orphaned_node_when_electing_then_orphaned_node_is_not_head() { + use ElectionResult::*; + let mut units = Units::new(); + let n_members = NodeCount(14); + let max_round = 4; + let session_id = 2137; + let keychains = Keychain::new_vec(n_members); + + let (dag, inactive_node_first_unit) = + minimal_reconstructed_dag_units_up_to(max_round, n_members, session_id, &keychains); + for round in dag { + for unit in round { + units.add_unit(unit); + } + } + let election = RoundElection::for_round(0, &units).expect("we have enough rounds"); + match election { + Pending(_) => panic!("should have elected"), + Elected(head) => { + // This should be the second unit in order, as the first was not popular. + assert_ne!(head, inactive_node_first_unit.hash()); + } + } + } } diff --git a/consensus/src/extension/extender.rs b/consensus/src/extension/extender.rs index 6ea920d..bf55602 100644 --- a/consensus/src/extension/extender.rs +++ b/consensus/src/extension/extender.rs @@ -70,6 +70,7 @@ impl Extender { #[cfg(test)] mod test { + use crate::units::{minimal_reconstructed_dag_units_up_to, Unit}; use crate::{ extension::extender::Extender, units::random_full_parent_reconstrusted_units_up_to, NodeCount, Round, @@ -97,4 +98,30 @@ mod test { assert_eq!(batch.len(), n_members.0); } } + + #[test] + #[ignore] + // TODO(A0-4563) Uncomment after changes to parent voting code + fn given_minimal_dag_with_orphaned_node_when_producing_batches_have_correct_length() { + let mut extender = Extender::new(); + let n_members = NodeCount(4); + let threshold = n_members.consensus_threshold(); + let max_round: Round = 4; + let session_id = 2137; + let keychains = Keychain::new_vec(n_members); + let mut batches = Vec::new(); + let (dag, _) = + minimal_reconstructed_dag_units_up_to(max_round, n_members, session_id, &keychains); + for round in dag { + for unit in round { + batches.append(&mut extender.add_unit(unit)); + } + } + assert_eq!(batches.len(), (max_round - 3).into()); + assert_eq!(batches[0].len(), 1); + assert_eq!(batches[0][0].round(), 0); + for batch in batches.iter().skip(1) { + assert_eq!(batch.len(), threshold.0); + } + } } diff --git a/consensus/src/units/mod.rs b/consensus/src/units/mod.rs index aad76d9..285bfa1 100644 --- a/consensus/src/units/mod.rs +++ b/consensus/src/units/mod.rs @@ -15,11 +15,12 @@ mod validator; pub(crate) use store::*; #[cfg(test)] pub use testing::{ - create_preunits, creator_set, full_unit_to_unchecked_signed_unit, preunit_to_full_unit, - preunit_to_signed_unit, preunit_to_unchecked_signed_unit, - random_full_parent_reconstrusted_units_up_to, random_full_parent_units_up_to, - random_reconstructed_unit_with_parents, random_unit_with_parents, DagUnit as TestingDagUnit, - FullUnit as TestingFullUnit, SignedUnit as TestingSignedUnit, WrappedSignedUnit, + create_preunits, creator_set, full_unit_to_unchecked_signed_unit, + minimal_reconstructed_dag_units_up_to, preunit_to_full_unit, preunit_to_signed_unit, + preunit_to_unchecked_signed_unit, random_full_parent_reconstrusted_units_up_to, + random_full_parent_units_up_to, random_reconstructed_unit_with_parents, + random_unit_with_parents, DagUnit as TestingDagUnit, FullUnit as TestingFullUnit, + SignedUnit as TestingSignedUnit, WrappedSignedUnit, }; pub use validator::{ValidationError, Validator}; diff --git a/consensus/src/units/testing.rs b/consensus/src/units/testing.rs index dfaa4a8..14ad007 100644 --- a/consensus/src/units/testing.rs +++ b/consensus/src/units/testing.rs @@ -1,3 +1,4 @@ +use crate::units::TestingDagUnit; use crate::{ creation::Creator as GenericCreator, dag::ReconstructedUnit, @@ -9,6 +10,7 @@ use crate::{ NodeCount, NodeIndex, NodeMap, Round, SessionId, Signed, }; use aleph_bft_mock::{Data, Hash64, Hasher64, Keychain, Signature}; +use rand::prelude::IteratorRandom; type ControlHash = GenericControlHash; type Creator = GenericCreator; @@ -149,10 +151,10 @@ fn parent_map>(parents: &Vec) -> NodeMap { pub fn random_unit_with_parents>( creator: NodeIndex, parents: &Vec, + round: Round, ) -> FullUnit { let representative_parent = parents.last().expect("there are parents"); let session_id = representative_parent.session_id(); - let round = representative_parent.round() + 1; let parent_map = parent_map(parents); let control_hash = ControlHash::new(&parent_map); preunit_to_full_unit(PreUnit::new(creator, round, control_hash), session_id) @@ -162,9 +164,10 @@ pub fn random_reconstructed_unit_with_parents>( creator: NodeIndex, parents: &Vec, keychain: &Keychain, + round: Round, ) -> DagUnit { ReconstructedUnit::with_parents( - full_unit_to_signed_unit(random_unit_with_parents(creator, parents), keychain), + full_unit_to_signed_unit(random_unit_with_parents(creator, parents, round), keychain), parent_map(parents), ) .expect("correct parents") @@ -176,11 +179,11 @@ pub fn random_full_parent_units_up_to( session_id: SessionId, ) -> Vec> { let mut result = vec![random_initial_units(n_members, session_id)]; - for _ in 0..round { + for r in 1..=round { let units = n_members .into_iterator() .map(|node_id| { - random_unit_with_parents(node_id, result.last().expect("previous round present")) + random_unit_with_parents(node_id, result.last().expect("previous round present"), r) }) .collect(); result.push(units); @@ -188,6 +191,8 @@ pub fn random_full_parent_units_up_to( result } +/// Constructs a DAG so that in each round (except round 0) it has all N parents, where N is number +/// of nodes in the DAG pub fn random_full_parent_reconstrusted_units_up_to( round: Round, n_members: NodeCount, @@ -197,7 +202,7 @@ pub fn random_full_parent_reconstrusted_units_up_to( let mut result = vec![random_initial_reconstructed_units( n_members, session_id, keychains, )]; - for _ in 0..round { + for r in 1..=round { let units = n_members .into_iterator() .map(|node_id| { @@ -205,6 +210,7 @@ pub fn random_full_parent_reconstrusted_units_up_to( node_id, result.last().expect("previous round present"), &keychains[node_id.0], + r, ) }) .collect(); @@ -212,3 +218,54 @@ pub fn random_full_parent_reconstrusted_units_up_to( } result } + +/// Constructs a DAG so that in each round (except round 0) it has at least 2N/3 + 1 parents, where +/// N is number of nodes in the DAG. At least one node from N/3 group has some non-direct parents. +pub fn minimal_reconstructed_dag_units_up_to( + round: Round, + n_members: NodeCount, + session_id: SessionId, + keychains: &[Keychain], +) -> (Vec>, DagUnit) { + let mut rng = rand::thread_rng(); + let threshold = n_members.consensus_threshold().0; + + let mut dag = vec![random_initial_reconstructed_units( + n_members, session_id, keychains, + )]; + let inactive_node_first_and_last_seen_unit = dag + .last() + .expect("previous round present") + .last() + .expect("there is at least one node") + .clone(); + let inactive_node = inactive_node_first_and_last_seen_unit.creator(); + for r in 1..=round { + let mut parents: Vec = dag + .last() + .expect("previous round present") + .clone() + .into_iter() + .filter(|unit| unit.creator() != inactive_node) + .choose_multiple(&mut rng, threshold) + .into_iter() + .collect(); + if r == round { + let ancestor_unit = dag + .first() + .expect("first round present") + .get(inactive_node.0) + .expect("inactive node unit present"); + parents.push(ancestor_unit.clone()); + } + let units = n_members + .into_iterator() + .filter(|node_id| node_id != &inactive_node) + .map(|node_id| { + random_reconstructed_unit_with_parents(node_id, &parents, &keychains[node_id.0], r) + }) + .collect(); + dag.push(units); + } + (dag, inactive_node_first_and_last_seen_unit) +} diff --git a/consensus/src/units/validator.rs b/consensus/src/units/validator.rs index 53c9ceb..362e3d4 100644 --- a/consensus/src/units/validator.rs +++ b/consensus/src/units/validator.rs @@ -254,7 +254,7 @@ mod tests { .take(4) .cloned() .collect(); - let unit = random_unit_with_parents(creator_id, &parents); + let unit = random_unit_with_parents(creator_id, &parents, 1); let preunit = unit.as_pre_unit().clone(); let keychain = Keychain::new(n_members, creator_id); let unchecked_unit = full_unit_to_unchecked_signed_unit(unit, &keychain);