Skip to content

Commit

Permalink
Fix DAS branch CI (#5793)
Browse files Browse the repository at this point in the history
* Fix invalid syntax.

* Update cli doc. Ignore get_custody_columns test temporarily.

* Fix failing test and add verify inclusion test.

* Undo accidentally removed code.
  • Loading branch information
jimmygchen authored May 15, 2024
1 parent b64dd9d commit 80892e6
Show file tree
Hide file tree
Showing 10 changed files with 31 additions and 31 deletions.
24 changes: 6 additions & 18 deletions beacon_node/beacon_chain/src/test_utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2578,32 +2578,20 @@ pub fn generate_rand_block_and_blobs<E: EthSpec>(
(block, blob_sidecars)
}

#[allow(clippy::type_complexity)]
pub fn generate_rand_block_and_data_columns<E: EthSpec>(
fork_name: ForkName,
num_blobs: NumBlobs,
rng: &mut impl Rng,
) -> (
SignedBeaconBlock<E, FullPayload<E>>,
Vec<DataColumnSidecar<E>>,
Vec<Arc<DataColumnSidecar<E>>>,
) {
let (block, blobs) = generate_rand_block_and_blobs(fork_name, num_blobs, rng);
let blob = blobs.first().expect("should have at least 1 blob");

let data_columns = (0..E::number_of_columns())
.map(|index| DataColumnSidecar {
index: index as u64,
column: <_>::default(),
kzg_commitments: block
.message()
.body()
.blob_kzg_commitments()
.unwrap()
.clone(),
kzg_proofs: (vec![]).into(),
signed_block_header: blob.signed_block_header.clone(),
kzg_commitments_inclusion_proof: <_>::default(),
})
.collect::<Vec<_>>();
let blob: BlobsList<E> = blobs.into_iter().map(|b| b.blob).collect::<Vec<_>>().into();
let data_columns = DataColumnSidecar::build_sidecars(&blob, &block, &KZG)
.unwrap()
.into();

(block, data_columns)
}
4 changes: 2 additions & 2 deletions beacon_node/beacon_chain/tests/block_verification.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1362,7 +1362,7 @@ async fn add_base_block_to_altair_chain() {
)
.await,
ChainSegmentResult::Failed {
..
imported_blocks: _,
error: BlockError::InconsistentFork(InconsistentFork {
fork_at_slot: ForkName::Altair,
object_fork: ForkName::Base,
Expand Down Expand Up @@ -1497,7 +1497,7 @@ async fn add_altair_block_to_base_chain() {
)
.await,
ChainSegmentResult::Failed {
..
imported_blocks: _,
error: BlockError::InconsistentFork(InconsistentFork {
fork_at_slot: ForkName::Base,
object_fork: ForkName::Altair,
Expand Down
14 changes: 8 additions & 6 deletions beacon_node/network/src/sync/block_lookups/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -241,7 +241,9 @@ impl TestRig {
generate_rand_block_and_blobs::<E>(fork_name, num_blobs, rng)
}

fn rand_block_and_data_columns(&mut self) -> (SignedBeaconBlock<E>, Vec<DataColumnSidecar<E>>) {
fn rand_block_and_data_columns(
&mut self,
) -> (SignedBeaconBlock<E>, Vec<Arc<DataColumnSidecar<E>>>) {
let num_blobs = NumBlobs::Number(1);
generate_rand_block_and_data_columns::<E>(self.fork_name, num_blobs, &mut self.rng)
}
Expand Down Expand Up @@ -640,7 +642,7 @@ impl TestRig {
fn complete_valid_sampling_column_requests(
&mut self,
sampling_ids: SamplingIds,
data_columns: Vec<DataColumnSidecar<E>>,
data_columns: Vec<Arc<DataColumnSidecar<E>>>,
) {
for (id, column_index) in sampling_ids {
self.log(&format!("return valid data column for {column_index}"));
Expand All @@ -654,7 +656,7 @@ impl TestRig {
fn complete_valid_sampling_column_request(
&mut self,
id: DataColumnsByRootRequestId,
data_column: DataColumnSidecar<E>,
data_column: Arc<DataColumnSidecar<E>>,
) {
let block_root = data_column.block_root();
let column_index = data_column.index;
Expand All @@ -677,7 +679,7 @@ impl TestRig {
fn complete_valid_custody_request(
&mut self,
sampling_ids: SamplingIds,
data_columns: Vec<DataColumnSidecar<E>>,
data_columns: Vec<Arc<DataColumnSidecar<E>>>,
missing_components: bool,
) {
let lookup_id = if let DataColumnsByRootRequester::Custody(id) =
Expand Down Expand Up @@ -720,14 +722,14 @@ impl TestRig {
fn complete_data_columns_by_root_request(
&mut self,
id: DataColumnsByRootRequestId,
data_column: DataColumnSidecar<E>,
data_column: Arc<DataColumnSidecar<E>>,
) {
let peer_id = PeerId::random();
// Send chunk
self.send_sync_message(SyncMessage::RpcDataColumn {
request_id: SyncRequestId::DataColumnsByRoot(id),
peer_id,
data_column: Some(Arc::new(data_column)),
data_column: Some(data_column),
seen_timestamp: timestamp_now(),
});
// Send stream termination
Expand Down
2 changes: 1 addition & 1 deletion beacon_node/network/src/sync/block_sidecar_coupling.rs
Original file line number Diff line number Diff line change
Expand Up @@ -299,7 +299,7 @@ mod tests {
for block in &blocks {
for column in &block.1 {
if expects_custody_columns.contains(&column.index) {
info.add_data_column(Some(column.clone().into()));
info.add_data_column(Some(column.clone()));
}
}
}
Expand Down
2 changes: 2 additions & 0 deletions beacon_node/src/cli.rs
Original file line number Diff line number Diff line change
Expand Up @@ -47,9 +47,11 @@ pub fn cli_app<'a, 'b>() -> App<'a, 'b> {
.takes_value(false),
)
.arg(
// TODO(das): remove this before release
Arg::with_name("malicious-withhold-count")
.long("malicious-withhold-count")
.help("TESTING ONLY do not use this")
.hidden(true)
.takes_value(true),
)
.arg(
Expand Down
6 changes: 6 additions & 0 deletions beacon_node/src/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -443,6 +443,12 @@ pub fn get_config<E: EthSpec>(
client_config.store.epochs_per_blob_prune = epochs_per_blob_prune;
}

if let Some(blob_prune_margin_epochs) =
clap_utils::parse_optional(cli_args, "blob-prune-margin-epochs")?
{
client_config.store.blob_prune_margin_epochs = blob_prune_margin_epochs;
}

if let Some(malicious_withhold_count) =
clap_utils::parse_optional(cli_args, "malicious-withhold-count")?
{
Expand Down
5 changes: 1 addition & 4 deletions book/src/help_bn.md
Original file line number Diff line number Diff line change
Expand Up @@ -259,7 +259,7 @@ OPTIONS:
--graffiti <GRAFFITI>
Specify your custom graffiti to be included in blocks. Defaults to the current version and commit, truncated
to fit in 32 bytes.
to fit in 32 bytes.
--historic-state-cache-size <SIZE>
Specifies how many states from the freezer database should cache in memory [default: 1]
Expand Down Expand Up @@ -324,9 +324,6 @@ OPTIONS:
--logfile-max-size <SIZE>
The maximum size (in MB) each log file can grow to before rotating. If set to 0, background file logging is
disabled. [default: 200]
--malicious-withhold-count <malicious-withhold-count>
TESTING ONLY do not use this
--max-skip-slots <NUM_SLOTS>
Refuse to skip more than this many slots when processing an attestation. This prevents nodes on minority
forks from wasting our time and disk space, but could also cause unnecessary consensus failures, so is
Expand Down
1 change: 1 addition & 0 deletions consensus/types/src/data_column_sidecar.rs
Original file line number Diff line number Diff line change
Expand Up @@ -461,6 +461,7 @@ mod test {
col_sidecar.kzg_commitments_inclusion_proof,
block_kzg_commitments_inclusion_proof
);
assert!(col_sidecar.verify_inclusion_proof());
}
}

Expand Down
2 changes: 2 additions & 0 deletions testing/ef_tests/check_all_files_accessed.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,8 @@
excluded_paths = [
# TODO(das): remove once electra tests are on unstable
"tests/.*/electra/",
# TODO(das): ignore until new spec test release with column subnet count = 64.
"tests/.*/.*/.*/get_custody_columns/",
# Eth1Block and PowBlock
#
# Intentionally omitted, as per https://github.com/sigp/lighthouse/issues/1835
Expand Down
2 changes: 2 additions & 0 deletions testing/ef_tests/tests/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -775,7 +775,9 @@ fn rewards() {
}
}

// TODO(das): ignore until new spec test release with column subnet count = 64.
#[test]
#[ignore]
fn get_custody_columns() {
GetCustodyColumnsHandler::<MainnetEthSpec>::default()
.run_for_feature(ForkName::Deneb, FeatureName::Eip7594);
Expand Down

0 comments on commit 80892e6

Please sign in to comment.