Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: add DAS KZG in data col construction #5210

Merged
merged 10 commits into from
Mar 12, 2024
488 changes: 245 additions & 243 deletions .github/workflows/test-suite.yml

Large diffs are not rendered by default.

3 changes: 2 additions & 1 deletion Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

57 changes: 51 additions & 6 deletions beacon_node/beacon_chain/src/block_verification.rs
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,9 @@ use crate::block_verification_types::{
AsBlock, BlockContentsError, BlockImportData, GossipVerifiedBlockContents, RpcBlock,
};
use crate::data_availability_checker::{AvailabilityCheckError, MaybeAvailableBlock};
use crate::data_column_verification::GossipDataColumnError;
use crate::data_column_verification::{
GossipDataColumnError, GossipVerifiedDataColumn, GossipVerifiedDataColumnList,
};
use crate::eth1_finalization_cache::Eth1FinalizationData;
use crate::execution_payload::{
is_optimistic_candidate_block, validate_execution_payload_for_gossip, validate_merge_block,
Expand Down Expand Up @@ -99,10 +101,12 @@ use std::time::Duration;
use store::{Error as DBError, HotStateSummary, KeyValueStore, StoreOp};
use task_executor::JoinHandle;
use tree_hash::TreeHash;
use types::data_column_sidecar::DataColumnSidecarError;
use types::{
BeaconBlockRef, BeaconState, BeaconStateError, ChainSpec, CloneConfig, Epoch, EthSpec,
ExecutionBlockHash, Hash256, InconsistentFork, PublicKey, PublicKeyBytes, RelativeEpoch,
SignedBeaconBlock, SignedBeaconBlockHeader, Slot,
BeaconBlockRef, BeaconState, BeaconStateError, BlobSidecarList, ChainSpec, CloneConfig,
DataColumnSidecar, DataColumnSubnetId, Epoch, EthSpec, ExecutionBlockHash, Hash256,
InconsistentFork, PublicKey, PublicKeyBytes, RelativeEpoch, SignedBeaconBlock,
SignedBeaconBlockHeader, Slot,
};
use types::{BlobSidecar, ExecPayload};

Expand Down Expand Up @@ -727,7 +731,7 @@ impl<T: BeaconChainTypes> IntoGossipVerifiedBlockContents<T> for PublishBlockReq
let _timer =
metrics::start_timer(&metrics::BLOB_SIDECAR_INCLUSION_PROOF_COMPUTATION);
let blob = BlobSidecar::new(i, blob, &block, *kzg_proof)
.map_err(BlockContentsError::SidecarError)?;
.map_err(BlockContentsError::BlobSidecarError)?;
drop(_timer);
let gossip_verified_blob =
GossipVerifiedBlob::new(Arc::new(blob), i as u64, chain)?;
Expand All @@ -737,9 +741,50 @@ impl<T: BeaconChainTypes> IntoGossipVerifiedBlockContents<T> for PublishBlockReq
Ok::<_, BlockContentsError<T::EthSpec>>(gossip_verified_blobs)
})
.transpose()?;

let gossip_verified_data_columns = gossip_verified_blobs
.as_ref()
.map(|blobs| {
// NOTE: we expect KZG to be initialized if the blobs are present
let kzg = chain
.kzg
.as_ref()
.ok_or(BlockContentsError::DataColumnError(
GossipDataColumnError::<T::EthSpec>::KzgNotInitialized,
))?;

let blob_sidecar_list: Vec<_> =
blobs.iter().map(|blob| blob.clone_blob()).collect();
let blob_sidecar_list = BlobSidecarList::new(blob_sidecar_list)
.map_err(DataColumnSidecarError::SszError)?;
let sidecars = DataColumnSidecar::build_sidecars(&blob_sidecar_list, &block, kzg)?;
let mut gossip_verified_data_columns = vec![];
for sidecar in sidecars {
let subnet = DataColumnSubnetId::try_from_column_index::<T::EthSpec>(
sidecar.index as usize,
)
.map_err(|_| {
BlockContentsError::<T::EthSpec>::DataColumnSidecarError(
DataColumnSidecarError::DataColumnIndexOutOfBounds,
)
})?;
let column = GossipVerifiedDataColumn::new(sidecar, subnet.into(), chain)?;
gossip_verified_data_columns.push(column);
}
let gossip_verified_data_columns =
GossipVerifiedDataColumnList::new(gossip_verified_data_columns)
.map_err(DataColumnSidecarError::SszError)?;
Ok::<_, BlockContentsError<T::EthSpec>>(gossip_verified_data_columns)
})
.transpose()?;

let gossip_verified_block = GossipVerifiedBlock::new(block, chain)?;

Ok((gossip_verified_block, gossip_verified_blobs))
Ok((
gossip_verified_block,
gossip_verified_blobs,
gossip_verified_data_columns,
))
}

fn inner_block(&self) -> &SignedBeaconBlock<T::EthSpec> {
Expand Down
38 changes: 31 additions & 7 deletions beacon_node/beacon_chain/src/block_verification_types.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,14 +2,15 @@ use crate::blob_verification::{GossipBlobError, GossipVerifiedBlobList};
use crate::block_verification::BlockError;
use crate::data_availability_checker::AvailabilityCheckError;
pub use crate::data_availability_checker::{AvailableBlock, MaybeAvailableBlock};
use crate::data_column_verification::{GossipDataColumnError, GossipVerifiedDataColumnList};
use crate::eth1_finalization_cache::Eth1FinalizationData;
use crate::{get_block_root, GossipVerifiedBlock, PayloadVerificationOutcome};
use derivative::Derivative;
use ssz_types::VariableList;
use state_processing::ConsensusContext;
use std::sync::Arc;
use types::blob_sidecar::{BlobIdentifier, BlobSidecarError, FixedBlobSidecarList};
use types::data_column_sidecar::DataColumnSidecarList;
use types::blob_sidecar::{self, BlobIdentifier, FixedBlobSidecarList};
use types::data_column_sidecar::{self, DataColumnSidecarList};
use types::{
BeaconBlockRef, BeaconState, BlindedPayload, BlobSidecarList, Epoch, EthSpec, Hash256,
SignedBeaconBlock, SignedBeaconBlockHeader, Slot,
Expand Down Expand Up @@ -320,14 +321,19 @@ pub struct BlockImportData<E: EthSpec> {
pub consensus_context: ConsensusContext<E>,
}

pub type GossipVerifiedBlockContents<T> =
(GossipVerifiedBlock<T>, Option<GossipVerifiedBlobList<T>>);
pub type GossipVerifiedBlockContents<T> = (
GossipVerifiedBlock<T>,
Option<GossipVerifiedBlobList<T>>,
Option<GossipVerifiedDataColumnList<T>>,
);

#[derive(Debug)]
pub enum BlockContentsError<T: EthSpec> {
BlockError(BlockError<T>),
BlobError(GossipBlobError<T>),
SidecarError(BlobSidecarError),
BlobSidecarError(blob_sidecar::BlobSidecarError),
DataColumnError(GossipDataColumnError<T>),
DataColumnSidecarError(data_column_sidecar::DataColumnSidecarError),
}

impl<T: EthSpec> From<BlockError<T>> for BlockContentsError<T> {
Expand All @@ -342,6 +348,18 @@ impl<T: EthSpec> From<GossipBlobError<T>> for BlockContentsError<T> {
}
}

impl<T: EthSpec> From<GossipDataColumnError<T>> for BlockContentsError<T> {
fn from(value: GossipDataColumnError<T>) -> Self {
Self::DataColumnError(value)
}
}

impl<T: EthSpec> From<data_column_sidecar::DataColumnSidecarError> for BlockContentsError<T> {
fn from(value: data_column_sidecar::DataColumnSidecarError) -> Self {
Self::DataColumnSidecarError(value)
}
}

impl<T: EthSpec> std::fmt::Display for BlockContentsError<T> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Expand All @@ -351,8 +369,14 @@ impl<T: EthSpec> std::fmt::Display for BlockContentsError<T> {
BlockContentsError::BlobError(err) => {
write!(f, "BlobError({})", err)
}
BlockContentsError::SidecarError(err) => {
write!(f, "SidecarError({:?})", err)
BlockContentsError::BlobSidecarError(err) => {
write!(f, "BlobSidecarError({:?})", err)
}
BlockContentsError::DataColumnError(err) => {
write!(f, "DataColumnError({:?})", err)
}
BlockContentsError::DataColumnSidecarError(err) => {
write!(f, "DataColumnSidecarError({:?})", err)
}
}
}
Expand Down
15 changes: 14 additions & 1 deletion beacon_node/beacon_chain/src/data_column_verification.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,10 @@ use kzg::{Error as KzgError, Kzg};
use ssz_derive::{Decode, Encode};
use std::sync::Arc;
use types::data_column_sidecar::{ColumnIndex, DataColumnIdentifier};
use types::{BeaconStateError, DataColumnSidecar, EthSpec, Hash256, SignedBeaconBlockHeader, Slot};
use types::{
BeaconStateError, DataColumnSidecar, EthSpec, Hash256, SignedBeaconBlockHeader, Slot,
VariableList,
};

/// An error occurred while validating a gossip data column.
#[derive(Debug)]
Expand Down Expand Up @@ -77,6 +80,11 @@ impl<T: EthSpec> From<BeaconStateError> for GossipDataColumnError<T> {
}
}

pub type GossipVerifiedDataColumnList<T> = VariableList<
GossipVerifiedDataColumn<T>,
<<T as BeaconChainTypes>::EthSpec as EthSpec>::DataColumnCount,
>;

/// A wrapper around a `DataColumnSidecar` that indicates it has been approved for re-gossiping on
/// the p2p network.
#[derive(Debug)]
Expand Down Expand Up @@ -113,6 +121,11 @@ impl<T: BeaconChainTypes> GossipVerifiedDataColumn<T> {
self.data_column.as_data_column()
}

/// This is cheap as we're calling clone on an Arc
pub fn clone_data_column(&self) -> Arc<DataColumnSidecar<T::EthSpec>> {
self.data_column.clone_data_column()
}

pub fn block_root(&self) -> Hash256 {
self.block_root
}
Expand Down
52 changes: 31 additions & 21 deletions beacon_node/http_api/src/publish_blocks.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,10 +18,11 @@ use std::sync::Arc;
use std::time::Duration;
use tokio::sync::mpsc::UnboundedSender;
use tree_hash::TreeHash;
use types::data_column_sidecar::DataColumnSidecarList;
use types::{
AbstractExecPayload, BeaconBlockRef, BlobSidecarList, DataColumnSidecar, DataColumnSubnetId,
EthSpec, ExecPayload, ExecutionBlockHash, ForkName, FullPayload, FullPayloadMerge, Hash256,
SignedBeaconBlock, SignedBlindedBeaconBlock, VariableList,
AbstractExecPayload, BeaconBlockRef, BlobSidecarList, DataColumnSubnetId, EthSpec, ExecPayload,
ExecutionBlockHash, ForkName, FullPayload, FullPayloadMerge, Hash256, SignedBeaconBlock,
SignedBlindedBeaconBlock, VariableList,
};
use warp::http::StatusCode;
use warp::{reply::Response, Rejection, Reply};
Expand Down Expand Up @@ -67,6 +68,7 @@ pub async fn publish_block<T: BeaconChainTypes, B: IntoGossipVerifiedBlockConten
/* actually publish a block */
let publish_block = move |block: Arc<SignedBeaconBlock<T::EthSpec>>,
blobs_opt: Option<BlobSidecarList<T::EthSpec>>,
data_cols_opt: Option<DataColumnSidecarList<T::EthSpec>>,
sender,
log,
seen_timestamp| {
Expand All @@ -88,23 +90,6 @@ pub async fn publish_block<T: BeaconChainTypes, B: IntoGossipVerifiedBlockConten
SignedBeaconBlock::Deneb(_) => {
let mut pubsub_messages = vec![PubsubMessage::BeaconBlock(block.clone())];
if let Some(blob_sidecars) = blobs_opt {
// Build and publish column sidecars
let col_sidecars = DataColumnSidecar::random_from_blob_sidecars(&blob_sidecars)
.map_err(|e| {
BeaconChainError::UnableToBuildColumnSidecar(format!("{e:?}"))
})?;

for (col_index, col_sidecar) in col_sidecars.into_iter().enumerate() {
let subnet_id =
DataColumnSubnetId::try_from_column_index::<T::EthSpec>(col_index)
.map_err(|e| {
BeaconChainError::UnableToBuildColumnSidecar(format!("{e:?}"))
})?;
pubsub_messages.push(PubsubMessage::DataColumnSidecar(Box::new((
subnet_id,
Arc::new(col_sidecar),
))));
}
// Publish blob sidecars
for (blob_index, blob) in blob_sidecars.into_iter().enumerate() {
pubsub_messages.push(PubsubMessage::BlobSidecar(Box::new((
Expand All @@ -113,6 +98,19 @@ pub async fn publish_block<T: BeaconChainTypes, B: IntoGossipVerifiedBlockConten
))));
}
}
if let Some(data_col_sidecars) = data_cols_opt {
for data_col in data_col_sidecars {
let subnet = DataColumnSubnetId::try_from_column_index::<T::EthSpec>(
data_col.index as usize,
)
.map_err(|e| {
BeaconChainError::UnableToBuildColumnSidecar(format!("{e:?}"))
})?;
pubsub_messages.push(PubsubMessage::DataColumnSidecar(Box::new((
subnet, data_col,
))));
}
}
crate::publish_pubsub_messages(&sender, pubsub_messages)
.map_err(|_| BlockError::BeaconChainError(BeaconChainError::UnableToPublish))?;
}
Expand All @@ -128,7 +126,7 @@ pub async fn publish_block<T: BeaconChainTypes, B: IntoGossipVerifiedBlockConten
let log_clone = log.clone();

/* if we can form a `GossipVerifiedBlock`, we've passed our basic gossip checks */
let (gossip_verified_block, gossip_verified_blobs) =
let (gossip_verified_block, gossip_verified_blobs, gossip_verified_data_columns) =
match block_contents.into_gossip_verified_block(&chain) {
Ok(b) => b,
Err(BlockContentsError::BlockError(BlockError::BlockIsAlreadyKnown))
Expand Down Expand Up @@ -167,13 +165,23 @@ pub async fn publish_block<T: BeaconChainTypes, B: IntoGossipVerifiedBlockConten
.collect::<Vec<_>>();
VariableList::from(blobs)
});
let data_cols_opt = gossip_verified_data_columns
.as_ref()
.map(|gossip_verified_data_columns| {
let data_columns = gossip_verified_data_columns
.into_iter()
.map(|col| col.clone_data_column())
.collect::<Vec<_>>();
VariableList::from(data_columns)
});

let block_root = block_root.unwrap_or(gossip_verified_block.block_root);

if let BroadcastValidation::Gossip = validation_level {
publish_block(
block.clone(),
blobs_opt.clone(),
data_cols_opt.clone(),
sender_clone.clone(),
log.clone(),
seen_timestamp,
Expand All @@ -188,6 +196,7 @@ pub async fn publish_block<T: BeaconChainTypes, B: IntoGossipVerifiedBlockConten
BroadcastValidation::Consensus => publish_block(
block_clone,
blobs_opt,
data_cols_opt,
sender_clone,
log_clone,
seen_timestamp,
Expand All @@ -203,6 +212,7 @@ pub async fn publish_block<T: BeaconChainTypes, B: IntoGossipVerifiedBlockConten
publish_block(
block_clone,
blobs_opt,
data_cols_opt,
sender_clone,
log_clone,
seen_timestamp,
Expand Down
1 change: 1 addition & 0 deletions consensus/types/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,7 @@ strum = { workspace = true }
[dev-dependencies]
criterion = { workspace = true }
beacon_chain = { workspace = true }
eth2_network_config = { workspace = true }
state_processing = { workspace = true }
tokio = { workspace = true }
paste = { workspace = true }
Expand Down
33 changes: 33 additions & 0 deletions consensus/types/src/beacon_block_body.rs
Original file line number Diff line number Diff line change
Expand Up @@ -177,6 +177,39 @@ impl<'a, T: EthSpec, Payload: AbstractExecPayload<T>> BeaconBlockBodyRef<'a, T,
}
}

/// Produces the proof of inclusion for `self.blob_kzg_commitments`.
pub fn kzg_commitments_merkle_proof(
&self,
) -> Result<FixedVector<Hash256, T::KzgCommitmentsInclusionProofDepth>, Error> {
match self {
Self::Base(_) | Self::Altair(_) | Self::Merge(_) | Self::Capella(_) => {
Err(Error::IncorrectStateVariant)
}
Self::Deneb(body) => {
let leaves = [
body.randao_reveal.tree_hash_root(),
body.eth1_data.tree_hash_root(),
body.graffiti.tree_hash_root(),
body.proposer_slashings.tree_hash_root(),
body.attester_slashings.tree_hash_root(),
body.attestations.tree_hash_root(),
body.deposits.tree_hash_root(),
body.voluntary_exits.tree_hash_root(),
body.sync_aggregate.tree_hash_root(),
body.execution_payload.tree_hash_root(),
body.bls_to_execution_changes.tree_hash_root(),
body.blob_kzg_commitments.tree_hash_root(),
];
let beacon_block_body_depth = leaves.len().next_power_of_two().ilog2() as usize;
let tree = MerkleTree::create(&leaves, beacon_block_body_depth);
let (_, proof) = tree
.generate_proof(BLOB_KZG_COMMITMENTS_INDEX, beacon_block_body_depth)
.map_err(Error::MerkleTreeError)?;
Ok(proof.into())
}
}
}

/// Return `true` if this block body has a non-zero number of blobs.
pub fn has_blobs(self) -> bool {
self.blob_kzg_commitments()
Expand Down
Loading
Loading