Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Implements PoV export and local validation #4640

Merged
merged 9 commits into from
Jul 19, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
22 changes: 20 additions & 2 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -61,6 +61,7 @@ members = [
"bridges/snowbridge/primitives/router",
"bridges/snowbridge/runtime/runtime-common",
"bridges/snowbridge/runtime/test-common",
"cumulus/bin/pov-validator",
"cumulus/client/cli",
"cumulus/client/collator",
"cumulus/client/consensus/aura",
Expand Down
26 changes: 26 additions & 0 deletions cumulus/bin/pov-validator/Cargo.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
[package]
name = "cumulus-pov-validator"
version = "0.1.0"
authors.workspace = true
edition.workspace = true
repository.workspace = true
license.workspace = true
homepage.workspace = true
description = "A tool for validating PoVs locally"

[dependencies]
codec.workspace = true
clap = { workspace = true, features = ["derive"] }
sc-executor.workspace = true
sp-io.workspace = true
sp-core.workspace = true
sp-maybe-compressed-blob.workspace = true
polkadot-node-primitives.workspace = true
polkadot-parachain-primitives.workspace = true
polkadot-primitives.workspace = true
anyhow.workspace = true
tracing.workspace = true
tracing-subscriber.workspace = true

[lints]
workspace = true
154 changes: 154 additions & 0 deletions cumulus/bin/pov-validator/src/main.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,154 @@
// This file is part of Cumulus.

// Copyright (C) Parity Technologies (UK) Ltd.
// SPDX-License-Identifier: GPL-3.0-or-later WITH Classpath-exception-2.0

// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.

// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.

// You should have received a copy of the GNU General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.

use clap::Parser;
use codec::{Decode, Encode};
use polkadot_node_primitives::{BlockData, PoV, POV_BOMB_LIMIT, VALIDATION_CODE_BOMB_LIMIT};
use polkadot_parachain_primitives::primitives::ValidationParams;
use polkadot_primitives::{BlockNumber as RBlockNumber, Hash as RHash, HeadData};
use sc_executor::WasmExecutor;
use sp_core::traits::{CallContext, CodeExecutor, RuntimeCode, WrappedRuntimeCode};
use std::{fs, path::PathBuf, time::Instant};
use tracing::level_filters::LevelFilter;

/// Tool for validating a `PoV` locally.
#[derive(Parser)]
struct Cli {
/// The path to the validation code that should be used to validate the `PoV`.
///
/// The validation code can either be downloaded from the relay chain that the parachain is
/// connected to or by building the runtime manually to obtain the WASM binary.
#[arg(long)]
validation_code: PathBuf,
ordian marked this conversation as resolved.
Show resolved Hide resolved

/// The path to the `PoV` to validate.
///
/// The `PoV`'s can be obtained by running `polkadot-parachains --collator --chain YOUR_CHAIN
/// --export-pov-to-path PATH_TO_EXPORT` and then choose one of the exported `PoV`'s.
#[arg(long)]
pov: PathBuf,
}

fn main() -> anyhow::Result<()> {
let _ = tracing_subscriber::fmt()
.with_env_filter(
tracing_subscriber::EnvFilter::from_default_env()
.add_directive(LevelFilter::INFO.into()),
)
.with_writer(std::io::stderr)
.try_init();

let cli = Cli::parse();

let validation_code = fs::read(&cli.validation_code).map_err(|error| {
tracing::error!(%error, path = %cli.validation_code.display(), "Failed to read validation code");
anyhow::anyhow!("Failed to read validation code")
})?;

let validation_code =
sp_maybe_compressed_blob::decompress(&validation_code, VALIDATION_CODE_BOMB_LIMIT)
.map_err(|error| {
tracing::error!(%error, "Failed to decompress validation code");
anyhow::anyhow!("Failed to decompress validation code")
})?;

let pov_file = fs::read(&cli.pov).map_err(|error| {
tracing::error!(%error, path = %cli.pov.display(), "Failed to read PoV");
anyhow::anyhow!("Failed to read PoV")
})?;

let executor = WasmExecutor::<sp_io::SubstrateHostFunctions>::builder()
.with_allow_missing_host_functions(true)
.build();

let runtime_code = RuntimeCode {
code_fetcher: &WrappedRuntimeCode(validation_code.into()),
heap_pages: None,
// The hash is used for caching, which we need here, but we only use one wasm file. So, the
// actual hash is not that important.
hash: vec![1, 2, 3],
};

// We are calling `Core_version` to get the wasm file compiled. We don't care about the result.
let _ = executor
.call(
&mut sp_io::TestExternalities::default().ext(),
&runtime_code,
"Core_version",
&[],
CallContext::Offchain,
)
.0;

let pov_file_ptr = &mut &pov_file[..];
let pov = PoV::decode(pov_file_ptr).map_err(|error| {
tracing::error!(%error, "Failed to decode `PoV`");
anyhow::anyhow!("Failed to decode `PoV`")
})?;
let head_data = HeadData::decode(pov_file_ptr).map_err(|error| {
tracing::error!(%error, "Failed to `HeadData`");
anyhow::anyhow!("Failed to decode `HeadData`")
})?;
let relay_parent_storage_root = RHash::decode(pov_file_ptr).map_err(|error| {
tracing::error!(%error, "Failed to relay storage root");
anyhow::anyhow!("Failed to decode relay storage root")
})?;
let relay_parent_number = RBlockNumber::decode(pov_file_ptr).map_err(|error| {
tracing::error!(%error, "Failed to relay block number");
anyhow::anyhow!("Failed to decode relay block number")
})?;

let pov = sp_maybe_compressed_blob::decompress(&pov.block_data.0, POV_BOMB_LIMIT).map_err(
|error| {
tracing::error!(%error, "Failed to decompress `PoV`");
anyhow::anyhow!("Failed to decompress `PoV`")
},
)?;

let validation_params = ValidationParams {
relay_parent_number,
relay_parent_storage_root,
parent_head: head_data,
block_data: BlockData(pov.into()),
};

tracing::info!("Starting validation");

let start = Instant::now();

let res = executor
.call(
&mut sp_io::TestExternalities::default().ext(),
&runtime_code,
"validate_block",
&validation_params.encode(),
CallContext::Offchain,
)
.0;

let duration = start.elapsed();

match res {
Ok(_) => tracing::info!("Validation was successful"),
Copy link
Contributor

@sandreim sandreim May 31, 2024

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I would also add an option to decode the commitments as ValidationResult to a file for debugging purposes.

Err(error) => tracing::error!(%error, "Validation failed"),
}

tracing::info!("Validation took {}ms", duration.as_millis());

Ok(())
}
117 changes: 111 additions & 6 deletions cumulus/client/consensus/aura/src/collators/lookahead.rs
Original file line number Diff line number Diff line change
Expand Up @@ -39,10 +39,13 @@ use cumulus_primitives_aura::AuraUnincludedSegmentApi;
use cumulus_primitives_core::{CollectCollationInfo, PersistedValidationData};
use cumulus_relay_chain_interface::RelayChainInterface;

use polkadot_node_primitives::SubmitCollationParams;
use polkadot_node_primitives::{PoV, SubmitCollationParams};
use polkadot_node_subsystem::messages::CollationGenerationMessage;
use polkadot_overseer::Handle as OverseerHandle;
use polkadot_primitives::{CollatorPair, Id as ParaId, OccupiedCoreAssumption};
use polkadot_primitives::{
BlockNumber as RBlockNumber, CollatorPair, Hash as RHash, HeadData, Id as ParaId,
OccupiedCoreAssumption,
};

use futures::prelude::*;
use sc_client_api::{backend::AuxStore, BlockBackend, BlockOf};
Expand All @@ -54,10 +57,49 @@ use sp_consensus_aura::{AuraApi, Slot};
use sp_core::crypto::Pair;
use sp_inherents::CreateInherentDataProviders;
use sp_keystore::KeystorePtr;
use sp_runtime::traits::{Block as BlockT, Header as HeaderT, Member};
use std::{sync::Arc, time::Duration};
use sp_runtime::traits::{Block as BlockT, Header as HeaderT, Member, NumberFor};
use std::{
fs::{self, File},
path::PathBuf,
sync::Arc,
time::Duration,
};

use crate::collator::{self as collator_util};
use crate::{collator as collator_util, LOG_TARGET};

/// Export the given `pov` to the file system at `path`.
///
/// The file will be named `block_hash_block_number.pov`.
///
/// The `parent_header`, `relay_parent_storage_root` and `relay_parent_number` will also be
/// stored in the file alongside the `pov`. This enables stateless validation of the `pov`.
fn export_pov_to_path<Block: BlockT>(
path: PathBuf,
pov: PoV,
block_hash: Block::Hash,
block_number: NumberFor<Block>,
parent_header: Block::Header,
relay_parent_storage_root: RHash,
relay_parent_number: RBlockNumber,
) {
if let Err(error) = fs::create_dir_all(&path) {
tracing::error!(target: LOG_TARGET, %error, path = %path.display(), "Failed to create PoV export directory");
return
}

let mut file = match File::create(path.join(format!("{block_hash:?}_{block_number}.pov"))) {
Ok(f) => f,
Err(error) => {
tracing::error!(target: LOG_TARGET, %error, "Failed to export PoV.");
return
},
};

pov.encode_to(&mut file);
HeadData(parent_header.encode()).encode_to(&mut file);
relay_parent_storage_root.encode_to(&mut file);
relay_parent_number.encode_to(&mut file);
}

/// Parameters for [`run`].
pub struct Params<BI, CIDP, Client, Backend, RClient, CHP, Proposer, CS> {
Expand Down Expand Up @@ -97,7 +139,58 @@ pub struct Params<BI, CIDP, Client, Backend, RClient, CHP, Proposer, CS> {

/// Run async-backing-friendly Aura.
pub fn run<Block, P, BI, CIDP, Client, Backend, RClient, CHP, Proposer, CS>(
mut params: Params<BI, CIDP, Client, Backend, RClient, CHP, Proposer, CS>,
params: Params<BI, CIDP, Client, Backend, RClient, CHP, Proposer, CS>,
) -> impl Future<Output = ()> + Send + 'static
where
Block: BlockT,
Client: ProvideRuntimeApi<Block>
+ BlockOf
+ AuxStore
+ HeaderBackend<Block>
+ BlockBackend<Block>
+ Send
+ Sync
+ 'static,
Client::Api:
AuraApi<Block, P::Public> + CollectCollationInfo<Block> + AuraUnincludedSegmentApi<Block>,
Backend: sc_client_api::Backend<Block> + 'static,
RClient: RelayChainInterface + Clone + 'static,
CIDP: CreateInherentDataProviders<Block, ()> + 'static,
CIDP::InherentDataProviders: Send,
BI: BlockImport<Block> + ParachainBlockImportMarker + Send + Sync + 'static,
Proposer: ProposerInterface<Block> + Send + Sync + 'static,
CS: CollatorServiceInterface<Block> + Send + Sync + 'static,
CHP: consensus_common::ValidationCodeHashProvider<Block::Hash> + Send + 'static,
P: Pair,
P::Public: AppPublic + Member + Codec,
P::Signature: TryFrom<Vec<u8>> + Member + Codec,
{
run_with_export::<_, P, _, _, _, _, _, _, _, _>(ParamsWithExport { params, export_pov: None })
}

/// Parameters for [`run_with_export`].
pub struct ParamsWithExport<BI, CIDP, Client, Backend, RClient, CHP, Proposer, CS> {
/// The parameters.
pub params: Params<BI, CIDP, Client, Backend, RClient, CHP, Proposer, CS>,
/// When set, the collator will export every produced `POV` to this folder.
pub export_pov: Option<PathBuf>,
}

/// Run async-backing-friendly Aura.
///
/// This is exactly the same as [`run`], but it supports the optional export of each produced `POV`
/// to the file system.
pub fn run_with_export<Block, P, BI, CIDP, Client, Backend, RClient, CHP, Proposer, CS>(
ParamsWithExport { mut params, export_pov }: ParamsWithExport<
BI,
CIDP,
Client,
Backend,
RClient,
CHP,
Proposer,
CS,
>,
) -> impl Future<Output = ()> + Send + 'static
where
Block: BlockT,
Expand Down Expand Up @@ -339,6 +432,18 @@ where
// and provides sybil-resistance, as it should.
collator.collator_service().announce_block(new_block_hash, None);

if let Some(ref export_pov) = export_pov {
export_pov_to_path::<Block>(
export_pov.clone(),
collation.proof_of_validity.clone().into_compressed(),
new_block_hash,
*block_data.header().number(),
parent_header.clone(),
*relay_parent_header.state_root(),
*relay_parent_header.number(),
);
}

// Send a submit-collation message to the collation generation subsystem,
// which then distributes this to validators.
//
Expand Down
Loading
Loading