Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[WIP] v3 blocks and transactions (no redundant input features) #3385

Closed
wants to merge 19 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 2 additions & 5 deletions api/src/types.rs
Original file line number Diff line number Diff line change
Expand Up @@ -629,11 +629,8 @@ impl BlockPrintable {
include_proof: bool,
include_merkle_proof: bool,
) -> Result<BlockPrintable, chain::Error> {
let inputs = block
.inputs()
.iter()
.map(|x| x.commitment().to_hex())
.collect();
let inputs: Vec<_> = block.inputs().into();
let inputs = inputs.iter().map(|x| x.to_hex()).collect();
let outputs = block
.outputs()
.iter()
Expand Down
14 changes: 11 additions & 3 deletions chain/src/chain.rs
Original file line number Diff line number Diff line change
Expand Up @@ -500,8 +500,15 @@ impl Chain {
/// Returns Ok(Some(pos)) if output is unspent.
/// Returns Ok(None) if output is spent.
/// Returns Err if something went wrong beyond not finding the output.
pub fn get_unspent(&self, output_ref: &OutputIdentifier) -> Result<Option<CommitPos>, Error> {
self.txhashset.read().get_unspent(output_ref)
pub fn get_unspent(&self, output_id: &OutputIdentifier) -> Result<Option<CommitPos>, Error> {
self.txhashset.read().get_unspent(output_id)
}

pub fn get_unspent_by_commitment(
&self,
commitment: Commitment,
) -> Result<Option<(OutputIdentifier, CommitPos)>, Error> {
self.txhashset.read().get_unspent_by_commitment(commitment)
}

/// Retrieves an unspent output using its PMMR position
Expand Down Expand Up @@ -560,7 +567,8 @@ impl Chain {
let header_pmmr = self.header_pmmr.read();
let txhashset = self.txhashset.read();
txhashset::utxo_view(&header_pmmr, &txhashset, |utxo, batch| {
utxo.verify_coinbase_maturity(&tx.inputs(), height, batch)?;
let inputs: Vec<_> = tx.inputs().into();
utxo.verify_coinbase_maturity(&inputs, height, batch)?;
Ok(())
})
}
Expand Down
6 changes: 3 additions & 3 deletions chain/src/error.rs
Original file line number Diff line number Diff line change
Expand Up @@ -77,9 +77,9 @@ pub enum ErrorKind {
/// Error from underlying secp lib
#[fail(display = "Secp Lib Error")]
Secp(secp::Error),
/// One of the inputs in the block has already been spent
#[fail(display = "Already Spent: {:?}", _0)]
AlreadySpent(Commitment),
/// One of the inputs in the block has already been spent or was never spendable to begin with.
#[fail(display = "Not Unspent: {:?}", _0)]
NotUnspent(Commitment),
/// An output with that commitment already exists (should be unique)
#[fail(display = "Duplicate Commitment: {:?}", _0)]
DuplicateCommitment(Commitment),
Expand Down
86 changes: 72 additions & 14 deletions chain/src/pipe.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,12 +18,13 @@ use crate::core::consensus;
use crate::core::core::hash::Hashed;
use crate::core::core::verifier_cache::VerifierCache;
use crate::core::core::Committed;
use crate::core::core::{Block, BlockHeader, BlockSums};
use crate::core::core::{Block, BlockHeader, BlockSums, Inputs};
use crate::core::pow;
use crate::error::{Error, ErrorKind};
use crate::store;
use crate::txhashset;
use crate::types::{Options, Tip};
use crate::util::secp::pedersen::Commitment;
use crate::util::RwLock;
use grin_store;
use std::sync::Arc;
Expand Down Expand Up @@ -76,6 +77,40 @@ fn validate_pow_only(header: &BlockHeader, ctx: &mut BlockContext<'_>) -> Result
Ok(())
}

/// We may receive blocks from peers that contain CommitOnly inputs (v3).
/// We want to convert these to FeaturesAndCommit inputs (v2) before saving to the db.
/// This is necessary to support backward compatibility with v2 peers.
/// API also expects blocks in v2 format.
/// It is more efficient to store them in v2 format internally vs. converting them during block relay.
fn convert_block_v2(
b: &Block,
ext: &txhashset::ExtensionPair<'_>,
batch: &store::Batch<'_>,
) -> Result<Block, Error> {
debug!(
"convert_block_v2: {} at {} ({})",
b.header.hash(),
b.header.height,
b.inputs().version_str(),
);
match b.inputs() {
Inputs::CommitOnly(inputs) => {
let utxo_view = ext.extension.utxo_view(ext.header_extension);
let inputs: Result<Vec<_>, _> = inputs
.into_iter()
.map(|x| utxo_view.get_unspent(x.commitment(), batch))
.collect();
let mut inputs = inputs?;

// Make sure the converted inputs are sorted correctly.
inputs.sort_unstable();

Ok(b.clone().replace_inputs(inputs.into()))
}
Inputs::FeaturesAndCommit(_) => Ok(b.clone()),
}
}

/// Runs the block processing pipeline, including validation and finding a
/// place for the new block in the chain.
/// Returns new head if chain head updated and the "fork point" rewound to when processing the new block.
Expand All @@ -84,12 +119,13 @@ pub fn process_block(
ctx: &mut BlockContext<'_>,
) -> Result<(Option<Tip>, BlockHeader), Error> {
debug!(
"pipe: process_block {} at {} [in/out/kern: {}/{}/{}]",
"pipe: process_block {} at {} [in/out/kern: {}/{}/{}] ({})",
b.hash(),
b.header.height,
b.inputs().len(),
b.outputs().len(),
b.kernels().len(),
b.inputs().version_str(),
);

// Read current chain head from db via the batch.
Expand Down Expand Up @@ -123,35 +159,42 @@ pub fn process_block(

// Validate the block itself, make sure it is internally consistent.
// Use the verifier_cache for verifying rangeproofs and kernel signatures.
validate_block(b, ctx)?;
validate_block(&b, ctx)?;

// Start a chain extension unit of work dependent on the success of the
// internal validation and saving operations
let header_pmmr = &mut ctx.header_pmmr;
let txhashset = &mut ctx.txhashset;
let batch = &mut ctx.batch;
let fork_point = txhashset::extending(header_pmmr, txhashset, batch, |ext, batch| {
let (fork_point, b) = txhashset::extending(header_pmmr, txhashset, batch, |ext, batch| {
let fork_point = rewind_and_apply_fork(&prev, ext, batch)?;

// Convert block to v2 for backward compatibility.
// Convert *before* we apply to avoid looking for outputs after we spend them...
let b = convert_block_v2(b, ext, batch)?;

// Verify all inputs and outputs are unique and that no input spends an output in this block.
verify_cut_through(&b)?;

// Check any coinbase being spent have matured sufficiently.
// This needs to be done within the context of a potentially
// rewound txhashset extension to reflect chain state prior
// to applying the new block.
verify_coinbase_maturity(b, ext, batch)?;
verify_coinbase_maturity(&b, ext, batch)?;

// Validate the block against the UTXO set.
validate_utxo(b, ext, batch)?;
validate_utxo(&b, ext, batch)?;

// Using block_sums (utxo_sum, kernel_sum) for the previous block from the db
// we can verify_kernel_sums across the full UTXO sum and full kernel sum
// accounting for inputs/outputs/kernels in this new block.
// We know there are no double-spends etc. if this verifies successfully.
verify_block_sums(b, batch)?;
verify_block_sums(&b, batch)?;

// Apply the block to the txhashset state.
// Validate the txhashset roots and sizes against the block header.
// Block is invalid if there are any discrepencies.
apply_block_to_txhashset(b, ext, batch)?;
apply_block_to_txhashset(&b, ext, batch)?;

// If applying this block does not increase the work on the chain then
// we know we have not yet updated the chain to produce a new chain head.
Expand All @@ -162,14 +205,16 @@ pub fn process_block(
ext.extension.force_rollback();
}

Ok(fork_point)
Ok((fork_point, b))
})?;

// Add the validated block to the db.
// Add the converted (and validated) block to the db.
// Note we do this in the outer batch, not the child batch from the extension
// as we only commit the child batch if the extension increases total work.
// We want to save the block to the db regardless.
add_block(b, &ctx.batch)?;
// Note: We want to save the "converted" block to the db (with input features) for reference later.
validate_block(&b, ctx)?;
add_block(&b, &ctx.batch)?;

// If we have no "tail" then set it now.
if ctx.batch.tail().is_err() {
Expand Down Expand Up @@ -398,13 +443,25 @@ fn validate_header(header: &BlockHeader, ctx: &mut BlockContext<'_>) -> Result<(
}

fn validate_block(block: &Block, ctx: &mut BlockContext<'_>) -> Result<(), Error> {
debug!(
"validate_block: {} at {} ({})",
block.header.hash(),
block.header.height,
block.inputs().version_str(),
);

let prev = ctx.batch.get_previous_header(&block.header)?;
block
.validate(&prev.total_kernel_offset, ctx.verifier_cache.clone())
.map_err(ErrorKind::InvalidBlockProof)?;
Ok(())
}

fn verify_cut_through(block: &Block) -> Result<(), Error> {
block.verify_cut_through()?;
Ok(())
}

/// Verify the block is not spending coinbase outputs before they have sufficiently matured.
fn verify_coinbase_maturity(
block: &Block,
Expand All @@ -413,9 +470,10 @@ fn verify_coinbase_maturity(
) -> Result<(), Error> {
let extension = &ext.extension;
let header_extension = &ext.header_extension;
let inputs: Vec<Commitment> = block.inputs().into();
extension
.utxo_view(header_extension)
.verify_coinbase_maturity(&block.inputs(), block.header.height, batch)
.verify_coinbase_maturity(&inputs, block.header.height, batch)
}

/// Verify kernel sums across the full utxo and kernel sets based on block_sums
Expand Down Expand Up @@ -462,8 +520,8 @@ fn apply_block_to_txhashset(

/// Officially adds the block to our chain (possibly on a losing fork).
/// Header must be added separately (assume this has been done previously).
fn add_block(b: &Block, batch: &store::Batch<'_>) -> Result<(), Error> {
batch.save_block(b)?;
fn add_block(block: &Block, batch: &store::Batch<'_>) -> Result<(), Error> {
batch.save_block(block)?;
Ok(())
}

Expand Down
Loading