diff --git a/l1-contracts/src/core/Leonidas.sol b/l1-contracts/src/core/Leonidas.sol index 75390445d0b..d243d6eed88 100644 --- a/l1-contracts/src/core/Leonidas.sol +++ b/l1-contracts/src/core/Leonidas.sol @@ -320,6 +320,17 @@ contract Leonidas is Ownable, ILeonidas { return _ts < GENESIS_TIME ? Slot.wrap(0) : SlotLib.fromTimestamp(_ts - GENESIS_TIME); } + /** + * @notice Computes the epoch at a specific slot + * + * @param _slotNumber - The slot number to compute the epoch for + * + * @return The computed epoch + */ + function getEpochAtSlot(Slot _slotNumber) public pure override(ILeonidas) returns (Epoch) { + return Epoch.wrap(_slotNumber.unwrap() / EPOCH_DURATION); + } + /** * @notice Adds a validator to the set WITHOUT setting up the epoch * @param _validator - The validator to add diff --git a/l1-contracts/src/core/Rollup.sol b/l1-contracts/src/core/Rollup.sol index 6a45af67aec..6b19b80e1d6 100644 --- a/l1-contracts/src/core/Rollup.sol +++ b/l1-contracts/src/core/Rollup.sol @@ -171,62 +171,6 @@ contract Rollup is Leonidas, IRollup, ITestRollup { vkTreeRoot = _vkTreeRoot; } - function claimEpochProofRight(DataStructures.SignedEpochProofQuote calldata _quote) - external - override(IRollup) - { - Slot currentSlot = getCurrentSlot(); - address currentProposer = getCurrentProposer(); - Epoch epochToProve = getEpochToProve(); - - if (currentProposer != address(0) && currentProposer != msg.sender) { - revert Errors.Leonidas__InvalidProposer(currentProposer, msg.sender); - } - - if (_quote.quote.epochToProve != epochToProve) { - revert Errors.Rollup__NotClaimingCorrectEpoch(epochToProve, _quote.quote.epochToProve); - } - - if (currentSlot.positionInEpoch() >= CLAIM_DURATION_IN_L2_SLOTS) { - revert Errors.Rollup__NotInClaimPhase( - currentSlot.positionInEpoch(), CLAIM_DURATION_IN_L2_SLOTS - ); - } - - // if the epoch to prove is not the one that has been claimed, - // then whatever is in the proofClaim is stale - if (proofClaim.epochToProve == epochToProve && proofClaim.proposerClaimant != address(0)) { - revert Errors.Rollup__ProofRightAlreadyClaimed(); - } - - if (_quote.quote.bondAmount < PROOF_COMMITMENT_MIN_BOND_AMOUNT_IN_TST) { - revert Errors.Rollup__InsufficientBondAmount( - PROOF_COMMITMENT_MIN_BOND_AMOUNT_IN_TST, _quote.quote.bondAmount - ); - } - - if (_quote.quote.validUntilSlot < currentSlot) { - revert Errors.Rollup__QuoteExpired(currentSlot, _quote.quote.validUntilSlot); - } - - // We don't currently unstake, - // but we will as part of https://github.com/AztecProtocol/aztec-packages/issues/8652. - // Blocked on submitting epoch proofs to this contract. - PROOF_COMMITMENT_ESCROW.stakeBond(_quote.quote.bondAmount, _quote.quote.prover); - - proofClaim = DataStructures.EpochProofClaim({ - epochToProve: epochToProve, - basisPointFee: _quote.quote.basisPointFee, - bondAmount: _quote.quote.bondAmount, - bondProvider: _quote.quote.prover, - proposerClaimant: msg.sender - }); - - emit ProofRightClaimed( - epochToProve, _quote.quote.prover, msg.sender, _quote.quote.bondAmount, currentSlot - ); - } - /** * @notice Publishes the body and propose the block * @dev `eth_log_handlers` rely on this function @@ -237,68 +181,17 @@ contract Rollup is Leonidas, IRollup, ITestRollup { * @param _signatures - Signatures from the validators * @param _body - The body of the L2 block */ - function propose( + function proposeAndClaim( bytes calldata _header, bytes32 _archive, bytes32 _blockHash, bytes32[] memory _txHashes, SignatureLib.Signature[] memory _signatures, - bytes calldata _body + bytes calldata _body, + DataStructures.SignedEpochProofQuote calldata _quote ) external override(IRollup) { - if (_canPrune()) { - _prune(); - } - bytes32 txsEffectsHash = TxsDecoder.decode(_body); - - // Decode and validate header - HeaderLib.Header memory header = HeaderLib.decode(_header); - - bytes32 digest = keccak256(abi.encode(_archive, _txHashes)); - setupEpoch(); - _validateHeader({ - _header: header, - _signatures: _signatures, - _digest: digest, - _currentTime: Timestamp.wrap(block.timestamp), - _txEffectsHash: txsEffectsHash, - _flags: DataStructures.ExecutionFlags({ignoreDA: false, ignoreSignatures: false}) - }); - - uint256 blockNumber = ++tips.pendingBlockNumber; - - blocks[blockNumber] = BlockLog({ - archive: _archive, - blockHash: _blockHash, - slotNumber: Slot.wrap(header.globalVariables.slotNumber) - }); - - // @note The block number here will always be >=1 as the genesis block is at 0 - bytes32 inHash = INBOX.consume(blockNumber); - if (header.contentCommitment.inHash != inHash) { - revert Errors.Rollup__InvalidInHash(inHash, header.contentCommitment.inHash); - } - - // TODO(#7218): Revert to fixed height tree for outbox, currently just providing min as interim - // Min size = smallest path of the rollup tree + 1 - (uint256 min,) = MerkleLib.computeMinMaxPathLength(header.contentCommitment.numTxs); - uint256 l2ToL1TreeMinHeight = min + 1; - OUTBOX.insert(blockNumber, header.contentCommitment.outHash, l2ToL1TreeMinHeight); - - emit L2BlockProposed(blockNumber, _archive); - - // Automatically flag the block as proven if we have cheated and set assumeProvenThroughBlockNumber. - if (blockNumber <= assumeProvenThroughBlockNumber) { - tips.provenBlockNumber = blockNumber; - - if (header.globalVariables.coinbase != address(0) && header.totalFees > 0) { - // @note This will currently fail if there are insufficient funds in the bridge - // which WILL happen for the old version after an upgrade where the bridge follow. - // Consider allowing a failure. See #7938. - FEE_JUICE_PORTAL.distributeFees(header.globalVariables.coinbase, header.totalFees); - } - - emit L2ProofVerified(blockNumber, "CHEAT"); - } + propose(_header, _archive, _blockHash, _txHashes, _signatures, _body); + claimEpochProofRight(_quote); } /** @@ -442,27 +335,6 @@ contract Rollup is Leonidas, IRollup, ITestRollup { emit L2ProofVerified(header.globalVariables.blockNumber, _proverId); } - function status(uint256 myHeaderBlockNumber) - external - view - override(IRollup) - returns ( - uint256 provenBlockNumber, - bytes32 provenArchive, - uint256 pendingBlockNumber, - bytes32 pendingArchive, - bytes32 archiveOfMyBlock - ) - { - return ( - tips.provenBlockNumber, - blocks[tips.provenBlockNumber].archive, - tips.pendingBlockNumber, - blocks[tips.pendingBlockNumber].archive, - archiveAt(myHeaderBlockNumber) - ); - } - /** * @notice Submit a proof for an epoch in the pending chain * @@ -517,6 +389,207 @@ contract Rollup is Leonidas, IRollup, ITestRollup { emit L2ProofVerified(endBlockNumber, _args[6]); } + function status(uint256 myHeaderBlockNumber) + external + view + override(IRollup) + returns ( + uint256 provenBlockNumber, + bytes32 provenArchive, + uint256 pendingBlockNumber, + bytes32 pendingArchive, + bytes32 archiveOfMyBlock, + Epoch provenEpochNumber + ) + { + return ( + tips.provenBlockNumber, + blocks[tips.provenBlockNumber].archive, + tips.pendingBlockNumber, + blocks[tips.pendingBlockNumber].archive, + archiveAt(myHeaderBlockNumber), + getEpochForBlock(tips.provenBlockNumber) + ); + } + + /** + * @notice Check if msg.sender can propose at a given time + * + * @param _ts - The timestamp to check + * @param _archive - The archive to check (should be the latest archive) + * + * @return uint256 - The slot at the given timestamp + * @return uint256 - The block number at the given timestamp + */ + function canProposeAtTime(Timestamp _ts, bytes32 _archive) + external + view + override(IRollup) + returns (Slot, uint256) + { + Slot slot = getSlotAt(_ts); + + Slot lastSlot = blocks[tips.pendingBlockNumber].slotNumber; + if (slot <= lastSlot) { + revert Errors.Rollup__SlotAlreadyInChain(lastSlot, slot); + } + + // Make sure that the proposer is up to date + bytes32 tipArchive = archive(); + if (tipArchive != _archive) { + revert Errors.Rollup__InvalidArchive(tipArchive, _archive); + } + + SignatureLib.Signature[] memory sigs = new SignatureLib.Signature[](0); + DataStructures.ExecutionFlags memory flags = + DataStructures.ExecutionFlags({ignoreDA: true, ignoreSignatures: true}); + _validateLeonidas(slot, sigs, _archive, flags); + + return (slot, tips.pendingBlockNumber + 1); + } + + /** + * @notice Validate a header for submission + * + * @dev This is a convenience function that can be used by the sequencer to validate a "partial" header + * without having to deal with viem or anvil for simulating timestamps in the future. + * + * @param _header - The header to validate + * @param _signatures - The signatures to validate + * @param _digest - The digest to validate + * @param _currentTime - The current time + * @param _flags - The flags to validate + */ + function validateHeader( + bytes calldata _header, + SignatureLib.Signature[] memory _signatures, + bytes32 _digest, + Timestamp _currentTime, + bytes32 _txsEffectsHash, + DataStructures.ExecutionFlags memory _flags + ) external view override(IRollup) { + HeaderLib.Header memory header = HeaderLib.decode(_header); + _validateHeader(header, _signatures, _digest, _currentTime, _txsEffectsHash, _flags); + } + + function nextEpochToClaim() external view override(IRollup) returns (Epoch) { + Epoch epochClaimed = proofClaim.epochToProve; + if (proofClaim.proposerClaimant == address(0) && epochClaimed == Epoch.wrap(0)) { + return Epoch.wrap(0); + } + return Epoch.wrap(1) + epochClaimed; + } + + function computeTxsEffectsHash(bytes calldata _body) + external + pure + override(IRollup) + returns (bytes32) + { + return TxsDecoder.decode(_body); + } + + function claimEpochProofRight(DataStructures.SignedEpochProofQuote calldata _quote) + public + override(IRollup) + { + validateEpochProofRightClaim(_quote); + + Slot currentSlot = getCurrentSlot(); + Epoch epochToProve = getEpochToProve(); + + // We don't currently unstake, + // but we will as part of https://github.com/AztecProtocol/aztec-packages/issues/8652. + // Blocked on submitting epoch proofs to this contract. + PROOF_COMMITMENT_ESCROW.stakeBond(_quote.quote.bondAmount, _quote.quote.prover); + + proofClaim = DataStructures.EpochProofClaim({ + epochToProve: epochToProve, + basisPointFee: _quote.quote.basisPointFee, + bondAmount: _quote.quote.bondAmount, + bondProvider: _quote.quote.prover, + proposerClaimant: msg.sender + }); + + emit ProofRightClaimed( + epochToProve, _quote.quote.prover, msg.sender, _quote.quote.bondAmount, currentSlot + ); + } + + /** + * @notice Publishes the body and propose the block + * @dev `eth_log_handlers` rely on this function + * + * @param _header - The L2 block header + * @param _archive - A root of the archive tree after the L2 block is applied + * @param _blockHash - The poseidon2 hash of the header added to the archive tree in the rollup circuit + * @param _signatures - Signatures from the validators + * @param _body - The body of the L2 block + */ + function propose( + bytes calldata _header, + bytes32 _archive, + bytes32 _blockHash, + bytes32[] memory _txHashes, + SignatureLib.Signature[] memory _signatures, + bytes calldata _body + ) public override(IRollup) { + if (_canPrune()) { + _prune(); + } + bytes32 txsEffectsHash = TxsDecoder.decode(_body); + + // Decode and validate header + HeaderLib.Header memory header = HeaderLib.decode(_header); + + bytes32 digest = keccak256(abi.encode(_archive, _txHashes)); + setupEpoch(); + _validateHeader({ + _header: header, + _signatures: _signatures, + _digest: digest, + _currentTime: Timestamp.wrap(block.timestamp), + _txEffectsHash: txsEffectsHash, + _flags: DataStructures.ExecutionFlags({ignoreDA: false, ignoreSignatures: false}) + }); + + uint256 blockNumber = ++tips.pendingBlockNumber; + + blocks[blockNumber] = BlockLog({ + archive: _archive, + blockHash: _blockHash, + slotNumber: Slot.wrap(header.globalVariables.slotNumber) + }); + + // @note The block number here will always be >=1 as the genesis block is at 0 + bytes32 inHash = INBOX.consume(blockNumber); + if (header.contentCommitment.inHash != inHash) { + revert Errors.Rollup__InvalidInHash(inHash, header.contentCommitment.inHash); + } + + // TODO(#7218): Revert to fixed height tree for outbox, currently just providing min as interim + // Min size = smallest path of the rollup tree + 1 + (uint256 min,) = MerkleLib.computeMinMaxPathLength(header.contentCommitment.numTxs); + uint256 l2ToL1TreeMinHeight = min + 1; + OUTBOX.insert(blockNumber, header.contentCommitment.outHash, l2ToL1TreeMinHeight); + + emit L2BlockProposed(blockNumber, _archive); + + // Automatically flag the block as proven if we have cheated and set assumeProvenThroughBlockNumber. + if (blockNumber <= assumeProvenThroughBlockNumber) { + tips.provenBlockNumber = blockNumber; + + if (header.globalVariables.coinbase != address(0) && header.totalFees > 0) { + // @note This will currently fail if there are insufficient funds in the bridge + // which WILL happen for the old version after an upgrade where the bridge follow. + // Consider allowing a failure. See #7938. + FEE_JUICE_PORTAL.distributeFees(header.globalVariables.coinbase, header.totalFees); + } + + emit L2ProofVerified(blockNumber, "CHEAT"); + } + } + /** * @notice Returns the computed public inputs for the given epoch proof. * @@ -534,7 +607,7 @@ contract Rollup is Leonidas, IRollup, ITestRollup { bytes32[7] calldata _args, bytes32[64] calldata _fees, bytes calldata _aggregationObject - ) public view returns (bytes32[] memory) { + ) public view override(IRollup) returns (bytes32[] memory) { uint256 previousBlockNumber = tips.provenBlockNumber; uint256 endBlockNumber = previousBlockNumber + _epochSize; @@ -647,73 +720,44 @@ contract Rollup is Leonidas, IRollup, ITestRollup { return publicInputs; } - /** - * @notice Check if msg.sender can propose at a given time - * - * @param _ts - The timestamp to check - * @param _archive - The archive to check (should be the latest archive) - * - * @return uint256 - The slot at the given timestamp - * @return uint256 - The block number at the given timestamp - */ - function canProposeAtTime(Timestamp _ts, bytes32 _archive) - external + function validateEpochProofRightClaim(DataStructures.SignedEpochProofQuote calldata _quote) + public view override(IRollup) - returns (Slot, uint256) { - Slot slot = getSlotAt(_ts); + Slot currentSlot = getCurrentSlot(); + address currentProposer = getCurrentProposer(); + Epoch epochToProve = getEpochToProve(); - Slot lastSlot = blocks[tips.pendingBlockNumber].slotNumber; - if (slot <= lastSlot) { - revert Errors.Rollup__SlotAlreadyInChain(lastSlot, slot); + if (currentProposer != address(0) && currentProposer != msg.sender) { + revert Errors.Leonidas__InvalidProposer(currentProposer, msg.sender); } - // Make sure that the proposer is up to date - bytes32 tipArchive = archive(); - if (tipArchive != _archive) { - revert Errors.Rollup__InvalidArchive(tipArchive, _archive); + if (_quote.quote.epochToProve != epochToProve) { + revert Errors.Rollup__NotClaimingCorrectEpoch(epochToProve, _quote.quote.epochToProve); } - SignatureLib.Signature[] memory sigs = new SignatureLib.Signature[](0); - DataStructures.ExecutionFlags memory flags = - DataStructures.ExecutionFlags({ignoreDA: true, ignoreSignatures: true}); - _validateLeonidas(slot, sigs, _archive, flags); + if (currentSlot.positionInEpoch() >= CLAIM_DURATION_IN_L2_SLOTS) { + revert Errors.Rollup__NotInClaimPhase( + currentSlot.positionInEpoch(), CLAIM_DURATION_IN_L2_SLOTS + ); + } - return (slot, tips.pendingBlockNumber + 1); - } + // if the epoch to prove is not the one that has been claimed, + // then whatever is in the proofClaim is stale + if (proofClaim.epochToProve == epochToProve && proofClaim.proposerClaimant != address(0)) { + revert Errors.Rollup__ProofRightAlreadyClaimed(); + } - /** - * @notice Validate a header for submission - * - * @dev This is a convenience function that can be used by the sequencer to validate a "partial" header - * without having to deal with viem or anvil for simulating timestamps in the future. - * - * @param _header - The header to validate - * @param _signatures - The signatures to validate - * @param _digest - The digest to validate - * @param _currentTime - The current time - * @param _flags - The flags to validate - */ - function validateHeader( - bytes calldata _header, - SignatureLib.Signature[] memory _signatures, - bytes32 _digest, - Timestamp _currentTime, - bytes32 _txsEffectsHash, - DataStructures.ExecutionFlags memory _flags - ) external view override(IRollup) { - HeaderLib.Header memory header = HeaderLib.decode(_header); - _validateHeader(header, _signatures, _digest, _currentTime, _txsEffectsHash, _flags); - } + if (_quote.quote.bondAmount < PROOF_COMMITMENT_MIN_BOND_AMOUNT_IN_TST) { + revert Errors.Rollup__InsufficientBondAmount( + PROOF_COMMITMENT_MIN_BOND_AMOUNT_IN_TST, _quote.quote.bondAmount + ); + } - function computeTxsEffectsHash(bytes calldata _body) - external - pure - override(IRollup) - returns (bytes32) - { - return TxsDecoder.decode(_body); + if (_quote.quote.validUntilSlot < currentSlot) { + revert Errors.Rollup__QuoteExpired(currentSlot, _quote.quote.validUntilSlot); + } } /** @@ -733,6 +777,13 @@ contract Rollup is Leonidas, IRollup, ITestRollup { return tips.pendingBlockNumber; } + function getEpochForBlock(uint256 blockNumber) public view override(IRollup) returns (Epoch) { + if (blockNumber > tips.pendingBlockNumber) { + revert Errors.Rollup__InvalidBlockNumber(tips.pendingBlockNumber, blockNumber); + } + return getEpochAt(getTimestampForSlot(blocks[blockNumber].slotNumber)); + } + /** * @notice Get the epoch that should be proven * @@ -746,7 +797,7 @@ contract Rollup is Leonidas, IRollup, ITestRollup { if (tips.provenBlockNumber == tips.pendingBlockNumber) { revert Errors.Rollup__NoEpochToProve(); } else { - return getEpochAt(getTimestampForSlot(blocks[getProvenBlockNumber() + 1].slotNumber)); + return getEpochForBlock(getProvenBlockNumber() + 1); } } @@ -788,8 +839,7 @@ contract Rollup is Leonidas, IRollup, ITestRollup { } Slot currentSlot = getCurrentSlot(); - Epoch oldestPendingEpoch = - getEpochAt(getTimestampForSlot(blocks[tips.provenBlockNumber + 1].slotNumber)); + Epoch oldestPendingEpoch = getEpochForBlock(tips.provenBlockNumber + 1); Slot startSlotOfPendingEpoch = oldestPendingEpoch.toSlots(); // suppose epoch 1 is proven, epoch 2 is pending, epoch 3 is the current epoch. diff --git a/l1-contracts/src/core/interfaces/ILeonidas.sol b/l1-contracts/src/core/interfaces/ILeonidas.sol index 6a63f52a4da..ece101d7277 100644 --- a/l1-contracts/src/core/interfaces/ILeonidas.sol +++ b/l1-contracts/src/core/interfaces/ILeonidas.sol @@ -32,4 +32,5 @@ interface ILeonidas { function getEpochAt(Timestamp _ts) external view returns (Epoch); function getSlotAt(Timestamp _ts) external view returns (Slot); + function getEpochAtSlot(Slot _slotNumber) external view returns (Epoch); } diff --git a/l1-contracts/src/core/interfaces/IRollup.sol b/l1-contracts/src/core/interfaces/IRollup.sol index 62aad69681b..f9e34caf25c 100644 --- a/l1-contracts/src/core/interfaces/IRollup.sol +++ b/l1-contracts/src/core/interfaces/IRollup.sol @@ -42,6 +42,16 @@ interface IRollup { bytes calldata _body ) external; + function proposeAndClaim( + bytes calldata _header, + bytes32 _archive, + bytes32 _blockHash, + bytes32[] memory _txHashes, + SignatureLib.Signature[] memory _signatures, + bytes calldata _body, + DataStructures.SignedEpochProofQuote calldata _quote + ) external; + function submitBlockRootProof( bytes calldata _header, bytes32 _archive, @@ -86,7 +96,8 @@ interface IRollup { bytes32 provenArchive, uint256 pendingBlockNumber, bytes32 pendingArchive, - bytes32 archiveOfMyBlock + bytes32 archiveOfMyBlock, + Epoch provenEpochNumber ); // TODO(#7346): Integrate batch rollups @@ -106,5 +117,16 @@ interface IRollup { function getProvenBlockNumber() external view returns (uint256); function getPendingBlockNumber() external view returns (uint256); function getEpochToProve() external view returns (Epoch); + function nextEpochToClaim() external view returns (Epoch); + function getEpochForBlock(uint256 blockNumber) external view returns (Epoch); + function validateEpochProofRightClaim(DataStructures.SignedEpochProofQuote calldata _quote) + external + view; + function getEpochProofPublicInputs( + uint256 _epochSize, + bytes32[7] calldata _args, + bytes32[64] calldata _fees, + bytes calldata _aggregationObject + ) external view returns (bytes32[] memory); function computeTxsEffectsHash(bytes calldata _body) external pure returns (bytes32); } diff --git a/l1-contracts/src/core/libraries/SignatureLib.sol b/l1-contracts/src/core/libraries/SignatureLib.sol new file mode 100644 index 00000000000..af5cdf8eb30 --- /dev/null +++ b/l1-contracts/src/core/libraries/SignatureLib.sol @@ -0,0 +1,31 @@ +// SPDX-License-Identifier: Apache-2.0 +// Copyright 2024 Aztec Labs. +pragma solidity ^0.8.13; + +import {Errors} from "@aztec/core/libraries/Errors.sol"; + +library SignatureLib { + struct Signature { + bool isEmpty; + uint8 v; + bytes32 r; + bytes32 s; + } + + /** + * @notice Verified a signature, throws if the signature is invalid or empty + * + * @param _signature - The signature to verify + * @param _signer - The expected signer of the signature + * @param _digest - The digest that was signed + */ + function verify(Signature memory _signature, address _signer, bytes32 _digest) internal pure { + if (_signature.isEmpty) { + revert Errors.SignatureLib__CannotVerifyEmpty(); + } + address recovered = ecrecover(_digest, _signature.v, _signature.r, _signature.s); + if (_signer != recovered) { + revert Errors.SignatureLib__InvalidSignature(_signer, recovered); + } + } +} diff --git a/l1-contracts/test/Rollup.t.sol b/l1-contracts/test/Rollup.t.sol index cc26edb8257..bd581a883b2 100644 --- a/l1-contracts/test/Rollup.t.sol +++ b/l1-contracts/test/Rollup.t.sol @@ -373,6 +373,7 @@ contract RollupTest is DecoderBase { // and timestamp as if it was created at a different point in time. This allow us to insert it // as if it was the first block, even after we had originally inserted the mixed block. // An example where this could happen would be if no-one could prove the mixed block. + // @note We prune the pending chain as part of the propose call. _testBlock("empty_block_1", false, prunableAt.unwrap()); assertEq(inbox.inProgress(), 3, "Invalid in progress"); diff --git a/yarn-project/archiver/src/archiver/archiver.ts b/yarn-project/archiver/src/archiver/archiver.ts index ce8994dff5b..23f5f668254 100644 --- a/yarn-project/archiver/src/archiver/archiver.ts +++ b/yarn-project/archiver/src/archiver/archiver.ts @@ -278,8 +278,14 @@ export class Archiver implements ArchiveSource { } const localPendingBlockNumber = BigInt(await this.getBlockNumber()); - const [provenBlockNumber, provenArchive, pendingBlockNumber, pendingArchive, archiveForLocalPendingBlockNumber] = - await this.rollup.read.status([localPendingBlockNumber]); + const [ + provenBlockNumber, + provenArchive, + pendingBlockNumber, + pendingArchive, + archiveForLocalPendingBlockNumber, + provenEpochNumber, + ] = await this.rollup.read.status([localPendingBlockNumber]); const updateProvenBlock = async () => { const localBlockForDestinationProvenBlockNumber = await this.getBlock(Number(provenBlockNumber)); @@ -287,8 +293,10 @@ export class Archiver implements ArchiveSource { localBlockForDestinationProvenBlockNumber && provenArchive === localBlockForDestinationProvenBlockNumber.archive.root.toString() ) { - this.log.info(`Updating the proven block number to ${provenBlockNumber}`); + this.log.info(`Updating the proven block number to ${provenBlockNumber} and epoch to ${provenEpochNumber}`); await this.store.setProvenL2BlockNumber(Number(provenBlockNumber)); + // if we are here then we must have a valid proven epoch number + await this.store.setProvenL2EpochNumber(Number(provenEpochNumber)); } }; @@ -509,6 +517,10 @@ export class Archiver implements ArchiveSource { return this.store.getProvenL2BlockNumber(); } + public getProvenL2EpochNumber(): Promise { + return this.store.getProvenL2EpochNumber(); + } + /** Forcefully updates the last proven block number. Use for testing. */ public setProvenBlockNumber(blockNumber: number): Promise { return this.store.setProvenL2BlockNumber(blockNumber); @@ -756,9 +768,15 @@ class ArchiverStoreHelper getProvenL2BlockNumber(): Promise { return this.store.getProvenL2BlockNumber(); } + getProvenL2EpochNumber(): Promise { + return this.store.getProvenL2EpochNumber(); + } setProvenL2BlockNumber(l2BlockNumber: number): Promise { return this.store.setProvenL2BlockNumber(l2BlockNumber); } + setProvenL2EpochNumber(l2EpochNumber: number): Promise { + return this.store.setProvenL2EpochNumber(l2EpochNumber); + } setBlockSynchedL1BlockNumber(l1BlockNumber: bigint): Promise { return this.store.setBlockSynchedL1BlockNumber(l1BlockNumber); } diff --git a/yarn-project/archiver/src/archiver/archiver_store.ts b/yarn-project/archiver/src/archiver/archiver_store.ts index 2218be901d4..b181c44db39 100644 --- a/yarn-project/archiver/src/archiver/archiver_store.ts +++ b/yarn-project/archiver/src/archiver/archiver_store.ts @@ -146,12 +146,24 @@ export interface ArchiverDataStore { */ getProvenL2BlockNumber(): Promise; + /** + * Gets the number of the latest proven L2 epoch. + * @returns The number of the latest proven L2 epoch. + */ + getProvenL2EpochNumber(): Promise; + /** * Stores the number of the latest proven L2 block processed. * @param l2BlockNumber - The number of the latest proven L2 block processed. */ setProvenL2BlockNumber(l2BlockNumber: number): Promise; + /** + * Stores the number of the latest proven L2 epoch. + * @param l2EpochNumber - The number of the latest proven L2 epoch. + */ + setProvenL2EpochNumber(l2EpochNumber: number): Promise; + /** * Stores the l1 block number that blocks have been synched until * @param l1BlockNumber - The l1 block number diff --git a/yarn-project/archiver/src/archiver/data_retrieval.ts b/yarn-project/archiver/src/archiver/data_retrieval.ts index b447162c576..6b3b5228220 100644 --- a/yarn-project/archiver/src/archiver/data_retrieval.ts +++ b/yarn-project/archiver/src/archiver/data_retrieval.ts @@ -134,7 +134,9 @@ async function getBlockFromRollupTx( data, }); - if (!(functionName === 'propose')) { + const allowedMethods = ['propose', 'proposeAndClaim']; + + if (!allowedMethods.includes(functionName)) { throw new Error(`Unexpected method called ${functionName}`); } const [headerHex, archiveRootHex, , , , bodyHex] = args! as readonly [Hex, Hex, Hex, Hex[], ViemSignature[], Hex]; diff --git a/yarn-project/archiver/src/archiver/kv_archiver_store/block_store.ts b/yarn-project/archiver/src/archiver/kv_archiver_store/block_store.ts index a3d91989980..42701f53d14 100644 --- a/yarn-project/archiver/src/archiver/kv_archiver_store/block_store.ts +++ b/yarn-project/archiver/src/archiver/kv_archiver_store/block_store.ts @@ -29,6 +29,9 @@ export class BlockStore { /** Stores l2 block number of the last proven block */ #lastProvenL2Block: AztecSingleton; + /** Stores l2 epoch number of the last proven epoch */ + #lastProvenL2Epoch: AztecSingleton; + /** Index mapping transaction hash (as a string) to its location in a block */ #txIndex: AztecMap; @@ -44,6 +47,7 @@ export class BlockStore { this.#contractIndex = db.openMap('archiver_contract_index'); this.#lastSynchedL1Block = db.openSingleton('archiver_last_synched_l1_block'); this.#lastProvenL2Block = db.openSingleton('archiver_last_proven_l2_block'); + this.#lastProvenL2Epoch = db.openSingleton('archiver_last_proven_l2_epoch'); } /** @@ -235,6 +239,14 @@ export class BlockStore { void this.#lastProvenL2Block.set(blockNumber); } + getProvenL2EpochNumber(): number | undefined { + return this.#lastProvenL2Epoch.get(); + } + + setProvenL2EpochNumber(epochNumber: number) { + void this.#lastProvenL2Epoch.set(epochNumber); + } + #computeBlockRange(start: number, limit: number): Required, 'start' | 'end'>> { if (limit < 1) { throw new Error(`Invalid limit: ${limit}`); diff --git a/yarn-project/archiver/src/archiver/kv_archiver_store/kv_archiver_store.ts b/yarn-project/archiver/src/archiver/kv_archiver_store/kv_archiver_store.ts index 7544fd0941a..baebf6efc64 100644 --- a/yarn-project/archiver/src/archiver/kv_archiver_store/kv_archiver_store.ts +++ b/yarn-project/archiver/src/archiver/kv_archiver_store/kv_archiver_store.ts @@ -247,11 +247,20 @@ export class KVArchiverDataStore implements ArchiverDataStore { return Promise.resolve(this.#blockStore.getProvenL2BlockNumber()); } + getProvenL2EpochNumber(): Promise { + return Promise.resolve(this.#blockStore.getProvenL2EpochNumber()); + } + setProvenL2BlockNumber(blockNumber: number) { this.#blockStore.setProvenL2BlockNumber(blockNumber); return Promise.resolve(); } + setProvenL2EpochNumber(epochNumber: number) { + this.#blockStore.setProvenL2EpochNumber(epochNumber); + return Promise.resolve(); + } + setBlockSynchedL1BlockNumber(l1BlockNumber: bigint) { this.#blockStore.setSynchedL1BlockNumber(l1BlockNumber); return Promise.resolve(); diff --git a/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.ts b/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.ts index 9a1fe11ff58..df06ee022de 100644 --- a/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.ts +++ b/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.ts @@ -70,6 +70,7 @@ export class MemoryArchiverStore implements ArchiverDataStore { private lastL1BlockNewMessages: bigint | undefined = undefined; private lastProvenL2BlockNumber: number = 0; + private lastProvenL2EpochNumber: number = 0; constructor( /** The max number of logs that can be obtained in 1 "getUnencryptedLogs" call. */ @@ -471,11 +472,20 @@ export class MemoryArchiverStore implements ArchiverDataStore { return Promise.resolve(this.lastProvenL2BlockNumber); } + public getProvenL2EpochNumber(): Promise { + return Promise.resolve(this.lastProvenL2EpochNumber); + } + public setProvenL2BlockNumber(l2BlockNumber: number): Promise { this.lastProvenL2BlockNumber = l2BlockNumber; return Promise.resolve(); } + public setProvenL2EpochNumber(l2EpochNumber: number): Promise { + this.lastProvenL2EpochNumber = l2EpochNumber; + return Promise.resolve(); + } + setBlockSynchedL1BlockNumber(l1BlockNumber: bigint) { this.lastL1BlockNewBlocks = l1BlockNumber; return Promise.resolve(); diff --git a/yarn-project/aztec-node/src/aztec-node/server.ts b/yarn-project/aztec-node/src/aztec-node/server.ts index c8a34e7a1ab..83860d56b89 100644 --- a/yarn-project/aztec-node/src/aztec-node/server.ts +++ b/yarn-project/aztec-node/src/aztec-node/server.ts @@ -3,6 +3,7 @@ import { BBCircuitVerifier, TestCircuitVerifier } from '@aztec/bb-prover'; import { type AztecNode, type ClientProtocolCircuitVerifier, + type EpochProofQuote, type FromLogType, type GetUnencryptedLogsResponse, type L1ToL2MessageSource, @@ -58,7 +59,6 @@ import { AggregateTxValidator, DataTxValidator, DoubleSpendTxValidator, - InMemoryAttestationPool, MetadataTxValidator, type P2P, TxProofValidator, @@ -121,6 +121,14 @@ export class AztecNodeService implements AztecNode { this.log.info(message); } + addEpochProofQuote(quote: EpochProofQuote): Promise { + return Promise.resolve(this.p2pClient.broadcastEpochProofQuote(quote)); + } + + getEpochProofQuotes(epoch: bigint): Promise { + return this.p2pClient.getEpochProofQuotes(epoch); + } + /** * initializes the Aztec Node, wait for component to sync. * @param config - The configuration to be used by the aztec node. @@ -151,14 +159,7 @@ export class AztecNodeService implements AztecNode { const proofVerifier = config.realProofs ? await BBCircuitVerifier.new(config) : new TestCircuitVerifier(); // create the tx pool and the p2p client, which will need the l2 block source - const p2pClient = await createP2PClient( - config, - new InMemoryAttestationPool(), - archiver, - proofVerifier, - worldStateSynchronizer, - telemetry, - ); + const p2pClient = await createP2PClient(config, archiver, proofVerifier, worldStateSynchronizer, telemetry); // start both and wait for them to sync from the block source await Promise.all([p2pClient.start(), worldStateSynchronizer.start()]); @@ -229,8 +230,8 @@ export class AztecNodeService implements AztecNode { * Method to determine if the node is ready to accept transactions. * @returns - Flag indicating the readiness for tx submission. */ - public async isReady() { - return (await this.p2pClient.isReady()) ?? false; + public isReady() { + return Promise.resolve(this.p2pClient.isReady() ?? false); } /** diff --git a/yarn-project/aztec.js/src/index.ts b/yarn-project/aztec.js/src/index.ts index dba2233180c..08f9c3b460c 100644 --- a/yarn-project/aztec.js/src/index.ts +++ b/yarn-project/aztec.js/src/index.ts @@ -133,9 +133,11 @@ export { createAztecNodeClient, merkleTreeIds, mockTx, + mockEpochProofQuote, TaggedLog, L1NotePayload, L1EventPayload, + EpochProofQuote, } from '@aztec/circuit-types'; export { NodeInfo } from '@aztec/types/interfaces'; diff --git a/yarn-project/aztec/src/cli/cmds/start_prover_node.ts b/yarn-project/aztec/src/cli/cmds/start_prover_node.ts index e1e95a9a1f8..ca054a01ec4 100644 --- a/yarn-project/aztec/src/cli/cmds/start_prover_node.ts +++ b/yarn-project/aztec/src/cli/cmds/start_prover_node.ts @@ -63,7 +63,7 @@ export const startProverNode = async ( // Load l1 contract addresses from aztec node if not set. const isRollupAddressSet = proverConfig.l1Contracts?.rollupAddress && !proverConfig.l1Contracts.rollupAddress.isZero(); - const nodeUrl = proverConfig.nodeUrl ?? proverConfig.txProviderNodeUrl; + const nodeUrl = proverConfig.nodeUrl ?? proverConfig.proverCoordinationNodeUrl; if (nodeUrl && !isRollupAddressSet) { userLog(`Loading L1 contract addresses from aztec node at ${nodeUrl}`); proverConfig.l1Contracts = await createAztecNodeClient(nodeUrl).getL1ContractAddresses(); diff --git a/yarn-project/circuit-types/src/aztec_node/rpc/aztec_node_client.ts b/yarn-project/circuit-types/src/aztec_node/rpc/aztec_node_client.ts index 73c2da7cab0..e38f04f9f9e 100644 --- a/yarn-project/circuit-types/src/aztec_node/rpc/aztec_node_client.ts +++ b/yarn-project/circuit-types/src/aztec_node/rpc/aztec_node_client.ts @@ -31,19 +31,19 @@ export function createAztecNodeClient(url: string, fetch = defaultFetch): AztecN url, { AztecAddress, + Buffer32, EthAddress, + EventSelector, ExtendedUnencryptedL2Log, Fr, - EventSelector, FunctionSelector, Header, L2Block, - TxEffect, LogId, - TxHash, - Buffer32, PublicDataWitness, SiblingPath, + TxEffect, + TxHash, }, { EncryptedNoteL2BlockL2Logs, diff --git a/yarn-project/circuit-types/src/index.ts b/yarn-project/circuit-types/src/index.ts index 187539e9736..12c74f0c8f8 100644 --- a/yarn-project/circuit-types/src/index.ts +++ b/yarn-project/circuit-types/src/index.ts @@ -3,6 +3,7 @@ export * from './auth_witness.js'; export * from './aztec_node/rpc/index.js'; export * from './body.js'; export * from './function_call.js'; +export * from './global_variable_builder.js'; export * from './interfaces/index.js'; export * from './l2_block.js'; export * from './l2_block_downloader/index.js'; @@ -12,7 +13,9 @@ export * from './merkle_tree_id.js'; export * from './messaging/index.js'; export * from './mocks.js'; export * from './notes/index.js'; +export * from './p2p/index.js'; export * from './packed_values.js'; +export * from './prover_coordination/index.js'; export * from './public_data_witness.js'; export * from './public_data_write.js'; export * from './public_execution_request.js'; @@ -21,5 +24,3 @@ export * from './simulation_error.js'; export * from './tx/index.js'; export * from './tx_effect.js'; export * from './tx_execution_request.js'; -export * from './p2p/index.js'; -export * from './global_variable_builder.js'; diff --git a/yarn-project/circuit-types/src/interfaces/aztec-node.ts b/yarn-project/circuit-types/src/interfaces/aztec-node.ts index 16c64a1ac42..733982bb432 100644 --- a/yarn-project/circuit-types/src/interfaces/aztec-node.ts +++ b/yarn-project/circuit-types/src/interfaces/aztec-node.ts @@ -1,38 +1,33 @@ -import { - type ARCHIVE_HEIGHT, - type Header, - type L1_TO_L2_MSG_TREE_HEIGHT, - type NOTE_HASH_TREE_HEIGHT, - type NULLIFIER_TREE_HEIGHT, - type PUBLIC_DATA_TREE_HEIGHT, +import type { + ARCHIVE_HEIGHT, + Header, + L1_TO_L2_MSG_TREE_HEIGHT, + NOTE_HASH_TREE_HEIGHT, + NULLIFIER_TREE_HEIGHT, + PUBLIC_DATA_TREE_HEIGHT, } from '@aztec/circuits.js'; -import { type L1ContractAddresses } from '@aztec/ethereum'; -import { type ContractArtifact } from '@aztec/foundation/abi'; -import { type AztecAddress } from '@aztec/foundation/aztec-address'; -import { type Fr } from '@aztec/foundation/fields'; -import { - type ContractClassPublic, - type ContractInstanceWithAddress, - type ProtocolContractAddresses, +import type { L1ContractAddresses } from '@aztec/ethereum'; +import type { ContractArtifact } from '@aztec/foundation/abi'; +import type { AztecAddress } from '@aztec/foundation/aztec-address'; +import type { Fr } from '@aztec/foundation/fields'; +import type { + ContractClassPublic, + ContractInstanceWithAddress, + ProtocolContractAddresses, } from '@aztec/types/contracts'; -import { type L2Block } from '../l2_block.js'; -import { - type FromLogType, - type GetUnencryptedLogsResponse, - type L2BlockL2Logs, - type LogFilter, - type LogType, -} from '../logs/index.js'; -import { type MerkleTreeId } from '../merkle_tree_id.js'; -import { type PublicDataWitness } from '../public_data_witness.js'; -import { type SiblingPath } from '../sibling_path/index.js'; -import { type PublicSimulationOutput, type Tx, type TxHash, type TxReceipt } from '../tx/index.js'; -import { type TxEffect } from '../tx_effect.js'; -import { type SequencerConfig } from './configs.js'; -import { type L2BlockNumber } from './l2_block_number.js'; -import { type NullifierMembershipWitness } from './nullifier_tree.js'; -import { type ProverConfig } from './prover-client.js'; +import type { L2Block } from '../l2_block.js'; +import type { FromLogType, GetUnencryptedLogsResponse, L2BlockL2Logs, LogFilter, LogType } from '../logs/index.js'; +import type { MerkleTreeId } from '../merkle_tree_id.js'; +import type { EpochProofQuote } from '../prover_coordination/epoch_proof_quote.js'; +import type { PublicDataWitness } from '../public_data_witness.js'; +import type { SiblingPath } from '../sibling_path/index.js'; +import type { PublicSimulationOutput, Tx, TxHash, TxReceipt } from '../tx/index.js'; +import type { TxEffect } from '../tx_effect.js'; +import type { SequencerConfig } from './configs.js'; +import type { L2BlockNumber } from './l2_block_number.js'; +import type { NullifierMembershipWitness } from './nullifier_tree.js'; +import type { ProverConfig } from './prover-client.js'; /** * The aztec node. @@ -356,4 +351,16 @@ export interface AztecNode { * Returns the ENR of this node for peer discovery, if available. */ getEncodedEnr(): Promise; + + /** + * Receives a quote for an epoch proof and stores it in its EpochProofQuotePool + * @param quote - The quote to store + */ + addEpochProofQuote(quote: EpochProofQuote): Promise; + + /** + * Returns the received quotes for a given epoch + * @param epoch - The epoch for which to get the quotes + */ + getEpochProofQuotes(epoch: bigint): Promise; } diff --git a/yarn-project/circuit-types/src/interfaces/index.ts b/yarn-project/circuit-types/src/interfaces/index.ts index 1ef99ee52db..63a79b5a4be 100644 --- a/yarn-project/circuit-types/src/interfaces/index.ts +++ b/yarn-project/circuit-types/src/interfaces/index.ts @@ -1,14 +1,14 @@ export * from './aztec-node.js'; -export * from './l2_block_number.js'; -export * from './pxe.js'; -export * from './sync-status.js'; +export * from './block-prover.js'; export * from './configs.js'; +export * from './l2_block_number.js'; +export * from './merkle_tree_operations.js'; export * from './nullifier_tree.js'; +export * from './private_kernel_prover.js'; export * from './prover-client.js'; +export * from './prover-coordination.js'; export * from './proving-job.js'; -export * from './block-prover.js'; +export * from './pxe.js'; export * from './server_circuit_prover.js'; -export * from './private_kernel_prover.js'; -export * from './tx-provider.js'; -export * from './merkle_tree_operations.js'; +export * from './sync-status.js'; export * from './world_state.js'; diff --git a/yarn-project/circuit-types/src/interfaces/prover-coordination.ts b/yarn-project/circuit-types/src/interfaces/prover-coordination.ts new file mode 100644 index 00000000000..01918a34d39 --- /dev/null +++ b/yarn-project/circuit-types/src/interfaces/prover-coordination.ts @@ -0,0 +1,19 @@ +import { type EpochProofQuote } from '../prover_coordination/index.js'; +import { type Tx } from '../tx/tx.js'; +import { type TxHash } from '../tx/tx_hash.js'; + +/** Provides basic operations for ProverNodes to interact with other nodes in the network. */ +export interface ProverCoordination { + /** + * Returns a transaction given its hash if available. + * @param txHash - The hash of the transaction, used as an ID. + * @returns The transaction, if found, 'undefined' otherwise. + */ + getTxByHash(txHash: TxHash): Promise; + + /** + * Receives a quote for an epoch proof and stores it in its EpochProofQuotePool + * @param quote - The quote to store + */ + addEpochProofQuote(quote: EpochProofQuote): Promise; +} diff --git a/yarn-project/circuit-types/src/interfaces/tx-provider.ts b/yarn-project/circuit-types/src/interfaces/tx-provider.ts deleted file mode 100644 index 872f9ce0282..00000000000 --- a/yarn-project/circuit-types/src/interfaces/tx-provider.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { type Tx } from '../tx/tx.js'; -import { type TxHash } from '../tx/tx_hash.js'; - -/** Provider for transaction objects given their hash. */ -export interface TxProvider { - /** - * Returns a transaction given its hash if available. - * @param txHash - The hash of the transaction, used as an ID. - * @returns The transaction, if found, 'undefined' otherwise. - */ - getTxByHash(txHash: TxHash): Promise; -} diff --git a/yarn-project/circuit-types/src/l2_block_source.ts b/yarn-project/circuit-types/src/l2_block_source.ts index 45727f2156f..65bcf58d7de 100644 --- a/yarn-project/circuit-types/src/l2_block_source.ts +++ b/yarn-project/circuit-types/src/l2_block_source.ts @@ -33,6 +33,12 @@ export interface L2BlockSource { */ getProvenBlockNumber(): Promise; + /** + * Gets the number of the latest L2 proven epoch seen by the block source implementation. + * @returns The number of the latest L2 proven epoch seen by the block source implementation. + */ + getProvenL2EpochNumber(): Promise; + /** * Gets an l2 block. If a negative number is passed, the block returned is the most recent. * @param number - The block number to return (inclusive). diff --git a/yarn-project/circuit-types/src/mocks.ts b/yarn-project/circuit-types/src/mocks.ts index db9c3bc9f87..5c86312a170 100644 --- a/yarn-project/circuit-types/src/mocks.ts +++ b/yarn-project/circuit-types/src/mocks.ts @@ -2,6 +2,7 @@ import { AztecAddress, CallContext, ClientIvcProof, + EthAddress, GasSettings, LogHash, MAX_ENCRYPTED_LOGS_PER_TX, @@ -27,12 +28,15 @@ import { import { type ContractArtifact, NoteSelector } from '@aztec/foundation/abi'; import { makeTuple } from '@aztec/foundation/array'; import { padArrayEnd, times } from '@aztec/foundation/collection'; -import { randomBytes } from '@aztec/foundation/crypto'; +import { randomBigInt, randomBytes, randomInt } from '@aztec/foundation/crypto'; +import { Signature } from '@aztec/foundation/eth-signature'; import { Fr } from '@aztec/foundation/fields'; import { type ContractInstanceWithAddress, SerializableContractInstance } from '@aztec/types/contracts'; import { EncryptedNoteTxL2Logs, EncryptedTxL2Logs, Note, UnencryptedTxL2Logs } from './logs/index.js'; import { ExtendedNote, UniqueNote } from './notes/index.js'; +import { EpochProofQuote } from './prover_coordination/epoch_proof_quote.js'; +import { EpochProofQuotePayload } from './prover_coordination/epoch_proof_quote_payload.js'; import { PublicExecutionRequest } from './public_execution_request.js'; import { NestedProcessReturnValues, PublicSimulationOutput, SimulatedTx, Tx, TxHash } from './tx/index.js'; @@ -223,6 +227,24 @@ export const mockSimulatedTx = (seed = 1, hasLogs = true) => { return new SimulatedTx(tx, dec, output); }; +export const mockEpochProofQuote = ( + epochToProve: bigint, + validUntilSlot?: bigint, + bondAmount?: bigint, + proverAddress?: EthAddress, + basisPointFee?: number, +) => { + const quotePayload: EpochProofQuotePayload = new EpochProofQuotePayload( + epochToProve, + validUntilSlot ?? randomBigInt(10000n), + bondAmount ?? randomBigInt(10000n) + 1000n, + proverAddress ?? EthAddress.random(), + basisPointFee ?? randomInt(100), + ); + const sig: Signature = Signature.empty(); + return new EpochProofQuote(quotePayload, sig); +}; + export const randomContractArtifact = (): ContractArtifact => ({ name: randomBytes(4).toString('hex'), functions: [], diff --git a/yarn-project/circuit-types/src/prover_coordination/epoch_proof_quote.test.ts b/yarn-project/circuit-types/src/prover_coordination/epoch_proof_quote.test.ts index c62995559ef..049845921e1 100644 --- a/yarn-project/circuit-types/src/prover_coordination/epoch_proof_quote.test.ts +++ b/yarn-project/circuit-types/src/prover_coordination/epoch_proof_quote.test.ts @@ -11,7 +11,7 @@ describe('epoch proof quote', () => { basisPointFee: 5000, bondAmount: 1000000000000000000n, epochToProve: 42n, - rollupAddress: EthAddress.random(), + prover: EthAddress.random(), validUntilSlot: 100n, }); diff --git a/yarn-project/circuit-types/src/prover_coordination/epoch_proof_quote.ts b/yarn-project/circuit-types/src/prover_coordination/epoch_proof_quote.ts index d5c2f40e296..8839b257ff7 100644 --- a/yarn-project/circuit-types/src/prover_coordination/epoch_proof_quote.ts +++ b/yarn-project/circuit-types/src/prover_coordination/epoch_proof_quote.ts @@ -52,4 +52,17 @@ export class EpochProofQuote extends Gossipable { return this.sender; } + + toViemArgs() { + return { + quote: { + epochToProve: this.payload.epochToProve, + validUntilSlot: this.payload.validUntilSlot, + bondAmount: this.payload.bondAmount, + prover: this.payload.prover.toString(), + basisPointFee: this.payload.basisPointFee, + }, + signature: this.signature.toViemSignature(), + }; + } } diff --git a/yarn-project/circuit-types/src/prover_coordination/epoch_proof_quote_payload.ts b/yarn-project/circuit-types/src/prover_coordination/epoch_proof_quote_payload.ts index bd0c92fcdc7..be70356f7d8 100644 --- a/yarn-project/circuit-types/src/prover_coordination/epoch_proof_quote_payload.ts +++ b/yarn-project/circuit-types/src/prover_coordination/epoch_proof_quote_payload.ts @@ -11,7 +11,7 @@ export class EpochProofQuotePayload implements Signable { public readonly epochToProve: bigint, public readonly validUntilSlot: bigint, public readonly bondAmount: bigint, - public readonly rollupAddress: EthAddress, + public readonly prover: EthAddress, public readonly basisPointFee: number, ) {} @@ -20,7 +20,7 @@ export class EpochProofQuotePayload implements Signable { fields.epochToProve, fields.validUntilSlot, fields.bondAmount, - fields.rollupAddress, + fields.prover, fields.basisPointFee, ] as const; } @@ -45,7 +45,7 @@ export class EpochProofQuotePayload implements Signable { fields.epochToProve, fields.validUntilSlot, fields.bondAmount, - fields.rollupAddress, + fields.prover, fields.basisPointFee, ); } @@ -56,7 +56,7 @@ export class EpochProofQuotePayload implements Signable { this.epochToProve, this.validUntilSlot, this.bondAmount, - this.rollupAddress.toString(), + this.prover.toString(), this.basisPointFee, ] as const); diff --git a/yarn-project/circuit-types/src/prover_coordination/index.ts b/yarn-project/circuit-types/src/prover_coordination/index.ts new file mode 100644 index 00000000000..331978ec556 --- /dev/null +++ b/yarn-project/circuit-types/src/prover_coordination/index.ts @@ -0,0 +1,2 @@ +export * from './epoch_proof_quote.js'; +export * from './epoch_proof_quote_payload.js'; diff --git a/yarn-project/end-to-end/src/e2e_prover/e2e_prover_test.ts b/yarn-project/end-to-end/src/e2e_prover/e2e_prover_test.ts index d9c9795adae..7475e1d707c 100644 --- a/yarn-project/end-to-end/src/e2e_prover/e2e_prover_test.ts +++ b/yarn-project/end-to-end/src/e2e_prover/e2e_prover_test.ts @@ -257,7 +257,7 @@ export class FullProverTest { this.logger.verbose('Starting fully proven prover node'); const proverConfig: ProverNodeConfig = { ...this.context.aztecNodeConfig, - txProviderNodeUrl: undefined, + proverCoordinationNodeUrl: undefined, dataDirectory: undefined, proverId: new Fr(81), realProofs: this.realProofs, diff --git a/yarn-project/end-to-end/src/e2e_prover_node.test.ts b/yarn-project/end-to-end/src/e2e_prover_node.test.ts index 4172ac90fb1..5a0a5e3f705 100644 --- a/yarn-project/end-to-end/src/e2e_prover_node.test.ts +++ b/yarn-project/end-to-end/src/e2e_prover_node.test.ts @@ -128,7 +128,7 @@ describe('e2e_prover_node', () => { // snapshot manager does not include events nor txs, so a new archiver would not "see" old blocks. const proverConfig: ProverNodeConfig = { ...ctx.aztecNodeConfig, - txProviderNodeUrl: undefined, + proverCoordinationNodeUrl: undefined, dataDirectory: undefined, proverId, proverNodeMaxPendingJobs: 100, diff --git a/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts b/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts index d426142de11..2155c660e7a 100644 --- a/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts +++ b/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts @@ -261,7 +261,7 @@ export async function createAndSyncProverNode( // Prover node config is for simulated proofs const proverConfig: ProverNodeConfig = { ...aztecNodeConfig, - txProviderNodeUrl: undefined, + proverCoordinationNodeUrl: undefined, dataDirectory: undefined, proverId: new Fr(42), realProofs: false, diff --git a/yarn-project/end-to-end/src/prover-coordination/e2e_json_coordination.test.ts b/yarn-project/end-to-end/src/prover-coordination/e2e_json_coordination.test.ts new file mode 100644 index 00000000000..8ebe827b0bc --- /dev/null +++ b/yarn-project/end-to-end/src/prover-coordination/e2e_json_coordination.test.ts @@ -0,0 +1,230 @@ +import { getSchnorrAccount } from '@aztec/accounts/schnorr'; +import { + type AccountWalletWithSecretKey, + type DebugLogger, + type EpochProofQuote, + EthCheatCodes, + createDebugLogger, + mockEpochProofQuote, +} from '@aztec/aztec.js'; +import { AZTEC_EPOCH_DURATION, AZTEC_SLOT_DURATION, type AztecAddress, EthAddress } from '@aztec/circuits.js'; +import { times } from '@aztec/foundation/collection'; +import { RollupAbi } from '@aztec/l1-artifacts'; +import { StatefulTestContract } from '@aztec/noir-contracts.js'; + +import { beforeAll } from '@jest/globals'; +import { type PublicClient, getAddress, getContract } from 'viem'; + +import { + type ISnapshotManager, + type SubsystemsContext, + addAccounts, + createSnapshotManager, +} from '../fixtures/snapshot_manager.js'; + +// Tests simple block building with a sequencer that does not upload proofs to L1, +// and then follows with a prover node run (with real proofs disabled, but +// still simulating all circuits via a prover-client), in order to test +// the coordination through L1 between the sequencer and the prover node. +describe('e2e_prover_node', () => { + let ctx: SubsystemsContext; + let wallet: AccountWalletWithSecretKey; + let recipient: AztecAddress; + let contract: StatefulTestContract; + let rollupContract: any; + let publicClient: PublicClient; + let cc: EthCheatCodes; + let publisherAddress: EthAddress; + + let logger: DebugLogger; + let snapshotManager: ISnapshotManager; + + beforeAll(async () => { + logger = createDebugLogger('aztec:prover_coordination:e2e_json_coordination'); + snapshotManager = createSnapshotManager(`prover_coordination/e2e_json_coordination`, process.env.E2E_DATA_PATH); + + await snapshotManager.snapshot('setup', addAccounts(2, logger), async ({ accountKeys }, ctx) => { + const accountManagers = accountKeys.map(ak => getSchnorrAccount(ctx.pxe, ak[0], ak[1], 1)); + await Promise.all(accountManagers.map(a => a.register())); + const wallets = await Promise.all(accountManagers.map(a => a.getWallet())); + wallets.forEach((w, i) => logger.verbose(`Wallet ${i} address: ${w.getAddress()}`)); + wallet = wallets[0]; + recipient = wallets[1].getAddress(); + }); + + await snapshotManager.snapshot( + 'deploy-test-contract', + async () => { + const owner = wallet.getAddress(); + const contract = await StatefulTestContract.deploy(wallet, owner, owner, 42).send().deployed(); + return { contractAddress: contract.address }; + }, + async ({ contractAddress }) => { + contract = await StatefulTestContract.at(contractAddress, wallet); + }, + ); + + ctx = await snapshotManager.setup(); + + await ctx.proverNode.stop(); + + cc = new EthCheatCodes(ctx.aztecNodeConfig.l1RpcUrl); + + publicClient = ctx.deployL1ContractsValues.publicClient; + publisherAddress = EthAddress.fromString(ctx.deployL1ContractsValues.walletClient.account.address); + rollupContract = getContract({ + address: getAddress(ctx.deployL1ContractsValues.l1ContractAddresses.rollupAddress.toString()), + abi: RollupAbi, + client: ctx.deployL1ContractsValues.walletClient, + }); + }); + + const expectProofClaimOnL1 = async (quote: EpochProofQuote, proposerAddress: EthAddress) => { + const claimFromContract = await rollupContract.read.proofClaim(); + expect(claimFromContract[0]).toEqual(quote.payload.epochToProve); + expect(claimFromContract[1]).toEqual(BigInt(quote.payload.basisPointFee)); + expect(claimFromContract[2]).toEqual(quote.payload.bondAmount); + expect(claimFromContract[4]).toEqual(proposerAddress.toChecksumString()); + }; + + const getL1Timestamp = async () => { + return BigInt((await publicClient.getBlock()).timestamp); + }; + + const getSlot = async () => { + const ts = await getL1Timestamp(); + return await rollupContract.read.getSlotAt([ts]); + }; + + const getEpoch = async () => { + const slotNumber = await getSlot(); + return await rollupContract.read.getEpochAtSlot([slotNumber]); + }; + + const getPendingBlockNumber = async () => { + return await rollupContract.read.getPendingBlockNumber(); + }; + + const getProvenBlockNumber = async () => { + return await rollupContract.read.getProvenBlockNumber(); + }; + + const getEpochToProve = async () => { + return await rollupContract.read.getEpochToProve(); + }; + + const logState = async () => { + logger.info(`Pending block: ${await getPendingBlockNumber()}`); + logger.info(`Proven block: ${await getProvenBlockNumber()}`); + logger.info(`Slot number: ${await getSlot()}`); + logger.info(`Epoch number: ${await getEpoch()}`); + logger.info(`Epoch to prove ${await getEpochToProve()}`); + }; + + const advanceToNextEpoch = async () => { + const slot = await getSlot(); + const slotsUntilNextEpoch = BigInt(AZTEC_EPOCH_DURATION) - (slot % BigInt(AZTEC_EPOCH_DURATION)) + 1n; + const timeToNextEpoch = slotsUntilNextEpoch * BigInt(AZTEC_SLOT_DURATION); + const l1Timestamp = await getL1Timestamp(); + await cc.warp(Number(l1Timestamp + timeToNextEpoch)); + await logState(); + }; + + it('Sequencer selects best valid proving quote for each block', async () => { + // We want to create a set of proving quotes, some valid and some invalid + // The sequencer should select the cheapest valid quote when it proposes the block + + // Here we are creating a proof quote for epoch 0, this will NOT get used yet + const quoteForEpoch0 = mockEpochProofQuote( + 0n, // epoch 0 + BigInt(AZTEC_EPOCH_DURATION + 10), // valid until slot 10 into epoch 1 + 10000n, + EthAddress.random(), + 1, + ); + + // Send in the quote + await ctx.proverNode.sendEpochProofQuote(quoteForEpoch0); + + // Build a block, this should NOT use the above quote as it is for the current epoch (0) + await contract.methods.create_note(recipient, recipient, 10).send().wait(); + + await logState(); + + const epoch0BlockNumber = await getPendingBlockNumber(); + + // Verify that the claim state on L1 is unitialised + const uninitialisedProofClaim = mockEpochProofQuote( + 0n, // epoch 0 + BigInt(0), + 0n, + EthAddress.random(), + 0, + ); + + // The rollup contract should have an uninitialised proof claim struct + await expectProofClaimOnL1(uninitialisedProofClaim, EthAddress.ZERO); + + // Now go to epoch 1 + await advanceToNextEpoch(); + + await logState(); + + // Build a block in epoch 1, we should see the quote for epoch 0 submitted earlier published to L1 + await contract.methods.create_note(recipient, recipient, 10).send().wait(); + + const epoch1BlockNumber = await getPendingBlockNumber(); + + // Check it was published + await expectProofClaimOnL1(quoteForEpoch0, publisherAddress); + + // now 'prove' epoch 0 + await rollupContract.write.setAssumeProvenThroughBlockNumber([BigInt(epoch0BlockNumber)]); + + await logState(); + + // Now go to epoch 2 + await advanceToNextEpoch(); + + const currentSlot = await getSlot(); + + // Now create a number of quotes, some valid some invalid for epoch 1, the lowest priced valid quote should be chosen + const validQuotes = times(3, (i: number) => + mockEpochProofQuote(1n, currentSlot + 2n, 10000n, EthAddress.random(), 10 + i), + ); + + const proofQuoteInvalidSlot = mockEpochProofQuote(1n, 3n, 10000n, EthAddress.random(), 1); + + const proofQuoteInvalidEpoch = mockEpochProofQuote(2n, currentSlot + 4n, 10000n, EthAddress.random(), 2); + + const proofQuoteInsufficientBond = mockEpochProofQuote(1n, currentSlot + 4n, 0n, EthAddress.random(), 3); + + const allQuotes = [proofQuoteInvalidSlot, proofQuoteInvalidEpoch, ...validQuotes, proofQuoteInsufficientBond]; + + await Promise.all(allQuotes.map(x => ctx.proverNode.sendEpochProofQuote(x))); + + // now build another block and we should see the best valid quote being published + await contract.methods.create_note(recipient, recipient, 10).send().wait(); + + const expectedQuote = validQuotes[0]; + + await expectProofClaimOnL1(expectedQuote, publisherAddress); + + // building another block should succeed, we should not try and submit another quote + await contract.methods.create_note(recipient, recipient, 10).send().wait(); + + await expectProofClaimOnL1(expectedQuote, publisherAddress); + + // now 'prove' epoch 1 + await rollupContract.write.setAssumeProvenThroughBlockNumber([BigInt(epoch1BlockNumber)]); + + // Now go to epoch 3 + await advanceToNextEpoch(); + + // now build another block and we should see that no claim is published as nothing is valid + await contract.methods.create_note(recipient, recipient, 10).send().wait(); + + // The quote state on L1 is the same as before + await expectProofClaimOnL1(expectedQuote, publisherAddress); + }); +}); diff --git a/yarn-project/foundation/src/config/env_var.ts b/yarn-project/foundation/src/config/env_var.ts index a526c1b9eb4..7a0bc86efe2 100644 --- a/yarn-project/foundation/src/config/env_var.ts +++ b/yarn-project/foundation/src/config/env_var.ts @@ -1,133 +1,133 @@ export type EnvVar = - | 'AZTEC_PORT' + | 'ACVM_BINARY_PATH' + | 'ACVM_WORKING_DIRECTORY' + | 'API_KEY' + | 'API_PREFIX' + | 'ARCHIVER_MAX_LOGS' + | 'ARCHIVER_POLLING_INTERVAL_MS' + | 'ARCHIVER_URL' + | 'ARCHIVER_VIEM_POLLING_INTERVAL_MS' | 'ASSUME_PROVEN_THROUGH_BLOCK_NUMBER' - | 'TEST_ACCOUNTS' + | 'AZTEC_NODE_URL' + | 'AZTEC_PORT' + | 'BB_BINARY_PATH' + | 'BB_SKIP_CLEANUP' + | 'BB_WORKING_DIRECTORY' + | 'BOOTSTRAP_NODES' + | 'BOT_DA_GAS_LIMIT' + | 'BOT_FEE_PAYMENT_METHOD' + | 'BOT_FLUSH_SETUP_TRANSACTIONS' + | 'BOT_FOLLOW_CHAIN' + | 'BOT_L2_GAS_LIMIT' + | 'BOT_MAX_PENDING_TXS' + | 'BOT_NO_START' + | 'BOT_NO_WAIT_FOR_TRANSFERS' + | 'BOT_PRIVATE_KEY' + | 'BOT_PRIVATE_TRANSFERS_PER_TX' + | 'BOT_PUBLIC_TRANSFERS_PER_TX' + | 'BOT_PXE_URL' + | 'BOT_RECIPIENT_ENCRYPTION_SECRET' + | 'BOT_SKIP_PUBLIC_SIMULATION' + | 'BOT_TOKEN_CONTRACT' + | 'BOT_TOKEN_SALT' + | 'BOT_TX_INTERVAL_SECONDS' + | 'BOT_TX_MINED_WAIT_SECONDS' + | 'COINBASE' + | 'DATA_DIRECTORY' + | 'DEBUG' + | 'DEPLOY_AZTEC_CONTRACTS_SALT' + | 'DEPLOY_AZTEC_CONTRACTS' | 'ENABLE_GAS' - | 'API_PREFIX' + | 'ENFORCE_FEES' | 'ETHEREUM_HOST' - | 'L1_CHAIN_ID' - | 'MNEMONIC' - | 'ROLLUP_CONTRACT_ADDRESS' - | 'REGISTRY_CONTRACT_ADDRESS' - | 'INBOX_CONTRACT_ADDRESS' - | 'OUTBOX_CONTRACT_ADDRESS' | 'FEE_JUICE_CONTRACT_ADDRESS' | 'FEE_JUICE_PORTAL_CONTRACT_ADDRESS' - | 'ARCHIVER_URL' - | 'DEPLOY_AZTEC_CONTRACTS' - | 'DEPLOY_AZTEC_CONTRACTS_SALT' + | 'FEE_RECIPIENT' + | 'INBOX_CONTRACT_ADDRESS' + | 'L1_CHAIN_ID' | 'L1_PRIVATE_KEY' | 'L2_QUEUE_SIZE' - | 'WS_BLOCK_CHECK_INTERVAL_MS' - | 'P2P_ENABLED' + | 'LOG_JSON' + | 'LOG_LEVEL' + | 'MNEMONIC' + | 'NETWORK_NAME' + | 'NETWORK' + | 'OTEL_EXPORTER_OTLP_METRICS_ENDPOINT' + | 'OTEL_EXPORTER_OTLP_TRACES_ENDPOINT' + | 'OTEL_SERVICE_NAME' + | 'OUTBOX_CONTRACT_ADDRESS' | 'P2P_BLOCK_CHECK_INTERVAL_MS' - | 'P2P_PEER_CHECK_INTERVAL_MS' - | 'P2P_L2_QUEUE_SIZE' - | 'TCP_LISTEN_ADDR' - | 'UDP_LISTEN_ADDR' - | 'P2P_TCP_ANNOUNCE_ADDR' - | 'P2P_UDP_ANNOUNCE_ADDR' - | 'PEER_ID_PRIVATE_KEY' - | 'BOOTSTRAP_NODES' - | 'P2P_TX_PROTOCOL' - | 'P2P_MIN_PEERS' - | 'P2P_MAX_PEERS' - | 'DATA_DIRECTORY' - | 'TX_GOSSIP_VERSION' - | 'P2P_QUERY_FOR_IP' - | 'P2P_TX_POOL_KEEP_PROVEN_FOR' - | 'P2P_GOSSIPSUB_INTERVAL_MS' + | 'P2P_ENABLED' | 'P2P_GOSSIPSUB_D' - | 'P2P_GOSSIPSUB_DLO' | 'P2P_GOSSIPSUB_DHI' - | 'P2P_GOSSIPSUB_MCACHE_LENGTH' + | 'P2P_GOSSIPSUB_DLO' + | 'P2P_GOSSIPSUB_INTERVAL_MS' | 'P2P_GOSSIPSUB_MCACHE_GOSSIP' - | 'P2P_SEVERE_PEER_PENALTY_BLOCK_LENGTH' - | 'P2P_REQRESP_OVERALL_REQUEST_TIMEOUT_MS' - | 'P2P_REQRESP_INDIVIDUAL_REQUEST_TIMEOUT_MS' - | 'P2P_GOSSIPSUB_TX_TOPIC_WEIGHT' - | 'P2P_GOSSIPSUB_TX_INVALID_MESSAGE_DELIVERIES_WEIGHT' + | 'P2P_GOSSIPSUB_MCACHE_LENGTH' | 'P2P_GOSSIPSUB_TX_INVALID_MESSAGE_DELIVERIES_DECAY' + | 'P2P_GOSSIPSUB_TX_INVALID_MESSAGE_DELIVERIES_WEIGHT' + | 'P2P_GOSSIPSUB_TX_TOPIC_WEIGHT' + | 'P2P_L2_QUEUE_SIZE' + | 'P2P_MAX_PEERS' + | 'P2P_MIN_PEERS' + | 'P2P_PEER_CHECK_INTERVAL_MS' | 'P2P_PEER_PENALTY_VALUES' - | 'TELEMETRY' - | 'OTEL_SERVICE_NAME' - | 'OTEL_EXPORTER_OTLP_METRICS_ENDPOINT' - | 'OTEL_EXPORTER_OTLP_TRACES_ENDPOINT' - | 'NETWORK_NAME' - | 'NETWORK' - | 'API_KEY' - | 'AZTEC_NODE_URL' - | 'ARCHIVER_POLLING_INTERVAL_MS' - | 'ARCHIVER_VIEM_POLLING_INTERVAL_MS' - | 'ARCHIVER_MAX_LOGS' - | 'SEQ_TX_POLLING_INTERVAL_MS' - | 'SEQ_MAX_TX_PER_BLOCK' - | 'SEQ_MIN_TX_PER_BLOCK' - | 'SEQ_MIN_SECONDS_BETWEEN_BLOCKS' - | 'SEQ_MAX_SECONDS_BETWEEN_BLOCKS' - | 'COINBASE' - | 'FEE_RECIPIENT' - | 'ACVM_WORKING_DIRECTORY' - | 'ACVM_BINARY_PATH' - | 'SEQ_ALLOWED_SETUP_FN' - | 'SEQ_ALLOWED_TEARDOWN_FN' - | 'SEQ_MAX_BLOCK_SIZE_IN_BYTES' - | 'ENFORCE_FEES' - | 'SEQ_PUBLISHER_PRIVATE_KEY' - | 'SEQ_REQUIRED_CONFIRMATIONS' - | 'SEQ_PUBLISH_RETRY_INTERVAL_MS' - | 'VERSION' - | 'SEQ_DISABLED' - | 'PROVER_DISABLED' - | 'PROVER_REAL_PROOFS' + | 'P2P_QUERY_FOR_IP' + | 'P2P_REQRESP_INDIVIDUAL_REQUEST_TIMEOUT_MS' + | 'P2P_REQRESP_OVERALL_REQUEST_TIMEOUT_MS' + | 'P2P_SEVERE_PEER_PENALTY_BLOCK_LENGTH' + | 'P2P_TCP_ANNOUNCE_ADDR' + | 'P2P_TX_POOL_KEEP_PROVEN_FOR' + | 'P2P_TX_PROTOCOL' + | 'P2P_UDP_ANNOUNCE_ADDR' + | 'PEER_ID_PRIVATE_KEY' + | 'PROOF_VERIFIER_L1_START_BLOCK' + | 'PROOF_VERIFIER_POLL_INTERVAL_MS' + | 'PROVER_AGENT_CONCURRENCY' | 'PROVER_AGENT_ENABLED' | 'PROVER_AGENT_POLL_INTERVAL_MS' - | 'PROVER_AGENT_CONCURRENCY' - | 'PROVER_JOB_TIMEOUT_MS' - | 'PROVER_JOB_POLL_INTERVAL_MS' + | 'PROVER_COORDINATION_NODE_URL' + | 'PROVER_DISABLED' | 'PROVER_ID' - | 'WS_L2_BLOCK_QUEUE_SIZE' - | 'WS_PROVEN_BLOCKS_ONLY' + | 'PROVER_JOB_POLL_INTERVAL_MS' + | 'PROVER_JOB_TIMEOUT_MS' + | 'PROVER_NODE_DISABLE_AUTOMATIC_PROVING' + | 'PROVER_NODE_EPOCH_SIZE' + | 'PROVER_NODE_MAX_PENDING_JOBS' | 'PROVER_PUBLISH_RETRY_INTERVAL_MS' | 'PROVER_PUBLISHER_PRIVATE_KEY' + | 'PROVER_REAL_PROOFS' | 'PROVER_REQUIRED_CONFIRMATIONS' | 'PROVER_TEST_DELAY_MS' - | 'TX_PROVIDER_NODE_URL' - | 'TXE_PORT' - | 'LOG_JSON' - | 'BOT_PXE_URL' - | 'BOT_PRIVATE_KEY' - | 'BOT_RECIPIENT_ENCRYPTION_SECRET' - | 'BOT_TOKEN_SALT' - | 'BOT_TX_INTERVAL_SECONDS' - | 'BOT_PRIVATE_TRANSFERS_PER_TX' - | 'BOT_PUBLIC_TRANSFERS_PER_TX' - | 'BOT_FEE_PAYMENT_METHOD' - | 'BOT_NO_START' - | 'BOT_TX_MINED_WAIT_SECONDS' - | 'BOT_NO_WAIT_FOR_TRANSFERS' - | 'BOT_MAX_PENDING_TXS' - | 'BOT_SKIP_PUBLIC_SIMULATION' - | 'BOT_L2_GAS_LIMIT' - | 'BOT_DA_GAS_LIMIT' | 'PXE_BLOCK_POLLING_INTERVAL_MS' - | 'PXE_L2_STARTING_BLOCK' | 'PXE_DATA_DIRECTORY' - | 'BB_BINARY_PATH' - | 'BB_WORKING_DIRECTORY' - | 'BB_SKIP_CLEANUP' + | 'PXE_L2_STARTING_BLOCK' | 'PXE_PROVER_ENABLED' - | 'BOT_FOLLOW_CHAIN' - | 'BOT_FLUSH_SETUP_TRANSACTIONS' - | 'BOT_TOKEN_CONTRACT' - | 'VALIDATOR_PRIVATE_KEY' - | 'VALIDATOR_DISABLED' - | 'VALIDATOR_ATTESTATIONS_WAIT_TIMEOUT_MS' + | 'REGISTRY_CONTRACT_ADDRESS' + | 'ROLLUP_CONTRACT_ADDRESS' + | 'SEQ_ALLOWED_SETUP_FN' + | 'SEQ_ALLOWED_TEARDOWN_FN' + | 'SEQ_DISABLED' + | 'SEQ_MAX_BLOCK_SIZE_IN_BYTES' + | 'SEQ_MAX_SECONDS_BETWEEN_BLOCKS' + | 'SEQ_MAX_TX_PER_BLOCK' + | 'SEQ_MIN_SECONDS_BETWEEN_BLOCKS' + | 'SEQ_MIN_TX_PER_BLOCK' + | 'SEQ_PUBLISH_RETRY_INTERVAL_MS' + | 'SEQ_PUBLISHER_PRIVATE_KEY' + | 'SEQ_REQUIRED_CONFIRMATIONS' + | 'SEQ_TX_POLLING_INTERVAL_MS' + | 'TCP_LISTEN_ADDR' + | 'TELEMETRY' + | 'TEST_ACCOUNTS' + | 'TX_GOSSIP_VERSION' + | 'TXE_PORT' + | 'UDP_LISTEN_ADDR' | 'VALIDATOR_ATTESTATIONS_POOLING_INTERVAL_MS' - | 'PROVER_NODE_DISABLE_AUTOMATIC_PROVING' - | 'PROVER_NODE_MAX_PENDING_JOBS' - | 'PROVER_NODE_EPOCH_SIZE' - | 'PROOF_VERIFIER_POLL_INTERVAL_MS' - | 'PROOF_VERIFIER_L1_START_BLOCK' - | 'LOG_LEVEL' - | 'DEBUG'; + | 'VALIDATOR_ATTESTATIONS_WAIT_TIMEOUT_MS' + | 'VALIDATOR_DISABLED' + | 'VALIDATOR_PRIVATE_KEY' + | 'VERSION' + | 'WS_BLOCK_CHECK_INTERVAL_MS' + | 'WS_L2_BLOCK_QUEUE_SIZE' + | 'WS_PROVEN_BLOCKS_ONLY'; diff --git a/yarn-project/p2p/src/client/index.ts b/yarn-project/p2p/src/client/index.ts index e36054fd7fd..aaafccba9f1 100644 --- a/yarn-project/p2p/src/client/index.ts +++ b/yarn-project/p2p/src/client/index.ts @@ -6,8 +6,11 @@ import { type TelemetryClient } from '@aztec/telemetry-client'; import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; import { type AttestationPool } from '../attestation_pool/attestation_pool.js'; +import { InMemoryAttestationPool } from '../attestation_pool/memory_attestation_pool.js'; import { P2PClient } from '../client/p2p_client.js'; import { type P2PConfig } from '../config.js'; +import { type EpochProofQuotePool } from '../epoch_proof_quote_pool/epoch_proof_quote_pool.js'; +import { MemoryEpochProofQuotePool } from '../epoch_proof_quote_pool/memory_epoch_proof_quote_pool.js'; import { DiscV5Service } from '../service/discV5_service.js'; import { DummyP2PService } from '../service/dummy_service.js'; import { LibP2PService, createLibP2PPeerId } from '../service/index.js'; @@ -18,16 +21,22 @@ export * from './p2p_client.js'; export const createP2PClient = async ( _config: P2PConfig & DataStoreConfig, - attestationsPool: AttestationPool, l2BlockSource: L2BlockSource, proofVerifier: ClientProtocolCircuitVerifier, worldStateSynchronizer: WorldStateSynchronizer, telemetry: TelemetryClient = new NoopTelemetryClient(), - deps: { txPool?: TxPool; store?: AztecKVStore } = {}, + deps: { + txPool?: TxPool; + store?: AztecKVStore; + attestationsPool?: AttestationPool; + epochProofQuotePool?: EpochProofQuotePool; + } = {}, ) => { let config = { ..._config }; const store = deps.store ?? (await createStore('p2p', config, createDebugLogger('aztec:p2p:lmdb'))); const txPool = deps.txPool ?? new AztecKVTxPool(store, telemetry); + const attestationsPool = deps.attestationsPool ?? new InMemoryAttestationPool(); + const epochProofQuotePool = deps.epochProofQuotePool ?? new MemoryEpochProofQuotePool(); let p2pService; @@ -52,7 +61,15 @@ export const createP2PClient = async ( } else { p2pService = new DummyP2PService(); } - return new P2PClient(store, l2BlockSource, txPool, attestationsPool, p2pService, config.keepProvenTxsInPoolFor); + return new P2PClient( + store, + l2BlockSource, + txPool, + attestationsPool, + epochProofQuotePool, + p2pService, + config.keepProvenTxsInPoolFor, + ); }; async function configureP2PClientAddresses(_config: P2PConfig & DataStoreConfig): Promise { diff --git a/yarn-project/p2p/src/client/mocks.ts b/yarn-project/p2p/src/client/mocks.ts index 599769a9d30..1783c83a8bd 100644 --- a/yarn-project/p2p/src/client/mocks.ts +++ b/yarn-project/p2p/src/client/mocks.ts @@ -7,6 +7,7 @@ import { EthAddress } from '@aztec/circuits.js'; export class MockBlockSource implements L2BlockSource { private l2Blocks: L2Block[] = []; private txEffects: TxEffect[] = []; + private provenEpochNumber: number = 0; constructor(numBlocks = 100, private provenBlockNumber?: number) { this.addBlocks(numBlocks); @@ -25,6 +26,10 @@ export class MockBlockSource implements L2BlockSource { this.provenBlockNumber = provenBlockNumber; } + public setProvenEpochNumber(provenEpochNumber: number) { + this.provenEpochNumber = provenEpochNumber; + } + /** * Method to fetch the rollup contract address at the base-layer. * @returns The rollup address. @@ -53,6 +58,10 @@ export class MockBlockSource implements L2BlockSource { return this.provenBlockNumber ?? (await this.getBlockNumber()); } + public getProvenL2EpochNumber(): Promise { + return Promise.resolve(this.provenEpochNumber); + } + /** * Gets an l2 block. * @param number - The block number to return (inclusive). diff --git a/yarn-project/p2p/src/client/p2p_client.test.ts b/yarn-project/p2p/src/client/p2p_client.test.ts index 196a0ee45b7..8e015b5b74c 100644 --- a/yarn-project/p2p/src/client/p2p_client.test.ts +++ b/yarn-project/p2p/src/client/p2p_client.test.ts @@ -1,4 +1,4 @@ -import { mockTx } from '@aztec/circuit-types'; +import { mockEpochProofQuote, mockTx } from '@aztec/circuit-types'; import { retryUntil } from '@aztec/foundation/retry'; import { type AztecKVStore } from '@aztec/kv-store'; import { openTmpStore } from '@aztec/kv-store/utils'; @@ -6,7 +6,7 @@ import { openTmpStore } from '@aztec/kv-store/utils'; import { expect, jest } from '@jest/globals'; import { type AttestationPool } from '../attestation_pool/attestation_pool.js'; -import { type P2PService } from '../index.js'; +import { type EpochProofQuotePool, type P2PService } from '../index.js'; import { type TxPool } from '../tx_pool/index.js'; import { MockBlockSource } from './mocks.js'; import { P2PClient } from './p2p_client.js'; @@ -21,6 +21,7 @@ type Mockify = { describe('In-Memory P2P Client', () => { let txPool: Mockify; let attestationPool: Mockify; + let epochProofQuotePool: Mockify; let blockSource: MockBlockSource; let p2pService: Mockify; let kvStore: AztecKVStore; @@ -55,14 +56,21 @@ describe('In-Memory P2P Client', () => { getAttestationsForSlot: jest.fn().mockReturnValue(undefined), }; + epochProofQuotePool = { + addQuote: jest.fn(), + getQuotes: jest.fn().mockReturnValue([]), + deleteQuotesToEpoch: jest.fn(), + }; + blockSource = new MockBlockSource(); kvStore = openTmpStore(); - client = new P2PClient(kvStore, blockSource, txPool, attestationPool, p2pService, 0); + client = new P2PClient(kvStore, blockSource, txPool, attestationPool, epochProofQuotePool, p2pService, 0); }); - const advanceToProvenBlock = async (getProvenBlockNumber: number) => { + const advanceToProvenBlock = async (getProvenBlockNumber: number, provenEpochNumber = getProvenBlockNumber) => { blockSource.setProvenBlockNumber(getProvenBlockNumber); + blockSource.setProvenEpochNumber(provenEpochNumber); await retryUntil( () => Promise.resolve(client.getSyncedProvenBlockNum() >= getProvenBlockNumber), 'synced', @@ -71,14 +79,20 @@ describe('In-Memory P2P Client', () => { ); }; + afterEach(async () => { + if (client.isReady()) { + await client.stop(); + } + }); + it('can start & stop', async () => { - expect(await client.isReady()).toEqual(false); + expect(client.isReady()).toEqual(false); await client.start(); - expect(await client.isReady()).toEqual(true); + expect(client.isReady()).toEqual(true); await client.stop(); - expect(await client.isReady()).toEqual(false); + expect(client.isReady()).toEqual(false); }); it('adds txs to pool', async () => { @@ -121,7 +135,7 @@ describe('In-Memory P2P Client', () => { await client.start(); await client.stop(); - const client2 = new P2PClient(kvStore, blockSource, txPool, attestationPool, p2pService, 0); + const client2 = new P2PClient(kvStore, blockSource, txPool, attestationPool, epochProofQuotePool, p2pService, 0); expect(client2.getSyncedLatestBlockNum()).toEqual(client.getSyncedLatestBlockNum()); }); @@ -136,7 +150,7 @@ describe('In-Memory P2P Client', () => { }); it('deletes txs after waiting the set number of blocks', async () => { - client = new P2PClient(kvStore, blockSource, txPool, attestationPool, p2pService, 10); + client = new P2PClient(kvStore, blockSource, txPool, attestationPool, epochProofQuotePool, p2pService, 10); blockSource.setProvenBlockNumber(0); await client.start(); expect(txPool.deleteTxs).not.toHaveBeenCalled(); @@ -152,5 +166,63 @@ describe('In-Memory P2P Client', () => { await client.stop(); }); + it('stores and returns epoch proof quotes', async () => { + client = new P2PClient(kvStore, blockSource, txPool, attestationPool, epochProofQuotePool, p2pService, 0); + + blockSource.setProvenEpochNumber(2); + await client.start(); + + const proofQuotes = [ + mockEpochProofQuote(3n), + mockEpochProofQuote(2n), + mockEpochProofQuote(3n), + mockEpochProofQuote(4n), + mockEpochProofQuote(2n), + mockEpochProofQuote(3n), + ]; + + for (const quote of proofQuotes) { + client.broadcastEpochProofQuote(quote); + } + expect(epochProofQuotePool.addQuote).toBeCalledTimes(proofQuotes.length); + + for (let i = 0; i < proofQuotes.length; i++) { + expect(epochProofQuotePool.addQuote).toHaveBeenNthCalledWith(i + 1, proofQuotes[i]); + } + expect(epochProofQuotePool.addQuote).toBeCalledTimes(proofQuotes.length); + + await client.getEpochProofQuotes(2n); + + expect(epochProofQuotePool.getQuotes).toBeCalledTimes(1); + expect(epochProofQuotePool.getQuotes).toBeCalledWith(2n); + }); + + it('deletes expired proof quotes', async () => { + client = new P2PClient(kvStore, blockSource, txPool, attestationPool, epochProofQuotePool, p2pService, 0); + + blockSource.setProvenEpochNumber(1); + blockSource.setProvenBlockNumber(1); + await client.start(); + + const proofQuotes = [ + mockEpochProofQuote(3n), + mockEpochProofQuote(2n), + mockEpochProofQuote(3n), + mockEpochProofQuote(4n), + mockEpochProofQuote(2n), + mockEpochProofQuote(3n), + ]; + + for (const quote of proofQuotes) { + client.broadcastEpochProofQuote(quote); + } + + epochProofQuotePool.deleteQuotesToEpoch.mockReset(); + + await advanceToProvenBlock(3, 3); + + expect(epochProofQuotePool.deleteQuotesToEpoch).toBeCalledWith(3n); + }); + // TODO(https://github.com/AztecProtocol/aztec-packages/issues/7971): tests for attestation pool pruning }); diff --git a/yarn-project/p2p/src/client/p2p_client.ts b/yarn-project/p2p/src/client/p2p_client.ts index 4738d153006..b22b42700be 100644 --- a/yarn-project/p2p/src/client/p2p_client.ts +++ b/yarn-project/p2p/src/client/p2p_client.ts @@ -1,6 +1,7 @@ import { type BlockAttestation, type BlockProposal, + type EpochProofQuote, type L2Block, L2BlockDownloader, type L2BlockSource, @@ -15,6 +16,7 @@ import { type ENR } from '@chainsafe/enr'; import { type AttestationPool } from '../attestation_pool/attestation_pool.js'; import { getP2PConfigEnvVars } from '../config.js'; +import { type EpochProofQuotePool } from '../epoch_proof_quote_pool/epoch_proof_quote_pool.js'; import { TX_REQ_PROTOCOL } from '../service/reqresp/interface.js'; import type { P2PService } from '../service/service.js'; import { type TxPool } from '../tx_pool/index.js'; @@ -63,6 +65,21 @@ export interface P2P { */ getAttestationsForSlot(slot: bigint, proposalId: string): Promise; + /** + * Queries the EpochProofQuote pool for quotes for the given epoch + * + * @param epoch - the epoch to query + * @returns EpochProofQuotes + */ + getEpochProofQuotes(epoch: bigint): Promise; + + /** + * Broadcasts an EpochProofQuote to other peers. + * + * @param quote - the quote to broadcast + */ + broadcastEpochProofQuote(quote: EpochProofQuote): void; + /** * Registers a callback from the validator client that determines how to behave when * foreign block proposals are received @@ -135,7 +152,7 @@ export interface P2P { * Indicates if the p2p client is ready for transaction submission. * @returns A boolean flag indicating readiness. */ - isReady(): Promise; + isReady(): boolean; /** * Returns the current status of the p2p client. @@ -187,6 +204,7 @@ export class P2PClient implements P2P { private l2BlockSource: L2BlockSource, private txPool: TxPool, private attestationPool: AttestationPool, + private epochProofQuotePool: EpochProofQuotePool, private p2pService: P2PService, private keepProvenTxsFor: number, private log = createDebugLogger('aztec:p2p'), @@ -203,6 +221,22 @@ export class P2PClient implements P2P { this.synchedProvenBlockNumber = store.openSingleton('p2p_pool_last_proven_l2_block'); } + #assertIsReady() { + if (!this.isReady()) { + throw new Error('P2P client not ready'); + } + } + + getEpochProofQuotes(epoch: bigint): Promise { + return Promise.resolve(this.epochProofQuotePool.getQuotes(epoch)); + } + + broadcastEpochProofQuote(quote: EpochProofQuote): void { + this.#assertIsReady(); + this.epochProofQuotePool.addQuote(quote); + return this.p2pService.propagate(quote); + } + /** * Starts the P2P client. * @returns An empty promise signalling the synching process. @@ -364,10 +398,7 @@ export class P2PClient implements P2P { * @returns Empty promise. **/ public async sendTx(tx: Tx): Promise { - const ready = await this.isReady(); - if (!ready) { - throw new Error('P2P client not ready'); - } + this.#assertIsReady(); await this.txPool.addTxs([tx]); this.p2pService.propagate(tx); } @@ -392,10 +423,7 @@ export class P2PClient implements P2P { * @returns Empty promise. **/ public async deleteTxs(txHashes: TxHash[]): Promise { - const ready = await this.isReady(); - if (!ready) { - throw new Error('P2P client not ready'); - } + this.#assertIsReady(); await this.txPool.deleteTxs(txHashes); } @@ -404,7 +432,7 @@ export class P2PClient implements P2P { * @returns True if the P2P client is ready to receive txs. */ public isReady() { - return Promise.resolve(this.currentState === P2PClientState.RUNNING); + return this.currentState === P2PClientState.RUNNING; } /** @@ -500,6 +528,10 @@ export class P2PClient implements P2P { await this.synchedProvenBlockNumber.set(lastBlockNum); this.log.debug(`Synched to proven block ${lastBlockNum}`); + const provenEpochNumber = await this.l2BlockSource.getProvenL2EpochNumber(); + if (provenEpochNumber !== undefined) { + this.epochProofQuotePool.deleteQuotesToEpoch(BigInt(provenEpochNumber)); + } await this.startServiceIfSynched(); } diff --git a/yarn-project/p2p/src/epoch_proof_quote_pool/epoch_proof_quote_pool.ts b/yarn-project/p2p/src/epoch_proof_quote_pool/epoch_proof_quote_pool.ts new file mode 100644 index 00000000000..94776d04ac3 --- /dev/null +++ b/yarn-project/p2p/src/epoch_proof_quote_pool/epoch_proof_quote_pool.ts @@ -0,0 +1,7 @@ +import { type EpochProofQuote } from '@aztec/circuit-types'; + +export interface EpochProofQuotePool { + addQuote(quote: EpochProofQuote): void; + getQuotes(epoch: bigint): EpochProofQuote[]; + deleteQuotesToEpoch(epoch: bigint): void; +} diff --git a/yarn-project/p2p/src/epoch_proof_quote_pool/index.ts b/yarn-project/p2p/src/epoch_proof_quote_pool/index.ts new file mode 100644 index 00000000000..8073ff1866f --- /dev/null +++ b/yarn-project/p2p/src/epoch_proof_quote_pool/index.ts @@ -0,0 +1,3 @@ +export * from './epoch_proof_quote_pool.js'; +export * from './memory_epoch_proof_quote_pool.js'; +export * from './test_utils.js'; diff --git a/yarn-project/p2p/src/epoch_proof_quote_pool/memory_epoch_proof_quote_pool.test.ts b/yarn-project/p2p/src/epoch_proof_quote_pool/memory_epoch_proof_quote_pool.test.ts new file mode 100644 index 00000000000..16ea4aeec52 --- /dev/null +++ b/yarn-project/p2p/src/epoch_proof_quote_pool/memory_epoch_proof_quote_pool.test.ts @@ -0,0 +1,55 @@ +import { mockEpochProofQuote } from '@aztec/circuit-types'; + +import { MemoryEpochProofQuotePool } from './memory_epoch_proof_quote_pool.js'; + +describe('MemoryEpochProofQuotePool', () => { + let pool: MemoryEpochProofQuotePool; + + beforeEach(() => { + pool = new MemoryEpochProofQuotePool(); + }); + + it('should add/get quotes to/from pool', () => { + const quote = mockEpochProofQuote(5n); + + pool.addQuote(quote); + + const quotes = pool.getQuotes(quote.payload.epochToProve); + + expect(quotes).toHaveLength(1); + expect(quotes[0]).toEqual(quote); + }); + + it('should delete quotes for expired epochs', () => { + const proofQuotes = [ + mockEpochProofQuote(3n), + mockEpochProofQuote(2n), + mockEpochProofQuote(3n), + mockEpochProofQuote(4n), + mockEpochProofQuote(2n), + mockEpochProofQuote(3n), + ]; + + for (const quote of proofQuotes) { + pool.addQuote(quote); + } + + const quotes3 = pool.getQuotes(3n); + const quotesForEpoch3 = proofQuotes.filter(x => x.payload.epochToProve === 3n); + + expect(quotes3).toHaveLength(quotesForEpoch3.length); + expect(quotes3).toEqual(quotesForEpoch3); + + // should delete all quotes for epochs 2 and 3 + pool.deleteQuotesToEpoch(3n); + + expect(pool.getQuotes(2n)).toHaveLength(0); + expect(pool.getQuotes(3n)).toHaveLength(0); + + const quotes4 = pool.getQuotes(4n); + const quotesForEpoch4 = proofQuotes.filter(x => x.payload.epochToProve === 4n); + + expect(quotes4).toHaveLength(quotesForEpoch4.length); + expect(quotes4).toEqual(quotesForEpoch4); + }); +}); diff --git a/yarn-project/p2p/src/epoch_proof_quote_pool/memory_epoch_proof_quote_pool.ts b/yarn-project/p2p/src/epoch_proof_quote_pool/memory_epoch_proof_quote_pool.ts new file mode 100644 index 00000000000..a9166838a1b --- /dev/null +++ b/yarn-project/p2p/src/epoch_proof_quote_pool/memory_epoch_proof_quote_pool.ts @@ -0,0 +1,26 @@ +import { type EpochProofQuote } from '@aztec/circuit-types'; + +import { type EpochProofQuotePool } from './epoch_proof_quote_pool.js'; + +export class MemoryEpochProofQuotePool implements EpochProofQuotePool { + private quotes: Map; + constructor() { + this.quotes = new Map(); + } + addQuote(quote: EpochProofQuote) { + const epoch = quote.payload.epochToProve; + if (!this.quotes.has(epoch)) { + this.quotes.set(epoch, []); + } + this.quotes.get(epoch)!.push(quote); + } + getQuotes(epoch: bigint): EpochProofQuote[] { + return this.quotes.get(epoch) || []; + } + deleteQuotesToEpoch(epoch: bigint): void { + const expiredEpochs = Array.from(this.quotes.keys()).filter(k => k <= epoch); + for (const expiredEpoch of expiredEpochs) { + this.quotes.delete(expiredEpoch); + } + } +} diff --git a/yarn-project/p2p/src/epoch_proof_quote_pool/test_utils.ts b/yarn-project/p2p/src/epoch_proof_quote_pool/test_utils.ts new file mode 100644 index 00000000000..0847e254014 --- /dev/null +++ b/yarn-project/p2p/src/epoch_proof_quote_pool/test_utils.ts @@ -0,0 +1,25 @@ +import { EpochProofQuote, EpochProofQuotePayload } from '@aztec/circuit-types'; +import { EthAddress } from '@aztec/circuits.js'; +import { Secp256k1Signer, randomBigInt, randomInt } from '@aztec/foundation/crypto'; + +export function makeRandomEpochProofQuotePayload(): EpochProofQuotePayload { + return EpochProofQuotePayload.fromFields({ + basisPointFee: randomInt(10000), + bondAmount: 1000000000000000000n, + epochToProve: randomBigInt(1000000n), + prover: EthAddress.random(), + validUntilSlot: randomBigInt(1000000n), + }); +} + +export function makeRandomEpochProofQuote(payload?: EpochProofQuotePayload): { + quote: EpochProofQuote; + signer: Secp256k1Signer; +} { + const signer = Secp256k1Signer.random(); + + return { + quote: EpochProofQuote.new(payload ?? makeRandomEpochProofQuotePayload(), signer), + signer, + }; +} diff --git a/yarn-project/p2p/src/index.ts b/yarn-project/p2p/src/index.ts index e69651a7904..4a5c64fda7e 100644 --- a/yarn-project/p2p/src/index.ts +++ b/yarn-project/p2p/src/index.ts @@ -1,7 +1,8 @@ +export * from './attestation_pool/index.js'; +export * from './bootstrap/bootstrap.js'; export * from './client/index.js'; export * from './config.js'; -export * from './tx_pool/index.js'; -export * from './attestation_pool/index.js'; +export * from './epoch_proof_quote_pool/index.js'; export * from './service/index.js'; -export * from './bootstrap/bootstrap.js'; +export * from './tx_pool/index.js'; export * from './tx_validator/index.js'; diff --git a/yarn-project/p2p/src/service/reqresp/p2p_client.integration.test.ts b/yarn-project/p2p/src/service/reqresp/p2p_client.integration.test.ts index b52433a9b7b..0612f7c81f1 100644 --- a/yarn-project/p2p/src/service/reqresp/p2p_client.integration.test.ts +++ b/yarn-project/p2p/src/service/reqresp/p2p_client.integration.test.ts @@ -17,6 +17,7 @@ import { createP2PClient } from '../../client/index.js'; import { MockBlockSource } from '../../client/mocks.js'; import { type P2PClient } from '../../client/p2p_client.js'; import { type P2PConfig, getP2PDefaultConfig } from '../../config.js'; +import { type EpochProofQuotePool } from '../../epoch_proof_quote_pool/epoch_proof_quote_pool.js'; import { AlwaysFalseCircuitVerifier, AlwaysTrueCircuitVerifier } from '../../mocks/index.js'; import { type TxPool } from '../../tx_pool/index.js'; import { convertToMultiaddr } from '../../util.js'; @@ -47,6 +48,7 @@ const NUMBER_OF_PEERS = 2; describe('Req Resp p2p client integration', () => { let txPool: Mockify; let attestationPool: Mockify; + let epochProofQuotePool: Mockify; let blockSource: MockBlockSource; let kvStore: AztecKVStore; let worldStateSynchronizer: WorldStateSynchronizer; @@ -134,22 +136,22 @@ describe('Req Resp p2p client integration', () => { getAttestationsForSlot: jest.fn().mockReturnValue(undefined), }; + epochProofQuotePool = { + addQuote: jest.fn(), + getQuotes: jest.fn().mockReturnValue([]), + deleteQuotesToEpoch: jest.fn(), + }; + blockSource = new MockBlockSource(); proofVerifier = alwaysTrueVerifier ? new AlwaysTrueCircuitVerifier() : new AlwaysFalseCircuitVerifier(); kvStore = openTmpStore(); const deps = { txPool: txPool as unknown as TxPool, + attestationPool: attestationPool as unknown as AttestationPool, + epochProofQuotePool: epochProofQuotePool as unknown as EpochProofQuotePool, store: kvStore, }; - const client = await createP2PClient( - config, - attestationPool as unknown as AttestationPool, - blockSource, - proofVerifier, - worldStateSynchronizer, - undefined, - deps, - ); + const client = await createP2PClient(config, blockSource, proofVerifier, worldStateSynchronizer, undefined, deps); await client.start(); clients.push(client); diff --git a/yarn-project/prover-node/src/config.ts b/yarn-project/prover-node/src/config.ts index 9ac8ecb7b0d..1758e391362 100644 --- a/yarn-project/prover-node/src/config.ts +++ b/yarn-project/prover-node/src/config.ts @@ -16,14 +16,18 @@ import { } from '@aztec/sequencer-client'; import { type WorldStateConfig, getWorldStateConfigFromEnv, worldStateConfigMappings } from '@aztec/world-state'; -import { type TxProviderConfig, getTxProviderConfigFromEnv, txProviderConfigMappings } from './tx-provider/config.js'; +import { + type ProverCoordinationConfig, + getTxProviderConfigFromEnv, + proverCoordinationConfigMappings, +} from './prover-coordination/config.js'; export type ProverNodeConfig = ArchiverConfig & ProverClientConfig & WorldStateConfig & PublisherConfig & TxSenderConfig & - TxProviderConfig & { + ProverCoordinationConfig & { proverNodeDisableAutomaticProving?: boolean; proverNodeMaxPendingJobs?: number; proverNodeEpochSize?: number; @@ -55,7 +59,7 @@ export const proverNodeConfigMappings: ConfigMappingsType = { ...worldStateConfigMappings, ...getPublisherConfigMappings('PROVER'), ...getTxSenderConfigMappings('PROVER'), - ...txProviderConfigMappings, + ...proverCoordinationConfigMappings, ...specificProverNodeConfigMappings, }; diff --git a/yarn-project/prover-node/src/factory.ts b/yarn-project/prover-node/src/factory.ts index e6dd1e3524b..a909184bd96 100644 --- a/yarn-project/prover-node/src/factory.ts +++ b/yarn-project/prover-node/src/factory.ts @@ -9,9 +9,9 @@ import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; import { createWorldStateSynchronizer } from '@aztec/world-state'; import { type ProverNodeConfig } from './config.js'; +import { AztecNodeProverCoordination } from './prover-coordination/aztec-node-prover-coordination.js'; +import { createProverCoordination } from './prover-coordination/factory.js'; import { ProverNode } from './prover-node.js'; -import { AztecNodeTxProvider } from './tx-provider/aztec-node-tx-provider.js'; -import { createTxProvider } from './tx-provider/factory.js'; /** Creates a new prover node given a config. */ export async function createProverNode( @@ -40,8 +40,8 @@ export async function createProverNode( const publisher = new L1Publisher(config, telemetry); const txProvider = deps.aztecNodeTxProvider - ? new AztecNodeTxProvider(deps.aztecNodeTxProvider) - : createTxProvider(config); + ? new AztecNodeProverCoordination(deps.aztecNodeTxProvider) + : createProverCoordination(config); return new ProverNode( prover!, diff --git a/yarn-project/prover-node/src/job/epoch-proving-job.ts b/yarn-project/prover-node/src/job/epoch-proving-job.ts index d104bfc52dc..495759ed122 100644 --- a/yarn-project/prover-node/src/job/epoch-proving-job.ts +++ b/yarn-project/prover-node/src/job/epoch-proving-job.ts @@ -6,9 +6,9 @@ import { type L2BlockSource, PROVING_STATUS, type ProcessedTx, + type ProverCoordination, type Tx, type TxHash, - type TxProvider, } from '@aztec/circuit-types'; import { createDebugLogger } from '@aztec/foundation/log'; import { Timer } from '@aztec/foundation/timer'; @@ -35,7 +35,7 @@ export class EpochProvingJob { private publisher: L1Publisher, private l2BlockSource: L2BlockSource, private l1ToL2MessageSource: L1ToL2MessageSource, - private txProvider: TxProvider, + private coordination: ProverCoordination, private metrics: ProverNodeMetrics, private cleanUp: (job: EpochProvingJob) => Promise = () => Promise.resolve(), ) { @@ -148,7 +148,7 @@ export class EpochProvingJob { private async getTxs(txHashes: TxHash[]): Promise { const txs = await Promise.all( - txHashes.map(txHash => this.txProvider.getTxByHash(txHash).then(tx => [txHash, tx] as const)), + txHashes.map(txHash => this.coordination.getTxByHash(txHash).then(tx => [txHash, tx] as const)), ); const notFound = txs.filter(([_, tx]) => !tx); if (notFound.length) { diff --git a/yarn-project/prover-node/src/prover-coordination/aztec-node-prover-coordination.ts b/yarn-project/prover-node/src/prover-coordination/aztec-node-prover-coordination.ts new file mode 100644 index 00000000000..3152cd2ebf8 --- /dev/null +++ b/yarn-project/prover-node/src/prover-coordination/aztec-node-prover-coordination.ts @@ -0,0 +1,14 @@ +import type { AztecNode, EpochProofQuote, ProverCoordination, Tx, TxHash } from '@aztec/circuit-types'; + +/** Implements ProverCoordinator by wrapping an Aztec node */ +export class AztecNodeProverCoordination implements ProverCoordination { + constructor(private node: AztecNode) {} + + getTxByHash(txHash: TxHash): Promise { + return this.node.getTxByHash(txHash); + } + + addEpochProofQuote(quote: EpochProofQuote): Promise { + return this.node.addEpochProofQuote(quote); + } +} diff --git a/yarn-project/prover-node/src/prover-coordination/config.ts b/yarn-project/prover-node/src/prover-coordination/config.ts new file mode 100644 index 00000000000..7940c803e96 --- /dev/null +++ b/yarn-project/prover-node/src/prover-coordination/config.ts @@ -0,0 +1,17 @@ +import { type ConfigMappingsType, getConfigFromMappings } from '@aztec/foundation/config'; + +export type ProverCoordinationConfig = { + proverCoordinationNodeUrl: string | undefined; +}; + +export const proverCoordinationConfigMappings: ConfigMappingsType = { + proverCoordinationNodeUrl: { + env: 'PROVER_COORDINATION_NODE_URL', + description: 'The URL of the tx provider node', + parseEnv: (val: string) => val, + }, +}; + +export function getTxProviderConfigFromEnv(): ProverCoordinationConfig { + return getConfigFromMappings(proverCoordinationConfigMappings); +} diff --git a/yarn-project/prover-node/src/prover-coordination/factory.ts b/yarn-project/prover-node/src/prover-coordination/factory.ts new file mode 100644 index 00000000000..71e5ba8a95e --- /dev/null +++ b/yarn-project/prover-node/src/prover-coordination/factory.ts @@ -0,0 +1,13 @@ +import { type ProverCoordination, createAztecNodeClient } from '@aztec/circuit-types'; + +import { AztecNodeProverCoordination } from './aztec-node-prover-coordination.js'; +import { type ProverCoordinationConfig } from './config.js'; + +export function createProverCoordination(config: ProverCoordinationConfig): ProverCoordination { + if (config.proverCoordinationNodeUrl) { + const node = createAztecNodeClient(config.proverCoordinationNodeUrl); + return new AztecNodeProverCoordination(node); + } else { + throw new Error(`Aztec Node URL for Tx Provider is not set.`); + } +} diff --git a/yarn-project/prover-node/src/tx-provider/index.ts b/yarn-project/prover-node/src/prover-coordination/index.ts similarity index 52% rename from yarn-project/prover-node/src/tx-provider/index.ts rename to yarn-project/prover-node/src/prover-coordination/index.ts index bac271dd877..7394c367754 100644 --- a/yarn-project/prover-node/src/tx-provider/index.ts +++ b/yarn-project/prover-node/src/prover-coordination/index.ts @@ -1,3 +1,3 @@ -export * from './aztec-node-tx-provider.js'; -export * from './factory.js'; +export * from './aztec-node-prover-coordination.js'; export * from './config.js'; +export * from './factory.js'; diff --git a/yarn-project/prover-node/src/prover-node.test.ts b/yarn-project/prover-node/src/prover-node.test.ts index 6e52f77a0d4..669c56527a1 100644 --- a/yarn-project/prover-node/src/prover-node.test.ts +++ b/yarn-project/prover-node/src/prover-node.test.ts @@ -3,7 +3,7 @@ import { type L1ToL2MessageSource, type L2BlockSource, type MerkleTreeAdminOperations, - type TxProvider, + type ProverCoordination, WorldStateRunningState, type WorldStateSynchronizer, } from '@aztec/circuit-types'; @@ -24,7 +24,7 @@ describe('prover-node', () => { let l1ToL2MessageSource: MockProxy; let contractDataSource: MockProxy; let worldState: MockProxy; - let txProvider: MockProxy; + let txProvider: MockProxy; let simulator: MockProxy; let proverNode: TestProverNode; @@ -43,7 +43,7 @@ describe('prover-node', () => { l1ToL2MessageSource = mock(); contractDataSource = mock(); worldState = mock(); - txProvider = mock(); + txProvider = mock(); simulator = mock(); const telemetryClient = new NoopTelemetryClient(); diff --git a/yarn-project/prover-node/src/prover-node.ts b/yarn-project/prover-node/src/prover-node.ts index d31e05b215a..24617147814 100644 --- a/yarn-project/prover-node/src/prover-node.ts +++ b/yarn-project/prover-node/src/prover-node.ts @@ -1,9 +1,10 @@ import { + type EpochProofQuote, type EpochProverManager, type L1ToL2MessageSource, type L2BlockSource, type MerkleTreeOperations, - type TxProvider, + type ProverCoordination, type WorldStateSynchronizer, } from '@aztec/circuit-types'; import { compact } from '@aztec/foundation/collection'; @@ -44,7 +45,7 @@ export class ProverNode { private l1ToL2MessageSource: L1ToL2MessageSource, private contractDataSource: ContractDataSource, private worldState: WorldStateSynchronizer, - private txProvider: TxProvider, + private coordination: ProverCoordination, private simulator: SimulationProvider, private telemetryClient: TelemetryClient, options: Partial = {}, @@ -134,6 +135,10 @@ export class ProverNode { } } + public sendEpochProofQuote(quote: EpochProofQuote): Promise { + return this.coordination.addEpochProofQuote(quote); + } + /** * Creates a proof for a block range. Returns once the proof has been submitted to L1. */ @@ -212,7 +217,7 @@ export class ProverNode { this.publisher, this.l2BlockSource, this.l1ToL2MessageSource, - this.txProvider, + this.coordination, this.metrics, cleanUp, ); diff --git a/yarn-project/prover-node/src/tx-provider/aztec-node-tx-provider.ts b/yarn-project/prover-node/src/tx-provider/aztec-node-tx-provider.ts deleted file mode 100644 index 90797602453..00000000000 --- a/yarn-project/prover-node/src/tx-provider/aztec-node-tx-provider.ts +++ /dev/null @@ -1,10 +0,0 @@ -import { type AztecNode, type Tx, type TxHash, type TxProvider } from '@aztec/circuit-types'; - -/** Implements TxProvider by querying an Aztec node for the txs. */ -export class AztecNodeTxProvider implements TxProvider { - constructor(private node: AztecNode) {} - - getTxByHash(txHash: TxHash): Promise { - return this.node.getTxByHash(txHash); - } -} diff --git a/yarn-project/prover-node/src/tx-provider/config.ts b/yarn-project/prover-node/src/tx-provider/config.ts deleted file mode 100644 index 5fc9ed9465d..00000000000 --- a/yarn-project/prover-node/src/tx-provider/config.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { type ConfigMappingsType, getConfigFromMappings } from '@aztec/foundation/config'; - -export type TxProviderConfig = { - txProviderNodeUrl: string | undefined; -}; - -export const txProviderConfigMappings: ConfigMappingsType = { - txProviderNodeUrl: { - env: 'TX_PROVIDER_NODE_URL', - description: 'The URL of the tx provider node', - parseEnv: (val: string) => val, - }, -}; - -export function getTxProviderConfigFromEnv(): TxProviderConfig { - return getConfigFromMappings(txProviderConfigMappings); -} diff --git a/yarn-project/prover-node/src/tx-provider/factory.ts b/yarn-project/prover-node/src/tx-provider/factory.ts deleted file mode 100644 index e17d13e00c0..00000000000 --- a/yarn-project/prover-node/src/tx-provider/factory.ts +++ /dev/null @@ -1,13 +0,0 @@ -import { type TxProvider, createAztecNodeClient } from '@aztec/circuit-types'; - -import { AztecNodeTxProvider } from './aztec-node-tx-provider.js'; -import { type TxProviderConfig } from './config.js'; - -export function createTxProvider(config: TxProviderConfig): TxProvider { - if (config.txProviderNodeUrl) { - const node = createAztecNodeClient(config.txProviderNodeUrl); - return new AztecNodeTxProvider(node); - } else { - throw new Error(`Aztec Node URL for Tx Provider is not set.`); - } -} diff --git a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts index d2abdef3927..02771aaf1d0 100644 --- a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts +++ b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts @@ -1,4 +1,10 @@ -import { ConsensusPayload, type L2Block, type TxHash, getHashedSignaturePayload } from '@aztec/circuit-types'; +import { + ConsensusPayload, + type EpochProofQuote, + type L2Block, + type TxHash, + getHashedSignaturePayload, +} from '@aztec/circuit-types'; import { type L1PublishBlockStats, type L1PublishProofStats } from '@aztec/circuit-types/stats'; import { AGGREGATION_OBJECT_LENGTH, @@ -43,7 +49,7 @@ import type * as chains from 'viem/chains'; import { type PublisherConfig, type TxSenderConfig } from './config.js'; import { L1PublisherMetrics } from './l1-publisher-metrics.js'; -import { prettyLogVeimError } from './utils.js'; +import { prettyLogViemError } from './utils.js'; /** * Stats for a sent transaction. @@ -140,6 +146,7 @@ export class L1Publisher { private account: PrivateKeyAccount; public static PROPOSE_GAS_GUESS: bigint = 500_000n; + public static PROPOSE_AND_CLAIM_GAS_GUESS: bigint = 600_000n; constructor(config: TxSenderConfig & PublisherConfig, client: TelemetryClient) { this.sleepTimeMs = config?.l1PublishRetryIntervalMS ?? 60_000; @@ -186,6 +193,24 @@ export class L1Publisher { return [slot, blockNumber]; } + public async nextEpochToClaim(): Promise { + return await this.rollupContract.read.nextEpochToClaim(); + } + + public async getEpochForSlotNumber(slotNumber: bigint): Promise { + return await this.rollupContract.read.getEpochAtSlot([slotNumber]); + } + + public async validateProofQuote(quote: EpochProofQuote): Promise { + const args = [quote.toViemArgs()] as const; + try { + await this.rollupContract.read.validateEpochProofRightClaim(args, { account: this.account }); + } catch (err) { + return undefined; + } + return quote; + } + /** * @notice Will call `validateHeader` to make sure that it is possible to propose * @@ -253,7 +278,12 @@ export class L1Publisher { * @param block - L2 block to propose. * @returns True once the tx has been confirmed and is successful, false on revert or interrupt, blocks otherwise. */ - public async proposeL2Block(block: L2Block, attestations?: Signature[], txHashes?: TxHash[]): Promise { + public async proposeL2Block( + block: L2Block, + attestations?: Signature[], + txHashes?: TxHash[], + proofQuote?: EpochProofQuote, + ): Promise { const ctx = { blockNumber: block.number, slotNumber: block.header.globalVariables.slotNumber.toBigInt(), @@ -273,7 +303,12 @@ export class L1Publisher { }; // Publish body and propose block (if not already published) - if (!this.interrupted) { + if (this.interrupted) { + this.log.verbose('L2 block data syncing interrupted while processing blocks.', ctx); + return false; + } + + { const timer = new Timer(); // @note This will make sure that we are passing the checks for our header ASSUMING that the data is also made available @@ -321,7 +356,53 @@ export class L1Publisher { await this.sleepOrInterrupted(); } - this.log.verbose('L2 block data syncing interrupted while processing blocks.', ctx); + const timer = new Timer(); + + // @note This will make sure that we are passing the checks for our header ASSUMING that the data is also made available + // This means that we can avoid the simulation issues in later checks. + // By simulation issue, I mean the fact that the block.timestamp is equal to the last block, not the next, which + // make time consistency checks break. + await this.validateBlockForSubmission(block.header, { + digest: digest.toBuffer(), + signatures: attestations ?? [], + }); + + this.log.verbose(`Submitting propose transaction with `); + + const txHash = proofQuote + ? await this.sendProposeAndClaimTx(proposeTxArgs, proofQuote) + : await this.sendProposeTx(proposeTxArgs); + + if (!txHash) { + this.log.info(`Failed to publish block ${block.number} to L1`, ctx); + return false; + } + + const receipt = await this.getTransactionReceipt(txHash); + if (!receipt) { + this.log.info(`Failed to get receipt for tx ${txHash}`, ctx); + return false; + } + + // Tx was mined successfully + if (receipt.status) { + const tx = await this.getTransactionStats(txHash); + const stats: L1PublishBlockStats = { + ...pick(receipt, 'gasPrice', 'gasUsed', 'transactionHash'), + ...pick(tx!, 'calldataGas', 'calldataSize'), + ...block.getStats(), + eventName: 'rollup-published-to-l1', + }; + this.log.info(`Published L2 block to L1 rollup contract`, { ...stats, ...ctx }); + this.metrics.recordProcessBlockTx(timer.ms(), stats); + + return true; + } + + this.metrics.recordFailedTx('process'); + + this.log.error(`Rollup.process tx status failed: ${receipt.transactionHash}`, ctx); + await this.sleepOrInterrupted(); return false; } @@ -542,6 +623,40 @@ export class L1Publisher { } } + private async prepareProposeTx(encodedData: L1ProcessArgs, gasGuess: bigint) { + // We have to jump a few hoops because viem is not happy around estimating gas for view functions + const computeTxsEffectsHashGas = await this.publicClient.estimateGas({ + to: this.rollupContract.address, + data: encodeFunctionData({ + abi: this.rollupContract.abi, + functionName: 'computeTxsEffectsHash', + args: [`0x${encodedData.body.toString('hex')}`], + }), + }); + + // @note We perform this guesstimate instead of the usual `gasEstimate` since + // viem will use the current state to simulate against, which means that + // we will fail estimation in the case where we are simulating for the + // first ethereum block within our slot (as current time is not in the + // slot yet). + const gasGuesstimate = computeTxsEffectsHashGas + gasGuess; + + const attestations = encodedData.attestations + ? encodedData.attestations.map(attest => attest.toViemSignature()) + : []; + const txHashes = encodedData.txHashes ? encodedData.txHashes.map(txHash => txHash.to0xString()) : []; + const args = [ + `0x${encodedData.header.toString('hex')}`, + `0x${encodedData.archive.toString('hex')}`, + `0x${encodedData.blockHash.toString('hex')}`, + txHashes, + attestations, + `0x${encodedData.body.toString('hex')}`, + ] as const; + + return { args, gasGuesstimate }; + } + private getSubmitEpochProofArgs(args: { fromBlock: number; toBlock: number; @@ -569,47 +684,41 @@ export class L1Publisher { } private async sendProposeTx(encodedData: L1ProcessArgs): Promise { - if (!this.interrupted) { - try { - // We have to jump a few hoops because viem is not happy around estimating gas for view functions - const computeTxsEffectsHashGas = await this.publicClient.estimateGas({ - to: this.rollupContract.address, - data: encodeFunctionData({ - abi: this.rollupContract.abi, - functionName: 'computeTxsEffectsHash', - args: [`0x${encodedData.body.toString('hex')}`], - }), - }); + if (this.interrupted) { + return; + } + try { + const { args, gasGuesstimate } = await this.prepareProposeTx(encodedData, L1Publisher.PROPOSE_GAS_GUESS); - // @note We perform this guesstimate instead of the usual `gasEstimate` since - // viem will use the current state to simulate against, which means that - // we will fail estimation in the case where we are simulating for the - // first ethereum block within our slot (as current time is not in the - // slot yet). - const gasGuesstimate = computeTxsEffectsHashGas + L1Publisher.PROPOSE_GAS_GUESS; - - const attestations = encodedData.attestations - ? encodedData.attestations.map(attest => attest.toViemSignature()) - : []; - const txHashes = encodedData.txHashes ? encodedData.txHashes.map(txHash => txHash.to0xString()) : []; - const args = [ - `0x${encodedData.header.toString('hex')}`, - `0x${encodedData.archive.toString('hex')}`, - `0x${encodedData.blockHash.toString('hex')}`, - txHashes, - attestations, - `0x${encodedData.body.toString('hex')}`, - ] as const; - - return await this.rollupContract.write.propose(args, { - account: this.account, - gas: gasGuesstimate, - }); - } catch (err) { - prettyLogVeimError(err, this.log); - this.log.error(`Rollup publish failed`, err); - return undefined; - } + return await this.rollupContract.write.propose(args, { + account: this.account, + gas: gasGuesstimate, + }); + } catch (err) { + prettyLogViemError(err, this.log); + this.log.error(`Rollup publish failed`, err); + return undefined; + } + } + + private async sendProposeAndClaimTx(encodedData: L1ProcessArgs, quote: EpochProofQuote): Promise { + if (this.interrupted) { + return; + } + try { + const { args, gasGuesstimate } = await this.prepareProposeTx( + encodedData, + L1Publisher.PROPOSE_AND_CLAIM_GAS_GUESS, + ); + + return await this.rollupContract.write.proposeAndClaim([...args, quote.toViemArgs()], { + account: this.account, + gas: gasGuesstimate, + }); + } catch (err) { + prettyLogViemError(err, this.log); + this.log.error(`Rollup publish failed`, err); + return undefined; } } diff --git a/yarn-project/sequencer-client/src/publisher/utils.ts b/yarn-project/sequencer-client/src/publisher/utils.ts index 13842102a2c..8889aa0998c 100644 --- a/yarn-project/sequencer-client/src/publisher/utils.ts +++ b/yarn-project/sequencer-client/src/publisher/utils.ts @@ -2,7 +2,7 @@ import { type Logger } from '@aztec/foundation/log'; import { BaseError, ContractFunctionRevertedError } from 'viem'; -export function prettyLogVeimError(err: any, logger: Logger) { +export function prettyLogViemError(err: any, logger: Logger) { if (err instanceof BaseError) { const revertError = err.walk(err => err instanceof ContractFunctionRevertedError); if (revertError instanceof ContractFunctionRevertedError) { diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts index 2b20ac57113..bf3e21f0e6a 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts @@ -3,6 +3,7 @@ import { BlockProposal, type BlockSimulator, ConsensusPayload, + type EpochProofQuote, type L1ToL2MessageSource, L2Block, type L2BlockSource, @@ -18,9 +19,11 @@ import { WorldStateRunningState, type WorldStateSynchronizer, makeProcessedTx, + mockEpochProofQuote, mockTxForRollup, } from '@aztec/circuit-types'; import { + AZTEC_EPOCH_DURATION, AztecAddress, EthAddress, Fr, @@ -119,12 +122,12 @@ describe('sequencer', () => { blockSimulator = mock(); p2p = mock({ - getStatus: () => Promise.resolve({ state: P2PClientState.IDLE, syncedToL2Block: lastBlockNumber }), + getStatus: mockFn().mockResolvedValue({ state: P2PClientState.IDLE, syncedToL2Block: lastBlockNumber }), }); worldState = mock({ getLatest: () => merkleTreeOps, - status: () => Promise.resolve({ state: WorldStateRunningState.IDLE, syncedToL2Block: lastBlockNumber }), + status: mockFn().mockResolvedValue({ state: WorldStateRunningState.IDLE, syncedToL2Block: lastBlockNumber }), }); publicProcessor = mock({ @@ -145,7 +148,7 @@ describe('sequencer', () => { l1ToL2MessageSource = mock({ getL1ToL2Messages: () => Promise.resolve(Array(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP).fill(Fr.ZERO)), - getBlockNumber: () => Promise.resolve(lastBlockNumber), + getBlockNumber: mockFn().mockResolvedValue(lastBlockNumber), }); // all txs use the same allowed FPC class @@ -208,7 +211,7 @@ describe('sequencer', () => { ); // Ok, we have an issue that we never actually call the process L2 block expect(publisher.proposeL2Block).toHaveBeenCalledTimes(1); - expect(publisher.proposeL2Block).toHaveBeenCalledWith(block, getSignatures(), [txHash]); + expect(publisher.proposeL2Block).toHaveBeenCalledWith(block, getSignatures(), [txHash], undefined); expect(blockSimulator.cancel).toHaveBeenCalledTimes(0); }); @@ -257,7 +260,7 @@ describe('sequencer', () => { mockedGlobalVariables, Array(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP).fill(new Fr(0n)), ); - expect(publisher.proposeL2Block).toHaveBeenCalledWith(block, getSignatures(), [txHash]); + expect(publisher.proposeL2Block).toHaveBeenCalledWith(block, getSignatures(), [txHash], undefined); expect(blockSimulator.cancel).toHaveBeenCalledTimes(0); }); @@ -300,7 +303,7 @@ describe('sequencer', () => { mockedGlobalVariables, Array(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP).fill(new Fr(0n)), ); - expect(publisher.proposeL2Block).toHaveBeenCalledWith(block, getSignatures(), validTxHashes); + expect(publisher.proposeL2Block).toHaveBeenCalledWith(block, getSignatures(), validTxHashes, undefined); expect(p2p.deleteTxs).toHaveBeenCalledWith([doubleSpendTx.getTxHash()]); expect(blockSimulator.cancel).toHaveBeenCalledTimes(0); }); @@ -339,7 +342,7 @@ describe('sequencer', () => { mockedGlobalVariables, Array(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP).fill(new Fr(0n)), ); - expect(publisher.proposeL2Block).toHaveBeenCalledWith(block, getSignatures(), validTxHashes); + expect(publisher.proposeL2Block).toHaveBeenCalledWith(block, getSignatures(), validTxHashes, undefined); expect(p2p.deleteTxs).toHaveBeenCalledWith([invalidChainTx.getTxHash()]); expect(blockSimulator.cancel).toHaveBeenCalledTimes(0); }); @@ -380,7 +383,7 @@ describe('sequencer', () => { mockedGlobalVariables, Array(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP).fill(new Fr(0n)), ); - expect(publisher.proposeL2Block).toHaveBeenCalledWith(block, getSignatures(), validTxHashes); + expect(publisher.proposeL2Block).toHaveBeenCalledWith(block, getSignatures(), validTxHashes, undefined); expect(blockSimulator.cancel).toHaveBeenCalledTimes(0); }); @@ -431,7 +434,7 @@ describe('sequencer', () => { Array(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP).fill(new Fr(0n)), ); expect(publisher.proposeL2Block).toHaveBeenCalledTimes(1); - expect(publisher.proposeL2Block).toHaveBeenCalledWith(block, getSignatures(), txHashes); + expect(publisher.proposeL2Block).toHaveBeenCalledWith(block, getSignatures(), txHashes, undefined); expect(blockSimulator.cancel).toHaveBeenCalledTimes(0); }); @@ -482,7 +485,7 @@ describe('sequencer', () => { Array(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP).fill(new Fr(0n)), ); expect(publisher.proposeL2Block).toHaveBeenCalledTimes(1); - expect(publisher.proposeL2Block).toHaveBeenCalledWith(block, getSignatures(), []); + expect(publisher.proposeL2Block).toHaveBeenCalledWith(block, getSignatures(), [], undefined); expect(blockSimulator.cancel).toHaveBeenCalledTimes(0); }); @@ -536,7 +539,7 @@ describe('sequencer', () => { ); expect(publisher.proposeL2Block).toHaveBeenCalledTimes(1); - expect(publisher.proposeL2Block).toHaveBeenCalledWith(block, getSignatures(), postFlushTxHashes); + expect(publisher.proposeL2Block).toHaveBeenCalledWith(block, getSignatures(), postFlushTxHashes, undefined); expect(blockSimulator.cancel).toHaveBeenCalledTimes(0); }); @@ -580,6 +583,305 @@ describe('sequencer', () => { expect(publisher.proposeL2Block).not.toHaveBeenCalled(); }); + + describe('Handling proof quotes', () => { + let txHash: TxHash; + let currentEpoch = 0n; + const setupForBlockNumber = (blockNumber: number) => { + currentEpoch = BigInt(blockNumber) / BigInt(AZTEC_EPOCH_DURATION); + // Create a new block and header + block = L2Block.random(blockNumber); + + mockedGlobalVariables = new GlobalVariables( + chainId, + version, + block.header.globalVariables.blockNumber, + block.header.globalVariables.slotNumber, + Fr.ZERO, + coinbase, + feeRecipient, + gasFees, + ); + + worldState.status.mockResolvedValue({ + state: WorldStateRunningState.IDLE, + syncedToL2Block: block.header.globalVariables.blockNumber.toNumber() - 1, + }); + + p2p.getStatus.mockResolvedValue({ + syncedToL2Block: block.header.globalVariables.blockNumber.toNumber() - 1, + state: P2PClientState.IDLE, + }); + + l2BlockSource.getBlockNumber.mockResolvedValue(block.header.globalVariables.blockNumber.toNumber() - 1); + + l1ToL2MessageSource.getBlockNumber.mockResolvedValue(block.header.globalVariables.blockNumber.toNumber() - 1); + + globalVariableBuilder.buildGlobalVariables.mockResolvedValue(mockedGlobalVariables); + + publisher.canProposeAtNextEthBlock.mockResolvedValue([ + block.header.globalVariables.slotNumber.toBigInt(), + block.header.globalVariables.blockNumber.toBigInt(), + ]); + + publisher.getEpochForSlotNumber.mockImplementation((slotNumber: bigint) => + Promise.resolve(slotNumber / BigInt(AZTEC_EPOCH_DURATION)), + ); + + const tx = mockTxForRollup(); + tx.data.constants.txContext.chainId = chainId; + txHash = tx.getTxHash(); + const result: ProvingSuccess = { + status: PROVING_STATUS.SUCCESS, + }; + const ticket: ProvingTicket = { + provingPromise: Promise.resolve(result), + }; + + p2p.getTxs.mockReturnValue([tx]); + blockSimulator.startNewBlock.mockResolvedValueOnce(ticket); + blockSimulator.finaliseBlock.mockResolvedValue({ block }); + }; + + it('submits a valid proof quote with a block', async () => { + const blockNumber = AZTEC_EPOCH_DURATION + 1; + setupForBlockNumber(blockNumber); + + const proofQuote = mockEpochProofQuote( + currentEpoch - 1n, + block.header.globalVariables.slotNumber.toBigInt() + 1n, + 10000n, + EthAddress.random(), + 1, + ); + + p2p.getEpochProofQuotes.mockResolvedValue([proofQuote]); + publisher.proposeL2Block.mockResolvedValueOnce(true); + publisher.validateProofQuote.mockImplementation((x: EpochProofQuote) => Promise.resolve(x)); + + // The previous epoch can be claimed + publisher.nextEpochToClaim.mockImplementation(() => Promise.resolve(currentEpoch - 1n)); + + await sequencer.initialSync(); + await sequencer.work(); + expect(publisher.proposeL2Block).toHaveBeenCalledWith(block, getSignatures(), [txHash], proofQuote); + }); + + it('does not claim the epoch previous to the first', async () => { + const blockNumber = 1; + setupForBlockNumber(blockNumber); + + const proofQuote = mockEpochProofQuote( + 0n, + block.header.globalVariables.slotNumber.toBigInt() + 1n, + 10000n, + EthAddress.random(), + 1, + ); + + p2p.getEpochProofQuotes.mockResolvedValue([proofQuote]); + publisher.proposeL2Block.mockResolvedValueOnce(true); + publisher.validateProofQuote.mockImplementation((x: EpochProofQuote) => Promise.resolve(x)); + + // The previous epoch can be claimed + publisher.nextEpochToClaim.mockImplementation(() => Promise.resolve(0n)); + + await sequencer.initialSync(); + await sequencer.work(); + expect(publisher.proposeL2Block).toHaveBeenCalledWith(block, getSignatures(), [txHash], undefined); + }); + + it('does not submit a quote with an expired slot number', async () => { + const blockNumber = AZTEC_EPOCH_DURATION + 1; + setupForBlockNumber(blockNumber); + + const proofQuote = mockEpochProofQuote( + currentEpoch - 1n, + // Slot number expired + block.header.globalVariables.slotNumber.toBigInt() - 1n, + 10000n, + EthAddress.random(), + 1, + ); + + p2p.getEpochProofQuotes.mockResolvedValue([proofQuote]); + publisher.proposeL2Block.mockResolvedValueOnce(true); + publisher.validateProofQuote.mockImplementation((x: EpochProofQuote) => Promise.resolve(x)); + + // The previous epoch can be claimed + publisher.nextEpochToClaim.mockImplementation(() => Promise.resolve(currentEpoch - 1n)); + + await sequencer.initialSync(); + await sequencer.work(); + expect(publisher.proposeL2Block).toHaveBeenCalledWith(block, getSignatures(), [txHash], undefined); + }); + + it('does not submit a valid quote if unable to claim epoch', async () => { + const blockNumber = AZTEC_EPOCH_DURATION + 1; + setupForBlockNumber(blockNumber); + + const proofQuote = mockEpochProofQuote( + currentEpoch - 1n, + block.header.globalVariables.slotNumber.toBigInt() + 1n, + 10000n, + EthAddress.random(), + 1, + ); + + p2p.getEpochProofQuotes.mockResolvedValue([proofQuote]); + publisher.proposeL2Block.mockResolvedValueOnce(true); + publisher.validateProofQuote.mockImplementation((x: EpochProofQuote) => Promise.resolve(x)); + + // The previous epoch can be claimed + publisher.nextEpochToClaim.mockResolvedValue(currentEpoch); + + await sequencer.initialSync(); + await sequencer.work(); + expect(publisher.proposeL2Block).toHaveBeenCalledWith(block, getSignatures(), [txHash], undefined); + }); + + it('does not submit an invalid quote', async () => { + const blockNumber = AZTEC_EPOCH_DURATION + 1; + setupForBlockNumber(blockNumber); + + const proofQuote = mockEpochProofQuote( + currentEpoch - 1n, + block.header.globalVariables.slotNumber.toBigInt() + 1n, + 10000n, + EthAddress.random(), + 1, + ); + + p2p.getEpochProofQuotes.mockResolvedValue([proofQuote]); + publisher.proposeL2Block.mockResolvedValueOnce(true); + + // Quote is reported as invalid + publisher.validateProofQuote.mockImplementation(_ => Promise.resolve(undefined)); + + // The previous epoch can be claimed + publisher.nextEpochToClaim.mockImplementation(() => Promise.resolve(currentEpoch - 1n)); + + await sequencer.initialSync(); + await sequencer.work(); + expect(publisher.proposeL2Block).toHaveBeenCalledWith(block, getSignatures(), [txHash], undefined); + }); + + it('only selects valid quotes', async () => { + const blockNumber = AZTEC_EPOCH_DURATION + 1; + setupForBlockNumber(blockNumber); + + // Create 1 valid quote and 3 that have a higher fee but are invalid + const validProofQuote = mockEpochProofQuote( + currentEpoch - 1n, + block.header.globalVariables.slotNumber.toBigInt() + 1n, + 10000n, + EthAddress.random(), + 1, + ); + + const proofQuoteInvalidSlot = mockEpochProofQuote( + currentEpoch - 1n, + block.header.globalVariables.slotNumber.toBigInt() - 1n, + 10000n, + EthAddress.random(), + 2, + ); + + const proofQuoteInvalidEpoch = mockEpochProofQuote( + currentEpoch, + block.header.globalVariables.slotNumber.toBigInt() - 1n, + 10000n, + EthAddress.random(), + 2, + ); + + // This is deemed invalid by the contract, we identify it by a fee of 2 + const proofQuoteInvalid = mockEpochProofQuote( + currentEpoch - 1n, + block.header.globalVariables.slotNumber.toBigInt() + 1n, + 10000n, + EthAddress.random(), + 2, + ); + + const allQuotes = [validProofQuote, proofQuoteInvalidSlot, proofQuoteInvalidEpoch, proofQuoteInvalid]; + + p2p.getEpochProofQuotes.mockResolvedValue(allQuotes); + publisher.proposeL2Block.mockResolvedValueOnce(true); + + // Quote is reported as invalid + publisher.validateProofQuote.mockImplementation(p => + Promise.resolve(p.payload.basisPointFee === 2 ? undefined : p), + ); + + // The previous epoch can be claimed + publisher.nextEpochToClaim.mockImplementation(() => Promise.resolve(currentEpoch - 1n)); + + await sequencer.initialSync(); + await sequencer.work(); + expect(publisher.proposeL2Block).toHaveBeenCalledWith(block, getSignatures(), [txHash], validProofQuote); + }); + + it('selects the lowest cost valid quote', async () => { + const blockNumber = AZTEC_EPOCH_DURATION + 1; + setupForBlockNumber(blockNumber); + + // Create 3 valid quotes with different fees. + // And 3 invalid quotes with lower fees + // We should select the lowest cost valid quote + const validQuotes = times(3, (i: number) => + mockEpochProofQuote( + currentEpoch - 1n, + block.header.globalVariables.slotNumber.toBigInt() + 1n, + 10000n, + EthAddress.random(), + 10 + i, + ), + ); + + const proofQuoteInvalidSlot = mockEpochProofQuote( + currentEpoch - 1n, + block.header.globalVariables.slotNumber.toBigInt() - 1n, + 10000n, + EthAddress.random(), + 1, + ); + + const proofQuoteInvalidEpoch = mockEpochProofQuote( + currentEpoch, + block.header.globalVariables.slotNumber.toBigInt() - 1n, + 10000n, + EthAddress.random(), + 2, + ); + + // This is deemed invalid by the contract, we identify it by it's fee + const proofQuoteInvalid = mockEpochProofQuote( + currentEpoch - 1n, + block.header.globalVariables.slotNumber.toBigInt() + 1n, + 10000n, + EthAddress.random(), + 3, + ); + + const allQuotes = [proofQuoteInvalidSlot, proofQuoteInvalidEpoch, ...validQuotes, proofQuoteInvalid]; + + p2p.getEpochProofQuotes.mockResolvedValue(allQuotes); + publisher.proposeL2Block.mockResolvedValueOnce(true); + + // Quote is reported as invalid + publisher.validateProofQuote.mockImplementation(p => + Promise.resolve(p.payload.basisPointFee === 3 ? undefined : p), + ); + + // The previous epoch can be claimed + publisher.nextEpochToClaim.mockImplementation(() => Promise.resolve(currentEpoch - 1n)); + + await sequencer.initialSync(); + await sequencer.work(); + expect(publisher.proposeL2Block).toHaveBeenCalledWith(block, getSignatures(), [txHash], validQuotes[0]); + }); + }); }); class TestSubject extends Sequencer { diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.ts index 04426ee93d0..5e7bd9a4ae3 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.ts @@ -1,5 +1,6 @@ import { type BlockAttestation, + type EpochProofQuote, type L1ToL2MessageSource, type L2Block, type L2BlockSource, @@ -34,7 +35,7 @@ import { type ValidatorClient } from '@aztec/validator-client'; import { type BlockBuilderFactory } from '../block_builder/index.js'; import { type GlobalVariableBuilder } from '../global_variable_builder/global_builder.js'; import { type L1Publisher } from '../publisher/l1-publisher.js'; -import { prettyLogVeimError } from '../publisher/utils.js'; +import { prettyLogViemError } from '../publisher/utils.js'; import { type TxValidatorFactory } from '../tx_validator/tx_validator_factory.js'; import { type SequencerConfig } from './config.js'; import { SequencerMetrics } from './metrics.js'; @@ -311,7 +312,7 @@ export class Sequencer { this.log.debug(`Can propose block ${proposalBlockNumber} at slot ${slot}`); return slot; } catch (err) { - prettyLogVeimError(err, this.log); + prettyLogViemError(err, this.log); throw err; } } @@ -404,6 +405,14 @@ export class Sequencer { const newGlobalVariables = proposalHeader.globalVariables; + // Kick off the process of collecting and validating proof quotes here so it runs alongside block building + const proofQuotePromise = this.createProofClaimForPreviousEpoch(newGlobalVariables.slotNumber.toBigInt()).catch( + e => { + this.log.warn(`Failed to create proof claim quote ${e}`); + return undefined; + }, + ); + this.metrics.recordNewBlock(newGlobalVariables.blockNumber.toNumber(), validTxs.length); const workTimer = new Timer(); this.state = SequencerState.CREATING_BLOCK; @@ -488,8 +497,10 @@ export class Sequencer { const attestations = await this.collectAttestations(block, txHashes); this.log.verbose('Attestations collected'); + const proofQuote = await proofQuotePromise; + try { - await this.publishL2Block(block, attestations, txHashes); + await this.publishL2Block(block, attestations, txHashes, proofQuote); this.metrics.recordPublishedBlock(workDuration); this.log.info( `Submitted rollup block ${block.number} with ${ @@ -540,6 +551,42 @@ export class Sequencer { return orderAttestations(attestations, committee); } + protected async createProofClaimForPreviousEpoch(slotNumber: bigint): Promise { + // Find out which epoch we are currently in + const epochForBlock = await this.publisher.getEpochForSlotNumber(slotNumber); + if (epochForBlock < 1n) { + // It's the 0th epoch, nothing to be proven yet + this.log.verbose(`First epoch has no claim`); + return undefined; + } + const epochToProve = epochForBlock - 1n; + // Find out the next epoch that can be claimed + const canClaim = await this.publisher.nextEpochToClaim(); + if (canClaim != epochToProve) { + // It's not the one we are looking to claim + this.log.verbose(`Unable to claim previous epoch (${canClaim} != ${epochToProve})`); + return undefined; + } + // Get quotes for the epoch to be proven + const quotes = await this.p2pClient.getEpochProofQuotes(epochToProve); + this.log.verbose(`Retrieved ${quotes.length} quotes, slot: ${slotNumber}, epoch to prove: ${epochToProve}`); + // ensure these quotes are still valid for the slot and have the contract validate them + const validQuotesPromise = Promise.all( + quotes.filter(x => x.payload.validUntilSlot >= slotNumber).map(x => this.publisher.validateProofQuote(x)), + ); + + const validQuotes = (await validQuotesPromise).filter((q): q is EpochProofQuote => !!q); + if (!validQuotes.length) { + this.log.verbose(`Failed to find any valid proof quotes`); + return undefined; + } + // pick the quote with the lowest fee + const sortedQuotes = validQuotes.sort( + (a: EpochProofQuote, b: EpochProofQuote) => a.payload.basisPointFee - b.payload.basisPointFee, + ); + return sortedQuotes[0]; + } + /** * Publishes the L2Block to the rollup contract. * @param block - The L2Block to be published. @@ -547,11 +594,16 @@ export class Sequencer { @trackSpan('Sequencer.publishL2Block', block => ({ [Attributes.BLOCK_NUMBER]: block.number, })) - protected async publishL2Block(block: L2Block, attestations?: Signature[], txHashes?: TxHash[]) { + protected async publishL2Block( + block: L2Block, + attestations?: Signature[], + txHashes?: TxHash[], + proofQuote?: EpochProofQuote, + ) { // Publishes new block to the network and awaits the tx to be mined this.state = SequencerState.PUBLISHING_BLOCK; - const publishedL2Block = await this.publisher.proposeL2Block(block, attestations, txHashes); + const publishedL2Block = await this.publisher.proposeL2Block(block, attestations, txHashes, proofQuote); if (publishedL2Block) { this.lastPublishedBlock = block.number; } else {