diff --git a/CLAUDE.md b/CLAUDE.md index 3fbabc71..750eea48 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -50,6 +50,10 @@ uvx tox # Everything (checks + tests + docs) - Test files/functions must start with `test_` - **No example code in docstrings**: Do not include `Example:` sections with code blocks in docstrings. Keep documentation concise and focused on explaining *what* and *why*, not *how to use*. Unit tests serve as usage examples. - **No section separator comments**: Never use banner-style separator comments (`# ====...`, `# ----...`, or similar). They add visual clutter with no value. Use blank lines to separate logical sections. If a section needs a heading, a single `#` comment line is enough. +- **CRITICAL - Preserve existing documentation**: When refactoring or modifying code, NEVER remove or rewrite existing comments and docstrings unless they are directly invalidated by the code change. Removing documentation that still applies creates unnecessary noise in code review diffs and destroys context that was carefully written. Only modify documentation when: + - The documented behavior has actually changed + - The comment references code that no longer exists + - The comment is factually wrong after your change ### Import Style diff --git a/packages/testing/src/consensus_testing/keys.py b/packages/testing/src/consensus_testing/keys.py index ba9b75f2..5709cebe 100755 --- a/packages/testing/src/consensus_testing/keys.py +++ b/packages/testing/src/consensus_testing/keys.py @@ -45,16 +45,24 @@ AttestationSignatures, ) from lean_spec.subspecs.containers.slot import Slot +from lean_spec.subspecs.koalabear import Fp from lean_spec.subspecs.xmss.aggregation import ( AggregatedSignatureProof, SignatureKey, ) +from lean_spec.subspecs.xmss.constants import TARGET_CONFIG from lean_spec.subspecs.xmss.containers import KeyPair, PublicKey, Signature from lean_spec.subspecs.xmss.interface import ( PROD_SIGNATURE_SCHEME, TEST_SIGNATURE_SCHEME, GeneralizedXmssScheme, ) +from lean_spec.subspecs.xmss.types import ( + HashDigestList, + HashDigestVector, + HashTreeOpening, + Randomness, +) from lean_spec.types import Uint64 __all__ = [ @@ -64,6 +72,7 @@ "LazyKeyDict", "NUM_VALIDATORS", "XmssKeyManager", + "create_dummy_signature", "download_keys", "get_keys_dir", "get_shared_key_manager", @@ -94,6 +103,29 @@ """Default max slot for the shared key manager.""" +def create_dummy_signature() -> Signature: + """ + Create a structurally valid but cryptographically invalid individual signature. + + The signature has proper structure (correct number of siblings, hashes, etc.) + but all values are zeros, so it will fail cryptographic verification. + """ + # Create zero-filled hash digests with correct dimensions + zero_digest = HashDigestVector(data=[Fp(0) for _ in range(TARGET_CONFIG.HASH_LEN_FE)]) + + # Path needs LOG_LIFETIME siblings for the Merkle authentication path + siblings = HashDigestList(data=[zero_digest for _ in range(TARGET_CONFIG.LOG_LIFETIME)]) + + # Hashes need DIMENSION vectors for the Winternitz chain hashes + hashes = HashDigestList(data=[zero_digest for _ in range(TARGET_CONFIG.DIMENSION)]) + + return Signature( + path=HashTreeOpening(siblings=siblings), + rho=Randomness(data=[Fp(0) for _ in range(TARGET_CONFIG.RAND_LEN_FE)]), + hashes=hashes, + ) + + def get_shared_key_manager(max_slot: Slot = _SHARED_MANAGER_MAX_SLOT) -> XmssKeyManager: """ Get a shared XMSS key manager for reusing keys across tests. diff --git a/packages/testing/src/consensus_testing/test_fixtures/fork_choice.py b/packages/testing/src/consensus_testing/test_fixtures/fork_choice.py index 0a0138b6..5633a69b 100644 --- a/packages/testing/src/consensus_testing/test_fixtures/fork_choice.py +++ b/packages/testing/src/consensus_testing/test_fixtures/fork_choice.py @@ -33,14 +33,17 @@ from lean_spec.subspecs.containers.state.state import State from lean_spec.subspecs.containers.validator import ValidatorIndex from lean_spec.subspecs.forkchoice import Store -from lean_spec.subspecs.koalabear import Fp from lean_spec.subspecs.ssz import hash_tree_root from lean_spec.subspecs.xmss.aggregation import SignatureKey from lean_spec.subspecs.xmss.containers import Signature -from lean_spec.subspecs.xmss.types import HashDigestList, HashTreeOpening, Randomness from lean_spec.types import Bytes32, Uint64 -from ..keys import LEAN_ENV_TO_SCHEMES, XmssKeyManager, get_shared_key_manager +from ..keys import ( + LEAN_ENV_TO_SCHEMES, + XmssKeyManager, + create_dummy_signature, + get_shared_key_manager, +) from ..test_types import ( AggregatedAttestationSpec, AttestationStep, @@ -231,59 +234,62 @@ def make_fixture(self) -> Self: # Store follows immutable pattern: each method returns a new Store. for i, step in enumerate(self.steps): try: - if isinstance(step, TickStep): - # Time advancement may trigger slot boundaries. - # At slot boundaries, pending attestations may become active. - # Always act as aggregator to ensure gossip signatures are aggregated - store = store.on_tick(Uint64(step.time), has_proposal=False, is_aggregator=True) - - elif isinstance(step, BlockStep): - # Build a complete signed block from the lightweight spec. - # The spec contains minimal fields; we fill the rest. - signed_block = self._build_block_from_spec( - step.block, store, self._block_registry, key_manager - ) - - # Store the filled Block for serialization - block = signed_block.message.block - step._filled_block = signed_block.message - - # Register labeled blocks for fork building. - # Later blocks can reference this one as their parent. - if step.block.label is not None: - if step.block.label in self._block_registry: - raise ValueError( - f"Step {i}: duplicate label '{step.block.label}' - " - f"labels must be unique within a test" - ) - self._block_registry[step.block.label] = block - - # Advance time to the block's slot. - # Store rejects blocks from the future. - # This tick includes a block (has proposal). - # Always act as aggregator to ensure gossip signatures are aggregated - slot_duration_seconds = block.slot * SECONDS_PER_SLOT - block_time = store.config.genesis_time + slot_duration_seconds - store = store.on_tick(block_time, has_proposal=True, is_aggregator=True) - - # Process the block through Store. - # This validates, applies state transition, and updates head. - store = store.on_block( - signed_block, - scheme=LEAN_ENV_TO_SCHEMES[self.lean_env], - ) - - elif isinstance(step, AttestationStep): - # Process a gossip attestation. - # Gossip attestations arrive outside of blocks. - # They influence the fork choice weight calculation. - store = store.on_gossip_attestation( - step.attestation, - scheme=LEAN_ENV_TO_SCHEMES[self.lean_env], - ) - - else: - raise ValueError(f"Step {i}: unknown step type {type(step).__name__}") + match step: + case TickStep(): + # Time advancement may trigger slot boundaries. + # At slot boundaries, pending attestations may become active. + # Always act as aggregator to ensure gossip signatures are aggregated + store = store.on_tick( + Uint64(step.time), has_proposal=False, is_aggregator=True + ) + + case BlockStep(): + # Build a complete signed block from the lightweight spec. + # The spec contains minimal fields; we fill the rest. + signed_block = self._build_block_from_spec( + step.block, store, self._block_registry, key_manager + ) + + # Store the filled Block for serialization + block = signed_block.message.block + step._filled_block = signed_block.message + + # Register labeled blocks for fork building. + # Later blocks can reference this one as their parent. + if step.block.label is not None: + if step.block.label in self._block_registry: + raise ValueError( + f"Step {i}: duplicate label '{step.block.label}' - " + f"labels must be unique within a test" + ) + self._block_registry[step.block.label] = block + + # Advance time to the block's slot. + # Store rejects blocks from the future. + # This tick includes a block (has proposal). + # Always act as aggregator to ensure gossip signatures are aggregated + slot_duration_seconds = block.slot * SECONDS_PER_SLOT + block_time = store.config.genesis_time + slot_duration_seconds + store = store.on_tick(block_time, has_proposal=True, is_aggregator=True) + + # Process the block through Store. + # This validates, applies state transition, and updates head. + store = store.on_block( + signed_block, + scheme=LEAN_ENV_TO_SCHEMES[self.lean_env], + ) + + case AttestationStep(): + # Process a gossip attestation. + # Gossip attestations arrive outside of blocks. + # They influence the fork choice weight calculation. + store = store.on_gossip_attestation( + step.attestation, + scheme=LEAN_ENV_TO_SCHEMES[self.lean_env], + ) + + case _: + raise ValueError(f"Step {i}: unknown step type {type(step).__name__}") # Validate Store state if checks are provided. # Labels in checks are resolved to actual block roots. @@ -379,117 +385,84 @@ def _build_block_from_spec( sig_key = SignatureKey(attestation.validator_id, attestation.data.data_root_bytes()) if sig_key not in valid_signature_keys: continue - signature = attestation_signatures.get(sig_key) - if signature is None: + if (signature := attestation_signatures.get(sig_key)) is None: continue - signed_attestation = SignedAttestation( - validator_id=attestation.validator_id, - message=attestation.data, - signature=signature, - ) working_store = working_store.on_gossip_attestation( - signed_attestation, + SignedAttestation( + validator_id=attestation.validator_id, + message=attestation.data, + signature=signature, + ), scheme=LEAN_ENV_TO_SCHEMES[self.lean_env], is_aggregator=True, ) - # Prepare attestations and aggregated payloads for block construction. - # + # Aggregate gossip signatures and merge into known payloads. + # This makes recently gossiped attestations available for block construction. + aggregation_store = working_store.aggregate_committee_signatures() + merged_store = aggregation_store.accept_new_attestations() + # Two sources of attestations: # 1. Explicit attestations from the spec (always included) # 2. Store attestations (only if include_store_attestations is True) - # - # For all attestations, we need to create aggregated proofs - # so build_block can include them in the block body. - # Attestations with the same data should be merged into a single proof. available_attestations: list[Attestation] known_block_roots: set[Bytes32] | None = None - # First, aggregate any gossip signatures into payloads - # This ensures that signatures from previous blocks (like proposer attestations) - # are available for extraction - aggregation_store = working_store.aggregate_committee_signatures() - - # Now combine aggregated payloads from both sources - aggregated_payloads = ( - dict(store.latest_known_aggregated_payloads) - if store.latest_known_aggregated_payloads - else {} - ) - # Add newly aggregated payloads from gossip signatures - for key, proofs in aggregation_store.latest_new_aggregated_payloads.items(): - if key not in aggregated_payloads: - aggregated_payloads[key] = [] - aggregated_payloads[key].extend(proofs) - - # Collect all attestations that need aggregated proofs - all_attestations_for_proofs: list[Attestation] = list(attestations) - if spec.include_store_attestations: - # Gather all attestations by extracting from aggregated payloads. - # This now includes attestations from gossip signatures that were just aggregated. - known_attestations = store._extract_attestations_from_aggregated_payloads( - store.latest_known_aggregated_payloads - ) - new_attestations = aggregation_store._extract_attestations_from_aggregated_payloads( - aggregation_store.latest_new_aggregated_payloads + # Extract from merged payloads (contains both known and newly aggregated) + attestation_map = merged_store.extract_attestations_from_aggregated_payloads( + merged_store.latest_known_aggregated_payloads ) - - # Convert to list of Attestations store_attestations = [ - Attestation(validator_id=vid, data=data) for vid, data in known_attestations.items() + Attestation(validator_id=vid, data=data) for vid, data in attestation_map.items() ] - store_attestations.extend( - Attestation(validator_id=vid, data=data) for vid, data in new_attestations.items() - ) - - # Add store attestations to the list for proof creation - all_attestations_for_proofs.extend(store_attestations) - - # Combine for block construction available_attestations = store_attestations + attestations known_block_roots = set(store.blocks.keys()) else: # Use only explicit attestations from the spec available_attestations = attestations - # Update attestation_data_by_root with any new attestation data - attestation_data_by_root = dict(aggregation_store.attestation_data_by_root) - for attestation in all_attestations_for_proofs: - data_root = attestation.data.data_root_bytes() - attestation_data_by_root[data_root] = attestation.data - - # Use the aggregated payloads we just created - # No need to call aggregate_committee_signatures again since we already did it - - # Build the block using spec logic + # Build the block using spec logic. # # State handles the core block construction. # This includes state transition and root computation. parent_state = store.states[parent_root] - final_block, _, _, _ = parent_state.build_block( + final_block, post_state, _, _ = parent_state.build_block( slot=spec.slot, proposer_index=proposer_index, parent_root=parent_root, attestations=available_attestations, available_attestations=available_attestations, known_block_roots=known_block_roots, - aggregated_payloads=aggregated_payloads, + aggregated_payloads=merged_store.latest_known_aggregated_payloads, ) - # Create proposer attestation - # - # The proposer must also attest to their own block. - # This attestation commits to the block they just created. + # Create proposer attestation using the spec's attestation data production. + # Build a temporary store with the new block integrated so + # produce_attestation_data() sees the correct chain view. block_root = hash_tree_root(final_block) + latest_justified = ( + post_state.latest_justified + if post_state.latest_justified.slot > store.latest_justified.slot + else store.latest_justified + ) + latest_finalized = ( + post_state.latest_finalized + if post_state.latest_finalized.slot > store.latest_finalized.slot + else store.latest_finalized + ) + temp_store = store.model_copy( + update={ + "blocks": {**store.blocks, block_root: final_block}, + "states": {**store.states, block_root: post_state}, + "head": block_root, + "latest_justified": latest_justified, + "latest_finalized": latest_finalized, + } + ) proposer_attestation = Attestation( validator_id=proposer_index, - data=AttestationData( - slot=spec.slot, - head=Checkpoint(root=block_root, slot=spec.slot), - target=Checkpoint(root=block_root, slot=spec.slot), - source=Checkpoint(root=parent_root, slot=parent_state.latest_block_header.slot), - ), + data=temp_store.produce_attestation_data(spec.slot), ) # Sign everything @@ -635,11 +608,7 @@ def _build_attestations_from_spec( else: # Dummy signature for testing invalid signature handling. # The Store should reject attestations with bad signatures. - signature = Signature( - path=HashTreeOpening(siblings=HashDigestList(data=[])), - rho=Randomness(data=[Fp(0) for _ in range(Randomness.LENGTH)]), - hashes=HashDigestList(data=[]), - ) + signature = create_dummy_signature() # Index signature by validator and data root. # This enables lookup during signature aggregation. @@ -682,13 +651,13 @@ def _build_attestation_data_from_spec( """ # Resolve target from label. # The label references a block that will be the target checkpoint. - if spec.target_root_label not in block_registry: + if (target_block := block_registry.get(spec.target_root_label)) is None: raise ValueError( f"target_root_label '{spec.target_root_label}' not found - " f"available: {list(block_registry.keys())}" ) - target_root = hash_tree_root(block_registry[spec.target_root_label]) + target_root = hash_tree_root(target_block) target = Checkpoint(root=target_root, slot=spec.target_slot) # Build the attestation data. diff --git a/packages/testing/src/consensus_testing/test_fixtures/verify_signatures.py b/packages/testing/src/consensus_testing/test_fixtures/verify_signatures.py index a4ec903b..faa84c52 100644 --- a/packages/testing/src/consensus_testing/test_fixtures/verify_signatures.py +++ b/packages/testing/src/consensus_testing/test_fixtures/verify_signatures.py @@ -24,48 +24,16 @@ from lean_spec.subspecs.containers.checkpoint import Checkpoint from lean_spec.subspecs.containers.state.state import State from lean_spec.subspecs.containers.validator import ValidatorIndex -from lean_spec.subspecs.koalabear import Fp from lean_spec.subspecs.ssz import hash_tree_root from lean_spec.subspecs.xmss.aggregation import AggregatedSignatureProof -from lean_spec.subspecs.xmss.constants import TARGET_CONFIG -from lean_spec.subspecs.xmss.containers import Signature -from lean_spec.subspecs.xmss.types import ( - HashDigestList, - HashDigestVector, - HashTreeOpening, - Randomness, -) from lean_spec.types import Bytes32 from lean_spec.types.byte_arrays import ByteListMiB -from ..keys import XmssKeyManager, get_shared_key_manager +from ..keys import XmssKeyManager, create_dummy_signature, get_shared_key_manager from ..test_types import AggregatedAttestationSpec, BlockSpec from .base import BaseConsensusFixture -def _create_dummy_signature() -> Signature: - """ - Create a structurally valid but cryptographically invalid individual signature. - - The signature has proper structure (correct number of siblings, hashes, etc.) - but all values are zeros, so it will fail cryptographic verification. - """ - # Create zero-filled hash digests with correct dimensions - zero_digest = HashDigestVector(data=[Fp(0) for _ in range(TARGET_CONFIG.HASH_LEN_FE)]) - - # Path needs LOG_LIFETIME siblings for the Merkle authentication path - siblings = HashDigestList(data=[zero_digest for _ in range(TARGET_CONFIG.LOG_LIFETIME)]) - - # Hashes need DIMENSION vectors for the Winternitz chain hashes - hashes = HashDigestList(data=[zero_digest for _ in range(TARGET_CONFIG.DIMENSION)]) - - return Signature( - path=HashTreeOpening(siblings=siblings), - rho=Randomness(data=[Fp(0) for _ in range(TARGET_CONFIG.RAND_LEN_FE)]), - hashes=hashes, - ) - - def _create_dummy_aggregated_proof(validator_ids: list[ValidatorIndex]) -> AggregatedSignatureProof: """ Create a dummy aggregated signature proof with invalid proof data. @@ -322,7 +290,7 @@ def _build_block_from_spec( proposer_attestation.data, ) else: - proposer_attestation_signature = _create_dummy_signature() + proposer_attestation_signature = create_dummy_signature() return SignedBlockWithAttestation( message=BlockWithAttestation( diff --git a/packages/testing/src/consensus_testing/test_types/store_checks.py b/packages/testing/src/consensus_testing/test_types/store_checks.py index 7070abe7..bd98af5e 100644 --- a/packages/testing/src/consensus_testing/test_types/store_checks.py +++ b/packages/testing/src/consensus_testing/test_types/store_checks.py @@ -431,7 +431,7 @@ def validate_against_store( # Extract attestations from aggregated payloads if check.location == "new": extracted_attestations = ( - store._extract_attestations_from_aggregated_payloads( + store.extract_attestations_from_aggregated_payloads( store.latest_new_aggregated_payloads ) ) @@ -445,7 +445,7 @@ def validate_against_store( else: # check.location == "known" extracted_attestations = ( - store._extract_attestations_from_aggregated_payloads( + store.extract_attestations_from_aggregated_payloads( store.latest_known_aggregated_payloads ) ) @@ -572,7 +572,7 @@ def validate_against_store( # Calculate attestation weight: count attestations voting for this fork # An attestation votes for this fork if its head is this block or a descendant # Extract attestations from latest_known_aggregated_payloads - known_attestations = store._extract_attestations_from_aggregated_payloads( + known_attestations = store.extract_attestations_from_aggregated_payloads( store.latest_known_aggregated_payloads ) weight = 0 diff --git a/src/lean_spec/subspecs/forkchoice/store.py b/src/lean_spec/subspecs/forkchoice/store.py index 7faefd9c..4697d54e 100644 --- a/src/lean_spec/subspecs/forkchoice/store.py +++ b/src/lean_spec/subspecs/forkchoice/store.py @@ -690,7 +690,7 @@ def on_block( return store - def _extract_attestations_from_aggregated_payloads( + def extract_attestations_from_aggregated_payloads( self, aggregated_payloads: dict[SignatureKey, list[AggregatedSignatureProof]] ) -> dict[ValidatorIndex, AttestationData]: """ @@ -841,7 +841,7 @@ def update_head(self) -> "Store": """ # Extract attestations from known aggregated payloads - attestations = self._extract_attestations_from_aggregated_payloads( + attestations = self.extract_attestations_from_aggregated_payloads( self.latest_known_aggregated_payloads ) @@ -930,7 +930,7 @@ def update_safe_target(self) -> "Store": min_target_score = -(-num_validators * 2 // 3) # Extract attestations from new aggregated payloads - attestations = self._extract_attestations_from_aggregated_payloads( + attestations = self.extract_attestations_from_aggregated_payloads( self.latest_new_aggregated_payloads ) @@ -1287,7 +1287,7 @@ def produce_block_with_signatures( # Extract attestations from known aggregated payloads. # These attestations have already influenced fork choice. # Including them in the block makes them permanent on-chain. - attestation_data_map = store._extract_attestations_from_aggregated_payloads( + attestation_data_map = store.extract_attestations_from_aggregated_payloads( store.latest_known_aggregated_payloads ) available_attestations = [ diff --git a/tests/consensus/devnet/fc/test_signature_aggregation.py b/tests/consensus/devnet/fc/test_signature_aggregation.py index e2f3cf51..bdb374c6 100644 --- a/tests/consensus/devnet/fc/test_signature_aggregation.py +++ b/tests/consensus/devnet/fc/test_signature_aggregation.py @@ -308,13 +308,15 @@ def test_auto_collect_proposer_attestations( ), checks=StoreChecks( head_slot=Slot(2), - # Proposer 1's attestation should be auto-collected + # Proposer 1's attestation should be auto-collected. + # Target is genesis (slot 0) because the spec's attestation target + # algorithm walks back from head toward safe_target. block_attestation_count=1, block_attestations=[ AggregatedAttestationCheck( participants={1}, attestation_slot=Slot(1), - target_slot=Slot(1), + target_slot=Slot(0), ), ], ), @@ -337,10 +339,10 @@ def test_auto_collect_combined_with_explicit_attestations( Expected -------- - Block body contains merged attestation from all sources: - - Proposer 1's attestation (auto-collected) - - Validators 0 and 3 (explicitly specified) - - All merged into single aggregation (same target) + Block body contains attestations from all sources. + Proposer 1's attestation targets genesis (slot 0) via the spec's target + walk-back algorithm, while explicit attestations target block_1 (slot 1). + Different targets produce separate aggregation groups. """ fork_choice_test( steps=[ @@ -363,11 +365,16 @@ def test_auto_collect_combined_with_explicit_attestations( ), checks=StoreChecks( head_slot=Slot(2), - # All attestations merged: proposer 1 + explicit {0, 3} - block_attestation_count=1, + # Two separate groups: proposer targets genesis, explicit targets block_1 + block_attestation_count=2, block_attestations=[ AggregatedAttestationCheck( - participants={0, 1, 3}, + participants={1}, + attestation_slot=Slot(1), + target_slot=Slot(0), + ), + AggregatedAttestationCheck( + participants={0, 3}, attestation_slot=Slot(1), target_slot=Slot(1), ), diff --git a/tests/lean_spec/subspecs/forkchoice/test_store_attestations.py b/tests/lean_spec/subspecs/forkchoice/test_store_attestations.py index f02f73d4..904c8a62 100644 --- a/tests/lean_spec/subspecs/forkchoice/test_store_attestations.py +++ b/tests/lean_spec/subspecs/forkchoice/test_store_attestations.py @@ -59,7 +59,7 @@ def test_on_block_processes_multi_validator_aggregations(key_manager: XmssKeyMan updated_store = consumer_store.on_block(signed_block) # Verify attestations can be extracted from aggregated payloads - extracted_attestations = updated_store._extract_attestations_from_aggregated_payloads( + extracted_attestations = updated_store.extract_attestations_from_aggregated_payloads( updated_store.latest_known_aggregated_payloads ) assert ValidatorIndex(1) in extracted_attestations